lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
0be5f6c7e48edc24f11712b303e6c7b59136384b
0
Olgajjj/java_pft,Olgajjj/java_pft
package ru.stqa.pft.sandbox; public class Point { int x, y; public Point(int x, int y){ this.x=x; this.y=y; } //вычисляем расстояние от текущей точки до переданной public double calcDistance(Point p){ return Math.sqrt(Math.pow(this.x-p.x, 2)+Math.pow(this.y-p.y, 2)); } @Override public String toString() { return "{" + this.x + ";" + this.y + "}"; } }
sandbox/src/main/java/ru/stqa/pft/sandbox/Point.java
package ru.stqa.pft.sandbox; public class Point { int x, y; public Point(int x, int y){ this.x=x; this.y=y; } //вычисляем расстояние от текущей точки до переданной public double calcDistance(Point p){ return Math.sqrt(Math.pow(x-p.x, 2)+Math.pow(y-p.y, 2)); } @Override public String toString() { return "{" + x + ";" + y + "}"; } }
дз1 финал
sandbox/src/main/java/ru/stqa/pft/sandbox/Point.java
дз1 финал
Java
apache-2.0
22db83fd604283f927cbdfdfcbfe227e57a141ef
0
blue42u/swift-t,JohnPJenkins/swift-t,JohnPJenkins/swift-t,basheersubei/swift-t,blue42u/swift-t,blue42u/swift-t,basheersubei/swift-t,blue42u/swift-t,swift-lang/swift-t,swift-lang/swift-t,JohnPJenkins/swift-t,swift-lang/swift-t,JohnPJenkins/swift-t,basheersubei/swift-t,blue42u/swift-t,basheersubei/swift-t,JohnPJenkins/swift-t,blue42u/swift-t,blue42u/swift-t,swift-lang/swift-t,basheersubei/swift-t,swift-lang/swift-t,JohnPJenkins/swift-t,swift-lang/swift-t,basheersubei/swift-t,basheersubei/swift-t,JohnPJenkins/swift-t,swift-lang/swift-t
/* * Copyright 2013 University of Chicago and Argonne National Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package exm.stc.common.util; import java.util.ArrayList; import java.util.List; /** * Represent a pair of data. Supports equality and hash comparison. * @param <T1> * @param <T2> */ public class Pair<T1, T2> { public final T1 val1; public final T2 val2; public Pair(T1 first, T2 second) { super(); this.val1 = first; this.val2 = second; } public static <T1, T2> Pair<T1, T2> create(T1 f, T2 s) { return new Pair<T1, T2>(f, s); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((val1 == null) ? 0 : val1.hashCode()); result = prime * result + ((val2 == null) ? 0 : val2.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) { return false; } assert(obj instanceof Pair) : "Comparing pair with non-pair"; Pair<?, ?> other = (Pair<?, ?>) obj; if (val1 == null) { if (other.val1 != null) return false; } else if (other.val1 == null) { return false; } else if (!val1.equals(other.val1)) return false; if (val2 == null) { if (other.val2 != null) return false; } else if (other.val2 == null) { return false; } else if (!val2.equals(other.val2)) return false; return true; } @Override public String toString() { return "(" + val1 + ", " + val2 + ")"; } public static <T, S> List<T> extract1(List<Pair<T, S>> list) { ArrayList<T> res = new ArrayList<T>(list.size()); for (Pair<T, S> p: list) { res.add(p.val1); } return res; } public static <T, S> List<S> extract2(List<Pair<T, S>> list) { ArrayList<S> res = new ArrayList<S>(list.size()); for (Pair<T, S> p: list) { res.add(p.val2); } return res; } }
code/src/exm/stc/common/util/Pair.java
/* * Copyright 2013 University of Chicago and Argonne National Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package exm.stc.common.util; import java.util.ArrayList; import java.util.List; /** * Represent a pair of data. Supports equality and hash comparison. * @param <T1> * @param <T2> */ public class Pair<T1, T2> { public final T1 val1; public final T2 val2; public Pair(T1 first, T2 second) { super(); this.val1 = first; this.val2 = second; } public static <T1, T2> Pair<T1, T2> create(T1 f, T2 s) { return new Pair<T1, T2>(f, s); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((val1 == null) ? 0 : val1.hashCode()); result = prime * result + ((val2 == null) ? 0 : val2.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) { return false; } assert(obj instanceof Pair) : "Comparing pair with non-pair"; Pair<?, ?> other = (Pair<?, ?>) obj; if (val1 == null) { if (other.val1 != null) return false; } else if (other.val2 == null) { return false; } else if (!val1.equals(other.val1)) return false; if (val2 == null) { if (other.val2 != null) return false; } else if (other.val2 == null) { return false; } else if (!val2.equals(other.val2)) return false; return true; } @Override public String toString() { return "(" + val1 + ", " + val2 + ")"; } public static <T, S> List<T> extract1(List<Pair<T, S>> list) { ArrayList<T> res = new ArrayList<T>(list.size()); for (Pair<T, S> p: list) { res.add(p.val1); } return res; } public static <T, S> List<S> extract2(List<Pair<T, S>> list) { ArrayList<S> res = new ArrayList<S>(list.size()); for (Pair<T, S> p: list) { res.add(p.val2); } return res; } }
Bugfix in Pair.equals git-svn-id: 47705994653588c662f4ea400dfe88107361c0e2@14597 dc4e9af1-7f46-4ead-bba6-71afc04862de
code/src/exm/stc/common/util/Pair.java
Bugfix in Pair.equals
Java
apache-2.0
dfe6fe82eb1ff0f508b260b0209c6641aae9f69d
0
RedRoma/YelpJavaClient
/* * Copyright 2016 RedRoma, Inc.. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.redroma.yelp; import java.net.URL; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import tech.redroma.yelp.exceptions.YelpExcetion; import tech.redroma.yelp.oauth.OAuthTokenProvider; import tech.sirwellington.alchemy.generator.AlchemyGenerator; import tech.sirwellington.alchemy.http.AlchemyHttp; import tech.sirwellington.alchemy.http.HttpResponse; import tech.sirwellington.alchemy.http.exceptions.AlchemyHttpException; import tech.sirwellington.alchemy.http.mock.AlchemyHttpMock; import tech.sirwellington.alchemy.test.junit.runners.AlchemyTestRunner; import tech.sirwellington.alchemy.test.junit.runners.DontRepeat; import tech.sirwellington.alchemy.test.junit.runners.GeneratePojo; import tech.sirwellington.alchemy.test.junit.runners.GenerateString; import tech.sirwellington.alchemy.test.junit.runners.GenerateURL; import tech.sirwellington.alchemy.test.junit.runners.Repeat; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import static tech.sirwellington.alchemy.generator.AlchemyGenerator.one; import static tech.sirwellington.alchemy.generator.CollectionGenerators.listOf; import static tech.sirwellington.alchemy.generator.GeolocationGenerators.latitudes; import static tech.sirwellington.alchemy.generator.GeolocationGenerators.longitudes; import static tech.sirwellington.alchemy.generator.ObjectGenerators.pojos; import static tech.sirwellington.alchemy.test.junit.ThrowableAssertion.assertThrows; import static tech.sirwellington.alchemy.test.junit.runners.GenerateString.Type.ALPHABETIC; /** * * @author SirWellington */ @Repeat(10) @RunWith(AlchemyTestRunner.class) public class YelpAPIImplTest { @Mock private AlchemyHttp http; @Mock private OAuthTokenProvider tokenProvider; @GenerateURL private URL baseURL; @GenerateString private String token; @Mock private HttpResponse httpResponse; private List<YelpBusiness> businesses; private YelpSearchRequest request; @GenerateString private String searchTerm; private double latitude; private double longitude; @GenerateString(ALPHABETIC) private String businessID; @GeneratePojo private YelpBusinessDetails businessDetails; private YelpAPIImpl instance; @Before public void setUp() throws Exception { setupData(); setupMocks(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); } private void setupData() throws Exception { AlchemyGenerator<YelpBusiness> pojos = pojos(YelpBusiness.class); businesses = listOf(pojos); longitude = one(longitudes()); latitude = one(latitudes()); request = YelpSearchRequest.newBuilder() .withSearchTerm(searchTerm) .withCoordinate(Coordinate.of(latitude, longitude)) .build(); } private void setupMocks() throws Exception { when(tokenProvider.getToken()).thenReturn(token); when(httpResponse.bodyAsArrayOf(YelpBusiness.class)) .thenReturn(businesses); } @Test public void testGetBusinessDetails() throws Exception { String expectedURL = baseURL + YelpAPIImpl.URLS.BUSINESSES + "/" + businessID; http = AlchemyHttpMock.begin() .whenGet() .noBody() .at(expectedURL) .thenReturnPOJO(businessDetails) .build(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); YelpBusinessDetails result = instance.getBusinessDetails(businessID); assertThat(result, is(businessDetails)); } @DontRepeat @Test public void testGetBusinessDetailsWhenFails() throws Exception { String expectedURL = baseURL + YelpAPIImpl.URLS.BUSINESSES + "/" + businessID; Exception ex = new AlchemyHttpException(); http = AlchemyHttpMock.begin() .whenGet() .noBody() .at(expectedURL) .thenThrow(ex) .build(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); assertThrows(() -> instance.getBusinessDetails(businessID)) .isInstanceOf(YelpExcetion.class); } @Test public void testSearchForBusinesses() throws Exception { //We need a specific URL for each method, so a mock HTTP must be constructed //For each. String expectedURL = baseURL + YelpAPIImpl.URLS.BUSINESS_SEARCH; http = AlchemyHttpMock.begin() .whenGet() .anyBody() .at(expectedURL) .thenReturnResponse(httpResponse) .build(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); List<YelpBusiness> results = instance.searchForBusinesses(request); assertThat(results, is(businesses)); AlchemyHttpMock.verifyAllRequestsMade(http); } @DontRepeat @Test public void testSearchForBusinessesWhenFails() throws Exception { String expectedURL = baseURL + YelpAPIImpl.URLS.BUSINESS_SEARCH; Exception ex = new AlchemyHttpException(); http = AlchemyHttpMock.begin() .whenGet() .anyBody() .at(expectedURL) .thenThrow(ex) .build(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); assertThrows(() -> instance.searchForBusinesses(request)) .isInstanceOf(YelpExcetion.class); } }
src/test/java/tech/redroma/yelp/YelpAPIImplTest.java
/* * Copyright 2016 RedRoma, Inc.. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tech.redroma.yelp; import java.net.URL; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import tech.redroma.yelp.oauth.OAuthTokenProvider; import tech.sirwellington.alchemy.generator.AlchemyGenerator; import tech.sirwellington.alchemy.http.AlchemyHttp; import tech.sirwellington.alchemy.http.HttpResponse; import tech.sirwellington.alchemy.http.mock.AlchemyHttpMock; import tech.sirwellington.alchemy.test.junit.runners.AlchemyTestRunner; import tech.sirwellington.alchemy.test.junit.runners.GeneratePojo; import tech.sirwellington.alchemy.test.junit.runners.GenerateString; import tech.sirwellington.alchemy.test.junit.runners.GenerateURL; import tech.sirwellington.alchemy.test.junit.runners.Repeat; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import static tech.sirwellington.alchemy.generator.AlchemyGenerator.one; import static tech.sirwellington.alchemy.generator.CollectionGenerators.listOf; import static tech.sirwellington.alchemy.generator.GeolocationGenerators.latitudes; import static tech.sirwellington.alchemy.generator.GeolocationGenerators.longitudes; import static tech.sirwellington.alchemy.generator.ObjectGenerators.pojos; import static tech.sirwellington.alchemy.test.junit.runners.GenerateString.Type.ALPHABETIC; /** * * @author SirWellington */ @Repeat(10) @RunWith(AlchemyTestRunner.class) public class YelpAPIImplTest { @Mock private AlchemyHttp http; @Mock private OAuthTokenProvider tokenProvider; @GenerateURL private URL baseURL; @GenerateString private String token; @Mock private HttpResponse httpResponse; private List<YelpBusiness> businesses; private YelpSearchRequest request; @GenerateString private String searchTerm; private double latitude; private double longitude; @GenerateString(ALPHABETIC) private String businessID; @GeneratePojo private YelpBusinessDetails businessDetails; private YelpAPIImpl instance; @Before public void setUp() throws Exception { setupData(); setupMocks(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); } private void setupData() throws Exception { AlchemyGenerator<YelpBusiness> pojos = pojos(YelpBusiness.class); businesses = listOf(pojos); longitude = one(longitudes()); latitude = one(latitudes()); request = YelpSearchRequest.newBuilder() .withSearchTerm(searchTerm) .withCoordinate(Coordinate.of(latitude, longitude)) .build(); } private void setupMocks() throws Exception { when(tokenProvider.getToken()).thenReturn(token); when(httpResponse.bodyAsArrayOf(YelpBusiness.class)) .thenReturn(businesses); } @Test public void testGetBusinessDetails() throws Exception { String expectedURL = baseURL + YelpAPIImpl.URLS.BUSINESSES + "/" + businessID; http = AlchemyHttpMock.begin() .whenGet() .noBody() .at(expectedURL) .thenReturnPOJO(businessDetails) .build(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); YelpBusinessDetails result = instance.getBusinessDetails(businessID); assertThat(result, is(businessDetails)); } @Test public void testSearchForBusinesses() throws Exception { //We need a specific URL for each method, so a mock HTTP must be constructed //For each. String expectedURL = baseURL + YelpAPIImpl.URLS.BUSINESS_SEARCH; http = AlchemyHttpMock.begin() .whenGet() .anyBody() .at(expectedURL) .thenReturnResponse(httpResponse) .build(); instance = new YelpAPIImpl(http, tokenProvider, baseURL.toString()); List<YelpBusiness> results = instance.searchForBusinesses(request); assertThat(results, is(businesses)); AlchemyHttpMock.verifyAllRequestsMade(http); } }
Adds additional unit test cases when things go wrong
src/test/java/tech/redroma/yelp/YelpAPIImplTest.java
Adds additional unit test cases when things go wrong
Java
apache-2.0
74018661fd363e79d9f551753e442e8121f3bbee
0
chongma/BootsFaces-OSP,asterd/BootsFaces-OSP,jepsar/BootsFaces-OSP,mtvweb/BootsFaces-OSP,TheCoder4eu/BootsFaces-OSP,chongma/BootsFaces-OSP,TheCoder4eu/BootsFaces-OSP,jepsar/BootsFaces-OSP,asterd/BootsFaces-OSP,mtvweb/BootsFaces-OSP
/** * Copyright 2014 Riccardo Massera (TheCoder4.Eu) * * This file is part of BootsFaces. * * BootsFaces is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * BootsFaces is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with BootsFaces. If not, see <http://www.gnu.org/licenses/>. */ package net.bootsfaces.component; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.faces.application.ConfigurableNavigationHandler; import javax.faces.application.NavigationCase; import javax.faces.application.ProjectStage; import javax.faces.application.ResourceDependencies; import javax.faces.application.ResourceDependency; import javax.faces.application.ResourceHandler; import javax.faces.component.FacesComponent; import javax.faces.component.UIComponent; import javax.faces.component.UIParameter; //import javax.faces.component.html.HtmlOutcomeTargetButton; import javax.faces.component.html.HtmlOutcomeTargetLink; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import net.bootsfaces.C; import net.bootsfaces.component.icon.IconRenderer; import net.bootsfaces.listeners.AddResourcesListener; import net.bootsfaces.render.A; import net.bootsfaces.render.H; import net.bootsfaces.render.JSEventHandlerRenderer; import net.bootsfaces.render.R; import net.bootsfaces.render.Tooltip; /** * * @author thecoder4.eu */ @ResourceDependencies({ @ResourceDependency(library = "bsf", name = "css/core.css", target = "head"), @ResourceDependency(library = "bsf", name = "css/tooltip.css", target = "head") }) @FacesComponent(C.NAVLINK_COMPONENT_TYPE) public class NavLink extends HtmlOutcomeTargetLink { /** * <p> * The standard component type for this component. * </p> */ public static final String COMPONENT_TYPE = C.NAVLINK_COMPONENT_TYPE; /** * <p> * The component family for this component. * </p> */ public static final String COMPONENT_FAMILY = C.BSFCOMPONENT; public static final String DIVIDER = "divider"; public static final String DIVIDERH = DIVIDER; // divider-horizontal public static final String DIVIDERV = DIVIDER.concat("-").concat("vertical"); // divider-vertical public static final String DROPDOWN = "dropdown"; public NavLink() { setRendererType(null); // this component renders itself AddResourcesListener.addResourceToHeadButAfterJQuery(C.BSF_LIBRARY, "jq/jquery.js"); Tooltip.addResourceFile(); } Map<String, Object> attrs; // !//boolean white=false; @Override public void encodeEnd(FacesContext context) throws IOException { if (!isRendered()) { return; } /* * <li><a href="#"><i class="icon-star"></i> Star</a></li> type=divider * => <li class="divider-vertical"></li> */ attrs = getAttributes(); // If there is the header attribute, we only render a Header String head = A.asString(attrs.get(A.HEADER)); if (head != null) { encodeHeader(context, head); } else { // if there is no href, no outcome and no value attributes // we render a divider if (attrs.get("value") == null && attrs.get("value") == null && attrs.get("value") == null) { encodeDivider(context); } else { encodeHTML(context); } } // if header Tooltip.activateTooltips(context, attrs, this); } public void encodeHeader(FacesContext context, String h) throws IOException { ResponseWriter rw = context.getResponseWriter(); rw.startElement(H.LI, this); rw.writeAttribute("id", getClientId(context), "id"); String styleClass = A.asString(attrs.get("styleClass")); if (null == styleClass) rw.writeAttribute("class", DROPDOWN + "-header", "class"); else rw.writeAttribute("class", DROPDOWN + "-header " + styleClass, "class"); String style = A.asString(attrs.get("style")); if (null != style) { rw.writeAttribute("style", style, "style"); } rw.writeAttribute(H.ROLE, "presentation", null); rw.writeText(h, null); rw.endElement(H.LI); } public void encodeDivider(FacesContext context) throws IOException { ResponseWriter rw = context.getResponseWriter(); rw.startElement(H.LI, this); Tooltip.generateTooltip(context, attrs, rw); // rw.writeAttribute("data-class", // this.getParent().getClass().getSimpleName()+"-"+this.getParent().getClass().getName(), // null); String styleClass = A.asString(attrs.get("styleClass")); if (null == styleClass) styleClass = ""; else styleClass += " "; if (this.getParent().getClass().getName().equals(NavBarLinks.COMPONENT_TYPE)) { rw.writeAttribute("class", styleClass + DIVIDERV, "class"); } else { rw.writeAttribute("class", styleClass + DIVIDERH, "class"); } String style = A.asString(attrs.get("style")); if (null != style) { rw.writeAttribute("style", style, "style"); } rw.writeAttribute(H.ROLE, "presentation", null); rw.endElement(H.LI); } public void encodeHTML(FacesContext context) throws IOException { ResponseWriter rw = context.getResponseWriter(); String value = A.asString(attrs.get("value")); String url = encodeHref(context); // else { rw.startElement(H.LI, this); rw.writeAttribute("id", getClientId(context), "id"); Tooltip.generateTooltip(context, attrs, rw); // rw.writeAttribute(H.TYPE, H.BUTTON, null); rw.writeAttribute("class", getStyleClasses(), "class"); String style = A.asString(attrs.get("style")); if (null != style) { rw.writeAttribute("style", style, "style"); } rw.startElement("a", this); style = A.asString(attrs.get("contentStyle")); if (null != style) { rw.writeAttribute("style", style, "style"); } String contentClass = A.asString(attrs.get("contentClass")); if (null != style) { rw.writeAttribute("class", contentClass, "class"); } if (url == null) { /* * Param Name: javax.faces.PROJECT_STAGE Default Value: The default * value is ProjectStage#Production but IDE can set it differently * in web.xml Expected Values: Development, Production, SystemTest, * UnitTest Since: 2.0 * * If we cannot get an outcome we use the Bootstrap Framework to * give a feedback to the Developer if this build is in the * Development Stage */ // R.encodeLabel(rw, this, "important", C.W_NONAVCASE_LINK); if (FacesContext.getCurrentInstance().getApplication().getProjectStage().equals(ProjectStage.Development)) { rw.writeAttribute(H.TOGGLE, H.TOOLTIP, null); rw.writeAttribute(H.TITLE, FacesContext.getCurrentInstance().getApplication().getProjectStage() + "WARNING! " + C.W_NONAVCASE_LINK, null); } url = "#"; } rw.writeAttribute("href", url, null); rw.writeAttribute(H.ROLE, "menuitem", null); rw.writeAttribute("tabindex", "-1", null); JSEventHandlerRenderer.generateJSEventHandlers(rw, this); String icon = A.asString(attrs.get(A.ICON)); String faicon = A.asString(attrs.get(A.ICONAWESOME)); boolean fa = false; // flag to indicate wether the selected icon set is // Font Awesome or not. if (faicon != null) { icon = faicon; fa = true; } if (icon != null) { Object ialign = attrs.get(A.ICON_ALIGN); // Default Left if (ialign != null && ialign.equals(A.RIGHT)) { rw.writeText(value + " ", null); IconRenderer.encodeIcon(rw, this, icon, fa); } else { IconRenderer.encodeIcon(rw, this, icon, fa); // !//R.encodeIcon(rw, this, icon, white); rw.writeText(" " + value, null); } } else { rw.writeText(value, null); } rw.endElement("a"); rw.endElement(H.LI); } private String getStyleClasses() { String c = ""; boolean active = A.toBool(attrs.get(A.ACTIVE)); if (active) { c += "active"; } String styleClass = A.asString(attrs.get("styleClass")); if (null != styleClass) c += " " + styleClass; return c; } private String encodeHref(FacesContext context) { String href = A.asString(attrs.get(A.HREF)); String url; if (href != null) { url = getResourceURL(context, href); return url; } else { String outcome = getOutcome(); outcome = (outcome == null) ? context.getViewRoot().getViewId() : outcome; ConfigurableNavigationHandler cnh = (ConfigurableNavigationHandler) context.getApplication() .getNavigationHandler(); NavigationCase navCase = cnh.getNavigationCase(context, null, outcome); if (navCase == null) { return null; } // throw new FacesException("The outcome '"+outcome+"' cannot be // resolved."); } String vId = navCase.getToViewId(context); Map<String, List<String>> params = getParams(navCase, this); url = context.getApplication().getViewHandler().getBookmarkableURL(context, vId, params, isIncludeViewParams() || navCase.isIncludeViewParams()); if (url != null) { // fragment String frag = A.asString(attrs.get(A.FRAGMENT)); if (frag != null) { url += "#" + frag; } return url; } // return url; } else { return "#"; } // return # } } protected String getResourceURL(FacesContext fc, String value) { return fc.getExternalContext().encodeResourceURL(value); /* if (value.contains(ResourceHandler.RESOURCE_IDENTIFIER)) { return value; } else { String url = fc.getApplication().getViewHandler().getResourceURL(fc, value); return fc.getExternalContext().encodeResourceURL(url); } */ } /** * Find all parameters to include by looking at nested uiparams and params * of navigation case */ protected Map<String, List<String>> getParams(NavigationCase navCase, NavLink button) { Map<String, List<String>> params = new LinkedHashMap<String, List<String>>(); // UIParams for (UIComponent child : button.getChildren()) { if (child.isRendered() && (child instanceof UIParameter)) { UIParameter uiParam = (UIParameter) child; if (!uiParam.isDisable()) { List<String> paramValues = params.get(uiParam.getName()); if (paramValues == null) { paramValues = new ArrayList<String>(); params.put(uiParam.getName(), paramValues); } paramValues.add(String.valueOf(uiParam.getValue())); } } } // NavCase Params Map<String, List<String>> navCaseParams = navCase.getParameters(); if (navCaseParams != null && !navCaseParams.isEmpty()) { for (Map.Entry<String, List<String>> entry : navCaseParams.entrySet()) { String key = entry.getKey(); // UIParams take precedence if (!params.containsKey(key)) { params.put(key, entry.getValue()); } } } return params; } @Override public String getFamily() { return COMPONENT_FAMILY; } }
src/main/java/net/bootsfaces/component/NavLink.java
/** * Copyright 2014 Riccardo Massera (TheCoder4.Eu) * * This file is part of BootsFaces. * * BootsFaces is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * BootsFaces is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with BootsFaces. If not, see <http://www.gnu.org/licenses/>. */ package net.bootsfaces.component; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.faces.application.ConfigurableNavigationHandler; import javax.faces.application.NavigationCase; import javax.faces.application.ProjectStage; import javax.faces.application.ResourceDependencies; import javax.faces.application.ResourceDependency; import javax.faces.application.ResourceHandler; import javax.faces.component.FacesComponent; import javax.faces.component.UIComponent; import javax.faces.component.UIParameter; //import javax.faces.component.html.HtmlOutcomeTargetButton; import javax.faces.component.html.HtmlOutcomeTargetLink; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import net.bootsfaces.C; import net.bootsfaces.component.icon.IconRenderer; import net.bootsfaces.listeners.AddResourcesListener; import net.bootsfaces.render.A; import net.bootsfaces.render.H; import net.bootsfaces.render.JSEventHandlerRenderer; import net.bootsfaces.render.R; import net.bootsfaces.render.Tooltip; /** * * @author thecoder4.eu */ @ResourceDependencies({ @ResourceDependency(library = "bsf", name = "css/core.css", target = "head"), @ResourceDependency(library = "bsf", name = "css/tooltip.css", target = "head") }) @FacesComponent(C.NAVLINK_COMPONENT_TYPE) public class NavLink extends HtmlOutcomeTargetLink { /** * <p> * The standard component type for this component. * </p> */ public static final String COMPONENT_TYPE = C.NAVLINK_COMPONENT_TYPE; /** * <p> * The component family for this component. * </p> */ public static final String COMPONENT_FAMILY = C.BSFCOMPONENT; public static final String DIVIDER = "divider"; public static final String DIVIDERH = DIVIDER; // divider-horizontal public static final String DIVIDERV = DIVIDER.concat("-").concat("vertical"); // divider-vertical public static final String DROPDOWN = "dropdown"; public NavLink() { setRendererType(null); // this component renders itself AddResourcesListener.addResourceToHeadButAfterJQuery(C.BSF_LIBRARY, "jq/jquery.js"); Tooltip.addResourceFile(); } Map<String, Object> attrs; // !//boolean white=false; @Override public void encodeEnd(FacesContext context) throws IOException { if (!isRendered()) { return; } /* * <li><a href="#"><i class="icon-star"></i> Star</a></li> type=divider * => <li class="divider-vertical"></li> */ attrs = getAttributes(); // If there is the header attribute, we only render a Header String head = A.asString(attrs.get(A.HEADER)); if (head != null) { encodeHeader(context, head); } else { // if there is no href, no outcome and no value attributes // we render a divider if (attrs.get("value") == null && attrs.get("value") == null && attrs.get("value") == null) { encodeDivider(context); } else { encodeHTML(context); } } // if header Tooltip.activateTooltips(context, attrs, this); } public void encodeHeader(FacesContext context, String h) throws IOException { ResponseWriter rw = context.getResponseWriter(); rw.startElement(H.LI, this); rw.writeAttribute("id", getClientId(context), "id"); String styleClass = A.asString(attrs.get("styleClass")); if (null == styleClass) rw.writeAttribute("class", DROPDOWN + "-header", "class"); else rw.writeAttribute("class", DROPDOWN + "-header " + styleClass, "class"); String style = A.asString(attrs.get("style")); if (null != style) { rw.writeAttribute("style", style, "style"); } rw.writeAttribute(H.ROLE, "presentation", null); rw.writeText(h, null); rw.endElement(H.LI); } public void encodeDivider(FacesContext context) throws IOException { ResponseWriter rw = context.getResponseWriter(); rw.startElement(H.LI, this); Tooltip.generateTooltip(context, attrs, rw); // rw.writeAttribute("data-class", // this.getParent().getClass().getSimpleName()+"-"+this.getParent().getClass().getName(), // null); String styleClass = A.asString(attrs.get("styleClass")); if (null == styleClass) styleClass = ""; else styleClass += " "; if (this.getParent().getClass().getName().equals(NavBarLinks.COMPONENT_TYPE)) { rw.writeAttribute("class", styleClass + DIVIDERV, "class"); } else { rw.writeAttribute("class", styleClass + DIVIDERH, "class"); } String style = A.asString(attrs.get("style")); if (null != style) { rw.writeAttribute("style", style, "style"); } rw.writeAttribute(H.ROLE, "presentation", null); rw.endElement(H.LI); } public void encodeHTML(FacesContext context) throws IOException { ResponseWriter rw = context.getResponseWriter(); String value = A.asString(attrs.get("value")); String url = encodeHref(context); // else { rw.startElement(H.LI, this); rw.writeAttribute("id", getClientId(context), "id"); Tooltip.generateTooltip(context, attrs, rw); // rw.writeAttribute(H.TYPE, H.BUTTON, null); rw.writeAttribute("class", getStyleClasses(), "class"); String style = A.asString(attrs.get("style")); if (null != style) { rw.writeAttribute("style", style, "style"); } rw.startElement("a", this); style = A.asString(attrs.get("contentStyle")); if (null != style) { rw.writeAttribute("style", style, "style"); } String contentClass = A.asString(attrs.get("contentClass")); if (null != style) { rw.writeAttribute("class", contentClass, "class"); } if (url == null) { /* * Param Name: javax.faces.PROJECT_STAGE Default Value: The default * value is ProjectStage#Production but IDE can set it differently * in web.xml Expected Values: Development, Production, SystemTest, * UnitTest Since: 2.0 * * If we cannot get an outcome we use the Bootstrap Framework to * give a feedback to the Developer if this build is in the * Development Stage */ // R.encodeLabel(rw, this, "important", C.W_NONAVCASE_LINK); if (FacesContext.getCurrentInstance().getApplication().getProjectStage().equals(ProjectStage.Development)) { rw.writeAttribute(H.TOGGLE, H.TOOLTIP, null); rw.writeAttribute(H.TITLE, FacesContext.getCurrentInstance().getApplication().getProjectStage() + "WARNING! " + C.W_NONAVCASE_LINK, null); } url = "#"; } rw.writeAttribute("href", url, null); rw.writeAttribute(H.ROLE, "menuitem", null); rw.writeAttribute("tabindex", "-1", null); JSEventHandlerRenderer.generateJSEventHandlers(rw, this); String icon = A.asString(attrs.get(A.ICON)); String faicon = A.asString(attrs.get(A.ICONAWESOME)); boolean fa = false; // flag to indicate wether the selected icon set is // Font Awesome or not. if (faicon != null) { icon = faicon; fa = true; } if (icon != null) { Object ialign = attrs.get(A.ICON_ALIGN); // Default Left if (ialign != null && ialign.equals(A.RIGHT)) { rw.writeText(value + " ", null); IconRenderer.encodeIcon(rw, this, icon, fa); } else { IconRenderer.encodeIcon(rw, this, icon, fa); // !//R.encodeIcon(rw, this, icon, white); rw.writeText(" " + value, null); } } else { rw.writeText(value, null); } rw.endElement("a"); rw.endElement(H.LI); } private String getStyleClasses() { String c = ""; boolean active = A.toBool(attrs.get(A.ACTIVE)); if (active) { c += "active"; } String styleClass = A.asString(attrs.get("styleClass")); if (null != styleClass) c += " " + styleClass; return c; } private String encodeHref(FacesContext context) { String href = A.asString(attrs.get(A.HREF)); String url; if (href != null) { url = getResourceURL(context, href); return url; } else { String outcome = getOutcome(); outcome = (outcome == null) ? context.getViewRoot().getViewId() : outcome; ConfigurableNavigationHandler cnh = (ConfigurableNavigationHandler) context.getApplication() .getNavigationHandler(); NavigationCase navCase = cnh.getNavigationCase(context, null, outcome); if (navCase == null) { return null; } // throw new FacesException("The outcome '"+outcome+"' cannot be // resolved."); } String vId = navCase.getToViewId(context); Map<String, List<String>> params = getParams(navCase, this); url = context.getApplication().getViewHandler().getBookmarkableURL(context, vId, params, isIncludeViewParams() || navCase.isIncludeViewParams()); if (url != null) { // fragment String frag = A.asString(attrs.get(A.FRAGMENT)); if (frag != null) { url += "#" + frag; } return url; } // return url; } else { return "#"; } // return # } } protected String getResourceURL(FacesContext fc, String value) { if (value.contains(ResourceHandler.RESOURCE_IDENTIFIER)) { return value; } else { String url = fc.getApplication().getViewHandler().getResourceURL(fc, value); return fc.getExternalContext().encodeResourceURL(url); } } /** * Find all parameters to include by looking at nested uiparams and params * of navigation case */ protected Map<String, List<String>> getParams(NavigationCase navCase, NavLink button) { Map<String, List<String>> params = new LinkedHashMap<String, List<String>>(); // UIParams for (UIComponent child : button.getChildren()) { if (child.isRendered() && (child instanceof UIParameter)) { UIParameter uiParam = (UIParameter) child; if (!uiParam.isDisable()) { List<String> paramValues = params.get(uiParam.getName()); if (paramValues == null) { paramValues = new ArrayList<String>(); params.put(uiParam.getName(), paramValues); } paramValues.add(String.valueOf(uiParam.getValue())); } } } // NavCase Params Map<String, List<String>> navCaseParams = navCase.getParameters(); if (navCaseParams != null && !navCaseParams.isEmpty()) { for (Map.Entry<String, List<String>> entry : navCaseParams.entrySet()) { String key = entry.getKey(); // UIParams take precedence if (!params.containsKey(key)) { params.put(key, entry.getValue()); } } } return params; } @Override public String getFamily() { return COMPONENT_FAMILY; } }
Removed unneeded code for href rendering in NavLink. ( #200 )
src/main/java/net/bootsfaces/component/NavLink.java
Removed unneeded code for href rendering in NavLink. ( #200 )
Java
apache-2.0
2e3e69ce02524657748d76d0d47686f1c97731b4
0
ronsigal/xerces,RackerWilliams/xercesj,RackerWilliams/xercesj,ronsigal/xerces,RackerWilliams/xercesj,jimma/xerces,ronsigal/xerces,jimma/xerces,jimma/xerces
/* * The Apache Software License, Version 1.1 * * * Copyright (c) 1999,2000 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.apache.org. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.xerces.validators.common; import org.apache.xerces.framework.XMLAttrList; import org.apache.xerces.framework.XMLContentSpec; import org.apache.xerces.framework.XMLDocumentHandler; import org.apache.xerces.framework.XMLDocumentScanner; import org.apache.xerces.framework.XMLErrorReporter; import org.apache.xerces.readers.DefaultEntityHandler; import org.apache.xerces.readers.XMLEntityHandler; import org.apache.xerces.utils.ChunkyCharArray; import org.apache.xerces.utils.Hash2intTable; import org.apache.xerces.utils.NamespacesScope; import org.apache.xerces.utils.QName; import org.apache.xerces.utils.StringPool; import org.apache.xerces.utils.XMLCharacterProperties; import org.apache.xerces.utils.XMLMessages; import org.apache.xerces.utils.ImplementationMessages; import org.apache.xerces.parsers.DOMParser; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.EntityResolver; import org.xml.sax.Locator; import org.xml.sax.helpers.LocatorImpl; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import java.io.IOException; import java.util.Enumeration; import java.util.Hashtable; import java.util.StringTokenizer; import java.util.Vector; import org.apache.xerces.validators.dtd.DTDGrammar; import org.apache.xerces.validators.schema.SchemaGrammar; import org.apache.xerces.validators.schema.SchemaMessageProvider; import org.apache.xerces.validators.schema.SchemaSymbols; import org.apache.xerces.validators.schema.TraverseSchema; import org.apache.xerces.validators.datatype.DatatypeValidator; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; /** * This class is the super all-in-one validator used by the parser. * * @version $Id$ */ public final class XMLValidator implements DefaultEntityHandler.EventHandler, XMLEntityHandler.CharDataHandler, XMLDocumentScanner.EventHandler, NamespacesScope.NamespacesHandler { // // Constants // // debugging private static final boolean PRINT_EXCEPTION_STACK_TRACE = false; private static final boolean DEBUG_PRINT_ATTRIBUTES = false; private static final boolean DEBUG_PRINT_CONTENT = false; private static final boolean DEBUG_SCHEMA_VALIDATION = false; private static final boolean DEBUG_ELEMENT_CHILDREN = false; // Chunk size constants private static final int CHUNK_SHIFT = 8; // 2^8 = 256 private static final int CHUNK_SIZE = (1 << CHUNK_SHIFT); private static final int CHUNK_MASK = CHUNK_SIZE - 1; private static final int INITIAL_CHUNK_COUNT = (1 << (10 - CHUNK_SHIFT)); // 2^10 = 1k // // Data // // REVISIT: The data should be regrouped and re-organized so that // it's easier to find a meaningful field. // debugging // private static boolean DEBUG = false; // other private Hashtable fIdDefs = null; private Hashtable fIdRefs = null; private Object fNullValue = null; // attribute validators // REVISIT: Validation. A validator per element declaration and // attribute declaration is required to accomodate // Schema facets on simple types. private AttributeValidator fAttValidatorCDATA = null; private AttributeValidator fAttValidatorID = new AttValidatorID(); private AttributeValidator fAttValidatorIDREF = new AttValidatorIDREF(); private AttributeValidator fAttValidatorIDREFS = new AttValidatorIDREFS(); private AttributeValidator fAttValidatorENTITY = new AttValidatorENTITY(); private AttributeValidator fAttValidatorENTITIES = new AttValidatorENTITIES(); private AttributeValidator fAttValidatorNMTOKEN = new AttValidatorNMTOKEN(); private AttributeValidator fAttValidatorNMTOKENS = new AttValidatorNMTOKENS(); private AttributeValidator fAttValidatorNOTATION = new AttValidatorNOTATION(); private AttributeValidator fAttValidatorENUMERATION = new AttValidatorENUMERATION(); private AttributeValidator fAttValidatorDATATYPE = null; // Package access for use by AttributeValidator classes. StringPool fStringPool = null; boolean fValidating = false; boolean fInElementContent = false; int fStandaloneReader = -1; // settings private boolean fValidationEnabled = false; private boolean fDynamicValidation = false; private boolean fValidationEnabledByDynamic = false; private boolean fDynamicDisabledByValidation = false; private boolean fWarningOnDuplicateAttDef = false; private boolean fWarningOnUndeclaredElements = false; // declarations private int fDeclaration[]; private XMLErrorReporter fErrorReporter = null; private DefaultEntityHandler fEntityHandler = null; private QName fCurrentElement = new QName(); //REVISIT: validation private int[] fScopeStack = new int[8]; private int[] fGrammarNameSpaceIndexStack = new int[8]; private int[] fElementTypeStack = new int[8]; private int[] fElementEntityStack = new int[8]; private int[] fElementIndexStack = new int[8]; private int[] fContentSpecTypeStack = new int[8]; private QName[] fElementChildren = new QName[32]; private int fElementChildrenLength = 0; private int[] fElementChildrenOffsetStack = new int[32]; private int fElementDepth = -1; private boolean fNamespacesEnabled = false; private NamespacesScope fNamespacesScope = null; private int fNamespacesPrefix = -1; private QName fRootElement = new QName(); private int fAttrListHandle = -1; private int fCurrentElementEntity = -1; private int fCurrentElementIndex = -1; private int fCurrentContentSpecType = -1; private boolean fSeenDoctypeDecl = false; private final int TOP_LEVEL_SCOPE = -1; private int fCurrentScope = TOP_LEVEL_SCOPE; private int fCurrentSchemaURI = -1; private int fEmptyURI = - 1; private int fXsiPrefix = - 1; private int fXsiURI = -2; private Grammar fGrammar = null; private int fGrammarNameSpaceIndex = -1; private GrammarResolver fGrammarResolver = null; // state and stuff private boolean fScanningDTD = false; private XMLDocumentScanner fDocumentScanner = null; private boolean fCalledStartDocument = false; private XMLDocumentHandler fDocumentHandler = null; private XMLDocumentHandler.DTDHandler fDTDHandler = null; private boolean fSeenRootElement = false; private XMLAttrList fAttrList = null; private int fXMLLang = -1; private LocatorImpl fAttrNameLocator = null; private boolean fCheckedForSchema = false; private boolean fDeclsAreExternal = false; private StringPool.CharArrayRange fCurrentElementCharArrayRange = null; private char[] fCharRefData = null; private boolean fSendCharDataAsCharArray = false; private boolean fBufferDatatype = false; private StringBuffer fDatatypeBuffer = new StringBuffer(); private QName fTempQName = new QName(); private XMLAttributeDecl fTempAttDecl = new XMLAttributeDecl(); private XMLElementDecl fTempElementDecl = new XMLElementDecl(); //REVISIT: ericye, use this temp QName whenever we can!! private boolean fGrammarIsDTDGrammar = false; private boolean fGrammarIsSchemaGrammar = false; // symbols private int fEMPTYSymbol = -1; private int fANYSymbol = -1; private int fMIXEDSymbol = -1; private int fCHILDRENSymbol = -1; private int fCDATASymbol = -1; private int fIDSymbol = -1; private int fIDREFSymbol = -1; private int fIDREFSSymbol = -1; private int fENTITYSymbol = -1; private int fENTITIESSymbol = -1; private int fNMTOKENSymbol = -1; private int fNMTOKENSSymbol = -1; private int fNOTATIONSymbol = -1; private int fENUMERATIONSymbol = -1; private int fREQUIREDSymbol = -1; private int fFIXEDSymbol = -1; private int fDATATYPESymbol = -1; private int fEpsilonIndex = -1; // // Constructors // /** Constructs an XML validator. */ public XMLValidator(StringPool stringPool, XMLErrorReporter errorReporter, DefaultEntityHandler entityHandler, XMLDocumentScanner documentScanner) { // keep references fStringPool = stringPool; fErrorReporter = errorReporter; fEntityHandler = entityHandler; fDocumentScanner = documentScanner; fEmptyURI = fStringPool.addSymbol(""); fXsiURI = fStringPool.addSymbol(SchemaSymbols.URI_XSI); // initialize fAttrList = new XMLAttrList(fStringPool); entityHandler.setEventHandler(this); entityHandler.setCharDataHandler(this); fDocumentScanner.setEventHandler(this); init(); } // <init>(StringPool,XMLErrorReporter,DefaultEntityHandler,XMLDocumentScanner) public void setGrammarResolver(GrammarResolver grammarResolver){ fGrammarResolver = grammarResolver; } // // Public methods // // initialization /** Set char data processing preference and handlers. */ public void initHandlers(boolean sendCharDataAsCharArray, XMLDocumentHandler docHandler, XMLDocumentHandler.DTDHandler dtdHandler) { fSendCharDataAsCharArray = sendCharDataAsCharArray; fEntityHandler.setSendCharDataAsCharArray(fSendCharDataAsCharArray); fDocumentHandler = docHandler; fDTDHandler = dtdHandler; } // initHandlers(boolean,XMLDocumentHandler,XMLDocumentHandler.DTDHandler) /** Reset or copy. */ public void resetOrCopy(StringPool stringPool) throws Exception { fAttrList = new XMLAttrList(stringPool); resetCommon(stringPool); } /** Reset. */ public void reset(StringPool stringPool) throws Exception { fAttrList.reset(stringPool); resetCommon(stringPool); } // settings /** * Turning on validation/dynamic turns on validation if it is off, and * this is remembered. Turning off validation DISABLES validation/dynamic * if it is on. Turning off validation/dynamic DOES NOT turn off * validation if it was explicitly turned on, only if it was turned on * BECAUSE OF the call to turn validation/dynamic on. Turning on * validation will REENABLE and turn validation/dynamic back on if it * was disabled by a call that turned off validation while * validation/dynamic was enabled. */ public void setValidationEnabled(boolean flag) throws Exception { fValidationEnabled = flag; fValidationEnabledByDynamic = false; if (fValidationEnabled) { if (fDynamicDisabledByValidation) { fDynamicValidation = true; fDynamicDisabledByValidation = false; } } else if (fDynamicValidation) { fDynamicValidation = false; fDynamicDisabledByValidation = true; } fValidating = fValidationEnabled; } /** Returns true if validation is enabled. */ public boolean getValidationEnabled() { return fValidationEnabled; } /** Sets whether validation is dynamic. */ public void setDynamicValidationEnabled(boolean flag) throws Exception { fDynamicValidation = flag; fDynamicDisabledByValidation = false; if (!fDynamicValidation) { if (fValidationEnabledByDynamic) { fValidationEnabled = false; fValidationEnabledByDynamic = false; } } else if (!fValidationEnabled) { fValidationEnabled = true; fValidationEnabledByDynamic = true; } fValidating = fValidationEnabled; } /** Returns true if validation is dynamic. */ public boolean getDynamicValidationEnabled() { return fDynamicValidation; } /** Sets whether namespaces are enabled. */ public void setNamespacesEnabled(boolean flag) { fNamespacesEnabled = flag; } /** Returns true if namespaces are enabled. */ public boolean getNamespacesEnabled() { return fNamespacesEnabled; } /** Sets whether duplicate attribute definitions signal a warning. */ public void setWarningOnDuplicateAttDef(boolean flag) { fWarningOnDuplicateAttDef = flag; } /** Returns true if duplicate attribute definitions signal a warning. */ public boolean getWarningOnDuplicateAttDef() { return fWarningOnDuplicateAttDef; } /** Sets whether undeclared elements signal a warning. */ public void setWarningOnUndeclaredElements(boolean flag) { fWarningOnUndeclaredElements = flag; } /** Returns true if undeclared elements signal a warning. */ public boolean getWarningOnUndeclaredElements() { return fWarningOnUndeclaredElements; } // // DefaultEntityHandler.EventHandler methods // /** Start entity reference. */ public void startEntityReference(int entityName, int entityType, int entityContext) throws Exception { fDocumentHandler.startEntityReference(entityName, entityType, entityContext); } /** End entity reference. */ public void endEntityReference(int entityName, int entityType, int entityContext) throws Exception { fDocumentHandler.endEntityReference(entityName, entityType, entityContext); } /** Send end of input notification. */ public void sendEndOfInputNotifications(int entityName, boolean moreToFollow) throws Exception { fDocumentScanner.endOfInput(entityName, moreToFollow); /*** if (fScanningDTD) { fDTDImporter.sendEndOfInputNotifications(entityName, moreToFollow); } /***/ } /** Send reader change notifications. */ public void sendReaderChangeNotifications(XMLEntityHandler.EntityReader reader, int readerId) throws Exception { fDocumentScanner.readerChange(reader, readerId); /*** if (fScanningDTD) { fDTDImporter.sendReaderChangeNotifications(reader, readerId); } /***/ } /** External entity standalone check. */ public boolean externalEntityStandaloneCheck() { return (fStandaloneReader != -1 && fValidating); } /** Return true if validating. */ public boolean getValidating() { return fValidating; } // // XMLEntityHandler.CharDataHandler methods // /** Process characters. */ public void processCharacters(char[] chars, int offset, int length) throws Exception { if (fValidating) { if (fInElementContent || fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } if (fBufferDatatype) { fDatatypeBuffer.append(chars, offset, length); } } fDocumentHandler.characters(chars, offset, length); } /** Process characters. */ public void processCharacters(int data) throws Exception { if (fValidating) { if (fInElementContent || fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } if (fBufferDatatype) { fDatatypeBuffer.append(fStringPool.toString(data)); } } fDocumentHandler.characters(data); } /** Process whitespace. */ public void processWhitespace(char[] chars, int offset, int length) throws Exception { if (fInElementContent) { if (fStandaloneReader != -1 && fValidating && getElementDeclIsExternal(fCurrentElementIndex)) { reportRecoverableXMLError(XMLMessages.MSG_WHITE_SPACE_IN_ELEMENT_CONTENT_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION); } fDocumentHandler.ignorableWhitespace(chars, offset, length); } else { if (fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } fDocumentHandler.characters(chars, offset, length); } } // processWhitespace(char[],int,int) /** Process whitespace. */ public void processWhitespace(int data) throws Exception { if (fInElementContent) { if (fStandaloneReader != -1 && fValidating && getElementDeclIsExternal(fCurrentElementIndex)) { reportRecoverableXMLError(XMLMessages.MSG_WHITE_SPACE_IN_ELEMENT_CONTENT_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION); } fDocumentHandler.ignorableWhitespace(data); } else { if (fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } fDocumentHandler.characters(data); } } // processWhitespace(int) // // XMLDocumentScanner.EventHandler methods // /** Scans element type. */ public void scanElementType(XMLEntityHandler.EntityReader entityReader, char fastchar, QName element) throws Exception { if (!fNamespacesEnabled) { element.clear(); element.localpart = entityReader.scanName(fastchar); element.rawname = element.localpart; } else { entityReader.scanQName(fastchar, element); if (entityReader.lookingAtChar(':', false)) { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_TWO_COLONS_IN_QNAME, XMLMessages.P5_INVALID_CHARACTER, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); entityReader.skipPastNmtoken(' '); } } } // scanElementType(XMLEntityHandler.EntityReader,char,QName) /** Scans expected element type. */ public boolean scanExpectedElementType(XMLEntityHandler.EntityReader entityReader, char fastchar, QName element) throws Exception { if (fCurrentElementCharArrayRange == null) { fCurrentElementCharArrayRange = fStringPool.createCharArrayRange(); } fStringPool.getCharArrayRange(fCurrentElement.rawname, fCurrentElementCharArrayRange); return entityReader.scanExpectedName(fastchar, fCurrentElementCharArrayRange); } // scanExpectedElementType(XMLEntityHandler.EntityReader,char,QName) /** Scans attribute name. */ public void scanAttributeName(XMLEntityHandler.EntityReader entityReader, QName element, QName attribute) throws Exception { if (!fSeenRootElement) { fSeenRootElement = true; rootElementSpecified(element); fStringPool.resetShuffleCount(); } if (!fNamespacesEnabled) { attribute.clear(); attribute.localpart = entityReader.scanName('='); attribute.rawname = attribute.localpart; } else { entityReader.scanQName('=', attribute); if (entityReader.lookingAtChar(':', false)) { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_TWO_COLONS_IN_QNAME, XMLMessages.P5_INVALID_CHARACTER, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); entityReader.skipPastNmtoken(' '); } } } // scanAttributeName(XMLEntityHandler.EntityReader,QName,QName) /** Call start document. */ public void callStartDocument() throws Exception { if (!fCalledStartDocument) { fDocumentHandler.startDocument(); fCalledStartDocument = true; } } /** Call end document. */ public void callEndDocument() throws Exception { if (fCalledStartDocument) { fDocumentHandler.endDocument(); } } /** Call XML declaration. */ public void callXMLDecl(int version, int encoding, int standalone) throws Exception { fDocumentHandler.xmlDecl(version, encoding, standalone); } public void callStandaloneIsYes() throws Exception { // standalone = "yes". said XMLDocumentScanner. fStandaloneReader = fEntityHandler.getReaderId() ; } /** Call text declaration. */ public void callTextDecl(int version, int encoding) throws Exception { fDocumentHandler.textDecl(version, encoding); } /** * Signal the scanning of an element name in a start element tag. * * @param element Element name scanned. */ public void element(QName element) throws Exception { fAttrListHandle = -1; } /** * Signal the scanning of an attribute associated to the previous * start element tag. * * @param element Element name scanned. * @param attrName Attribute name scanned. * @param attrValue The string pool index of the attribute value. */ public boolean attribute(QName element, QName attrName, int attrValue) throws Exception { if (fAttrListHandle == -1) { fAttrListHandle = fAttrList.startAttrList(); } // if fAttrList.addAttr returns -1, indicates duplicate att in start tag of an element. // specified: true, search : true return fAttrList.addAttr(attrName, attrValue, fCDATASymbol, true, true) == -1; } /** Call start element. */ public void callStartElement(QName element) throws Exception { if ( DEBUG_SCHEMA_VALIDATION ) System.out.println("\n=======StartElement : " + fStringPool.toString(element.localpart)); // // Check after all specified attrs are scanned // (1) report error for REQUIRED attrs that are missing (V_TAGc) // (2) add default attrs (FIXED and NOT_FIXED) // if (!fSeenRootElement) { fSeenRootElement = true; rootElementSpecified(element); fStringPool.resetShuffleCount(); } fCheckedForSchema = true; if (fNamespacesEnabled) { bindNamespacesToElementAndAttributes(element, fAttrList); } validateElementAndAttributes(element, fAttrList); if (fAttrListHandle != -1) { fAttrList.endAttrList(); } fDocumentHandler.startElement(element, fAttrList, fAttrListHandle); fAttrListHandle = -1; //before we increment the element depth, add this element's QName to its enclosing element 's children list fElementDepth++; //if (fElementDepth >= 0) { if (fValidating) { // push current length onto stack if (fElementChildrenOffsetStack.length < fElementDepth) { int newarray[] = new int[fElementChildrenOffsetStack.length * 2]; System.arraycopy(fElementChildrenOffsetStack, 0, newarray, 0, fElementChildrenOffsetStack.length); fElementChildrenOffsetStack = newarray; } fElementChildrenOffsetStack[fElementDepth] = fElementChildrenLength; // add this element to children if (fElementChildren.length <= fElementChildrenLength) { QName[] newarray = new QName[fElementChildrenLength * 2]; System.arraycopy(fElementChildren, 0, newarray, 0, fElementChildren.length); fElementChildren = newarray; } QName qname = fElementChildren[fElementChildrenLength]; if (qname == null) { for (int i = fElementChildrenLength; i < fElementChildren.length; i++) { fElementChildren[i] = new QName(); } qname = fElementChildren[fElementChildrenLength]; } qname.setValues(element); fElementChildrenLength++; if (DEBUG_ELEMENT_CHILDREN) { printChildren(); printStack(); } } // One more level of depth //fElementDepth++; ensureStackCapacity(fElementDepth); fCurrentElement.setValues(element); fCurrentElementEntity = fEntityHandler.getReaderId(); fElementTypeStack[fElementDepth] = fCurrentElement.rawname; fElementEntityStack[fElementDepth] = fCurrentElementEntity; fElementIndexStack[fElementDepth] = fCurrentElementIndex; fContentSpecTypeStack[fElementDepth] = fCurrentContentSpecType; //REVISIT: Validation if ( fCurrentElementIndex > -1 && fGrammarIsSchemaGrammar && fValidating) { fCurrentScope = ((SchemaGrammar) fGrammar).getElementDefinedScope(fCurrentElementIndex); } fScopeStack[fElementDepth] = fCurrentScope; fGrammarNameSpaceIndexStack[fElementDepth] = fGrammarNameSpaceIndex; } // callStartElement(QName) private void ensureStackCapacity ( int newElementDepth) { if (newElementDepth == fElementTypeStack.length) { int[] newStack = new int[newElementDepth * 2]; System.arraycopy(fScopeStack, 0, newStack, 0, newElementDepth); fScopeStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fGrammarNameSpaceIndexStack, 0, newStack, 0, newElementDepth); fGrammarNameSpaceIndexStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fElementTypeStack, 0, newStack, 0, newElementDepth); fElementTypeStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fElementEntityStack, 0, newStack, 0, newElementDepth); fElementEntityStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fElementIndexStack, 0, newStack, 0, newElementDepth); fElementIndexStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fContentSpecTypeStack, 0, newStack, 0, newElementDepth); fContentSpecTypeStack = newStack; } } /** Call end element. */ public void callEndElement(int readerId) throws Exception { if ( DEBUG_SCHEMA_VALIDATION ) System.out.println("=======EndElement : " + fStringPool.toString(fCurrentElement.localpart)+"\n"); int prefixIndex = fCurrentElement.prefix; // REVISIT: Validation int elementType = fCurrentElement.rawname; if (fCurrentElementEntity != readerId) { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_ELEMENT_ENTITY_MISMATCH, XMLMessages.P78_NOT_WELLFORMED, new Object[] { fStringPool.toString(elementType) }, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } fElementDepth--; if (fValidating) { int elementIndex = fCurrentElementIndex; if (elementIndex != -1 && fCurrentContentSpecType != -1) { QName children[] = fElementChildren; int childrenOffset = fElementChildrenOffsetStack[fElementDepth + 1] + 1; int childrenLength = fElementChildrenLength - childrenOffset; if (DEBUG_ELEMENT_CHILDREN) { System.out.println("endElement("+fStringPool.toString(fCurrentElement.rawname)+')'); System.out.print("offset: "); System.out.print(childrenOffset); System.out.print(", length: "); System.out.print(childrenLength); System.out.println(); printChildren(); printStack(); } int result = checkContent(elementIndex, children, childrenOffset, childrenLength); if ( DEBUG_SCHEMA_VALIDATION ) System.out.println("!!!!!!!!In XMLValidator, the return value from checkContent : " + result); if (result != -1) { int majorCode = result != childrenLength ? XMLMessages.MSG_CONTENT_INVALID : XMLMessages.MSG_CONTENT_INCOMPLETE; fGrammar.getElementDecl(elementIndex, fTempElementDecl); reportRecoverableXMLError(majorCode, 0, fStringPool.toString(elementType), XMLContentSpec.toString(fGrammar, fStringPool, fTempElementDecl.contentSpecIndex));// REVISIT: getContentSpecAsString(elementIndex)); } } fElementChildrenLength = fElementChildrenOffsetStack[fElementDepth + 1] + 1; } fDocumentHandler.endElement(fCurrentElement); if (fNamespacesEnabled) { fNamespacesScope.decreaseDepth(); } // now pop this element off the top of the element stack //if (fElementDepth-- < 0) { if (fElementDepth < -1) { throw new RuntimeException("FWK008 Element stack underflow"); } if (fElementDepth < 0) { fCurrentElement.clear(); fCurrentElementEntity = -1; fCurrentElementIndex = -1; fCurrentContentSpecType = -1; fInElementContent = false; // // Check after document is fully parsed // (1) check that there was an element with a matching id for every // IDREF and IDREFS attr (V_IDREF0) // if (fValidating && fIdRefs != null) { checkIdRefs(); } return; } //restore enclosing element to all the "current" variables // REVISIT: Validation. This information needs to be stored. fCurrentElement.prefix = -1; fCurrentElement.localpart = fElementTypeStack[fElementDepth]; fCurrentElement.rawname = fElementTypeStack[fElementDepth]; fCurrentElementEntity = fElementEntityStack[fElementDepth]; fCurrentElementIndex = fElementIndexStack[fElementDepth]; fCurrentContentSpecType = fContentSpecTypeStack[fElementDepth]; //REVISIT: Validation fCurrentScope = fScopeStack[fElementDepth]; //if ( DEBUG_SCHEMA_VALIDATION ) { /**** System.out.println("+++++ currentElement : " + fStringPool.toString(elementType)+ "\n fCurrentElementIndex : " + fCurrentElementIndex + "\n fCurrentScope : " + fCurrentScope + "\n fCurrentContentSpecType : " + fCurrentContentSpecType + "\n++++++++++++++++++++++++++++++++++++++++++++++++" ); /****/ //} // if enclosing element's Schema is different, need to switch "context" if ( fGrammarNameSpaceIndex != fGrammarNameSpaceIndexStack[fElementDepth] ) { fGrammarNameSpaceIndex = fGrammarNameSpaceIndexStack[fElementDepth]; switchGrammar(fGrammarNameSpaceIndex); } if (fValidating) { fBufferDatatype = false; } fInElementContent = (fCurrentContentSpecType == XMLElementDecl.TYPE_CHILDREN); } // callEndElement(int) /** Call start CDATA section. */ public void callStartCDATA() throws Exception { fDocumentHandler.startCDATA(); } /** Call end CDATA section. */ public void callEndCDATA() throws Exception { fDocumentHandler.endCDATA(); } /** Call characters. */ public void callCharacters(int ch) throws Exception { if (fCharRefData == null) { fCharRefData = new char[2]; } int count = (ch < 0x10000) ? 1 : 2; if (count == 1) { fCharRefData[0] = (char)ch; } else { fCharRefData[0] = (char)(((ch-0x00010000)>>10)+0xd800); fCharRefData[1] = (char)(((ch-0x00010000)&0x3ff)+0xdc00); } if (fValidating && (fInElementContent || fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY)) { charDataInContent(); } if (fSendCharDataAsCharArray) { fDocumentHandler.characters(fCharRefData, 0, count); } else { int index = fStringPool.addString(new String(fCharRefData, 0, count)); fDocumentHandler.characters(index); } } // callCharacters(int) /** Call processing instruction. */ public void callProcessingInstruction(int target, int data) throws Exception { fDocumentHandler.processingInstruction(target, data); } /** Call comment. */ public void callComment(int comment) throws Exception { fDocumentHandler.comment(comment); } // // NamespacesScope.NamespacesHandler methods // /** Start a new namespace declaration scope. */ public void startNamespaceDeclScope(int prefix, int uri) throws Exception { fDocumentHandler.startNamespaceDeclScope(prefix, uri); } /** End a namespace declaration scope. */ public void endNamespaceDeclScope(int prefix) throws Exception { fDocumentHandler.endNamespaceDeclScope(prefix); } // attributes /** Normalize attribute value. */ public int normalizeAttValue(QName element, QName attribute, int attValue, int attType, boolean list, int enumHandle) throws Exception { AttributeValidator av = getValidatorForAttType(attType, list); if (av != null) { return av.normalize(element, attribute, attValue, attType, enumHandle); } return -1; } // normalizeAttValue(QName,QName,int,int,boolean,int):int // other /** Sets the root element. */ public void setRootElementType(QName rootElement) { fRootElement.setValues(rootElement); } /** * Returns true if the element declaration is external. * <p> * <strong>Note:</strong> This method is primarilly useful for * DTDs with internal and external subsets. */ private boolean getElementDeclIsExternal(int elementIndex) { /*if (elementIndex < 0 || elementIndex >= fElementCount) { return false; } int chunk = elementIndex >> CHUNK_SHIFT; int index = elementIndex & CHUNK_MASK; return (fElementDeclIsExternal[chunk][index] != 0); */ if (fGrammarIsDTDGrammar ) { return ((DTDGrammar) fGrammar).getElementDeclIsExternal(elementIndex); } return false; } /** Returns the content spec type for an element index. */ public int getContentSpecType(int elementIndex) { int contentSpecType = -1; if ( elementIndex > -1) { if ( fGrammar.getElementDecl(elementIndex,fTempElementDecl) ) { contentSpecType = fTempElementDecl.type; } } return contentSpecType; } /** Returns the content spec handle for an element index. */ public int getContentSpecHandle(int elementIndex) { int contentSpecHandle = -1; if ( elementIndex > -1) { if ( fGrammar.getElementDecl(elementIndex,fTempElementDecl) ) { contentSpecHandle = fTempElementDecl.contentSpecIndex; } } return contentSpecHandle; } // // Protected methods // // error reporting /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode) throws Exception { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, null, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, int stringIndex1) throws Exception { Object[] args = { fStringPool.toString(stringIndex1) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,int) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, String string1) throws Exception { Object[] args = { string1 }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,String) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, int stringIndex1, int stringIndex2) throws Exception { Object[] args = { fStringPool.toString(stringIndex1), fStringPool.toString(stringIndex2) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,int,int) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, String string1, String string2) throws Exception { Object[] args = { string1, string2 }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,String,String) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, String string1, String string2, String string3) throws Exception { Object[] args = { string1, string2, string3 }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,String,String,String) // content spec /** * Returns information about which elements can be placed at a particular point * in the passed element's content model. * <p> * Note that the incoming content model to test must be valid at least up to * the insertion point. If not, then -1 will be returned and the info object * will not have been filled in. * <p> * If, on return, the info.isValidEOC flag is set, then the 'insert after' * elemement is a valid end of content, i.e. nothing needs to be inserted * after it to make the parent element's content model valid. * * @param elementIndex The index within the <code>ElementDeclPool</code> of the * element which is being querying. * @param fullyValid Only return elements that can be inserted and still * maintain the validity of subsequent elements past the * insertion point (if any). If the insertion point is at * the end, and this is true, then only elements that can * be legal final states will be returned. * @param info An object that contains the required input data for the method, * and which will contain the output information if successful. * * @return The value -1 if fully valid, else the 0 based index of the child * that first failed before the insertion point. If the value * returned is equal to the number of children, then the specified * children are valid but additional content is required to reach a * valid ending state. * * @exception Exception Thrown on error. * * @see InsertableElementsInfo */ protected int whatCanGoHere(int elementIndex, boolean fullyValid, InsertableElementsInfo info) throws Exception { // // Do some basic sanity checking on the info packet. First, make sure // that insertAt is not greater than the child count. It can be equal, // which means to get appendable elements, but not greater. Or, if // the current children array is null, that's bad too. // // Since the current children array must have a blank spot for where // the insert is going to be, the child count must always be at least // one. // // Make sure that the child count is not larger than the current children // array. It can be equal, which means get appendable elements, but not // greater. // if (info.insertAt > info.childCount || info.curChildren == null || info.childCount < 1 || info.childCount > info.curChildren.length) { fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, ImplementationMessages.VAL_WCGHI, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } int retVal = 0; try { // Get the content model for this element final XMLContentModel cmElem = getContentModel(elementIndex); // And delegate this call to it retVal = cmElem.whatCanGoHere(fullyValid, info); } catch (CMException excToCatch) { // REVISIT - Translate caught error to the protected error handler interface int majorCode = excToCatch.getErrorCode(); fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, majorCode, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); throw excToCatch; } return retVal; } // whatCanGoHere(int,boolean,InsertableElementsInfo):int // attribute information /** Protected for use by AttributeValidator classes. */ protected boolean getAttDefIsExternal(QName element, QName attribute) { int attDefIndex = getAttDef(element, attribute); if (fGrammarIsDTDGrammar ) { return ((DTDGrammar) fGrammar).getAttributeDeclIsExternal(attDefIndex); } return false; } /** addId. */ protected boolean addId(int idIndex) { Integer key = new Integer(idIndex); if (fIdDefs == null) { fIdDefs = new Hashtable(); } else if (fIdDefs.containsKey(key)) { return false; } if (fNullValue == null) { fNullValue = new Object(); } fIdDefs.put(key, fNullValue/*new Integer(elementType)*/); return true; } // addId(int):boolean /** addIdRef. */ protected void addIdRef(int idIndex) { Integer key = new Integer(idIndex); if (fIdDefs != null && fIdDefs.containsKey(key)) { return; } if (fIdRefs == null) { fIdRefs = new Hashtable(); } else if (fIdRefs.containsKey(key)) { return; } if (fNullValue == null) { fNullValue = new Object(); } fIdRefs.put(key, fNullValue/*new Integer(elementType)*/); } // addIdRef(int) // // Private methods // // other /** Returns true if using a standalone reader. */ private boolean usingStandaloneReader() { return fStandaloneReader == -1 || fEntityHandler.getReaderId() == fStandaloneReader; } /** Returns a locator implementation. */ private LocatorImpl getLocatorImpl(LocatorImpl fillin) { Locator here = fErrorReporter.getLocator(); if (fillin == null) return new LocatorImpl(here); fillin.setPublicId(here.getPublicId()); fillin.setSystemId(here.getSystemId()); fillin.setLineNumber(here.getLineNumber()); fillin.setColumnNumber(here.getColumnNumber()); return fillin; } // getLocatorImpl(LocatorImpl):LocatorImpl // content models /** * This method will handle the querying of the content model for a * particular element. If the element does not have a content model, then * it will be created. */ private XMLContentModel getContentModel(int elementIndex) throws CMException { // See if a content model already exists first XMLContentModel cmRet = getElementContentModel(elementIndex); // If we have one, just return that. Otherwise, gotta create one if (cmRet != null) { return cmRet; } // Get the type of content this element has final int contentSpec = getContentSpecType(elementIndex); // And create the content model according to the spec type if (contentSpec == XMLElementDecl.TYPE_MIXED) { // // Just create a mixel content model object. This type of // content model is optimized for mixed content validation. // //REVISIT, could not compile // XMLContentSpec specNode = new XMLContentSpec(); // int contentSpecIndex = getContentSpecHandle(elementIndex); // makeContentList(contentSpecIndex, specNode); // cmRet = new MixedContentModel(fCount, fContentList); } else if (contentSpec == XMLElementDecl.TYPE_CHILDREN) { // // This method will create an optimal model for the complexity // of the element's defined model. If its simple, it will create // a SimpleContentModel object. If its a simple list, it will // create a SimpleListContentModel object. If its complex, it // will create a DFAContentModel object. // //REVISIT: couldnot compile //cmRet = createChildModel(elementIndex); } else if (contentSpec == fDATATYPESymbol) { // cmRet = fSchemaImporter.createDatatypeContentModel(elementIndex); } else { throw new CMException(ImplementationMessages.VAL_CST); } // Add the new model to the content model for this element //REVISIT setContentModel(elementIndex, cmRet); return cmRet; } // getContentModel(int):XMLContentModel // initialization /** Reset pool. */ private void poolReset() { if (fIdDefs != null) { fIdDefs.clear(); } if (fIdRefs != null) { fIdRefs.clear(); } } // poolReset() /** Reset common. */ private void resetCommon(StringPool stringPool) throws Exception { fStringPool = stringPool; fValidating = fValidationEnabled; fValidationEnabledByDynamic = false; fDynamicDisabledByValidation = false; poolReset(); fCalledStartDocument = false; fStandaloneReader = -1; fElementChildrenLength = 0; fElementDepth = -1; fSeenRootElement = false; fSeenDoctypeDecl = false; fNamespacesScope = null; fNamespacesPrefix = -1; fRootElement.clear(); fAttrListHandle = -1; fCheckedForSchema = false; fCurrentScope = TOP_LEVEL_SCOPE; fCurrentSchemaURI = -1; fEmptyURI = - 1; fXsiPrefix = - 1; fGrammar = null; fGrammarNameSpaceIndex = -1; fGrammarResolver = null; fGrammarIsDTDGrammar = false; fGrammarIsSchemaGrammar = false; init(); } // resetCommon(StringPool) /** Initialize. */ private void init() { fEmptyURI = fStringPool.addSymbol(""); fXsiURI = fStringPool.addSymbol(SchemaSymbols.URI_XSI); fEMPTYSymbol = fStringPool.addSymbol("EMPTY"); fANYSymbol = fStringPool.addSymbol("ANY"); fMIXEDSymbol = fStringPool.addSymbol("MIXED"); fCHILDRENSymbol = fStringPool.addSymbol("CHILDREN"); fCDATASymbol = fStringPool.addSymbol("CDATA"); fIDSymbol = fStringPool.addSymbol("ID"); fIDREFSymbol = fStringPool.addSymbol("IDREF"); fIDREFSSymbol = fStringPool.addSymbol("IDREFS"); fENTITYSymbol = fStringPool.addSymbol("ENTITY"); fENTITIESSymbol = fStringPool.addSymbol("ENTITIES"); fNMTOKENSymbol = fStringPool.addSymbol("NMTOKEN"); fNMTOKENSSymbol = fStringPool.addSymbol("NMTOKENS"); fNOTATIONSymbol = fStringPool.addSymbol("NOTATION"); fENUMERATIONSymbol = fStringPool.addSymbol("ENUMERATION"); fREQUIREDSymbol = fStringPool.addSymbol("#REQUIRED"); fFIXEDSymbol = fStringPool.addSymbol("#FIXED"); fDATATYPESymbol = fStringPool.addSymbol("<<datatype>>"); fEpsilonIndex = fStringPool.addSymbol("<<CMNODE_EPSILON>>"); fXMLLang = fStringPool.addSymbol("xml:lang"); /** fEMPTYSymbol = XMLElementDecl.TYPE_EMPTY; fANYSymbol = XMLElementDecl.TYPE_ANY; fMIXEDSymbol = XMLElementDecl.TYPE_MIXED; fCHILDRENSymbol = XMLElementDecl.TYPE_CHILDREN; fCDATASymbol = XMLAttributeDecl.TYPE_CDATA; fIDSymbol = XMLAttributeDecl.TYPE_ID; fIDREFSymbol = XMLAttributeDecl.TYPE_IDREF; fIDREFSSymbol = XMLAttributeDecl.TYPE_IDREF; fENTITYSymbol = XMLAttributeDecl.TYPE_ENTITY; fENTITIESSymbol = XMLAttributeDecl.TYPE_ENTITY; fNMTOKENSymbol = XMLAttributeDecl.TYPE_NMTOKEN; fNMTOKENSSymbol = XMLAttributeDecl.TYPE_NMTOKEN; fNOTATIONSymbol = XMLAttributeDecl.TYPE_NOTATION; fENUMERATIONSymbol = XMLAttributeDecl.TYPE_ENUMERATION; fREQUIREDSymbol = XMLAttributeDecl.DEFAULT_TYPE_REQUIRED; fFIXEDSymbol = XMLAttributeDecl.DEFAULT_TYPE_FIXED; fDATATYPESymbol = XMLElementDecl.TYPE_SIMPLE; **/ } // init() // other // default attribute /** addDefaultAttributes. */ private int addDefaultAttributes(int elementIndex, XMLAttrList attrList, int attrIndex, boolean validationEnabled, boolean standalone) throws Exception { //System.out.println("XMLValidator#addDefaultAttributes"); //System.out.print(" "); //fGrammar.printAttributes(elementIndex); // // Check after all specified attrs are scanned // (1) report error for REQUIRED attrs that are missing (V_TAGc) // (2) check that FIXED attrs have matching value (V_TAGd) // (3) add default attrs (FIXED and NOT_FIXED) // fGrammar.getElementDecl(elementIndex,fTempElementDecl); //System.out.println("addDefaultAttributes: " + fStringPool.toString(fTempElementDecl.name.localpart)+ // "," + attrIndex + "," + validationEnabled); int elementNameIndex = fTempElementDecl.name.localpart; int attlistIndex = fGrammar.getFirstAttributeDeclIndex(elementIndex); int firstCheck = attrIndex; int lastCheck = -1; while (attlistIndex != -1) { //int adChunk = attlistIndex >> CHUNK_SHIFT; //int adIndex = attlistIndex & CHUNK_MASK; fGrammar.getAttributeDecl(attlistIndex, fTempAttDecl); // TO DO: For ericye Debug only /*** if (fTempAttDecl != null) { XMLElementDecl element = new XMLElementDecl(); fGrammar.getElementDecl(elementIndex, element); System.out.println("element: "+fStringPool.toString(element.name.localpart)); System.out.println("attlistIndex " + attlistIndex + "\n"+ "attName : '"+fStringPool.toString(fTempAttDecl.name.localpart) + "'\n" + "attType : "+fTempAttDecl.type + "\n" + "attDefaultType : "+fTempAttDecl.defaultType + "\n" + "attDefaultValue : '"+fTempAttDecl.defaultValue + "'\n" + attrList.getLength() +"\n" ); } /***/ int attPrefix = fTempAttDecl.name.prefix; int attName = fTempAttDecl.name.localpart; int attType = attributeTypeName(fTempAttDecl); int attDefType =fTempAttDecl.defaultType; int attValue = -1 ; if (fTempAttDecl.defaultValue != null ) { attValue = fStringPool.addSymbol(fTempAttDecl.defaultValue); } boolean specified = false; boolean required = attDefType == XMLAttributeDecl.DEFAULT_TYPE_REQUIRED; /**** if (fValidating && fGrammar != null && fGrammarIsDTDGrammar && attValue != -1) { normalizeAttValue(null, fTempAttDecl.name, attValue,attType,fTempAttDecl.list, fTempAttDecl.enumeration); } /****/ if (firstCheck != -1) { boolean cdata = attType == fCDATASymbol; if (!cdata || required || attValue != -1) { int i = attrList.getFirstAttr(firstCheck); while (i != -1 && (lastCheck == -1 || i <= lastCheck)) { //if (fStringPool.equalNames(attrList.getAttrName(i), attName)) { if ( fStringPool.equalNames(attrList.getAttrLocalpart(i), attName) && fStringPool.equalNames(attrList.getAttrURI(i), fTempAttDecl.name.uri) ) { if (validationEnabled && attDefType == XMLAttributeDecl.DEFAULT_TYPE_FIXED) { int alistValue = attrList.getAttValue(i); if (alistValue != attValue && !fStringPool.toString(alistValue).equals(fStringPool.toString(attValue))) { Object[] args = { fStringPool.toString(elementNameIndex), fStringPool.toString(attName), fStringPool.toString(alistValue), fStringPool.toString(attValue) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_FIXED_ATTVALUE_INVALID, XMLMessages.VC_FIXED_ATTRIBUTE_DEFAULT, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } specified = true; break; } i = attrList.getNextAttr(i); } } } if (!specified) { if (required) { if (validationEnabled) { Object[] args = { fStringPool.toString(elementNameIndex), fStringPool.toString(attName) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_REQUIRED_ATTRIBUTE_NOT_SPECIFIED, XMLMessages.VC_REQUIRED_ATTRIBUTE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } else if (attValue != -1) { if (validationEnabled && standalone ) if ( fGrammarIsDTDGrammar && ((DTDGrammar) fGrammar).getAttributeDeclIsExternal(attlistIndex) ) { Object[] args = { fStringPool.toString(elementNameIndex), fStringPool.toString(attName) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_DEFAULTED_ATTRIBUTE_NOT_SPECIFIED, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } if (attType == fIDREFSymbol) { addIdRef(attValue); } else if (attType == fIDREFSSymbol) { StringTokenizer tokenizer = new StringTokenizer(fStringPool.toString(attValue)); while (tokenizer.hasMoreTokens()) { String idName = tokenizer.nextToken(); addIdRef(fStringPool.addSymbol(idName)); } } if (attrIndex == -1) { attrIndex = attrList.startAttrList(); } // REVISIT: Validation. What should the prefix be? fTempQName.setValues(attPrefix, attName, attName, fTempAttDecl.name.uri); int newAttr = attrList.addAttr(fTempQName, attValue, attType, false, false); if (lastCheck == -1) { lastCheck = newAttr; } } } attlistIndex = fGrammar.getNextAttributeDeclIndex(attlistIndex); } return attrIndex; } // addDefaultAttributes(int,XMLAttrList,int,boolean,boolean):int // content models /** Queries the content model for the specified element index. */ private XMLContentModel getElementContentModel(int elementIndex) throws CMException { XMLContentModel contentModel = null; if ( elementIndex > -1) { if ( fGrammar.getElementDecl(elementIndex,fTempElementDecl) ) { contentModel = fGrammar.getElementContentModel(elementIndex); } } //return fGrammar.getElementContentModel(elementIndex); return contentModel; } /** Sets the content model for the specified element index. */ private void setContentModel(int elementIndex, XMLContentModel cm) { // REVISIT: What's this method do? /*if (elementIndex < 0 || elementIndex >= fElementCount) { return; } int chunk = elementIndex >> CHUNK_SHIFT; int index = elementIndex & CHUNK_MASK; fContentModel[chunk][index] = cm; */ } // query attribute information /** Returns the validatator for an attribute type. */ private AttributeValidator getValidatorForAttType(int attType, boolean list) { if (attType == XMLAttributeDecl.TYPE_CDATA) { if (fAttValidatorCDATA == null) { fAttValidatorCDATA = new AttValidatorCDATA(); } return fAttValidatorCDATA; } if (attType == XMLAttributeDecl.TYPE_ID) { if (fAttValidatorID == null) { fAttValidatorID = new AttValidatorID(); } return fAttValidatorID; } if (attType == XMLAttributeDecl.TYPE_IDREF) { if (!list) { if (fAttValidatorIDREF == null) { fAttValidatorIDREF = new AttValidatorIDREF(); } return fAttValidatorIDREF; } else { if (fAttValidatorIDREFS == null) { fAttValidatorIDREFS = new AttValidatorIDREFS(); } return fAttValidatorIDREFS; } } if (attType == XMLAttributeDecl.TYPE_ENTITY) { if (!list) { if (fAttValidatorENTITY == null) { fAttValidatorENTITY = new AttValidatorENTITY(); } return fAttValidatorENTITY; } else{ if (fAttValidatorENTITIES == null) { fAttValidatorENTITIES = new AttValidatorENTITIES(); } return fAttValidatorENTITIES; } } if (attType == XMLAttributeDecl.TYPE_NMTOKEN) { if (!list) { if (fAttValidatorNMTOKEN == null) { fAttValidatorNMTOKEN = new AttValidatorNMTOKEN(); } return fAttValidatorNMTOKEN; } else{ if (fAttValidatorNMTOKENS == null) { fAttValidatorNMTOKENS = new AttValidatorNMTOKENS(); } return fAttValidatorNMTOKENS; } } if (attType == XMLAttributeDecl.TYPE_NOTATION) { if (fAttValidatorNOTATION == null) { fAttValidatorNOTATION = new AttValidatorNOTATION(); } return fAttValidatorNOTATION; } if (attType == XMLAttributeDecl.TYPE_ENUMERATION) { if (fAttValidatorENUMERATION == null) { fAttValidatorENUMERATION = new AttValidatorENUMERATION(); } return fAttValidatorENUMERATION; } if (attType == XMLAttributeDecl.TYPE_SIMPLE) { if (fAttValidatorDATATYPE == null) { fAttValidatorDATATYPE = null; //REVISIT : !!! used to be fSchemaImporter.createDatatypeAttributeValidator(); } //return fAttValidatorDATATYPE; } return null; //throw new RuntimeException("getValidatorForAttType(" + fStringPool.toString(attType) + ")"); } /** Returns an attribute definition for an element type. */ private int getAttDef(QName element, QName attribute) { if (fGrammar != null) { int scope = fCurrentScope; if (element.uri > -1) { scope = TOP_LEVEL_SCOPE; } int elementIndex = fGrammar.getElementDeclIndex(element.localpart,scope); if (elementIndex == -1) { return -1; } int attDefIndex = fGrammar.getFirstAttributeDeclIndex(elementIndex); while (attDefIndex != -1) { fGrammar.getAttributeDecl(attDefIndex, fTempAttDecl); if (fTempAttDecl.name.localpart == attribute.localpart && fTempAttDecl.name.uri == attribute.uri ) { return attDefIndex; } attDefIndex = fGrammar.getNextAttributeDeclIndex(attDefIndex); } } return -1; } // getAttDef(QName,QName) /** Returns an attribute definition for an element type. */ private int getAttDefByElementIndex(int elementIndex, QName attribute) { if (fGrammar != null && elementIndex > -1) { if (elementIndex == -1) { return -1; } int attDefIndex = fGrammar.getFirstAttributeDeclIndex(elementIndex); while (attDefIndex != -1) { fGrammar.getAttributeDecl(attDefIndex, fTempAttDecl); if (fTempAttDecl.name.localpart == attribute.localpart && fTempAttDecl.name.uri == attribute.uri ) { return attDefIndex; } attDefIndex = fGrammar.getNextAttributeDeclIndex(attDefIndex); } } return -1; } // getAttDef(QName,QName) // validation /** Root element specified. */ private void rootElementSpecified(QName rootElement) throws Exception { // this is what it used to be //if (fDynamicValidation && !fSeenDoctypeDecl) { //fValidating = false; //} if (fValidating) { // initialize the grammar to be the default one. if (fGrammar == null) { fGrammar = fGrammarResolver.getGrammar(""); //TO DO, for ericye debug only if (fGrammar == null && DEBUG_SCHEMA_VALIDATION) { System.out.println("Oops! no grammar is found for validation"); } if (fDynamicValidation && fGrammar==null) { fValidating = false; } if (fGrammar != null) { if (fGrammar instanceof DTDGrammar) { fGrammarIsDTDGrammar = true; fGrammarIsSchemaGrammar = false; } else if ( fGrammar instanceof SchemaGrammar ) { fGrammarIsSchemaGrammar = true; fGrammarIsDTDGrammar = false; } fGrammarNameSpaceIndex = fEmptyURI; } } if ( fGrammarIsDTDGrammar && ((DTDGrammar) fGrammar).getRootElementQName(fRootElement) ) { String root1 = fStringPool.toString(fRootElement.rawname); String root2 = fStringPool.toString(rootElement.rawname); if (!root1.equals(root2)) { reportRecoverableXMLError(XMLMessages.MSG_ROOT_ELEMENT_TYPE, XMLMessages.VC_ROOT_ELEMENT_TYPE, fRootElement.rawname, rootElement.rawname); } } } if (fNamespacesEnabled) { if (fNamespacesScope == null) { fNamespacesScope = new NamespacesScope(this); fNamespacesPrefix = fStringPool.addSymbol("xmlns"); fNamespacesScope.setNamespaceForPrefix(fNamespacesPrefix, -1); int xmlSymbol = fStringPool.addSymbol("xml"); int xmlNamespace = fStringPool.addSymbol("http://www.w3.org/XML/1998/namespace"); fNamespacesScope.setNamespaceForPrefix(xmlSymbol, xmlNamespace); } } } // rootElementSpecified(QName) /** Switchs to correct validating symbol tables when Schema changes.*/ private void switchGrammar(int newGrammarNameSpaceIndex) { Grammar tempGrammar = fGrammarResolver.getGrammar(fStringPool.toString(newGrammarNameSpaceIndex)); if (tempGrammar == null) { System.out.println(fStringPool.toString(newGrammarNameSpaceIndex) + " grammar not found"); //TO DO report error here } else { fGrammar = tempGrammar; if (fGrammar instanceof DTDGrammar) { fGrammarIsDTDGrammar = true; fGrammarIsSchemaGrammar = false; } else if ( fGrammar instanceof SchemaGrammar ) { fGrammarIsSchemaGrammar = true; fGrammarIsDTDGrammar = false; } } } /** Binds namespaces to the element and attributes. */ private void bindNamespacesToElementAndAttributes(QName element, XMLAttrList attrList) throws Exception { fNamespacesScope.increaseDepth(); Vector schemaCandidateURIs = null; Hashtable locationUriPairs = null; if (fValidating) { schemaCandidateURIs = new Vector(); locationUriPairs = new Hashtable(); } if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); int attPrefix = attrList.getAttrPrefix(index); if (fStringPool.equalNames(attName, fXMLLang)) { /*** // NOTE: This check is done in the validateElementsAndAttributes // method. fDocumentScanner.checkXMLLangAttributeValue(attrList.getAttValue(index)); /***/ } else if (fStringPool.equalNames(attName, fNamespacesPrefix)) { int uri = fStringPool.addSymbol(attrList.getAttValue(index)); fNamespacesScope.setNamespaceForPrefix(StringPool.EMPTY_STRING, uri); } else { if (attPrefix == fNamespacesPrefix) { int nsPrefix = attrList.getAttrLocalpart(index); int uri = fStringPool.addSymbol(attrList.getAttValue(index)); boolean seeXsi = false; fNamespacesScope.setNamespaceForPrefix(nsPrefix, uri); String attrValue = fStringPool.toString(attrList.getAttValue(index)); if (attrValue.equals(SchemaSymbols.URI_XSI)) { fXsiPrefix = nsPrefix; seeXsi = true; } if (fValidating && !seeXsi) { schemaCandidateURIs.addElement( fStringPool.toString(uri) ); } } } index = attrList.getNextAttr(index); } // if validating, walk through the list again to deal with "xsi:...." if (fValidating) { index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); int attPrefix = attrList.getAttrPrefix(index); if (fStringPool.equalNames(attName, fNamespacesPrefix)) { // REVISIT } else { if ( DEBUG_SCHEMA_VALIDATION ) { System.out.println("deal with XSI"); System.out.println("before find XSI: "+fStringPool.toString(attPrefix) +","+fStringPool.toString(fXsiPrefix) ); } if (attPrefix == fXsiPrefix && fXsiPrefix != -1 ) { if (DEBUG_SCHEMA_VALIDATION) { System.out.println("find XSI: "+fStringPool.toString(attPrefix) +","+fStringPool.toString(attName) ); } int localpart = attrList.getAttrLocalpart(index); if (localpart == fStringPool.addSymbol(SchemaSymbols.XSI_SCHEMALOCACTION)) { parseSchemaLocation(fStringPool.toString(attrList.getAttValue(index)), locationUriPairs); } else if (localpart == fStringPool.addSymbol(SchemaSymbols.XSI_NONAMESPACESCHEMALOCACTION)) { locationUriPairs.put(fStringPool.toString(attrList.getAttValue(index)), ""); if (fNamespacesScope != null) { //bind prefix "" to URI "" in this case fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol("")); } } // REVISIT: should we break here? //break; } } index = attrList.getNextAttr(index); } // try to resolve all the grammars here Enumeration locations = locationUriPairs.keys(); while (locations.hasMoreElements()) { String loc = (String) locations.nextElement(); String uri = (String) locationUriPairs.get(loc); resolveSchemaGrammar( loc, uri); schemaCandidateURIs.removeElement(uri); } //TO DO: This should be a feature that can be turned on or off /***** for (int i=0; i< schemaCandidateURIs.size(); i++) { String uri = (String) schemaCandidateURIs.elementAt(i); resolveSchemaGrammar(uri); } /*****/ } } // bind element to URI int prefix = element.prefix != -1 ? element.prefix : 0; int uri = fNamespacesScope.getNamespaceForPrefix(prefix); if (element.prefix != -1 || uri != -1) { element.uri = uri; if (element.uri == -1) { Object[] args = { fStringPool.toString(element.prefix) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XMLNS_DOMAIN, XMLMessages.MSG_PREFIX_DECLARED, XMLMessages.NC_PREFIX_DECLARED, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } //REVISIT: is this the right place to check on if the Schema has changed? if ( fValidating && element.uri != fGrammarNameSpaceIndex && element.uri != -1 ) { fGrammarNameSpaceIndex = element.uri; switchGrammar(fGrammarNameSpaceIndex); } if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); if (!fStringPool.equalNames(attName, fNamespacesPrefix)) { int attPrefix = attrList.getAttrPrefix(index); if (attPrefix != fNamespacesPrefix) { prefix = attPrefix != -1 ? attPrefix : 0; int attrUri = fNamespacesScope.getNamespaceForPrefix(prefix); if (attPrefix != -1 || attrUri != -1) { //int attrUri = fNamespacesScope.getNamespaceForPrefix(attPrefix); if (attrUri == -1) { Object[] args = { fStringPool.toString(attPrefix) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XMLNS_DOMAIN, XMLMessages.MSG_PREFIX_DECLARED, XMLMessages.NC_PREFIX_DECLARED, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } attrList.setAttrURI(index, attrUri); } } } index = attrList.getNextAttr(index); } } } // bindNamespacesToElementAndAttributes(QName,XMLAttrList) void parseSchemaLocation(String schemaLocationStr, Hashtable locationUriPairs){ if (locationUriPairs != null) { StringTokenizer tokenizer = new StringTokenizer(schemaLocationStr, " \n\t\r", false); int tokenTotal = tokenizer.countTokens(); if (tokenTotal % 2 != 0 ) { // TO DO: report warning - malformed schemaLocation string } else { while (tokenizer.hasMoreTokens()) { String uri = tokenizer.nextToken(); String location = tokenizer.nextToken(); locationUriPairs.put(location, uri); } } } else { // TO DO: should report internal error here } }// parseSchemaLocaltion(String, Hashtable) private void resolveSchemaGrammar( String loc, String uri) throws Exception { SchemaGrammar grammar = (SchemaGrammar) fGrammarResolver.getGrammar(uri); if (grammar == null) { DOMParser parser = new DOMParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } // expand it before passing it to the parser loc = fEntityHandler.expandSystemId(loc); try { parser.parse( loc ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { //e.printStackTrace(); reportRecoverableXMLError(167, 144, e.getMessage() ); } Document document = parser.getDocument(); //Our Grammar TraverseSchema tst = null; try { if (DEBUG_SCHEMA_VALIDATION) { System.out.println("I am geting the Schema Document"); } Element root = document.getDocumentElement();// This is what we pass to TraverserSchema if (root == null) { reportRecoverableXMLError(167, 144, "Can't get back Schema document's root element :" + loc); } else { if (uri == null || !uri.equals(root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE)) ) { reportRecoverableXMLError(167,144, "Schema in " + loc + " has a different target namespace " + "from the one specified in the instance document :" + uri); } grammar = new SchemaGrammar(); grammar.setGrammarDocument(document); tst = new TraverseSchema( root, fStringPool, (SchemaGrammar)grammar, fGrammarResolver, fErrorReporter, loc); fGrammarResolver.putGrammar(document.getDocumentElement().getAttribute("targetNamespace"), grammar); } } catch (Exception e) { e.printStackTrace(System.err); } } } private void resolveSchemaGrammar(String uri) throws Exception{ resolveSchemaGrammar(uri, uri); } static class Resolver implements EntityResolver { private static final String SYSTEM[] = { "http://www.w3.org/TR/2000/WD-xmlschema-1-20000407/structures.dtd", "http://www.w3.org/TR/2000/WD-xmlschema-1-20000407/datatypes.dtd", "http://www.w3.org/TR/2000/WD-xmlschema-1-20000407/versionInfo.ent", }; private static final String PATH[] = { "structures.dtd", "datatypes.dtd", "versionInfo.ent", }; public InputSource resolveEntity(String publicId, String systemId) throws IOException { // looking for the schema DTDs? for (int i = 0; i < SYSTEM.length; i++) { if (systemId.equals(SYSTEM[i])) { InputSource source = new InputSource(getClass().getResourceAsStream(PATH[i])); source.setPublicId(publicId); source.setSystemId(systemId); return source; } } // use default resolution return null; } // resolveEntity(String,String):InputSource } // class Resolver static class ErrorHandler implements org.xml.sax.ErrorHandler { /** Warning. */ public void warning(SAXParseException ex) { System.err.println("[Warning] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Error. */ public void error(SAXParseException ex) { System.err.println("[Error] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Fatal error. */ public void fatalError(SAXParseException ex) { System.err.println("[Fatal Error] "+ getLocationString(ex)+": "+ ex.getMessage()); //throw ex; } // // Private methods // /** Returns a string of the location. */ private String getLocationString(SAXParseException ex) { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':'); str.append(ex.getLineNumber()); str.append(':'); str.append(ex.getColumnNumber()); return str.toString(); } // getLocationString(SAXParseException):String } private int attributeTypeName(XMLAttributeDecl attrDecl) { switch (attrDecl.type) { //case XMLAttributeDecl.TYPE_CDATA: case XMLAttributeDecl.TYPE_ENTITY: { return attrDecl.list ? fENTITIESSymbol : fENTITYSymbol; } case XMLAttributeDecl.TYPE_ENUMERATION: { String enumeration = fStringPool.stringListAsString(attrDecl.enumeration); return fStringPool.addString(enumeration); } case XMLAttributeDecl.TYPE_ID: { return fIDSymbol; } case XMLAttributeDecl.TYPE_IDREF: { return attrDecl.list ? fIDREFSSymbol : fIDREFSymbol; } case XMLAttributeDecl.TYPE_NMTOKEN: { return attrDecl.list ? fNMTOKENSSymbol : fNMTOKENSSymbol; } case XMLAttributeDecl.TYPE_NOTATION: { return fNOTATIONSymbol; } } return fCDATASymbol; } /** Validates element and attributes. */ private void validateElementAndAttributes(QName element, XMLAttrList attrList) throws Exception { if (fGrammar == null && !fValidating && !fNamespacesEnabled) { fCurrentElementIndex = -1; fCurrentContentSpecType = -1; fInElementContent = false; if (fAttrListHandle != -1) { fAttrList.endAttrList(); int index = fAttrList.getFirstAttr(fAttrListHandle); while (index != -1) { if (fStringPool.equalNames(fAttrList.getAttrName(index), fXMLLang)) { fDocumentScanner.checkXMLLangAttributeValue(fAttrList.getAttValue(index)); break; } index = fAttrList.getNextAttr(index); } } return; } int elementIndex = -1; //REVISIT, is it possible, fValidating is false and fGrammar is no null.??? if ( fGrammar != null ){ if (DEBUG_SCHEMA_VALIDATION) { System.out.println("localpart: '" + fStringPool.toString(element.localpart) +"' and scope : " + fCurrentScope); } if (element.uri == -1) { elementIndex = fGrammar.getElementDeclIndex(element.localpart,fCurrentScope); } else { elementIndex = fGrammar.getElementDeclIndex(element.localpart, TOP_LEVEL_SCOPE); } if (elementIndex == -1) { // if validating based on a Schema, try to resolve the element again by look it up in its ancestor types if (fGrammarIsSchemaGrammar && fCurrentElementIndex != -1) { TraverseSchema.ComplexTypeInfo baseTypeInfo = null; baseTypeInfo = ((SchemaGrammar)fGrammar).getElementComplexTypeInfo(fCurrentElementIndex); while (baseTypeInfo != null) { elementIndex = fGrammar.getElementDeclIndex(element.localpart, baseTypeInfo.scopeDefined); if (elementIndex > -1 ) { break; } baseTypeInfo = baseTypeInfo.baseComplexTypeInfo; } } //if still can't resolve it, try TOP_LEVEL_SCOPE AGAIN if (element.uri == -1 && elementIndex == -1) { elementIndex = fGrammar.getElementDeclIndex(element.localpart, TOP_LEVEL_SCOPE); // REVISIT: // this is a hack to handle the situation where namespace prefix "" is bound to nothing, and there // is a "noNamespaceSchemaLocation" specified, and element element.uri = fStringPool.addSymbol(""); } /****/ if (elementIndex == -1) if (DEBUG_SCHEMA_VALIDATION) System.out.println("!!! can not find elementDecl in the grammar, " + " the element localpart: " + element.localpart+"["+fStringPool.toString(element.localpart) +"]" + " the element uri: " + element.uri+"["+fStringPool.toString(element.uri) +"]" + " and the current enclosing scope: " + fCurrentScope ); /****/ } if (DEBUG_SCHEMA_VALIDATION) { fGrammar.getElementDecl(elementIndex, fTempElementDecl); System.out.println("elementIndex: " + elementIndex+" \n and itsName : '" + fStringPool.toString(fTempElementDecl.name.localpart) +"' \n its ContentType:" + fTempElementDecl.type +"\n its ContentSpecIndex : " + fTempElementDecl.contentSpecIndex +"\n"); } } // here need to check if we need to switch Grammar by asking SchemaGrammar whether // this element actually is of a type in another Schema. if (fGrammarIsSchemaGrammar && elementIndex != -1) { String anotherSchemaURI = ((SchemaGrammar)fGrammar).getElementFromAnotherSchemaURI(elementIndex); if (anotherSchemaURI != null) { fGrammarNameSpaceIndex = fCurrentSchemaURI = fStringPool.addSymbol(anotherSchemaURI); switchGrammar(fCurrentSchemaURI); } } int contentSpecType = getContentSpecType(elementIndex); if (contentSpecType == -1 && fValidating) { reportRecoverableXMLError(XMLMessages.MSG_ELEMENT_NOT_DECLARED, XMLMessages.VC_ELEMENT_VALID, element.rawname); } if (fGrammar != null && elementIndex != -1) { //REVISIT: broken fAttrListHandle = addDefaultAttributes(elementIndex, attrList, fAttrListHandle, fValidating, fStandaloneReader != -1); } if (fAttrListHandle != -1) { fAttrList.endAttrList(); } if (DEBUG_PRINT_ATTRIBUTES) { String elementStr = fStringPool.toString(element.rawname); System.out.print("startElement: <" + elementStr); if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { System.out.print(" " + fStringPool.toString(attrList.getAttrName(index)) + "=\"" + fStringPool.toString(attrList.getAttValue(index)) + "\""); index = attrList.getNextAttr(index); } } System.out.println(">"); } // REVISIT: Validation. Do we need to recheck for the xml:lang // attribute? It was already checked above -- perhaps // this is to check values that are defaulted in? If // so, this check could move to the attribute decl // callback so we can check the default value before // it is used. if (fAttrListHandle != -1) { int index = fAttrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attrNameIndex = attrList.getAttrName(index); if (fStringPool.equalNames(attrNameIndex, fXMLLang)) { fDocumentScanner.checkXMLLangAttributeValue(attrList.getAttValue(index)); // break; } // here, we validate every "user-defined" attributes int _xmlns = fStringPool.addSymbol("xmlns"); if (attrNameIndex != _xmlns && attrList.getAttrPrefix(index) != _xmlns) if (fValidating) { fAttrNameLocator = getLocatorImpl(fAttrNameLocator); fTempQName.setValues(attrList.getAttrPrefix(index), attrList.getAttrLocalpart(index), attrList.getAttrName(index), attrList.getAttrURI(index) ); int attDefIndex = getAttDefByElementIndex(elementIndex, fTempQName); if (fTempQName.uri != fXsiURI) if (attDefIndex == -1) { // REVISIT - cache the elem/attr tuple so that we only give // this error once for each unique occurrence Object[] args = { fStringPool.toString(element.rawname), fStringPool.toString(attrList.getAttrName(index)) }; System.out.println("[Error] attribute " + fStringPool.toString(attrList.getAttrName(index)) + " not found in element type " + fStringPool.toString(element.rawname)); /*****/ fErrorReporter.reportError(fAttrNameLocator, XMLMessages.XML_DOMAIN, XMLMessages.MSG_ATTRIBUTE_NOT_DECLARED, XMLMessages.VC_ATTRIBUTE_VALUE_TYPE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); /******/ } else { fGrammar.getAttributeDecl(attDefIndex, fTempAttDecl); int attributeType = attributeTypeName(fTempAttDecl); attrList.setAttType(index, attributeType); if (fGrammarIsDTDGrammar && (fTempAttDecl.type == XMLAttributeDecl.TYPE_ENTITY || fTempAttDecl.type == XMLAttributeDecl.TYPE_ENUMERATION || fTempAttDecl.type == XMLAttributeDecl.TYPE_ID || fTempAttDecl.type == XMLAttributeDecl.TYPE_IDREF || fTempAttDecl.type == XMLAttributeDecl.TYPE_NMTOKEN || fTempAttDecl.type == XMLAttributeDecl.TYPE_NOTATION) ) { validateDTDattribute(element, attrList.getAttValue(index), fTempAttDecl); } if (fTempAttDecl.datatypeValidator == null) { Object[] args = { fStringPool.toString(element.rawname), fStringPool.toString(attrList.getAttrName(index)) }; System.out.println("[Error] Datatypevalidator for attribute " + fStringPool.toString(attrList.getAttrName(index)) + " not found in element type " + fStringPool.toString(element.rawname)); //REVISIT : is this the right message? /****/ fErrorReporter.reportError(fAttrNameLocator, XMLMessages.XML_DOMAIN, XMLMessages.MSG_ATTRIBUTE_NOT_DECLARED, XMLMessages.VC_ATTRIBUTE_VALUE_TYPE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); /****/ } else{ try { fTempAttDecl.datatypeValidator.validate(fStringPool.toString(attrList.getAttValue(index)), null ); } catch (InvalidDatatypeValueException idve) { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, SchemaMessageProvider.DatatypeError, SchemaMessageProvider.MSG_NONE, new Object [] { idve.getMessage() }, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } } } index = fAttrList.getNextAttr(index); } } if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); if (!fStringPool.equalNames(attName, fNamespacesPrefix)) { int attPrefix = attrList.getAttrPrefix(index); if (attPrefix != fNamespacesPrefix) { if (attPrefix != -1) { int uri = fNamespacesScope.getNamespaceForPrefix(attPrefix); if (uri == -1) { Object[] args = { fStringPool.toString(attPrefix) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XMLNS_DOMAIN, XMLMessages.MSG_PREFIX_DECLARED, XMLMessages.NC_PREFIX_DECLARED, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } attrList.setAttrURI(index, uri); } } } index = attrList.getNextAttr(index); } } fCurrentElementIndex = elementIndex; fCurrentContentSpecType = contentSpecType; if (fValidating && contentSpecType == XMLElementDecl.TYPE_SIMPLE) { fBufferDatatype = true; fDatatypeBuffer.setLength(0); } fInElementContent = (contentSpecType == XMLElementDecl.TYPE_CHILDREN); } // validateElementAndAttributes(QName,XMLAttrList) //validate attributes in DTD fashion private void validateDTDattribute(QName element, int attValue, XMLAttributeDecl attributeDecl) throws Exception{ AttributeValidator av = null; switch (attributeDecl.type) { case XMLAttributeDecl.TYPE_ENTITY: if (attributeDecl.list) { av = fAttValidatorENTITIES; } else { av = fAttValidatorENTITY; } break; case XMLAttributeDecl.TYPE_ENUMERATION: av = fAttValidatorENUMERATION; break; case XMLAttributeDecl.TYPE_ID: av = fAttValidatorID; break; case XMLAttributeDecl.TYPE_IDREF: if (attributeDecl.list) { av = fAttValidatorIDREFS; } else { av = fAttValidatorIDREF; } break; case XMLAttributeDecl.TYPE_NOTATION: av = fAttValidatorNOTATION; break; case XMLAttributeDecl.TYPE_NMTOKEN: if (attributeDecl.list) { av = fAttValidatorNMTOKENS; } else { av = fAttValidatorNMTOKEN; } break; } av.normalize(element, attributeDecl.name, attValue, attributeDecl.type, attributeDecl.enumeration); } /** Character data in content. */ private void charDataInContent() { if (DEBUG_ELEMENT_CHILDREN) { System.out.println("charDataInContent()"); } if (fElementChildren.length <= fElementChildrenLength) { QName[] newarray = new QName[fElementChildren.length * 2]; System.arraycopy(fElementChildren, 0, newarray, 0, fElementChildren.length); fElementChildren = newarray; } QName qname = fElementChildren[fElementChildrenLength]; if (qname == null) { for (int i = fElementChildrenLength; i < fElementChildren.length; i++) { fElementChildren[i] = new QName(); } qname = fElementChildren[fElementChildrenLength]; } qname.clear(); fElementChildrenLength++; } // charDataInCount() /** * Check that the content of an element is valid. * <p> * This is the method of primary concern to the validator. This method is called * upon the scanner reaching the end tag of an element. At that time, the * element's children must be structurally validated, so it calls this method. * The index of the element being checked (in the decl pool), is provided as * well as an array of element name indexes of the children. The validator must * confirm that this element can have these children in this order. * <p> * This can also be called to do 'what if' testing of content models just to see * if they would be valid. * <p> * Note that the element index is an index into the element decl pool, whereas * the children indexes are name indexes, i.e. into the string pool. * <p> * A value of -1 in the children array indicates a PCDATA node. All other * indexes will be positive and represent child elements. The count can be * zero, since some elements have the EMPTY content model and that must be * confirmed. * * @param elementIndex The index within the <code>ElementDeclPool</code> of this * element. * @param childCount The number of entries in the <code>children</code> array. * @param children The children of this element. Each integer is an index within * the <code>StringPool</code> of the child element name. An index * of -1 is used to indicate an occurrence of non-whitespace character * data. * * @return The value -1 if fully valid, else the 0 based index of the child * that first failed. If the value returned is equal to the number * of children, then additional content is required to reach a valid * ending state. * * @exception Exception Thrown on error. */ private int checkContent(int elementIndex, QName[] children, int childOffset, int childCount) throws Exception { // Get the element name index from the element // REVISIT: Validation final int elementType = fCurrentElement.rawname; if (DEBUG_PRINT_CONTENT) { String strTmp = fStringPool.toString(elementType); System.out.println("Name: "+strTmp+", "+ "Count: "+childCount+", "+ "ContentSpecType: " +fCurrentContentSpecType); //+getContentSpecAsString(elementIndex)); for (int index = childOffset; index < (childOffset+childCount) && index < 10; index++) { if (index == 0) { System.out.print(" ("); } String childName = (children[index].localpart == -1) ? "#PCDATA" : fStringPool.toString(children[index].localpart); if (index + 1 == childCount) { System.out.println(childName + ")"); } else if (index + 1 == 10) { System.out.println(childName + ",...)"); } else { System.out.print(childName + ","); } } } // Get out the content spec for this element final int contentType = fCurrentContentSpecType; // debugging //System.out.println("~~~~~~in checkContent, fCurrentContentSpecType : " + fCurrentContentSpecType); // // Deal with the possible types of content. We try to optimized here // by dealing specially with content models that don't require the // full DFA treatment. // if (contentType == XMLElementDecl.TYPE_EMPTY) { // // If the child count is greater than zero, then this is // an error right off the bat at index 0. // if (childCount != 0) { return 0; } } else if (contentType == XMLElementDecl.TYPE_ANY) { // // This one is open game so we don't pass any judgement on it // at all. Its assumed to fine since it can hold anything. // } else if (contentType == XMLElementDecl.TYPE_MIXED || contentType == XMLElementDecl.TYPE_CHILDREN) { // Get the content model for this element, faulting it in if needed XMLContentModel cmElem = null; try { cmElem = getContentModel(elementIndex); return cmElem.validateContent(children, childOffset, childCount); } catch(CMException excToCatch) { // REVISIT - Translate the caught exception to the protected error API int majorCode = excToCatch.getErrorCode(); fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, majorCode, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } } else if (contentType == -1) { reportRecoverableXMLError(XMLMessages.MSG_ELEMENT_NOT_DECLARED, XMLMessages.VC_ELEMENT_VALID, elementType); } else if (contentType == XMLElementDecl.TYPE_SIMPLE ) { XMLContentModel cmElem = null; try { // REVISIT: this might not be right //cmElem = getContentModel(elementIndex); //fTempQName.rawname = fTempQName.localpart = fStringPool.addString(fDatatypeBuffer.toString()); //return cmElem.validateContent(1, new QName[] { fTempQName }); fGrammar.getElementDecl(elementIndex, fTempElementDecl); DatatypeValidator dv = fTempElementDecl.datatypeValidator; if (dv == null) { System.out.println("Internal Error: this element have a simpletype "+ "but no datatypevalidator was found, element "+fTempElementDecl.name +",locapart: "+fStringPool.toString(fTempElementDecl.name.localpart)); } else { dv.validate(fDatatypeBuffer.toString(), null); } } //catch (CMException cme) { // System.out.println("Internal Error in datatype validation"); //} catch (InvalidDatatypeValueException idve) { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, SchemaMessageProvider.DatatypeError, SchemaMessageProvider.MSG_NONE, new Object [] { idve.getMessage() }, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } /* boolean DEBUG_DATATYPES = false; if (DEBUG_DATATYPES) { System.out.println("Checking content of datatype"); String strTmp = fStringPool.toString(elementTypeIndex); int contentSpecIndex = fElementDeclPool.getContentSpec(elementIndex); XMLContentSpec csn = new XMLContentSpec(); fElementDeclPool.getContentSpecNode(contentSpecIndex, csn); String contentSpecString = fStringPool.toString(csn.value); System.out.println ( "Name: " + strTmp + ", Count: " + childCount + ", ContentSpec: " + contentSpecString ); for (int index = 0; index < childCount && index < 10; index++) { if (index == 0) System.out.print(" ("); String childName = (children[index] == -1) ? "#PCDATA" : fStringPool.toString(children[index]); if (index + 1 == childCount) System.out.println(childName + ")"); else if (index + 1 == 10) System.out.println(childName + ",...)"); else System.out.print(childName + ","); } } try { // REVISIT - integrate w/ error handling int contentSpecIndex = fElementDeclPool.getContentSpec(elementIndex); XMLContentSpec csn = new XMLContentSpec(); fElementDeclPool.getContentSpecNode(contentSpecIndex, csn); String type = fStringPool.toString(csn.value); DatatypeValidator v = fDatatypeRegistry.getValidatorFor(type); if (v != null) v.validate(fDatatypeBuffer.toString()); else System.out.println("No validator for datatype "+type); } catch (InvalidDatatypeValueException idve) { System.out.println("Incorrect datatype: "+idve.getMessage()); } catch (Exception e) { e.printStackTrace(); System.out.println("Internal error in datatype validation"); } */ } else { fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, ImplementationMessages.VAL_CST, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } // We succeeded return -1; } // checkContent(int,int,int[]):int /** * Check that all ID references were to ID attributes present in the document. * <p> * This method is a convenience call that allows the validator to do any id ref * checks above and beyond those done by the scanner. The scanner does the checks * specificied in the XML spec, i.e. that ID refs refer to ids which were * eventually defined somewhere in the document. * <p> * If the validator is for a Schema perhaps, which defines id semantics beyond * those of the XML specificiation, this is where that extra checking would be * done. For most validators, this is a no-op. * * @exception Exception Thrown on error. */ private void checkIdRefs() throws Exception { if (fIdRefs == null) return; Enumeration en = fIdRefs.keys(); while (en.hasMoreElements()) { Integer key = (Integer)en.nextElement(); if (fIdDefs == null || !fIdDefs.containsKey(key)) { Object[] args = { fStringPool.toString(key.intValue()) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_ELEMENT_WITH_ID_REQUIRED, XMLMessages.VC_IDREF, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } } // checkIdRefs() /** * Checks that all declared elements refer to declared elements * in their content models. This method calls out to the error * handler to indicate warnings. */ /*private void checkDeclaredElements() throws Exception { //****DEBUG**** if (DEBUG) print("(???) XMLValidator.checkDeclaredElements\n"); //****DEBUG**** for (int i = 0; i < fElementCount; i++) { int type = fGrammar.getContentSpecType(i); if (type == XMLElementDecl.TYPE_MIXED || type == XMLElementDecl.TYPE_CHILDREN) { int chunk = i >> CHUNK_SHIFT; int index = i & CHUNK_MASK; int contentSpecIndex = fContentSpec[chunk][index]; checkDeclaredElements(i, contentSpecIndex); } } } */ private void printChildren() { if (DEBUG_ELEMENT_CHILDREN) { System.out.print('['); for (int i = 0; i < fElementChildrenLength; i++) { System.out.print(' '); QName qname = fElementChildren[i]; if (qname != null) { System.out.print(fStringPool.toString(qname.rawname)); } else { System.out.print("null"); } if (i < fElementChildrenLength - 1) { System.out.print(", "); } System.out.flush(); } System.out.print(" ]"); System.out.println(); } } private void printStack() { if (DEBUG_ELEMENT_CHILDREN) { System.out.print('{'); for (int i = 0; i <= fElementDepth; i++) { System.out.print(' '); System.out.print(fElementChildrenOffsetStack[i]); if (i < fElementDepth) { System.out.print(", "); } System.out.flush(); } System.out.print(" }"); System.out.println(); } } // // Interfaces // /** * AttributeValidator. */ public interface AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValue, int attType, int enumHandle) throws Exception; } // interface AttributeValidator // // Classes // /** * AttValidatorCDATA. */ final class AttValidatorCDATA implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // Normalize attribute based upon attribute type... return attValueHandle; } } // class AttValidatorCDATA /** * AttValidatorID. */ final class AttValidatorID implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } if (!XMLCharacterProperties.validName(newAttValue)) { reportRecoverableXMLError(XMLMessages.MSG_ID_INVALID, XMLMessages.VC_ID, fStringPool.toString(attribute.rawname), newAttValue); } // // ID - check that the id value is unique within the document (V_TAG8) // if (element.rawname != -1 && !addId(attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ID_NOT_UNIQUE, XMLMessages.VC_ID, fStringPool.toString(attribute.rawname), newAttValue); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalong attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorID /** * AttValidatorIDREF. */ final class AttValidatorIDREF implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } if (!XMLCharacterProperties.validName(newAttValue)) { reportRecoverableXMLError(XMLMessages.MSG_IDREF_INVALID, XMLMessages.VC_IDREF, fStringPool.toString(attribute.rawname), newAttValue); } // // IDREF - remember the id value // if (element.rawname != -1) addIdRef(attValueHandle); } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorIDREF /** * AttValidatorIDREFS. */ final class AttValidatorIDREFS implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); StringTokenizer tokenizer = new StringTokenizer(attValue); StringBuffer sb = new StringBuffer(attValue.length()); boolean ok = true; if (tokenizer.hasMoreTokens()) { while (true) { String idName = tokenizer.nextToken(); if (fValidating) { if (!XMLCharacterProperties.validName(idName)) { ok = false; } // // IDREFS - remember the id values // if (element.rawname != -1) { addIdRef(fStringPool.addSymbol(idName)); } } sb.append(idName); if (!tokenizer.hasMoreTokens()) break; sb.append(' '); } } String newAttValue = sb.toString(); if (fValidating && (!ok || newAttValue.length() == 0)) { reportRecoverableXMLError(XMLMessages.MSG_IDREFS_INVALID, XMLMessages.VC_IDREF, fStringPool.toString(attribute.rawname), newAttValue); } if (!newAttValue.equals(attValue)) { attValueHandle = fStringPool.addString(newAttValue); if (fValidating && invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorIDREFS /** * AttValidatorENTITY. */ final class AttValidatorENTITY implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } // // ENTITY - check that the value is an unparsed entity name (V_TAGa) // if (!fEntityHandler.isUnparsedEntity(attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ENTITY_INVALID, XMLMessages.VC_ENTITY_NAME, fStringPool.toString(attribute.rawname), newAttValue); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorENTITY /** * AttValidatorENTITIES. */ final class AttValidatorENTITIES implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); StringTokenizer tokenizer = new StringTokenizer(attValue); StringBuffer sb = new StringBuffer(attValue.length()); boolean ok = true; if (tokenizer.hasMoreTokens()) { while (true) { String entityName = tokenizer.nextToken(); // // ENTITIES - check that each value is an unparsed entity name (V_TAGa) // if (fValidating && !fEntityHandler.isUnparsedEntity(fStringPool.addSymbol(entityName))) { ok = false; } sb.append(entityName); if (!tokenizer.hasMoreTokens()) { break; } sb.append(' '); } } String newAttValue = sb.toString(); if (fValidating && (!ok || newAttValue.length() == 0)) { reportRecoverableXMLError(XMLMessages.MSG_ENTITIES_INVALID, XMLMessages.VC_ENTITY_NAME, fStringPool.toString(attribute.rawname), newAttValue); } if (!newAttValue.equals(attValue)) { attValueHandle = fStringPool.addString(newAttValue); if (fValidating && invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorENTITIES /** * AttValidatorNMTOKEN. */ final class AttValidatorNMTOKEN implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } if (!XMLCharacterProperties.validNmtoken(newAttValue)) { reportRecoverableXMLError(XMLMessages.MSG_NMTOKEN_INVALID, XMLMessages.VC_NAME_TOKEN, fStringPool.toString(attribute.rawname), newAttValue); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorNMTOKEN /** * AttValidatorNMTOKENS. */ final class AttValidatorNMTOKENS implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); StringTokenizer tokenizer = new StringTokenizer(attValue); StringBuffer sb = new StringBuffer(attValue.length()); boolean ok = true; if (tokenizer.hasMoreTokens()) { while (true) { String nmtoken = tokenizer.nextToken(); if (fValidating && !XMLCharacterProperties.validNmtoken(nmtoken)) { ok = false; } sb.append(nmtoken); if (!tokenizer.hasMoreTokens()) { break; } sb.append(' '); } } String newAttValue = sb.toString(); if (fValidating && (!ok || newAttValue.length() == 0)) { reportRecoverableXMLError(XMLMessages.MSG_NMTOKENS_INVALID, XMLMessages.VC_NAME_TOKEN, fStringPool.toString(attribute.rawname), newAttValue); } if (!newAttValue.equals(attValue)) { attValueHandle = fStringPool.addString(newAttValue); if (fValidating && invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorNMTOKENS /** * AttValidatorNOTATION. */ final class AttValidatorNOTATION implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } // // NOTATION - check that the value is in the AttDef enumeration (V_TAGo) // if (!fStringPool.stringInList(enumHandle, attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ATTRIBUTE_VALUE_NOT_IN_LIST, XMLMessages.VC_NOTATION_ATTRIBUTES, fStringPool.toString(attribute.rawname), newAttValue, fStringPool.stringListAsString(enumHandle)); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorNOTATION /** * AttValidatorENUMERATION. */ final class AttValidatorENUMERATION implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } // // ENUMERATION - check that value is in the AttDef enumeration (V_TAG9) // if (!fStringPool.stringInList(enumHandle, attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ATTRIBUTE_VALUE_NOT_IN_LIST, XMLMessages.VC_ENUMERATION, fStringPool.toString(attribute.rawname), newAttValue, fStringPool.stringListAsString(enumHandle)); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorENUMERATION } // class XMLValidator
src/org/apache/xerces/validators/common/XMLValidator.java
/* * The Apache Software License, Version 1.1 * * * Copyright (c) 1999,2000 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.apache.org. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.xerces.validators.common; import org.apache.xerces.framework.XMLAttrList; import org.apache.xerces.framework.XMLContentSpec; import org.apache.xerces.framework.XMLDocumentHandler; import org.apache.xerces.framework.XMLDocumentScanner; import org.apache.xerces.framework.XMLErrorReporter; import org.apache.xerces.readers.DefaultEntityHandler; import org.apache.xerces.readers.XMLEntityHandler; import org.apache.xerces.utils.ChunkyCharArray; import org.apache.xerces.utils.Hash2intTable; import org.apache.xerces.utils.NamespacesScope; import org.apache.xerces.utils.QName; import org.apache.xerces.utils.StringPool; import org.apache.xerces.utils.XMLCharacterProperties; import org.apache.xerces.utils.XMLMessages; import org.apache.xerces.utils.ImplementationMessages; import org.apache.xerces.parsers.DOMParser; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.EntityResolver; import org.xml.sax.Locator; import org.xml.sax.helpers.LocatorImpl; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import java.io.IOException; import java.util.Enumeration; import java.util.Hashtable; import java.util.StringTokenizer; import java.util.Vector; import org.apache.xerces.validators.dtd.DTDGrammar; import org.apache.xerces.validators.schema.SchemaGrammar; import org.apache.xerces.validators.schema.SchemaMessageProvider; import org.apache.xerces.validators.schema.SchemaSymbols; import org.apache.xerces.validators.schema.TraverseSchema; import org.apache.xerces.validators.datatype.DatatypeValidator; import org.apache.xerces.validators.datatype.InvalidDatatypeValueException; /** * This class is the super all-in-one validator used by the parser. * * @version $Id$ */ public final class XMLValidator implements DefaultEntityHandler.EventHandler, XMLEntityHandler.CharDataHandler, XMLDocumentScanner.EventHandler, NamespacesScope.NamespacesHandler { // // Constants // // debugging private static final boolean PRINT_EXCEPTION_STACK_TRACE = false; private static final boolean DEBUG_PRINT_ATTRIBUTES = false; private static final boolean DEBUG_PRINT_CONTENT = false; private static final boolean DEBUG_SCHEMA_VALIDATION = false; private static final boolean DEBUG_ELEMENT_CHILDREN = false; // Chunk size constants private static final int CHUNK_SHIFT = 8; // 2^8 = 256 private static final int CHUNK_SIZE = (1 << CHUNK_SHIFT); private static final int CHUNK_MASK = CHUNK_SIZE - 1; private static final int INITIAL_CHUNK_COUNT = (1 << (10 - CHUNK_SHIFT)); // 2^10 = 1k // // Data // // REVISIT: The data should be regrouped and re-organized so that // it's easier to find a meaningful field. // debugging // private static boolean DEBUG = false; // other private Hashtable fIdDefs = null; private Hashtable fIdRefs = null; private Object fNullValue = null; // attribute validators // REVISIT: Validation. A validator per element declaration and // attribute declaration is required to accomodate // Schema facets on simple types. private AttributeValidator fAttValidatorCDATA = null; private AttributeValidator fAttValidatorID = new AttValidatorID(); private AttributeValidator fAttValidatorIDREF = new AttValidatorIDREF(); private AttributeValidator fAttValidatorIDREFS = new AttValidatorIDREFS(); private AttributeValidator fAttValidatorENTITY = new AttValidatorENTITY(); private AttributeValidator fAttValidatorENTITIES = new AttValidatorENTITIES(); private AttributeValidator fAttValidatorNMTOKEN = new AttValidatorNMTOKEN(); private AttributeValidator fAttValidatorNMTOKENS = new AttValidatorNMTOKENS(); private AttributeValidator fAttValidatorNOTATION = new AttValidatorNOTATION(); private AttributeValidator fAttValidatorENUMERATION = new AttValidatorENUMERATION(); private AttributeValidator fAttValidatorDATATYPE = null; // Package access for use by AttributeValidator classes. StringPool fStringPool = null; boolean fValidating = false; boolean fInElementContent = false; int fStandaloneReader = -1; // settings private boolean fValidationEnabled = false; private boolean fDynamicValidation = false; private boolean fValidationEnabledByDynamic = false; private boolean fDynamicDisabledByValidation = false; private boolean fWarningOnDuplicateAttDef = false; private boolean fWarningOnUndeclaredElements = false; // declarations private int fDeclaration[]; private XMLErrorReporter fErrorReporter = null; private DefaultEntityHandler fEntityHandler = null; private QName fCurrentElement = new QName(); //REVISIT: validation private int[] fScopeStack = new int[8]; private int[] fGrammarNameSpaceIndexStack = new int[8]; private int[] fElementTypeStack = new int[8]; private int[] fElementEntityStack = new int[8]; private int[] fElementIndexStack = new int[8]; private int[] fContentSpecTypeStack = new int[8]; private QName[] fElementChildren = new QName[32]; private int fElementChildrenLength = 0; private int[] fElementChildrenOffsetStack = new int[32]; private int fElementDepth = -1; private boolean fNamespacesEnabled = false; private NamespacesScope fNamespacesScope = null; private int fNamespacesPrefix = -1; private QName fRootElement = new QName(); private int fAttrListHandle = -1; private int fCurrentElementEntity = -1; private int fCurrentElementIndex = -1; private int fCurrentContentSpecType = -1; private boolean fSeenDoctypeDecl = false; private final int TOP_LEVEL_SCOPE = -1; private int fCurrentScope = TOP_LEVEL_SCOPE; private int fCurrentSchemaURI = -1; private int fEmptyURI = - 1; private int fXsiPrefix = - 1; private int fXsiURI = -2; private Grammar fGrammar = null; private int fGrammarNameSpaceIndex = -1; private GrammarResolver fGrammarResolver = null; // state and stuff private boolean fScanningDTD = false; private XMLDocumentScanner fDocumentScanner = null; private boolean fCalledStartDocument = false; private XMLDocumentHandler fDocumentHandler = null; private XMLDocumentHandler.DTDHandler fDTDHandler = null; private boolean fSeenRootElement = false; private XMLAttrList fAttrList = null; private int fXMLLang = -1; private LocatorImpl fAttrNameLocator = null; private boolean fCheckedForSchema = false; private boolean fDeclsAreExternal = false; private StringPool.CharArrayRange fCurrentElementCharArrayRange = null; private char[] fCharRefData = null; private boolean fSendCharDataAsCharArray = false; private boolean fBufferDatatype = false; private StringBuffer fDatatypeBuffer = new StringBuffer(); private QName fTempQName = new QName(); private XMLAttributeDecl fTempAttDecl = new XMLAttributeDecl(); private XMLElementDecl fTempElementDecl = new XMLElementDecl(); //REVISIT: ericye, use this temp QName whenever we can!! private boolean fGrammarIsDTDGrammar = false; private boolean fGrammarIsSchemaGrammar = false; // symbols private int fEMPTYSymbol = -1; private int fANYSymbol = -1; private int fMIXEDSymbol = -1; private int fCHILDRENSymbol = -1; private int fCDATASymbol = -1; private int fIDSymbol = -1; private int fIDREFSymbol = -1; private int fIDREFSSymbol = -1; private int fENTITYSymbol = -1; private int fENTITIESSymbol = -1; private int fNMTOKENSymbol = -1; private int fNMTOKENSSymbol = -1; private int fNOTATIONSymbol = -1; private int fENUMERATIONSymbol = -1; private int fREQUIREDSymbol = -1; private int fFIXEDSymbol = -1; private int fDATATYPESymbol = -1; private int fEpsilonIndex = -1; // // Constructors // /** Constructs an XML validator. */ public XMLValidator(StringPool stringPool, XMLErrorReporter errorReporter, DefaultEntityHandler entityHandler, XMLDocumentScanner documentScanner) { // keep references fStringPool = stringPool; fErrorReporter = errorReporter; fEntityHandler = entityHandler; fDocumentScanner = documentScanner; fEmptyURI = fStringPool.addSymbol(""); fXsiURI = fStringPool.addSymbol(SchemaSymbols.URI_XSI); // initialize fAttrList = new XMLAttrList(fStringPool); entityHandler.setEventHandler(this); entityHandler.setCharDataHandler(this); fDocumentScanner.setEventHandler(this); init(); } // <init>(StringPool,XMLErrorReporter,DefaultEntityHandler,XMLDocumentScanner) public void setGrammarResolver(GrammarResolver grammarResolver){ fGrammarResolver = grammarResolver; } // // Public methods // // initialization /** Set char data processing preference and handlers. */ public void initHandlers(boolean sendCharDataAsCharArray, XMLDocumentHandler docHandler, XMLDocumentHandler.DTDHandler dtdHandler) { fSendCharDataAsCharArray = sendCharDataAsCharArray; fEntityHandler.setSendCharDataAsCharArray(fSendCharDataAsCharArray); fDocumentHandler = docHandler; fDTDHandler = dtdHandler; } // initHandlers(boolean,XMLDocumentHandler,XMLDocumentHandler.DTDHandler) /** Reset or copy. */ public void resetOrCopy(StringPool stringPool) throws Exception { fAttrList = new XMLAttrList(stringPool); resetCommon(stringPool); } /** Reset. */ public void reset(StringPool stringPool) throws Exception { fAttrList.reset(stringPool); resetCommon(stringPool); } // settings /** * Turning on validation/dynamic turns on validation if it is off, and * this is remembered. Turning off validation DISABLES validation/dynamic * if it is on. Turning off validation/dynamic DOES NOT turn off * validation if it was explicitly turned on, only if it was turned on * BECAUSE OF the call to turn validation/dynamic on. Turning on * validation will REENABLE and turn validation/dynamic back on if it * was disabled by a call that turned off validation while * validation/dynamic was enabled. */ public void setValidationEnabled(boolean flag) throws Exception { fValidationEnabled = flag; fValidationEnabledByDynamic = false; if (fValidationEnabled) { if (fDynamicDisabledByValidation) { fDynamicValidation = true; fDynamicDisabledByValidation = false; } } else if (fDynamicValidation) { fDynamicValidation = false; fDynamicDisabledByValidation = true; } fValidating = fValidationEnabled; } /** Returns true if validation is enabled. */ public boolean getValidationEnabled() { return fValidationEnabled; } /** Sets whether validation is dynamic. */ public void setDynamicValidationEnabled(boolean flag) throws Exception { fDynamicValidation = flag; fDynamicDisabledByValidation = false; if (!fDynamicValidation) { if (fValidationEnabledByDynamic) { fValidationEnabled = false; fValidationEnabledByDynamic = false; } } else if (!fValidationEnabled) { fValidationEnabled = true; fValidationEnabledByDynamic = true; } fValidating = fValidationEnabled; } /** Returns true if validation is dynamic. */ public boolean getDynamicValidationEnabled() { return fDynamicValidation; } /** Sets whether namespaces are enabled. */ public void setNamespacesEnabled(boolean flag) { fNamespacesEnabled = flag; } /** Returns true if namespaces are enabled. */ public boolean getNamespacesEnabled() { return fNamespacesEnabled; } /** Sets whether duplicate attribute definitions signal a warning. */ public void setWarningOnDuplicateAttDef(boolean flag) { fWarningOnDuplicateAttDef = flag; } /** Returns true if duplicate attribute definitions signal a warning. */ public boolean getWarningOnDuplicateAttDef() { return fWarningOnDuplicateAttDef; } /** Sets whether undeclared elements signal a warning. */ public void setWarningOnUndeclaredElements(boolean flag) { fWarningOnUndeclaredElements = flag; } /** Returns true if undeclared elements signal a warning. */ public boolean getWarningOnUndeclaredElements() { return fWarningOnUndeclaredElements; } // // DefaultEntityHandler.EventHandler methods // /** Start entity reference. */ public void startEntityReference(int entityName, int entityType, int entityContext) throws Exception { fDocumentHandler.startEntityReference(entityName, entityType, entityContext); } /** End entity reference. */ public void endEntityReference(int entityName, int entityType, int entityContext) throws Exception { fDocumentHandler.endEntityReference(entityName, entityType, entityContext); } /** Send end of input notification. */ public void sendEndOfInputNotifications(int entityName, boolean moreToFollow) throws Exception { fDocumentScanner.endOfInput(entityName, moreToFollow); /*** if (fScanningDTD) { fDTDImporter.sendEndOfInputNotifications(entityName, moreToFollow); } /***/ } /** Send reader change notifications. */ public void sendReaderChangeNotifications(XMLEntityHandler.EntityReader reader, int readerId) throws Exception { fDocumentScanner.readerChange(reader, readerId); /*** if (fScanningDTD) { fDTDImporter.sendReaderChangeNotifications(reader, readerId); } /***/ } /** External entity standalone check. */ public boolean externalEntityStandaloneCheck() { return (fStandaloneReader != -1 && fValidating); } /** Return true if validating. */ public boolean getValidating() { return fValidating; } // // XMLEntityHandler.CharDataHandler methods // /** Process characters. */ public void processCharacters(char[] chars, int offset, int length) throws Exception { if (fValidating) { if (fInElementContent || fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } if (fBufferDatatype) { fDatatypeBuffer.append(chars, offset, length); } } fDocumentHandler.characters(chars, offset, length); } /** Process characters. */ public void processCharacters(int data) throws Exception { if (fValidating) { if (fInElementContent || fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } if (fBufferDatatype) { fDatatypeBuffer.append(fStringPool.toString(data)); } } fDocumentHandler.characters(data); } /** Process whitespace. */ public void processWhitespace(char[] chars, int offset, int length) throws Exception { if (fInElementContent) { if (fStandaloneReader != -1 && fValidating && getElementDeclIsExternal(fCurrentElementIndex)) { reportRecoverableXMLError(XMLMessages.MSG_WHITE_SPACE_IN_ELEMENT_CONTENT_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION); } fDocumentHandler.ignorableWhitespace(chars, offset, length); } else { if (fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } fDocumentHandler.characters(chars, offset, length); } } // processWhitespace(char[],int,int) /** Process whitespace. */ public void processWhitespace(int data) throws Exception { if (fInElementContent) { if (fStandaloneReader != -1 && fValidating && getElementDeclIsExternal(fCurrentElementIndex)) { reportRecoverableXMLError(XMLMessages.MSG_WHITE_SPACE_IN_ELEMENT_CONTENT_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION); } fDocumentHandler.ignorableWhitespace(data); } else { if (fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY) { charDataInContent(); } fDocumentHandler.characters(data); } } // processWhitespace(int) // // XMLDocumentScanner.EventHandler methods // /** Scans element type. */ public void scanElementType(XMLEntityHandler.EntityReader entityReader, char fastchar, QName element) throws Exception { if (!fNamespacesEnabled) { element.clear(); element.localpart = entityReader.scanName(fastchar); element.rawname = element.localpart; } else { entityReader.scanQName(fastchar, element); if (entityReader.lookingAtChar(':', false)) { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_TWO_COLONS_IN_QNAME, XMLMessages.P5_INVALID_CHARACTER, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); entityReader.skipPastNmtoken(' '); } } } // scanElementType(XMLEntityHandler.EntityReader,char,QName) /** Scans expected element type. */ public boolean scanExpectedElementType(XMLEntityHandler.EntityReader entityReader, char fastchar, QName element) throws Exception { if (fCurrentElementCharArrayRange == null) { fCurrentElementCharArrayRange = fStringPool.createCharArrayRange(); } fStringPool.getCharArrayRange(fCurrentElement.rawname, fCurrentElementCharArrayRange); return entityReader.scanExpectedName(fastchar, fCurrentElementCharArrayRange); } // scanExpectedElementType(XMLEntityHandler.EntityReader,char,QName) /** Scans attribute name. */ public void scanAttributeName(XMLEntityHandler.EntityReader entityReader, QName element, QName attribute) throws Exception { if (!fSeenRootElement) { fSeenRootElement = true; rootElementSpecified(element); fStringPool.resetShuffleCount(); } if (!fNamespacesEnabled) { attribute.clear(); attribute.localpart = entityReader.scanName('='); attribute.rawname = attribute.localpart; } else { entityReader.scanQName('=', attribute); if (entityReader.lookingAtChar(':', false)) { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_TWO_COLONS_IN_QNAME, XMLMessages.P5_INVALID_CHARACTER, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); entityReader.skipPastNmtoken(' '); } } } // scanAttributeName(XMLEntityHandler.EntityReader,QName,QName) /** Call start document. */ public void callStartDocument() throws Exception { if (!fCalledStartDocument) { fDocumentHandler.startDocument(); fCalledStartDocument = true; } } /** Call end document. */ public void callEndDocument() throws Exception { if (fCalledStartDocument) { fDocumentHandler.endDocument(); } } /** Call XML declaration. */ public void callXMLDecl(int version, int encoding, int standalone) throws Exception { fDocumentHandler.xmlDecl(version, encoding, standalone); } public void callStandaloneIsYes() throws Exception { // standalone = "yes". said XMLDocumentScanner. fStandaloneReader = fEntityHandler.getReaderId() ; } /** Call text declaration. */ public void callTextDecl(int version, int encoding) throws Exception { fDocumentHandler.textDecl(version, encoding); } /** * Signal the scanning of an element name in a start element tag. * * @param element Element name scanned. */ public void element(QName element) throws Exception { fAttrListHandle = -1; } /** * Signal the scanning of an attribute associated to the previous * start element tag. * * @param element Element name scanned. * @param attrName Attribute name scanned. * @param attrValue The string pool index of the attribute value. */ public boolean attribute(QName element, QName attrName, int attrValue) throws Exception { if (fAttrListHandle == -1) { fAttrListHandle = fAttrList.startAttrList(); } // if fAttrList.addAttr returns -1, indicates duplicate att in start tag of an element. // specified: true, search : true return fAttrList.addAttr(attrName, attrValue, fCDATASymbol, true, true) == -1; } /** Call start element. */ public void callStartElement(QName element) throws Exception { if ( DEBUG_SCHEMA_VALIDATION ) System.out.println("\n=======StartElement : " + fStringPool.toString(element.localpart)); // // Check after all specified attrs are scanned // (1) report error for REQUIRED attrs that are missing (V_TAGc) // (2) add default attrs (FIXED and NOT_FIXED) // if (!fSeenRootElement) { fSeenRootElement = true; rootElementSpecified(element); fStringPool.resetShuffleCount(); } fCheckedForSchema = true; if (fNamespacesEnabled) { bindNamespacesToElementAndAttributes(element, fAttrList); } validateElementAndAttributes(element, fAttrList); if (fAttrListHandle != -1) { fAttrList.endAttrList(); } fDocumentHandler.startElement(element, fAttrList, fAttrListHandle); fAttrListHandle = -1; //before we increment the element depth, add this element's QName to its enclosing element 's children list fElementDepth++; //if (fElementDepth >= 0) { if (fValidating) { // push current length onto stack if (fElementChildrenOffsetStack.length < fElementDepth) { int newarray[] = new int[fElementChildrenOffsetStack.length * 2]; System.arraycopy(fElementChildrenOffsetStack, 0, newarray, 0, fElementChildrenOffsetStack.length); fElementChildrenOffsetStack = newarray; } fElementChildrenOffsetStack[fElementDepth] = fElementChildrenLength; // add this element to children if (fElementChildren.length <= fElementChildrenLength) { QName[] newarray = new QName[fElementChildrenLength * 2]; System.arraycopy(fElementChildren, 0, newarray, 0, fElementChildren.length); fElementChildren = newarray; } QName qname = fElementChildren[fElementChildrenLength]; if (qname == null) { for (int i = fElementChildrenLength; i < fElementChildren.length; i++) { fElementChildren[i] = new QName(); } qname = fElementChildren[fElementChildrenLength]; } qname.setValues(element); fElementChildrenLength++; if (DEBUG_ELEMENT_CHILDREN) { printChildren(); printStack(); } } // One more level of depth //fElementDepth++; ensureStackCapacity(fElementDepth); fCurrentElement.setValues(element); fCurrentElementEntity = fEntityHandler.getReaderId(); fElementTypeStack[fElementDepth] = fCurrentElement.rawname; fElementEntityStack[fElementDepth] = fCurrentElementEntity; fElementIndexStack[fElementDepth] = fCurrentElementIndex; fContentSpecTypeStack[fElementDepth] = fCurrentContentSpecType; //REVISIT: Validation if ( fCurrentElementIndex > -1 && fGrammarIsSchemaGrammar && fValidating) { fCurrentScope = ((SchemaGrammar) fGrammar).getElementDefinedScope(fCurrentElementIndex); } fScopeStack[fElementDepth] = fCurrentScope; fGrammarNameSpaceIndexStack[fElementDepth] = fGrammarNameSpaceIndex; } // callStartElement(QName) private void ensureStackCapacity ( int newElementDepth) { if (newElementDepth == fElementTypeStack.length) { int[] newStack = new int[newElementDepth * 2]; System.arraycopy(fScopeStack, 0, newStack, 0, newElementDepth); fScopeStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fGrammarNameSpaceIndexStack, 0, newStack, 0, newElementDepth); fGrammarNameSpaceIndexStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fElementTypeStack, 0, newStack, 0, newElementDepth); fElementTypeStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fElementEntityStack, 0, newStack, 0, newElementDepth); fElementEntityStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fElementIndexStack, 0, newStack, 0, newElementDepth); fElementIndexStack = newStack; newStack = new int[newElementDepth * 2]; System.arraycopy(fContentSpecTypeStack, 0, newStack, 0, newElementDepth); fContentSpecTypeStack = newStack; } } /** Call end element. */ public void callEndElement(int readerId) throws Exception { if ( DEBUG_SCHEMA_VALIDATION ) System.out.println("=======EndElement : " + fStringPool.toString(fCurrentElement.localpart)+"\n"); int prefixIndex = fCurrentElement.prefix; // REVISIT: Validation int elementType = fCurrentElement.rawname; if (fCurrentElementEntity != readerId) { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_ELEMENT_ENTITY_MISMATCH, XMLMessages.P78_NOT_WELLFORMED, new Object[] { fStringPool.toString(elementType) }, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } fElementDepth--; if (fValidating) { int elementIndex = fCurrentElementIndex; if (elementIndex != -1 && fCurrentContentSpecType != -1) { QName children[] = fElementChildren; int childrenOffset = fElementChildrenOffsetStack[fElementDepth + 1] + 1; int childrenLength = fElementChildrenLength - childrenOffset; if (DEBUG_ELEMENT_CHILDREN) { System.out.println("endElement("+fStringPool.toString(fCurrentElement.rawname)+')'); System.out.print("offset: "); System.out.print(childrenOffset); System.out.print(", length: "); System.out.print(childrenLength); System.out.println(); printChildren(); printStack(); } int result = checkContent(elementIndex, children, childrenOffset, childrenLength); if ( DEBUG_SCHEMA_VALIDATION ) System.out.println("!!!!!!!!In XMLValidator, the return value from checkContent : " + result); if (result != -1) { int majorCode = result != childrenLength ? XMLMessages.MSG_CONTENT_INVALID : XMLMessages.MSG_CONTENT_INCOMPLETE; fGrammar.getElementDecl(elementIndex, fTempElementDecl); reportRecoverableXMLError(majorCode, 0, fStringPool.toString(elementType), XMLContentSpec.toString(fGrammar, fStringPool, fTempElementDecl.contentSpecIndex));// REVISIT: getContentSpecAsString(elementIndex)); } } fElementChildrenLength = fElementChildrenOffsetStack[fElementDepth + 1] + 1; } fDocumentHandler.endElement(fCurrentElement); if (fNamespacesEnabled) { fNamespacesScope.decreaseDepth(); } // now pop this element off the top of the element stack //if (fElementDepth-- < 0) { if (fElementDepth < -1) { throw new RuntimeException("FWK008 Element stack underflow"); } if (fElementDepth < 0) { fCurrentElement.clear(); fCurrentElementEntity = -1; fCurrentElementIndex = -1; fCurrentContentSpecType = -1; fInElementContent = false; // // Check after document is fully parsed // (1) check that there was an element with a matching id for every // IDREF and IDREFS attr (V_IDREF0) // if (fValidating && fIdRefs != null) { checkIdRefs(); } return; } //restore enclosing element to all the "current" variables // REVISIT: Validation. This information needs to be stored. fCurrentElement.prefix = -1; fCurrentElement.localpart = fElementTypeStack[fElementDepth]; fCurrentElement.rawname = fElementTypeStack[fElementDepth]; fCurrentElementEntity = fElementEntityStack[fElementDepth]; fCurrentElementIndex = fElementIndexStack[fElementDepth]; fCurrentContentSpecType = fContentSpecTypeStack[fElementDepth]; //REVISIT: Validation fCurrentScope = fScopeStack[fElementDepth]; //if ( DEBUG_SCHEMA_VALIDATION ) { /**** System.out.println("+++++ currentElement : " + fStringPool.toString(elementType)+ "\n fCurrentElementIndex : " + fCurrentElementIndex + "\n fCurrentScope : " + fCurrentScope + "\n fCurrentContentSpecType : " + fCurrentContentSpecType + "\n++++++++++++++++++++++++++++++++++++++++++++++++" ); /****/ //} // if enclosing element's Schema is different, need to switch "context" if ( fGrammarNameSpaceIndex != fGrammarNameSpaceIndexStack[fElementDepth] ) { fGrammarNameSpaceIndex = fGrammarNameSpaceIndexStack[fElementDepth]; switchGrammar(fGrammarNameSpaceIndex); } if (fValidating) { fBufferDatatype = false; } fInElementContent = (fCurrentContentSpecType == XMLElementDecl.TYPE_CHILDREN); } // callEndElement(int) /** Call start CDATA section. */ public void callStartCDATA() throws Exception { fDocumentHandler.startCDATA(); } /** Call end CDATA section. */ public void callEndCDATA() throws Exception { fDocumentHandler.endCDATA(); } /** Call characters. */ public void callCharacters(int ch) throws Exception { if (fCharRefData == null) { fCharRefData = new char[2]; } int count = (ch < 0x10000) ? 1 : 2; if (count == 1) { fCharRefData[0] = (char)ch; } else { fCharRefData[0] = (char)(((ch-0x00010000)>>10)+0xd800); fCharRefData[1] = (char)(((ch-0x00010000)&0x3ff)+0xdc00); } if (fValidating && (fInElementContent || fCurrentContentSpecType == XMLElementDecl.TYPE_EMPTY)) { charDataInContent(); } if (fSendCharDataAsCharArray) { fDocumentHandler.characters(fCharRefData, 0, count); } else { int index = fStringPool.addString(new String(fCharRefData, 0, count)); fDocumentHandler.characters(index); } } // callCharacters(int) /** Call processing instruction. */ public void callProcessingInstruction(int target, int data) throws Exception { fDocumentHandler.processingInstruction(target, data); } /** Call comment. */ public void callComment(int comment) throws Exception { fDocumentHandler.comment(comment); } // // NamespacesScope.NamespacesHandler methods // /** Start a new namespace declaration scope. */ public void startNamespaceDeclScope(int prefix, int uri) throws Exception { fDocumentHandler.startNamespaceDeclScope(prefix, uri); } /** End a namespace declaration scope. */ public void endNamespaceDeclScope(int prefix) throws Exception { fDocumentHandler.endNamespaceDeclScope(prefix); } // attributes /** Normalize attribute value. */ public int normalizeAttValue(QName element, QName attribute, int attValue, int attType, boolean list, int enumHandle) throws Exception { AttributeValidator av = getValidatorForAttType(attType, list); if (av != null) { return av.normalize(element, attribute, attValue, attType, enumHandle); } return -1; } // normalizeAttValue(QName,QName,int,int,boolean,int):int // other /** Sets the root element. */ public void setRootElementType(QName rootElement) { fRootElement.setValues(rootElement); } /** * Returns true if the element declaration is external. * <p> * <strong>Note:</strong> This method is primarilly useful for * DTDs with internal and external subsets. */ private boolean getElementDeclIsExternal(int elementIndex) { /*if (elementIndex < 0 || elementIndex >= fElementCount) { return false; } int chunk = elementIndex >> CHUNK_SHIFT; int index = elementIndex & CHUNK_MASK; return (fElementDeclIsExternal[chunk][index] != 0); */ if (fGrammarIsDTDGrammar ) { return ((DTDGrammar) fGrammar).getElementDeclIsExternal(elementIndex); } return false; } /** Returns the content spec type for an element index. */ public int getContentSpecType(int elementIndex) { int contentSpecType = -1; if ( elementIndex > -1) { if ( fGrammar.getElementDecl(elementIndex,fTempElementDecl) ) { contentSpecType = fTempElementDecl.type; } } return contentSpecType; } /** Returns the content spec handle for an element index. */ public int getContentSpecHandle(int elementIndex) { int contentSpecHandle = -1; if ( elementIndex > -1) { if ( fGrammar.getElementDecl(elementIndex,fTempElementDecl) ) { contentSpecHandle = fTempElementDecl.contentSpecIndex; } } return contentSpecHandle; } // // Protected methods // // error reporting /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode) throws Exception { fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, null, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, int stringIndex1) throws Exception { Object[] args = { fStringPool.toString(stringIndex1) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,int) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, String string1) throws Exception { Object[] args = { string1 }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,String) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, int stringIndex1, int stringIndex2) throws Exception { Object[] args = { fStringPool.toString(stringIndex1), fStringPool.toString(stringIndex2) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,int,int) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, String string1, String string2) throws Exception { Object[] args = { string1, string2 }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,String,String) /** Report a recoverable xml error. */ protected void reportRecoverableXMLError(int majorCode, int minorCode, String string1, String string2, String string3) throws Exception { Object[] args = { string1, string2, string3 }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, majorCode, minorCode, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } // reportRecoverableXMLError(int,int,String,String,String) // content spec /** * Returns information about which elements can be placed at a particular point * in the passed element's content model. * <p> * Note that the incoming content model to test must be valid at least up to * the insertion point. If not, then -1 will be returned and the info object * will not have been filled in. * <p> * If, on return, the info.isValidEOC flag is set, then the 'insert after' * elemement is a valid end of content, i.e. nothing needs to be inserted * after it to make the parent element's content model valid. * * @param elementIndex The index within the <code>ElementDeclPool</code> of the * element which is being querying. * @param fullyValid Only return elements that can be inserted and still * maintain the validity of subsequent elements past the * insertion point (if any). If the insertion point is at * the end, and this is true, then only elements that can * be legal final states will be returned. * @param info An object that contains the required input data for the method, * and which will contain the output information if successful. * * @return The value -1 if fully valid, else the 0 based index of the child * that first failed before the insertion point. If the value * returned is equal to the number of children, then the specified * children are valid but additional content is required to reach a * valid ending state. * * @exception Exception Thrown on error. * * @see InsertableElementsInfo */ protected int whatCanGoHere(int elementIndex, boolean fullyValid, InsertableElementsInfo info) throws Exception { // // Do some basic sanity checking on the info packet. First, make sure // that insertAt is not greater than the child count. It can be equal, // which means to get appendable elements, but not greater. Or, if // the current children array is null, that's bad too. // // Since the current children array must have a blank spot for where // the insert is going to be, the child count must always be at least // one. // // Make sure that the child count is not larger than the current children // array. It can be equal, which means get appendable elements, but not // greater. // if (info.insertAt > info.childCount || info.curChildren == null || info.childCount < 1 || info.childCount > info.curChildren.length) { fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, ImplementationMessages.VAL_WCGHI, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } int retVal = 0; try { // Get the content model for this element final XMLContentModel cmElem = getContentModel(elementIndex); // And delegate this call to it retVal = cmElem.whatCanGoHere(fullyValid, info); } catch (CMException excToCatch) { // REVISIT - Translate caught error to the protected error handler interface int majorCode = excToCatch.getErrorCode(); fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, majorCode, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); throw excToCatch; } return retVal; } // whatCanGoHere(int,boolean,InsertableElementsInfo):int // attribute information /** Protected for use by AttributeValidator classes. */ protected boolean getAttDefIsExternal(QName element, QName attribute) { int attDefIndex = getAttDef(element, attribute); if (fGrammarIsDTDGrammar ) { return ((DTDGrammar) fGrammar).getAttributeDeclIsExternal(attDefIndex); } return false; } /** addId. */ protected boolean addId(int idIndex) { Integer key = new Integer(idIndex); if (fIdDefs == null) { fIdDefs = new Hashtable(); } else if (fIdDefs.containsKey(key)) { return false; } if (fNullValue == null) { fNullValue = new Object(); } fIdDefs.put(key, fNullValue/*new Integer(elementType)*/); return true; } // addId(int):boolean /** addIdRef. */ protected void addIdRef(int idIndex) { Integer key = new Integer(idIndex); if (fIdDefs != null && fIdDefs.containsKey(key)) { return; } if (fIdRefs == null) { fIdRefs = new Hashtable(); } else if (fIdRefs.containsKey(key)) { return; } if (fNullValue == null) { fNullValue = new Object(); } fIdRefs.put(key, fNullValue/*new Integer(elementType)*/); } // addIdRef(int) // // Private methods // // other /** Returns true if using a standalone reader. */ private boolean usingStandaloneReader() { return fStandaloneReader == -1 || fEntityHandler.getReaderId() == fStandaloneReader; } /** Returns a locator implementation. */ private LocatorImpl getLocatorImpl(LocatorImpl fillin) { Locator here = fErrorReporter.getLocator(); if (fillin == null) return new LocatorImpl(here); fillin.setPublicId(here.getPublicId()); fillin.setSystemId(here.getSystemId()); fillin.setLineNumber(here.getLineNumber()); fillin.setColumnNumber(here.getColumnNumber()); return fillin; } // getLocatorImpl(LocatorImpl):LocatorImpl // content models /** * This method will handle the querying of the content model for a * particular element. If the element does not have a content model, then * it will be created. */ private XMLContentModel getContentModel(int elementIndex) throws CMException { // See if a content model already exists first XMLContentModel cmRet = getElementContentModel(elementIndex); // If we have one, just return that. Otherwise, gotta create one if (cmRet != null) { return cmRet; } // Get the type of content this element has final int contentSpec = getContentSpecType(elementIndex); // And create the content model according to the spec type if (contentSpec == XMLElementDecl.TYPE_MIXED) { // // Just create a mixel content model object. This type of // content model is optimized for mixed content validation. // //REVISIT, could not compile // XMLContentSpec specNode = new XMLContentSpec(); // int contentSpecIndex = getContentSpecHandle(elementIndex); // makeContentList(contentSpecIndex, specNode); // cmRet = new MixedContentModel(fCount, fContentList); } else if (contentSpec == XMLElementDecl.TYPE_CHILDREN) { // // This method will create an optimal model for the complexity // of the element's defined model. If its simple, it will create // a SimpleContentModel object. If its a simple list, it will // create a SimpleListContentModel object. If its complex, it // will create a DFAContentModel object. // //REVISIT: couldnot compile //cmRet = createChildModel(elementIndex); } else if (contentSpec == fDATATYPESymbol) { // cmRet = fSchemaImporter.createDatatypeContentModel(elementIndex); } else { throw new CMException(ImplementationMessages.VAL_CST); } // Add the new model to the content model for this element //REVISIT setContentModel(elementIndex, cmRet); return cmRet; } // getContentModel(int):XMLContentModel // initialization /** Reset pool. */ private void poolReset() { if (fIdDefs != null) { fIdDefs.clear(); } if (fIdRefs != null) { fIdRefs.clear(); } } // poolReset() /** Reset common. */ private void resetCommon(StringPool stringPool) throws Exception { fStringPool = stringPool; fValidating = fValidationEnabled; fValidationEnabledByDynamic = false; fDynamicDisabledByValidation = false; poolReset(); fCalledStartDocument = false; fStandaloneReader = -1; fElementChildrenLength = 0; fElementDepth = -1; fSeenRootElement = false; fSeenDoctypeDecl = false; fNamespacesScope = null; fNamespacesPrefix = -1; fRootElement.clear(); fAttrListHandle = -1; fCheckedForSchema = false; fCurrentScope = TOP_LEVEL_SCOPE; fCurrentSchemaURI = -1; fEmptyURI = - 1; fXsiPrefix = - 1; fGrammar = null; fGrammarNameSpaceIndex = -1; fGrammarResolver = null; fGrammarIsDTDGrammar = false; fGrammarIsSchemaGrammar = false; init(); } // resetCommon(StringPool) /** Initialize. */ private void init() { fEmptyURI = fStringPool.addSymbol(""); fXsiURI = fStringPool.addSymbol(SchemaSymbols.URI_XSI); fEMPTYSymbol = fStringPool.addSymbol("EMPTY"); fANYSymbol = fStringPool.addSymbol("ANY"); fMIXEDSymbol = fStringPool.addSymbol("MIXED"); fCHILDRENSymbol = fStringPool.addSymbol("CHILDREN"); fCDATASymbol = fStringPool.addSymbol("CDATA"); fIDSymbol = fStringPool.addSymbol("ID"); fIDREFSymbol = fStringPool.addSymbol("IDREF"); fIDREFSSymbol = fStringPool.addSymbol("IDREFS"); fENTITYSymbol = fStringPool.addSymbol("ENTITY"); fENTITIESSymbol = fStringPool.addSymbol("ENTITIES"); fNMTOKENSymbol = fStringPool.addSymbol("NMTOKEN"); fNMTOKENSSymbol = fStringPool.addSymbol("NMTOKENS"); fNOTATIONSymbol = fStringPool.addSymbol("NOTATION"); fENUMERATIONSymbol = fStringPool.addSymbol("ENUMERATION"); fREQUIREDSymbol = fStringPool.addSymbol("#REQUIRED"); fFIXEDSymbol = fStringPool.addSymbol("#FIXED"); fDATATYPESymbol = fStringPool.addSymbol("<<datatype>>"); fEpsilonIndex = fStringPool.addSymbol("<<CMNODE_EPSILON>>"); fXMLLang = fStringPool.addSymbol("xml:lang"); /** fEMPTYSymbol = XMLElementDecl.TYPE_EMPTY; fANYSymbol = XMLElementDecl.TYPE_ANY; fMIXEDSymbol = XMLElementDecl.TYPE_MIXED; fCHILDRENSymbol = XMLElementDecl.TYPE_CHILDREN; fCDATASymbol = XMLAttributeDecl.TYPE_CDATA; fIDSymbol = XMLAttributeDecl.TYPE_ID; fIDREFSymbol = XMLAttributeDecl.TYPE_IDREF; fIDREFSSymbol = XMLAttributeDecl.TYPE_IDREF; fENTITYSymbol = XMLAttributeDecl.TYPE_ENTITY; fENTITIESSymbol = XMLAttributeDecl.TYPE_ENTITY; fNMTOKENSymbol = XMLAttributeDecl.TYPE_NMTOKEN; fNMTOKENSSymbol = XMLAttributeDecl.TYPE_NMTOKEN; fNOTATIONSymbol = XMLAttributeDecl.TYPE_NOTATION; fENUMERATIONSymbol = XMLAttributeDecl.TYPE_ENUMERATION; fREQUIREDSymbol = XMLAttributeDecl.DEFAULT_TYPE_REQUIRED; fFIXEDSymbol = XMLAttributeDecl.DEFAULT_TYPE_FIXED; fDATATYPESymbol = XMLElementDecl.TYPE_SIMPLE; **/ } // init() // other // default attribute /** addDefaultAttributes. */ private int addDefaultAttributes(int elementIndex, XMLAttrList attrList, int attrIndex, boolean validationEnabled, boolean standalone) throws Exception { //System.out.println("XMLValidator#addDefaultAttributes"); //System.out.print(" "); //fGrammar.printAttributes(elementIndex); // // Check after all specified attrs are scanned // (1) report error for REQUIRED attrs that are missing (V_TAGc) // (2) check that FIXED attrs have matching value (V_TAGd) // (3) add default attrs (FIXED and NOT_FIXED) // fGrammar.getElementDecl(elementIndex,fTempElementDecl); //System.out.println("addDefaultAttributes: " + fStringPool.toString(fTempElementDecl.name.localpart)+ // "," + attrIndex + "," + validationEnabled); int elementNameIndex = fTempElementDecl.name.localpart; int attlistIndex = fGrammar.getFirstAttributeDeclIndex(elementIndex); int firstCheck = attrIndex; int lastCheck = -1; while (attlistIndex != -1) { //int adChunk = attlistIndex >> CHUNK_SHIFT; //int adIndex = attlistIndex & CHUNK_MASK; fGrammar.getAttributeDecl(attlistIndex, fTempAttDecl); // TO DO: For ericye Debug only /*** if (fTempAttDecl != null) { XMLElementDecl element = new XMLElementDecl(); fGrammar.getElementDecl(elementIndex, element); System.out.println("element: "+fStringPool.toString(element.name.localpart)); System.out.println("attlistIndex " + attlistIndex + "\n"+ "attName : '"+fStringPool.toString(fTempAttDecl.name.localpart) + "'\n" + "attType : "+fTempAttDecl.type + "\n" + "attDefaultType : "+fTempAttDecl.defaultType + "\n" + "attDefaultValue : '"+fTempAttDecl.defaultValue + "'\n" + attrList.getLength() +"\n" ); } /***/ int attPrefix = fTempAttDecl.name.prefix; int attName = fTempAttDecl.name.localpart; int attType = attributeTypeName(fTempAttDecl); int attDefType =fTempAttDecl.defaultType; int attValue = -1 ; if (fTempAttDecl.defaultValue != null ) { attValue = fStringPool.addSymbol(fTempAttDecl.defaultValue); } boolean specified = false; boolean required = attDefType == XMLAttributeDecl.DEFAULT_TYPE_REQUIRED; /**** if (fValidating && fGrammar != null && fGrammarIsDTDGrammar && attValue != -1) { normalizeAttValue(null, fTempAttDecl.name, attValue,attType,fTempAttDecl.list, fTempAttDecl.enumeration); } /****/ if (firstCheck != -1) { boolean cdata = attType == fCDATASymbol; if (!cdata || required || attValue != -1) { int i = attrList.getFirstAttr(firstCheck); while (i != -1 && (lastCheck == -1 || i <= lastCheck)) { //if (fStringPool.equalNames(attrList.getAttrName(i), attName)) { if ( fStringPool.equalNames(attrList.getAttrLocalpart(i), attName) && fStringPool.equalNames(attrList.getAttrURI(i), fTempAttDecl.name.uri) ) { if (validationEnabled && attDefType == XMLAttributeDecl.DEFAULT_TYPE_FIXED) { int alistValue = attrList.getAttValue(i); if (alistValue != attValue && !fStringPool.toString(alistValue).equals(fStringPool.toString(attValue))) { Object[] args = { fStringPool.toString(elementNameIndex), fStringPool.toString(attName), fStringPool.toString(alistValue), fStringPool.toString(attValue) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_FIXED_ATTVALUE_INVALID, XMLMessages.VC_FIXED_ATTRIBUTE_DEFAULT, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } specified = true; break; } i = attrList.getNextAttr(i); } } } if (!specified) { if (required) { if (validationEnabled) { Object[] args = { fStringPool.toString(elementNameIndex), fStringPool.toString(attName) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_REQUIRED_ATTRIBUTE_NOT_SPECIFIED, XMLMessages.VC_REQUIRED_ATTRIBUTE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } else if (attValue != -1) { if (validationEnabled && standalone ) if ( fGrammarIsDTDGrammar && ((DTDGrammar) fGrammar).getAttributeDeclIsExternal(attlistIndex) ) { Object[] args = { fStringPool.toString(elementNameIndex), fStringPool.toString(attName) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_DEFAULTED_ATTRIBUTE_NOT_SPECIFIED, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } if (attType == fIDREFSymbol) { addIdRef(attValue); } else if (attType == fIDREFSSymbol) { StringTokenizer tokenizer = new StringTokenizer(fStringPool.toString(attValue)); while (tokenizer.hasMoreTokens()) { String idName = tokenizer.nextToken(); addIdRef(fStringPool.addSymbol(idName)); } } if (attrIndex == -1) { attrIndex = attrList.startAttrList(); } // REVISIT: Validation. What should the prefix be? fTempQName.setValues(attPrefix, attName, attName, fTempAttDecl.name.uri); int newAttr = attrList.addAttr(fTempQName, attValue, attType, false, false); if (lastCheck == -1) { lastCheck = newAttr; } } } attlistIndex = fGrammar.getNextAttributeDeclIndex(attlistIndex); } return attrIndex; } // addDefaultAttributes(int,XMLAttrList,int,boolean,boolean):int // content models /** Queries the content model for the specified element index. */ private XMLContentModel getElementContentModel(int elementIndex) throws CMException { XMLContentModel contentModel = null; if ( elementIndex > -1) { if ( fGrammar.getElementDecl(elementIndex,fTempElementDecl) ) { contentModel = fGrammar.getElementContentModel(elementIndex); } } //return fGrammar.getElementContentModel(elementIndex); return contentModel; } /** Sets the content model for the specified element index. */ private void setContentModel(int elementIndex, XMLContentModel cm) { // REVISIT: What's this method do? /*if (elementIndex < 0 || elementIndex >= fElementCount) { return; } int chunk = elementIndex >> CHUNK_SHIFT; int index = elementIndex & CHUNK_MASK; fContentModel[chunk][index] = cm; */ } // query attribute information /** Returns the validatator for an attribute type. */ private AttributeValidator getValidatorForAttType(int attType, boolean list) { if (attType == XMLAttributeDecl.TYPE_CDATA) { if (fAttValidatorCDATA == null) { fAttValidatorCDATA = new AttValidatorCDATA(); } return fAttValidatorCDATA; } if (attType == XMLAttributeDecl.TYPE_ID) { if (fAttValidatorID == null) { fAttValidatorID = new AttValidatorID(); } return fAttValidatorID; } if (attType == XMLAttributeDecl.TYPE_IDREF) { if (!list) { if (fAttValidatorIDREF == null) { fAttValidatorIDREF = new AttValidatorIDREF(); } return fAttValidatorIDREF; } else { if (fAttValidatorIDREFS == null) { fAttValidatorIDREFS = new AttValidatorIDREFS(); } return fAttValidatorIDREFS; } } if (attType == XMLAttributeDecl.TYPE_ENTITY) { if (!list) { if (fAttValidatorENTITY == null) { fAttValidatorENTITY = new AttValidatorENTITY(); } return fAttValidatorENTITY; } else{ if (fAttValidatorENTITIES == null) { fAttValidatorENTITIES = new AttValidatorENTITIES(); } return fAttValidatorENTITIES; } } if (attType == XMLAttributeDecl.TYPE_NMTOKEN) { if (!list) { if (fAttValidatorNMTOKEN == null) { fAttValidatorNMTOKEN = new AttValidatorNMTOKEN(); } return fAttValidatorNMTOKEN; } else{ if (fAttValidatorNMTOKENS == null) { fAttValidatorNMTOKENS = new AttValidatorNMTOKENS(); } return fAttValidatorNMTOKENS; } } if (attType == XMLAttributeDecl.TYPE_NOTATION) { if (fAttValidatorNOTATION == null) { fAttValidatorNOTATION = new AttValidatorNOTATION(); } return fAttValidatorNOTATION; } if (attType == XMLAttributeDecl.TYPE_ENUMERATION) { if (fAttValidatorENUMERATION == null) { fAttValidatorENUMERATION = new AttValidatorENUMERATION(); } return fAttValidatorENUMERATION; } if (attType == XMLAttributeDecl.TYPE_SIMPLE) { if (fAttValidatorDATATYPE == null) { fAttValidatorDATATYPE = null; //REVISIT : !!! used to be fSchemaImporter.createDatatypeAttributeValidator(); } //return fAttValidatorDATATYPE; } return null; //throw new RuntimeException("getValidatorForAttType(" + fStringPool.toString(attType) + ")"); } /** Returns an attribute definition for an element type. */ private int getAttDef(QName element, QName attribute) { if (fGrammar != null) { int scope = fCurrentScope; if (element.uri > -1) { scope = TOP_LEVEL_SCOPE; } int elementIndex = fGrammar.getElementDeclIndex(element.localpart,scope); if (elementIndex == -1) { return -1; } int attDefIndex = fGrammar.getFirstAttributeDeclIndex(elementIndex); while (attDefIndex != -1) { fGrammar.getAttributeDecl(attDefIndex, fTempAttDecl); if (fTempAttDecl.name.localpart == attribute.localpart && fTempAttDecl.name.uri == attribute.uri ) { return attDefIndex; } attDefIndex = fGrammar.getNextAttributeDeclIndex(attDefIndex); } } return -1; } // getAttDef(QName,QName) /** Returns an attribute definition for an element type. */ private int getAttDefByElementIndex(int elementIndex, QName attribute) { if (fGrammar != null && elementIndex > -1) { if (elementIndex == -1) { return -1; } int attDefIndex = fGrammar.getFirstAttributeDeclIndex(elementIndex); while (attDefIndex != -1) { fGrammar.getAttributeDecl(attDefIndex, fTempAttDecl); if (fTempAttDecl.name.localpart == attribute.localpart && fTempAttDecl.name.uri == attribute.uri ) { return attDefIndex; } attDefIndex = fGrammar.getNextAttributeDeclIndex(attDefIndex); } } return -1; } // getAttDef(QName,QName) // validation /** Root element specified. */ private void rootElementSpecified(QName rootElement) throws Exception { // this is what it used to be //if (fDynamicValidation && !fSeenDoctypeDecl) { //fValidating = false; //} if (fValidating) { // initialize the grammar to be the default one. if (fGrammar == null) { fGrammar = fGrammarResolver.getGrammar(""); //TO DO, for ericye debug only if (fGrammar == null && DEBUG_SCHEMA_VALIDATION) { System.out.println("Oops! no grammar is found for validation"); } if (fDynamicValidation && fGrammar==null) { fValidating = false; } if (fGrammar != null) { if (fGrammar instanceof DTDGrammar) { fGrammarIsDTDGrammar = true; fGrammarIsSchemaGrammar = false; } else if ( fGrammar instanceof SchemaGrammar ) { fGrammarIsSchemaGrammar = true; fGrammarIsDTDGrammar = false; } fGrammarNameSpaceIndex = fEmptyURI; } } if ( fGrammarIsDTDGrammar && ((DTDGrammar) fGrammar).getRootElementQName(fRootElement) ) { String root1 = fStringPool.toString(fRootElement.rawname); String root2 = fStringPool.toString(rootElement.rawname); if (!root1.equals(root2)) { reportRecoverableXMLError(XMLMessages.MSG_ROOT_ELEMENT_TYPE, XMLMessages.VC_ROOT_ELEMENT_TYPE, fRootElement.rawname, rootElement.rawname); } } } if (fNamespacesEnabled) { if (fNamespacesScope == null) { fNamespacesScope = new NamespacesScope(this); fNamespacesPrefix = fStringPool.addSymbol("xmlns"); fNamespacesScope.setNamespaceForPrefix(fNamespacesPrefix, -1); int xmlSymbol = fStringPool.addSymbol("xml"); int xmlNamespace = fStringPool.addSymbol("http://www.w3.org/XML/1998/namespace"); fNamespacesScope.setNamespaceForPrefix(xmlSymbol, xmlNamespace); } } } // rootElementSpecified(QName) /** Switchs to correct validating symbol tables when Schema changes.*/ private void switchGrammar(int newGrammarNameSpaceIndex) { Grammar tempGrammar = fGrammarResolver.getGrammar(fStringPool.toString(newGrammarNameSpaceIndex)); if (tempGrammar == null) { System.out.println(fStringPool.toString(newGrammarNameSpaceIndex) + " grammar not found"); //TO DO report error here } else { fGrammar = tempGrammar; if (fGrammar instanceof DTDGrammar) { fGrammarIsDTDGrammar = true; fGrammarIsSchemaGrammar = false; } else if ( fGrammar instanceof SchemaGrammar ) { fGrammarIsSchemaGrammar = true; fGrammarIsDTDGrammar = false; } } } /** Binds namespaces to the element and attributes. */ private void bindNamespacesToElementAndAttributes(QName element, XMLAttrList attrList) throws Exception { fNamespacesScope.increaseDepth(); Vector schemaCandidateURIs = null; Hashtable locationUriPairs = null; if (fValidating) { schemaCandidateURIs = new Vector(); locationUriPairs = new Hashtable(); } if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); int attPrefix = attrList.getAttrPrefix(index); if (fStringPool.equalNames(attName, fXMLLang)) { /*** // NOTE: This check is done in the validateElementsAndAttributes // method. fDocumentScanner.checkXMLLangAttributeValue(attrList.getAttValue(index)); /***/ } else if (fStringPool.equalNames(attName, fNamespacesPrefix)) { int uri = fStringPool.addSymbol(attrList.getAttValue(index)); fNamespacesScope.setNamespaceForPrefix(StringPool.EMPTY_STRING, uri); } else { if (attPrefix == fNamespacesPrefix) { int nsPrefix = attrList.getAttrLocalpart(index); int uri = fStringPool.addSymbol(attrList.getAttValue(index)); boolean seeXsi = false; fNamespacesScope.setNamespaceForPrefix(nsPrefix, uri); String attrValue = fStringPool.toString(attrList.getAttValue(index)); if (attrValue.equals(SchemaSymbols.URI_XSI)) { fXsiPrefix = nsPrefix; seeXsi = true; } if (fValidating && !seeXsi) { schemaCandidateURIs.addElement( fStringPool.toString(uri) ); } } } index = attrList.getNextAttr(index); } // if validating, walk through the list again to deal with "xsi:...." if (fValidating) { index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); int attPrefix = attrList.getAttrPrefix(index); if (fStringPool.equalNames(attName, fNamespacesPrefix)) { // REVISIT } else { if ( DEBUG_SCHEMA_VALIDATION ) { System.out.println("deal with XSI"); System.out.println("before find XSI: "+fStringPool.toString(attPrefix) +","+fStringPool.toString(fXsiPrefix) ); } if (attPrefix == fXsiPrefix && fXsiPrefix != -1 ) { if (DEBUG_SCHEMA_VALIDATION) { System.out.println("find XSI: "+fStringPool.toString(attPrefix) +","+fStringPool.toString(attName) ); } int localpart = attrList.getAttrLocalpart(index); if (localpart == fStringPool.addSymbol(SchemaSymbols.XSI_SCHEMALOCACTION)) { parseSchemaLocation(fStringPool.toString(attrList.getAttValue(index)), locationUriPairs); } else if (localpart == fStringPool.addSymbol(SchemaSymbols.XSI_NONAMESPACESCHEMALOCACTION)) { locationUriPairs.put(fStringPool.toString(attrList.getAttValue(index)), ""); if (fNamespacesScope != null) { //bind prefix "" to URI "" in this case fNamespacesScope.setNamespaceForPrefix( fStringPool.addSymbol(""), fStringPool.addSymbol("")); } } // REVISIT: should we break here? //break; } } index = attrList.getNextAttr(index); } // try to resolve all the grammars here Enumeration locations = locationUriPairs.keys(); while (locations.hasMoreElements()) { String loc = (String) locations.nextElement(); String uri = (String) locationUriPairs.get(loc); resolveSchemaGrammar( loc, uri); schemaCandidateURIs.removeElement(uri); } //TO DO: This should be a feature that can be turned on or off /***** for (int i=0; i< schemaCandidateURIs.size(); i++) { String uri = (String) schemaCandidateURIs.elementAt(i); resolveSchemaGrammar(uri); } /*****/ } } // bind element to URI int prefix = element.prefix != -1 ? element.prefix : 0; int uri = fNamespacesScope.getNamespaceForPrefix(prefix); if (element.prefix != -1 || uri != -1) { element.uri = uri; if (element.uri == -1) { Object[] args = { fStringPool.toString(element.prefix) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XMLNS_DOMAIN, XMLMessages.MSG_PREFIX_DECLARED, XMLMessages.NC_PREFIX_DECLARED, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } //REVISIT: is this the right place to check on if the Schema has changed? if ( fValidating && element.uri != fGrammarNameSpaceIndex && element.uri != -1 ) { fGrammarNameSpaceIndex = element.uri; switchGrammar(fGrammarNameSpaceIndex); } if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); if (!fStringPool.equalNames(attName, fNamespacesPrefix)) { int attPrefix = attrList.getAttrPrefix(index); if (attPrefix != fNamespacesPrefix) { if (attPrefix != -1) { int attrUri = fNamespacesScope.getNamespaceForPrefix(attPrefix); if (attrUri == -1) { Object[] args = { fStringPool.toString(attPrefix) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XMLNS_DOMAIN, XMLMessages.MSG_PREFIX_DECLARED, XMLMessages.NC_PREFIX_DECLARED, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } attrList.setAttrURI(index, attrUri); } } } index = attrList.getNextAttr(index); } } } // bindNamespacesToElementAndAttributes(QName,XMLAttrList) void parseSchemaLocation(String schemaLocationStr, Hashtable locationUriPairs){ if (locationUriPairs != null) { StringTokenizer tokenizer = new StringTokenizer(schemaLocationStr, " \n\t\r", false); int tokenTotal = tokenizer.countTokens(); if (tokenTotal % 2 != 0 ) { // TO DO: report warning - malformed schemaLocation string } else { while (tokenizer.hasMoreTokens()) { String uri = tokenizer.nextToken(); String location = tokenizer.nextToken(); locationUriPairs.put(location, uri); } } } else { // TO DO: should report internal error here } }// parseSchemaLocaltion(String, Hashtable) private void resolveSchemaGrammar( String loc, String uri) throws Exception { SchemaGrammar grammar = (SchemaGrammar) fGrammarResolver.getGrammar(uri); if (grammar == null) { DOMParser parser = new DOMParser(); parser.setEntityResolver( new Resolver() ); parser.setErrorHandler( new ErrorHandler() ); try { parser.setFeature("http://xml.org/sax/features/validation", false); parser.setFeature("http://xml.org/sax/features/namespaces", true); parser.setFeature("http://apache.org/xml/features/dom/defer-node-expansion", false); }catch( org.xml.sax.SAXNotRecognizedException e ) { e.printStackTrace(); }catch( org.xml.sax.SAXNotSupportedException e ) { e.printStackTrace(); } // expand it before passing it to the parser loc = fEntityHandler.expandSystemId(loc); try { parser.parse( loc ); }catch( IOException e ) { e.printStackTrace(); }catch( SAXException e ) { //e.printStackTrace(); reportRecoverableXMLError(167, 144, e.getMessage() ); } Document document = parser.getDocument(); //Our Grammar TraverseSchema tst = null; try { if (DEBUG_SCHEMA_VALIDATION) { System.out.println("I am geting the Schema Document"); } Element root = document.getDocumentElement();// This is what we pass to TraverserSchema if (root == null) { reportRecoverableXMLError(167, 144, "Can't get back Schema document's root element :" + loc); } else { if (uri == null || !uri.equals(root.getAttribute(SchemaSymbols.ATT_TARGETNAMESPACE)) ) { reportRecoverableXMLError(167,144, "Schema in " + loc + " has a different target namespace " + "from the one specified in the instance document :" + uri); } grammar = new SchemaGrammar(); grammar.setGrammarDocument(document); tst = new TraverseSchema( root, fStringPool, (SchemaGrammar)grammar, fGrammarResolver, fErrorReporter, loc); fGrammarResolver.putGrammar(document.getDocumentElement().getAttribute("targetNamespace"), grammar); } } catch (Exception e) { e.printStackTrace(System.err); } } } private void resolveSchemaGrammar(String uri) throws Exception{ resolveSchemaGrammar(uri, uri); } static class Resolver implements EntityResolver { private static final String SYSTEM[] = { "http://www.w3.org/TR/2000/WD-xmlschema-1-20000407/structures.dtd", "http://www.w3.org/TR/2000/WD-xmlschema-1-20000407/datatypes.dtd", "http://www.w3.org/TR/2000/WD-xmlschema-1-20000407/versionInfo.ent", }; private static final String PATH[] = { "structures.dtd", "datatypes.dtd", "versionInfo.ent", }; public InputSource resolveEntity(String publicId, String systemId) throws IOException { // looking for the schema DTDs? for (int i = 0; i < SYSTEM.length; i++) { if (systemId.equals(SYSTEM[i])) { InputSource source = new InputSource(getClass().getResourceAsStream(PATH[i])); source.setPublicId(publicId); source.setSystemId(systemId); return source; } } // use default resolution return null; } // resolveEntity(String,String):InputSource } // class Resolver static class ErrorHandler implements org.xml.sax.ErrorHandler { /** Warning. */ public void warning(SAXParseException ex) { System.err.println("[Warning] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Error. */ public void error(SAXParseException ex) { System.err.println("[Error] "+ getLocationString(ex)+": "+ ex.getMessage()); } /** Fatal error. */ public void fatalError(SAXParseException ex) { System.err.println("[Fatal Error] "+ getLocationString(ex)+": "+ ex.getMessage()); //throw ex; } // // Private methods // /** Returns a string of the location. */ private String getLocationString(SAXParseException ex) { StringBuffer str = new StringBuffer(); String systemId_ = ex.getSystemId(); if (systemId_ != null) { int index = systemId_.lastIndexOf('/'); if (index != -1) systemId_ = systemId_.substring(index + 1); str.append(systemId_); } str.append(':'); str.append(ex.getLineNumber()); str.append(':'); str.append(ex.getColumnNumber()); return str.toString(); } // getLocationString(SAXParseException):String } private int attributeTypeName(XMLAttributeDecl attrDecl) { switch (attrDecl.type) { //case XMLAttributeDecl.TYPE_CDATA: case XMLAttributeDecl.TYPE_ENTITY: { return attrDecl.list ? fENTITIESSymbol : fENTITYSymbol; } case XMLAttributeDecl.TYPE_ENUMERATION: { String enumeration = fStringPool.stringListAsString(attrDecl.enumeration); return fStringPool.addString(enumeration); } case XMLAttributeDecl.TYPE_ID: { return fIDSymbol; } case XMLAttributeDecl.TYPE_IDREF: { return attrDecl.list ? fIDREFSSymbol : fIDREFSymbol; } case XMLAttributeDecl.TYPE_NMTOKEN: { return attrDecl.list ? fNMTOKENSSymbol : fNMTOKENSSymbol; } case XMLAttributeDecl.TYPE_NOTATION: { return fNOTATIONSymbol; } } return fCDATASymbol; } /** Validates element and attributes. */ private void validateElementAndAttributes(QName element, XMLAttrList attrList) throws Exception { if (fGrammar == null && !fValidating && !fNamespacesEnabled) { fCurrentElementIndex = -1; fCurrentContentSpecType = -1; fInElementContent = false; if (fAttrListHandle != -1) { fAttrList.endAttrList(); int index = fAttrList.getFirstAttr(fAttrListHandle); while (index != -1) { if (fStringPool.equalNames(fAttrList.getAttrName(index), fXMLLang)) { fDocumentScanner.checkXMLLangAttributeValue(fAttrList.getAttValue(index)); break; } index = fAttrList.getNextAttr(index); } } return; } int elementIndex = -1; //REVISIT, is it possible, fValidating is false and fGrammar is no null.??? if ( fGrammar != null ){ if (DEBUG_SCHEMA_VALIDATION) { System.out.println("localpart: '" + fStringPool.toString(element.localpart) +"' and scope : " + fCurrentScope); } if (element.uri == -1) { elementIndex = fGrammar.getElementDeclIndex(element.localpart,fCurrentScope); } else { elementIndex = fGrammar.getElementDeclIndex(element.localpart, TOP_LEVEL_SCOPE); } if (elementIndex == -1) { // if validating based on a Schema, try to resolve the element again by look it up in its ancestor types if (fGrammarIsSchemaGrammar && fCurrentElementIndex != -1) { TraverseSchema.ComplexTypeInfo baseTypeInfo = null; baseTypeInfo = ((SchemaGrammar)fGrammar).getElementComplexTypeInfo(fCurrentElementIndex); while (baseTypeInfo != null) { elementIndex = fGrammar.getElementDeclIndex(element.localpart, baseTypeInfo.scopeDefined); if (elementIndex > -1 ) { break; } baseTypeInfo = baseTypeInfo.baseComplexTypeInfo; } } //if still can't resolve it, try TOP_LEVEL_SCOPE AGAIN if (element.uri == -1 && elementIndex == -1) { elementIndex = fGrammar.getElementDeclIndex(element.localpart, TOP_LEVEL_SCOPE); // REVISIT: // this is a hack to handle the situation where namespace prefix "" is bound to nothing, and there // is a "noNamespaceSchemaLocation" specified, and element element.uri = fStringPool.addSymbol(""); } /****/ if (elementIndex == -1) if (DEBUG_SCHEMA_VALIDATION) System.out.println("!!! can not find elementDecl in the grammar, " + " the element localpart: " + element.localpart+"["+fStringPool.toString(element.localpart) +"]" + " the element uri: " + element.uri+"["+fStringPool.toString(element.uri) +"]" + " and the current enclosing scope: " + fCurrentScope ); /****/ } if (DEBUG_SCHEMA_VALIDATION) { fGrammar.getElementDecl(elementIndex, fTempElementDecl); System.out.println("elementIndex: " + elementIndex+" \n and itsName : '" + fStringPool.toString(fTempElementDecl.name.localpart) +"' \n its ContentType:" + fTempElementDecl.type +"\n its ContentSpecIndex : " + fTempElementDecl.contentSpecIndex +"\n"); } } // here need to check if we need to switch Grammar by asking SchemaGrammar whether // this element actually is of a type in another Schema. if (fGrammarIsSchemaGrammar && elementIndex != -1) { String anotherSchemaURI = ((SchemaGrammar)fGrammar).getElementFromAnotherSchemaURI(elementIndex); if (anotherSchemaURI != null) { fGrammarNameSpaceIndex = fCurrentSchemaURI = fStringPool.addSymbol(anotherSchemaURI); switchGrammar(fCurrentSchemaURI); } } int contentSpecType = getContentSpecType(elementIndex); if (contentSpecType == -1 && fValidating) { reportRecoverableXMLError(XMLMessages.MSG_ELEMENT_NOT_DECLARED, XMLMessages.VC_ELEMENT_VALID, element.rawname); } if (fGrammar != null && elementIndex != -1) { //REVISIT: broken fAttrListHandle = addDefaultAttributes(elementIndex, attrList, fAttrListHandle, fValidating, fStandaloneReader != -1); } if (fAttrListHandle != -1) { fAttrList.endAttrList(); } if (DEBUG_PRINT_ATTRIBUTES) { String elementStr = fStringPool.toString(element.rawname); System.out.print("startElement: <" + elementStr); if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { System.out.print(" " + fStringPool.toString(attrList.getAttrName(index)) + "=\"" + fStringPool.toString(attrList.getAttValue(index)) + "\""); index = attrList.getNextAttr(index); } } System.out.println(">"); } // REVISIT: Validation. Do we need to recheck for the xml:lang // attribute? It was already checked above -- perhaps // this is to check values that are defaulted in? If // so, this check could move to the attribute decl // callback so we can check the default value before // it is used. if (fAttrListHandle != -1) { int index = fAttrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attrNameIndex = attrList.getAttrName(index); if (fStringPool.equalNames(attrNameIndex, fXMLLang)) { fDocumentScanner.checkXMLLangAttributeValue(attrList.getAttValue(index)); // break; } // here, we validate every "user-defined" attributes int _xmlns = fStringPool.addSymbol("xmlns"); if (attrNameIndex != _xmlns && attrList.getAttrPrefix(index) != _xmlns) if (fValidating) { fAttrNameLocator = getLocatorImpl(fAttrNameLocator); fTempQName.setValues(attrList.getAttrPrefix(index), attrList.getAttrLocalpart(index), attrList.getAttrName(index), attrList.getAttrURI(index) ); int attDefIndex = getAttDefByElementIndex(elementIndex, fTempQName); if (fTempQName.uri != fXsiURI) if (attDefIndex == -1) { // REVISIT - cache the elem/attr tuple so that we only give // this error once for each unique occurrence Object[] args = { fStringPool.toString(element.rawname), fStringPool.toString(attrList.getAttrName(index)) }; System.out.println("[Error] attribute " + fStringPool.toString(attrList.getAttrName(index)) + " not found in element type " + fStringPool.toString(element.rawname)); /*****/ fErrorReporter.reportError(fAttrNameLocator, XMLMessages.XML_DOMAIN, XMLMessages.MSG_ATTRIBUTE_NOT_DECLARED, XMLMessages.VC_ATTRIBUTE_VALUE_TYPE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); /******/ } else { fGrammar.getAttributeDecl(attDefIndex, fTempAttDecl); int attributeType = attributeTypeName(fTempAttDecl); attrList.setAttType(index, attributeType); if (fGrammarIsDTDGrammar && (fTempAttDecl.type == XMLAttributeDecl.TYPE_ENTITY || fTempAttDecl.type == XMLAttributeDecl.TYPE_ENUMERATION || fTempAttDecl.type == XMLAttributeDecl.TYPE_ID || fTempAttDecl.type == XMLAttributeDecl.TYPE_IDREF || fTempAttDecl.type == XMLAttributeDecl.TYPE_NMTOKEN || fTempAttDecl.type == XMLAttributeDecl.TYPE_NOTATION) ) { validateDTDattribute(element, attrList.getAttValue(index), fTempAttDecl); } if (fTempAttDecl.datatypeValidator == null) { Object[] args = { fStringPool.toString(element.rawname), fStringPool.toString(attrList.getAttrName(index)) }; System.out.println("[Error] Datatypevalidator for attribute " + fStringPool.toString(attrList.getAttrName(index)) + " not found in element type " + fStringPool.toString(element.rawname)); //REVISIT : is this the right message? /****/ fErrorReporter.reportError(fAttrNameLocator, XMLMessages.XML_DOMAIN, XMLMessages.MSG_ATTRIBUTE_NOT_DECLARED, XMLMessages.VC_ATTRIBUTE_VALUE_TYPE, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); /****/ } else{ try { fTempAttDecl.datatypeValidator.validate(fStringPool.toString(attrList.getAttValue(index)), null ); } catch (InvalidDatatypeValueException idve) { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, SchemaMessageProvider.DatatypeError, SchemaMessageProvider.MSG_NONE, new Object [] { idve.getMessage() }, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } } } index = fAttrList.getNextAttr(index); } } if (fAttrListHandle != -1) { int index = attrList.getFirstAttr(fAttrListHandle); while (index != -1) { int attName = attrList.getAttrName(index); if (!fStringPool.equalNames(attName, fNamespacesPrefix)) { int attPrefix = attrList.getAttrPrefix(index); if (attPrefix != fNamespacesPrefix) { if (attPrefix != -1) { int uri = fNamespacesScope.getNamespaceForPrefix(attPrefix); if (uri == -1) { Object[] args = { fStringPool.toString(attPrefix) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XMLNS_DOMAIN, XMLMessages.MSG_PREFIX_DECLARED, XMLMessages.NC_PREFIX_DECLARED, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } attrList.setAttrURI(index, uri); } } } index = attrList.getNextAttr(index); } } fCurrentElementIndex = elementIndex; fCurrentContentSpecType = contentSpecType; if (fValidating && contentSpecType == XMLElementDecl.TYPE_SIMPLE) { fBufferDatatype = true; fDatatypeBuffer.setLength(0); } fInElementContent = (contentSpecType == XMLElementDecl.TYPE_CHILDREN); } // validateElementAndAttributes(QName,XMLAttrList) //validate attributes in DTD fashion private void validateDTDattribute(QName element, int attValue, XMLAttributeDecl attributeDecl) throws Exception{ AttributeValidator av = null; switch (attributeDecl.type) { case XMLAttributeDecl.TYPE_ENTITY: if (attributeDecl.list) { av = fAttValidatorENTITIES; } else { av = fAttValidatorENTITY; } break; case XMLAttributeDecl.TYPE_ENUMERATION: av = fAttValidatorENUMERATION; break; case XMLAttributeDecl.TYPE_ID: av = fAttValidatorID; break; case XMLAttributeDecl.TYPE_IDREF: if (attributeDecl.list) { av = fAttValidatorIDREFS; } else { av = fAttValidatorIDREF; } break; case XMLAttributeDecl.TYPE_NOTATION: av = fAttValidatorNOTATION; break; case XMLAttributeDecl.TYPE_NMTOKEN: if (attributeDecl.list) { av = fAttValidatorNMTOKENS; } else { av = fAttValidatorNMTOKEN; } break; } av.normalize(element, attributeDecl.name, attValue, attributeDecl.type, attributeDecl.enumeration); } /** Character data in content. */ private void charDataInContent() { if (DEBUG_ELEMENT_CHILDREN) { System.out.println("charDataInContent()"); } if (fElementChildren.length <= fElementChildrenLength) { QName[] newarray = new QName[fElementChildren.length * 2]; System.arraycopy(fElementChildren, 0, newarray, 0, fElementChildren.length); fElementChildren = newarray; } QName qname = fElementChildren[fElementChildrenLength]; if (qname == null) { for (int i = fElementChildrenLength; i < fElementChildren.length; i++) { fElementChildren[i] = new QName(); } qname = fElementChildren[fElementChildrenLength]; } qname.clear(); fElementChildrenLength++; } // charDataInCount() /** * Check that the content of an element is valid. * <p> * This is the method of primary concern to the validator. This method is called * upon the scanner reaching the end tag of an element. At that time, the * element's children must be structurally validated, so it calls this method. * The index of the element being checked (in the decl pool), is provided as * well as an array of element name indexes of the children. The validator must * confirm that this element can have these children in this order. * <p> * This can also be called to do 'what if' testing of content models just to see * if they would be valid. * <p> * Note that the element index is an index into the element decl pool, whereas * the children indexes are name indexes, i.e. into the string pool. * <p> * A value of -1 in the children array indicates a PCDATA node. All other * indexes will be positive and represent child elements. The count can be * zero, since some elements have the EMPTY content model and that must be * confirmed. * * @param elementIndex The index within the <code>ElementDeclPool</code> of this * element. * @param childCount The number of entries in the <code>children</code> array. * @param children The children of this element. Each integer is an index within * the <code>StringPool</code> of the child element name. An index * of -1 is used to indicate an occurrence of non-whitespace character * data. * * @return The value -1 if fully valid, else the 0 based index of the child * that first failed. If the value returned is equal to the number * of children, then additional content is required to reach a valid * ending state. * * @exception Exception Thrown on error. */ private int checkContent(int elementIndex, QName[] children, int childOffset, int childCount) throws Exception { // Get the element name index from the element // REVISIT: Validation final int elementType = fCurrentElement.rawname; if (DEBUG_PRINT_CONTENT) { String strTmp = fStringPool.toString(elementType); System.out.println("Name: "+strTmp+", "+ "Count: "+childCount+", "+ "ContentSpecType: " +fCurrentContentSpecType); //+getContentSpecAsString(elementIndex)); for (int index = childOffset; index < (childOffset+childCount) && index < 10; index++) { if (index == 0) { System.out.print(" ("); } String childName = (children[index].localpart == -1) ? "#PCDATA" : fStringPool.toString(children[index].localpart); if (index + 1 == childCount) { System.out.println(childName + ")"); } else if (index + 1 == 10) { System.out.println(childName + ",...)"); } else { System.out.print(childName + ","); } } } // Get out the content spec for this element final int contentType = fCurrentContentSpecType; // debugging //System.out.println("~~~~~~in checkContent, fCurrentContentSpecType : " + fCurrentContentSpecType); // // Deal with the possible types of content. We try to optimized here // by dealing specially with content models that don't require the // full DFA treatment. // if (contentType == XMLElementDecl.TYPE_EMPTY) { // // If the child count is greater than zero, then this is // an error right off the bat at index 0. // if (childCount != 0) { return 0; } } else if (contentType == XMLElementDecl.TYPE_ANY) { // // This one is open game so we don't pass any judgement on it // at all. Its assumed to fine since it can hold anything. // } else if (contentType == XMLElementDecl.TYPE_MIXED || contentType == XMLElementDecl.TYPE_CHILDREN) { // Get the content model for this element, faulting it in if needed XMLContentModel cmElem = null; try { cmElem = getContentModel(elementIndex); return cmElem.validateContent(children, childOffset, childCount); } catch(CMException excToCatch) { // REVISIT - Translate the caught exception to the protected error API int majorCode = excToCatch.getErrorCode(); fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, majorCode, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } } else if (contentType == -1) { reportRecoverableXMLError(XMLMessages.MSG_ELEMENT_NOT_DECLARED, XMLMessages.VC_ELEMENT_VALID, elementType); } else if (contentType == XMLElementDecl.TYPE_SIMPLE ) { XMLContentModel cmElem = null; try { // REVISIT: this might not be right //cmElem = getContentModel(elementIndex); //fTempQName.rawname = fTempQName.localpart = fStringPool.addString(fDatatypeBuffer.toString()); //return cmElem.validateContent(1, new QName[] { fTempQName }); fGrammar.getElementDecl(elementIndex, fTempElementDecl); DatatypeValidator dv = fTempElementDecl.datatypeValidator; if (dv == null) { System.out.println("Internal Error: this element have a simpletype "+ "but no datatypevalidator was found, element "+fTempElementDecl.name +",locapart: "+fStringPool.toString(fTempElementDecl.name.localpart)); } else { dv.validate(fDatatypeBuffer.toString(), null); } } //catch (CMException cme) { // System.out.println("Internal Error in datatype validation"); //} catch (InvalidDatatypeValueException idve) { fErrorReporter.reportError(fErrorReporter.getLocator(), SchemaMessageProvider.SCHEMA_DOMAIN, SchemaMessageProvider.DatatypeError, SchemaMessageProvider.MSG_NONE, new Object [] { idve.getMessage() }, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } /* boolean DEBUG_DATATYPES = false; if (DEBUG_DATATYPES) { System.out.println("Checking content of datatype"); String strTmp = fStringPool.toString(elementTypeIndex); int contentSpecIndex = fElementDeclPool.getContentSpec(elementIndex); XMLContentSpec csn = new XMLContentSpec(); fElementDeclPool.getContentSpecNode(contentSpecIndex, csn); String contentSpecString = fStringPool.toString(csn.value); System.out.println ( "Name: " + strTmp + ", Count: " + childCount + ", ContentSpec: " + contentSpecString ); for (int index = 0; index < childCount && index < 10; index++) { if (index == 0) System.out.print(" ("); String childName = (children[index] == -1) ? "#PCDATA" : fStringPool.toString(children[index]); if (index + 1 == childCount) System.out.println(childName + ")"); else if (index + 1 == 10) System.out.println(childName + ",...)"); else System.out.print(childName + ","); } } try { // REVISIT - integrate w/ error handling int contentSpecIndex = fElementDeclPool.getContentSpec(elementIndex); XMLContentSpec csn = new XMLContentSpec(); fElementDeclPool.getContentSpecNode(contentSpecIndex, csn); String type = fStringPool.toString(csn.value); DatatypeValidator v = fDatatypeRegistry.getValidatorFor(type); if (v != null) v.validate(fDatatypeBuffer.toString()); else System.out.println("No validator for datatype "+type); } catch (InvalidDatatypeValueException idve) { System.out.println("Incorrect datatype: "+idve.getMessage()); } catch (Exception e) { e.printStackTrace(); System.out.println("Internal error in datatype validation"); } */ } else { fErrorReporter.reportError(fErrorReporter.getLocator(), ImplementationMessages.XERCES_IMPLEMENTATION_DOMAIN, ImplementationMessages.VAL_CST, 0, null, XMLErrorReporter.ERRORTYPE_FATAL_ERROR); } // We succeeded return -1; } // checkContent(int,int,int[]):int /** * Check that all ID references were to ID attributes present in the document. * <p> * This method is a convenience call that allows the validator to do any id ref * checks above and beyond those done by the scanner. The scanner does the checks * specificied in the XML spec, i.e. that ID refs refer to ids which were * eventually defined somewhere in the document. * <p> * If the validator is for a Schema perhaps, which defines id semantics beyond * those of the XML specificiation, this is where that extra checking would be * done. For most validators, this is a no-op. * * @exception Exception Thrown on error. */ private void checkIdRefs() throws Exception { if (fIdRefs == null) return; Enumeration en = fIdRefs.keys(); while (en.hasMoreElements()) { Integer key = (Integer)en.nextElement(); if (fIdDefs == null || !fIdDefs.containsKey(key)) { Object[] args = { fStringPool.toString(key.intValue()) }; fErrorReporter.reportError(fErrorReporter.getLocator(), XMLMessages.XML_DOMAIN, XMLMessages.MSG_ELEMENT_WITH_ID_REQUIRED, XMLMessages.VC_IDREF, args, XMLErrorReporter.ERRORTYPE_RECOVERABLE_ERROR); } } } // checkIdRefs() /** * Checks that all declared elements refer to declared elements * in their content models. This method calls out to the error * handler to indicate warnings. */ /*private void checkDeclaredElements() throws Exception { //****DEBUG**** if (DEBUG) print("(???) XMLValidator.checkDeclaredElements\n"); //****DEBUG**** for (int i = 0; i < fElementCount; i++) { int type = fGrammar.getContentSpecType(i); if (type == XMLElementDecl.TYPE_MIXED || type == XMLElementDecl.TYPE_CHILDREN) { int chunk = i >> CHUNK_SHIFT; int index = i & CHUNK_MASK; int contentSpecIndex = fContentSpec[chunk][index]; checkDeclaredElements(i, contentSpecIndex); } } } */ private void printChildren() { if (DEBUG_ELEMENT_CHILDREN) { System.out.print('['); for (int i = 0; i < fElementChildrenLength; i++) { System.out.print(' '); QName qname = fElementChildren[i]; if (qname != null) { System.out.print(fStringPool.toString(qname.rawname)); } else { System.out.print("null"); } if (i < fElementChildrenLength - 1) { System.out.print(", "); } System.out.flush(); } System.out.print(" ]"); System.out.println(); } } private void printStack() { if (DEBUG_ELEMENT_CHILDREN) { System.out.print('{'); for (int i = 0; i <= fElementDepth; i++) { System.out.print(' '); System.out.print(fElementChildrenOffsetStack[i]); if (i < fElementDepth) { System.out.print(", "); } System.out.flush(); } System.out.print(" }"); System.out.println(); } } // // Interfaces // /** * AttributeValidator. */ public interface AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValue, int attType, int enumHandle) throws Exception; } // interface AttributeValidator // // Classes // /** * AttValidatorCDATA. */ final class AttValidatorCDATA implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // Normalize attribute based upon attribute type... return attValueHandle; } } // class AttValidatorCDATA /** * AttValidatorID. */ final class AttValidatorID implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } if (!XMLCharacterProperties.validName(newAttValue)) { reportRecoverableXMLError(XMLMessages.MSG_ID_INVALID, XMLMessages.VC_ID, fStringPool.toString(attribute.rawname), newAttValue); } // // ID - check that the id value is unique within the document (V_TAG8) // if (element.rawname != -1 && !addId(attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ID_NOT_UNIQUE, XMLMessages.VC_ID, fStringPool.toString(attribute.rawname), newAttValue); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalong attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorID /** * AttValidatorIDREF. */ final class AttValidatorIDREF implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } if (!XMLCharacterProperties.validName(newAttValue)) { reportRecoverableXMLError(XMLMessages.MSG_IDREF_INVALID, XMLMessages.VC_IDREF, fStringPool.toString(attribute.rawname), newAttValue); } // // IDREF - remember the id value // if (element.rawname != -1) addIdRef(attValueHandle); } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorIDREF /** * AttValidatorIDREFS. */ final class AttValidatorIDREFS implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); StringTokenizer tokenizer = new StringTokenizer(attValue); StringBuffer sb = new StringBuffer(attValue.length()); boolean ok = true; if (tokenizer.hasMoreTokens()) { while (true) { String idName = tokenizer.nextToken(); if (fValidating) { if (!XMLCharacterProperties.validName(idName)) { ok = false; } // // IDREFS - remember the id values // if (element.rawname != -1) { addIdRef(fStringPool.addSymbol(idName)); } } sb.append(idName); if (!tokenizer.hasMoreTokens()) break; sb.append(' '); } } String newAttValue = sb.toString(); if (fValidating && (!ok || newAttValue.length() == 0)) { reportRecoverableXMLError(XMLMessages.MSG_IDREFS_INVALID, XMLMessages.VC_IDREF, fStringPool.toString(attribute.rawname), newAttValue); } if (!newAttValue.equals(attValue)) { attValueHandle = fStringPool.addString(newAttValue); if (fValidating && invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorIDREFS /** * AttValidatorENTITY. */ final class AttValidatorENTITY implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } // // ENTITY - check that the value is an unparsed entity name (V_TAGa) // if (!fEntityHandler.isUnparsedEntity(attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ENTITY_INVALID, XMLMessages.VC_ENTITY_NAME, fStringPool.toString(attribute.rawname), newAttValue); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorENTITY /** * AttValidatorENTITIES. */ final class AttValidatorENTITIES implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); StringTokenizer tokenizer = new StringTokenizer(attValue); StringBuffer sb = new StringBuffer(attValue.length()); boolean ok = true; if (tokenizer.hasMoreTokens()) { while (true) { String entityName = tokenizer.nextToken(); // // ENTITIES - check that each value is an unparsed entity name (V_TAGa) // if (fValidating && !fEntityHandler.isUnparsedEntity(fStringPool.addSymbol(entityName))) { ok = false; } sb.append(entityName); if (!tokenizer.hasMoreTokens()) { break; } sb.append(' '); } } String newAttValue = sb.toString(); if (fValidating && (!ok || newAttValue.length() == 0)) { reportRecoverableXMLError(XMLMessages.MSG_ENTITIES_INVALID, XMLMessages.VC_ENTITY_NAME, fStringPool.toString(attribute.rawname), newAttValue); } if (!newAttValue.equals(attValue)) { attValueHandle = fStringPool.addString(newAttValue); if (fValidating && invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorENTITIES /** * AttValidatorNMTOKEN. */ final class AttValidatorNMTOKEN implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } if (!XMLCharacterProperties.validNmtoken(newAttValue)) { reportRecoverableXMLError(XMLMessages.MSG_NMTOKEN_INVALID, XMLMessages.VC_NAME_TOKEN, fStringPool.toString(attribute.rawname), newAttValue); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorNMTOKEN /** * AttValidatorNMTOKENS. */ final class AttValidatorNMTOKENS implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); StringTokenizer tokenizer = new StringTokenizer(attValue); StringBuffer sb = new StringBuffer(attValue.length()); boolean ok = true; if (tokenizer.hasMoreTokens()) { while (true) { String nmtoken = tokenizer.nextToken(); if (fValidating && !XMLCharacterProperties.validNmtoken(nmtoken)) { ok = false; } sb.append(nmtoken); if (!tokenizer.hasMoreTokens()) { break; } sb.append(' '); } } String newAttValue = sb.toString(); if (fValidating && (!ok || newAttValue.length() == 0)) { reportRecoverableXMLError(XMLMessages.MSG_NMTOKENS_INVALID, XMLMessages.VC_NAME_TOKEN, fStringPool.toString(attribute.rawname), newAttValue); } if (!newAttValue.equals(attValue)) { attValueHandle = fStringPool.addString(newAttValue); if (fValidating && invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorNMTOKENS /** * AttValidatorNOTATION. */ final class AttValidatorNOTATION implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } // // NOTATION - check that the value is in the AttDef enumeration (V_TAGo) // if (!fStringPool.stringInList(enumHandle, attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ATTRIBUTE_VALUE_NOT_IN_LIST, XMLMessages.VC_NOTATION_ATTRIBUTES, fStringPool.toString(attribute.rawname), newAttValue, fStringPool.stringListAsString(enumHandle)); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorNOTATION /** * AttValidatorENUMERATION. */ final class AttValidatorENUMERATION implements AttributeValidator { // // AttributeValidator methods // /** Normalize. */ public int normalize(QName element, QName attribute, int attValueHandle, int attType, int enumHandle) throws Exception { // // Normalize attribute based upon attribute type... // String attValue = fStringPool.toString(attValueHandle); String newAttValue = attValue.trim(); if (fValidating) { // REVISIT - can we release the old string? if (newAttValue != attValue) { if (invalidStandaloneAttDef(element, attribute)) { reportRecoverableXMLError(XMLMessages.MSG_ATTVALUE_CHANGED_DURING_NORMALIZATION_WHEN_STANDALONE, XMLMessages.VC_STANDALONE_DOCUMENT_DECLARATION, fStringPool.toString(attribute.rawname), attValue, newAttValue); } attValueHandle = fStringPool.addSymbol(newAttValue); } else { attValueHandle = fStringPool.addSymbol(attValueHandle); } // // ENUMERATION - check that value is in the AttDef enumeration (V_TAG9) // if (!fStringPool.stringInList(enumHandle, attValueHandle)) { reportRecoverableXMLError(XMLMessages.MSG_ATTRIBUTE_VALUE_NOT_IN_LIST, XMLMessages.VC_ENUMERATION, fStringPool.toString(attribute.rawname), newAttValue, fStringPool.stringListAsString(enumHandle)); } } else if (newAttValue != attValue) { // REVISIT - can we release the old string? attValueHandle = fStringPool.addSymbol(newAttValue); } return attValueHandle; } // normalize(QName,QName,int,int,int):int // // Package methods // /** Returns true if invalid standalone attribute definition. */ boolean invalidStandaloneAttDef(QName element, QName attribute) { if (fStandaloneReader == -1) { return false; } // we are normalizing a default att value... this ok? if (element.rawname == -1) { return false; } return getAttDefIsExternal(element, attribute); } } // class AttValidatorENUMERATION } // class XMLValidator
follow up today AndyC's fix, do the samething for the attribute. --ericye git-svn-id: 21df804813e9d3638e43477f308dd0be51e5f30f@315812 13f79535-47bb-0310-9956-ffa450edef68
src/org/apache/xerces/validators/common/XMLValidator.java
follow up today AndyC's fix, do the samething for the attribute. --ericye
Java
apache-2.0
d4dcd713f16edf0457f233260e7d0bf485b47a1a
0
apache/cordova-plugin-statusbar,apache/cordova-plugin-statusbar
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.cordova.statusbar; import android.graphics.Color; import android.os.Build; import android.view.View; import android.view.Window; import android.view.WindowManager; import androidx.appcompat.app.AppCompatActivity; import androidx.core.view.WindowCompat; import androidx.core.view.WindowInsetsControllerCompat; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaArgs; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.apache.cordova.LOG; import org.apache.cordova.PluginResult; import org.json.JSONException; public class StatusBar extends CordovaPlugin { private static final String TAG = "StatusBar"; private static final String ACTION_HIDE = "hide"; private static final String ACTION_SHOW = "show"; private static final String ACTION_READY = "_ready"; private static final String ACTION_BACKGROUND_COLOR_BY_HEX_STRING = "backgroundColorByHexString"; private static final String ACTION_OVERLAYS_WEB_VIEW = "overlaysWebView"; private static final String ACTION_STYLE_DEFAULT = "styleDefault"; private static final String ACTION_STYLE_LIGHT_CONTENT = "styleLightContent"; private static final String STYLE_DEFAULT = "default"; private static final String STYLE_LIGHT_CONTENT = "lightcontent"; private AppCompatActivity activity; private Window window; /** * Sets the context of the Command. This can then be used to do things like * get file paths associated with the Activity. * * @param cordova The context of the main Activity. * @param webView The CordovaWebView Cordova is running in. */ @Override public void initialize(final CordovaInterface cordova, CordovaWebView webView) { LOG.v(TAG, "StatusBar: initialization"); super.initialize(cordova, webView); activity = this.cordova.getActivity(); window = activity.getWindow(); activity.runOnUiThread(() -> { // Clear flag FLAG_FORCE_NOT_FULLSCREEN which is set initially // by the Cordova. window.clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); // Read 'StatusBarOverlaysWebView' from config.xml, default is true. setStatusBarTransparent(preferences.getBoolean("StatusBarOverlaysWebView", true)); // Read 'StatusBarBackgroundColor' from config.xml, default is #000000. setStatusBarBackgroundColor(preferences.getString("StatusBarBackgroundColor", "#000000")); // Read 'StatusBarStyle' from config.xml, default is 'lightcontent'. setStatusBarStyle( preferences.getString("StatusBarStyle", STYLE_LIGHT_CONTENT).toLowerCase() ); }); } /** * Executes the request and returns PluginResult. * * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackContext The callback id used when calling back into JavaScript. * @return True if the action was valid, false otherwise. */ @Override public boolean execute(final String action, final CordovaArgs args, final CallbackContext callbackContext) { LOG.v(TAG, "Executing action: " + action); switch (action) { case ACTION_READY: boolean statusBarVisible = (window.getAttributes().flags & WindowManager.LayoutParams.FLAG_FULLSCREEN) == 0; callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, statusBarVisible)); return true; case ACTION_SHOW: activity.runOnUiThread(() -> { int uiOptions = window.getDecorView().getSystemUiVisibility(); uiOptions &= ~View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN; uiOptions &= ~View.SYSTEM_UI_FLAG_FULLSCREEN; window.getDecorView().setSystemUiVisibility(uiOptions); // CB-11197 We still need to update LayoutParams to force status bar // to be hidden when entering e.g. text fields window.clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); }); return true; case ACTION_HIDE: activity.runOnUiThread(() -> { int uiOptions = window.getDecorView().getSystemUiVisibility() | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_FULLSCREEN; window.getDecorView().setSystemUiVisibility(uiOptions); // CB-11197 We still need to update LayoutParams to force status bar // to be hidden when entering e.g. text fields window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); }); return true; case ACTION_BACKGROUND_COLOR_BY_HEX_STRING: activity.runOnUiThread(() -> { try { setStatusBarBackgroundColor(args.getString(0)); } catch (JSONException ignore) { LOG.e(TAG, "Invalid hexString argument, use f.i. '#777777'"); } }); return true; case ACTION_OVERLAYS_WEB_VIEW: activity.runOnUiThread(() -> { try { setStatusBarTransparent(args.getBoolean(0)); } catch (JSONException ignore) { LOG.e(TAG, "Invalid boolean argument"); } }); return true; case ACTION_STYLE_DEFAULT: activity.runOnUiThread(() -> setStatusBarStyle(STYLE_DEFAULT)); return true; case ACTION_STYLE_LIGHT_CONTENT: activity.runOnUiThread(() -> setStatusBarStyle(STYLE_LIGHT_CONTENT)); return true; default: return false; } } private void setStatusBarBackgroundColor(final String colorPref) { if (colorPref.isEmpty()) return; int color; try { color = Color.parseColor(colorPref); } catch (IllegalArgumentException ignore) { LOG.e(TAG, "Invalid hexString argument, use f.i. '#999999'"); return; } window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); // SDK 19-30 window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); // SDK 21 window.setStatusBarColor(color); } private void setStatusBarTransparent(final boolean isTransparent) { final Window window = cordova.getActivity().getWindow(); int visibility = isTransparent ? View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN : View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_VISIBLE; window.getDecorView().setSystemUiVisibility(visibility); if (isTransparent) { window.setStatusBarColor(Color.TRANSPARENT); } } private void setStatusBarStyle(final String style) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && !style.isEmpty()) { View decorView = window.getDecorView(); WindowInsetsControllerCompat windowInsetsControllerCompat = WindowCompat.getInsetsController(window, decorView); if (style.equals(STYLE_DEFAULT)) { windowInsetsControllerCompat.setAppearanceLightStatusBars(true); } else if (style.equals(STYLE_LIGHT_CONTENT)) { windowInsetsControllerCompat.setAppearanceLightStatusBars(false); } else { LOG.e(TAG, "Invalid style, must be either 'default' or 'lightcontent'"); } } } }
src/android/StatusBar.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.cordova.statusbar; import android.app.Activity; import android.graphics.Color; import android.os.Build; import android.view.View; import android.view.Window; import android.view.WindowManager; import androidx.core.view.WindowCompat; import androidx.core.view.WindowInsetsControllerCompat; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaArgs; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.apache.cordova.LOG; import org.apache.cordova.PluginResult; import org.json.JSONException; public class StatusBar extends CordovaPlugin { private static final String TAG = "StatusBar"; private static final String ACTION_HIDE = "hide"; private static final String ACTION_SHOW = "show"; private static final String ACTION_READY = "_ready"; private static final String ACTION_BACKGROUND_COLOR_BY_HEX_STRING = "backgroundColorByHexString"; private static final String ACTION_OVERLAYS_WEB_VIEW = "overlaysWebView"; private static final String ACTION_STYLE_DEFAULT = "styleDefault"; private static final String ACTION_STYLE_LIGHT_CONTENT = "styleLightContent"; private static final String STYLE_DEFAULT = "default"; private static final String STYLE_LIGHT_CONTENT = "lightcontent"; /** * Sets the context of the Command. This can then be used to do things like * get file paths associated with the Activity. * * @param cordova The context of the main Activity. * @param webView The CordovaWebView Cordova is running in. */ @Override public void initialize(final CordovaInterface cordova, CordovaWebView webView) { LOG.v(TAG, "StatusBar: initialization"); super.initialize(cordova, webView); this.cordova.getActivity().runOnUiThread(() -> { // Clear flag FLAG_FORCE_NOT_FULLSCREEN which is set initially // by the Cordova. Window window = cordova.getActivity().getWindow(); window.clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); // Read 'StatusBarOverlaysWebView' from config.xml, default is true. setStatusBarTransparent(preferences.getBoolean("StatusBarOverlaysWebView", true)); // Read 'StatusBarBackgroundColor' from config.xml, default is #000000. setStatusBarBackgroundColor(preferences.getString("StatusBarBackgroundColor", "#000000")); // Read 'StatusBarStyle' from config.xml, default is 'lightcontent'. setStatusBarStyle( preferences.getString("StatusBarStyle", STYLE_LIGHT_CONTENT).toLowerCase() ); }); } /** * Executes the request and returns PluginResult. * * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackContext The callback id used when calling back into JavaScript. * @return True if the action was valid, false otherwise. */ @Override public boolean execute(final String action, final CordovaArgs args, final CallbackContext callbackContext) { LOG.v(TAG, "Executing action: " + action); final Activity activity = this.cordova.getActivity(); final Window window = activity.getWindow(); switch (action) { case ACTION_READY: boolean statusBarVisible = (window.getAttributes().flags & WindowManager.LayoutParams.FLAG_FULLSCREEN) == 0; callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, statusBarVisible)); return true; case ACTION_SHOW: activity.runOnUiThread(() -> { int uiOptions = window.getDecorView().getSystemUiVisibility(); uiOptions &= ~View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN; uiOptions &= ~View.SYSTEM_UI_FLAG_FULLSCREEN; window.getDecorView().setSystemUiVisibility(uiOptions); // CB-11197 We still need to update LayoutParams to force status bar // to be hidden when entering e.g. text fields window.clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); }); return true; case ACTION_HIDE: activity.runOnUiThread(() -> { int uiOptions = window.getDecorView().getSystemUiVisibility() | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_FULLSCREEN; window.getDecorView().setSystemUiVisibility(uiOptions); // CB-11197 We still need to update LayoutParams to force status bar // to be hidden when entering e.g. text fields window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); }); return true; case ACTION_BACKGROUND_COLOR_BY_HEX_STRING: activity.runOnUiThread(() -> { try { setStatusBarBackgroundColor(args.getString(0)); } catch (JSONException ignore) { LOG.e(TAG, "Invalid hexString argument, use f.i. '#777777'"); } }); return true; case ACTION_OVERLAYS_WEB_VIEW: activity.runOnUiThread(() -> { try { setStatusBarTransparent(args.getBoolean(0)); } catch (JSONException ignore) { LOG.e(TAG, "Invalid boolean argument"); } }); return true; case ACTION_STYLE_DEFAULT: activity.runOnUiThread(() -> setStatusBarStyle(STYLE_DEFAULT)); return true; case ACTION_STYLE_LIGHT_CONTENT: activity.runOnUiThread(() -> setStatusBarStyle(STYLE_LIGHT_CONTENT)); return true; default: return false; } } private void setStatusBarBackgroundColor(final String colorPref) { if (colorPref.isEmpty()) return; int color; try { color = Color.parseColor(colorPref); } catch (IllegalArgumentException ignore) { LOG.e(TAG, "Invalid hexString argument, use f.i. '#999999'"); return; } final Window window = cordova.getActivity().getWindow(); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); // SDK 19-30 window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); // SDK 21 window.setStatusBarColor(color); } private void setStatusBarTransparent(final boolean isTransparent) { final Window window = cordova.getActivity().getWindow(); int visibility = isTransparent ? View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN : View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_VISIBLE; window.getDecorView().setSystemUiVisibility(visibility); if (isTransparent) { window.setStatusBarColor(Color.TRANSPARENT); } } private void setStatusBarStyle(final String style) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && !style.isEmpty()) { Window window = cordova.getActivity().getWindow(); View decorView = window.getDecorView(); WindowInsetsControllerCompat windowInsetsControllerCompat = WindowCompat.getInsetsController(window, decorView); if (style.equals(STYLE_DEFAULT)) { windowInsetsControllerCompat.setAppearanceLightStatusBars(true); } else if (style.equals(STYLE_LIGHT_CONTENT)) { windowInsetsControllerCompat.setAppearanceLightStatusBars(false); } else { LOG.e(TAG, "Invalid style, must be either 'default' or 'lightcontent'"); } } } }
refactor(android): simplify window & activity (#249)
src/android/StatusBar.java
refactor(android): simplify window & activity (#249)
Java
apache-2.0
433778a076e5a418012d05cccd8f8968f16fa3af
0
samskivert/libgdx,ttencate/libgdx,fwolff/libgdx,toa5/libgdx,kotcrab/libgdx,bsmr-java/libgdx,sarkanyi/libgdx,js78/libgdx,josephknight/libgdx,xoppa/libgdx,realitix/libgdx,bsmr-java/libgdx,jsjolund/libgdx,yangweigbh/libgdx,bladecoder/libgdx,josephknight/libgdx,ttencate/libgdx,ttencate/libgdx,bladecoder/libgdx,libgdx/libgdx,bsmr-java/libgdx,gouessej/libgdx,josephknight/libgdx,MikkelTAndersen/libgdx,FredGithub/libgdx,BlueRiverInteractive/libgdx,czyzby/libgdx,MovingBlocks/libgdx,realitix/libgdx,toa5/libgdx,jsjolund/libgdx,bsmr-java/libgdx,cypherdare/libgdx,bgroenks96/libgdx,codepoke/libgdx,kotcrab/libgdx,FredGithub/libgdx,ttencate/libgdx,alex-dorokhov/libgdx,js78/libgdx,realitix/libgdx,sarkanyi/libgdx,gouessej/libgdx,tommyettinger/libgdx,bgroenks96/libgdx,samskivert/libgdx,hyvas/libgdx,codepoke/libgdx,bsmr-java/libgdx,MovingBlocks/libgdx,sarkanyi/libgdx,toa5/libgdx,tommyettinger/libgdx,toa5/libgdx,js78/libgdx,sarkanyi/libgdx,ttencate/libgdx,kotcrab/libgdx,xoppa/libgdx,stinsonga/libgdx,codepoke/libgdx,xoppa/libgdx,sarkanyi/libgdx,yangweigbh/libgdx,sarkanyi/libgdx,xoppa/libgdx,sarkanyi/libgdx,samskivert/libgdx,fwolff/libgdx,cypherdare/libgdx,fwolff/libgdx,fwolff/libgdx,toa5/libgdx,bsmr-java/libgdx,MikkelTAndersen/libgdx,NathanSweet/libgdx,NathanSweet/libgdx,MikkelTAndersen/libgdx,yangweigbh/libgdx,jsjolund/libgdx,toa5/libgdx,MikkelTAndersen/libgdx,realitix/libgdx,js78/libgdx,Zomby2D/libgdx,gouessej/libgdx,Zomby2D/libgdx,alex-dorokhov/libgdx,yangweigbh/libgdx,BlueRiverInteractive/libgdx,ttencate/libgdx,gouessej/libgdx,libgdx/libgdx,hyvas/libgdx,bgroenks96/libgdx,Zomby2D/libgdx,tommyettinger/libgdx,stinsonga/libgdx,josephknight/libgdx,js78/libgdx,xoppa/libgdx,xoppa/libgdx,alex-dorokhov/libgdx,fwolff/libgdx,kotcrab/libgdx,gouessej/libgdx,czyzby/libgdx,BlueRiverInteractive/libgdx,codepoke/libgdx,FredGithub/libgdx,BlueRiverInteractive/libgdx,FredGithub/libgdx,NathanSweet/libgdx,yangweigbh/libgdx,hyvas/libgdx,gouessej/libgdx,cypherdare/libgdx,bsmr-java/libgdx,stinsonga/libgdx,bgroenks96/libgdx,czyzby/libgdx,libgdx/libgdx,ttencate/libgdx,NathanSweet/libgdx,libgdx/libgdx,xoppa/libgdx,bgroenks96/libgdx,alex-dorokhov/libgdx,samskivert/libgdx,jsjolund/libgdx,js78/libgdx,FredGithub/libgdx,ttencate/libgdx,bgroenks96/libgdx,czyzby/libgdx,czyzby/libgdx,bgroenks96/libgdx,Zomby2D/libgdx,realitix/libgdx,samskivert/libgdx,MikkelTAndersen/libgdx,MovingBlocks/libgdx,yangweigbh/libgdx,MovingBlocks/libgdx,gouessej/libgdx,NathanSweet/libgdx,yangweigbh/libgdx,realitix/libgdx,js78/libgdx,xoppa/libgdx,hyvas/libgdx,stinsonga/libgdx,jsjolund/libgdx,BlueRiverInteractive/libgdx,MikkelTAndersen/libgdx,BlueRiverInteractive/libgdx,josephknight/libgdx,codepoke/libgdx,realitix/libgdx,MikkelTAndersen/libgdx,toa5/libgdx,realitix/libgdx,czyzby/libgdx,sarkanyi/libgdx,jsjolund/libgdx,MovingBlocks/libgdx,bsmr-java/libgdx,cypherdare/libgdx,hyvas/libgdx,bladecoder/libgdx,codepoke/libgdx,MikkelTAndersen/libgdx,czyzby/libgdx,BlueRiverInteractive/libgdx,js78/libgdx,samskivert/libgdx,hyvas/libgdx,fwolff/libgdx,libgdx/libgdx,hyvas/libgdx,jsjolund/libgdx,FredGithub/libgdx,Zomby2D/libgdx,stinsonga/libgdx,tommyettinger/libgdx,josephknight/libgdx,alex-dorokhov/libgdx,bgroenks96/libgdx,gouessej/libgdx,codepoke/libgdx,alex-dorokhov/libgdx,josephknight/libgdx,jsjolund/libgdx,bladecoder/libgdx,MovingBlocks/libgdx,BlueRiverInteractive/libgdx,toa5/libgdx,kotcrab/libgdx,codepoke/libgdx,bladecoder/libgdx,FredGithub/libgdx,kotcrab/libgdx,FredGithub/libgdx,josephknight/libgdx,hyvas/libgdx,samskivert/libgdx,alex-dorokhov/libgdx,czyzby/libgdx,alex-dorokhov/libgdx,MovingBlocks/libgdx,tommyettinger/libgdx,fwolff/libgdx,fwolff/libgdx,yangweigbh/libgdx,kotcrab/libgdx,cypherdare/libgdx,samskivert/libgdx,MovingBlocks/libgdx,kotcrab/libgdx
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.scenes.scene2d.ui; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input.Keys; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.NinePatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.math.Interpolation; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener.ChangeEvent; import com.badlogic.gdx.scenes.scene2d.utils.Disableable; import com.badlogic.gdx.scenes.scene2d.utils.Drawable; import com.badlogic.gdx.utils.Pools; /** A progress bar is a widget that visually displays the progress of some activity or a value within given range. The progress * bar has a range (min, max) and a stepping between each value it represents. The percentage of completeness typically starts out * as an empty progress bar and gradually becomes filled in as the task or variable value progresses. * <p> * {@link ChangeEvent} is fired when the progress bar knob is moved. Cancelling the event will move the knob to where it was * previously. * <p> * The preferred height of a progress bar is determined by the larger of the knob and background. The preferred width of progress * bar is 140, a relatively arbitrary size. * @author mzechner * @author Nathan Sweet */ public class ProgressBar extends Widget implements Disableable { private ProgressBarStyle style; private float min, max, stepSize; private float value, animateFromValue; float position; final boolean vertical; private float animateDuration, animateTime; private Interpolation animateInterpolation = Interpolation.linear; private float[] snapValues; private float threshold; boolean disabled; boolean shiftIgnoresSnap; private Interpolation visualInterpolation = Interpolation.linear; public ProgressBar (float min, float max, float stepSize, boolean vertical, Skin skin) { this(min, max, stepSize, vertical, skin.get("default-" + (vertical ? "vertical" : "horizontal"), ProgressBarStyle.class)); } public ProgressBar (float min, float max, float stepSize, boolean vertical, Skin skin, String styleName) { this(min, max, stepSize, vertical, skin.get(styleName, ProgressBarStyle.class)); } /** Creates a new progress bar. It's width is determined by the given prefWidth parameter, its height is determined by the * maximum of the height of either the progress bar {@link NinePatch} or progress bar handle {@link TextureRegion}. The min and * max values determine the range the values of this progress bar can take on, the stepSize parameter specifies the distance * between individual values. * <p> * E.g. min could be 4, max could be 10 and stepSize could be 0.2, giving you a total of 30 values, 4.0 4.2, 4.4 and so on. * @param min the minimum value * @param max the maximum value * @param stepSize the step size between values * @param style the {@link ProgressBarStyle} */ public ProgressBar (float min, float max, float stepSize, boolean vertical, ProgressBarStyle style) { if (min > max) throw new IllegalArgumentException("max must be > min. min,max: " + min + ", " + max); if (stepSize <= 0) throw new IllegalArgumentException("stepSize must be > 0: " + stepSize); setStyle(style); this.min = min; this.max = max; this.stepSize = stepSize; this.vertical = vertical; this.value = min; setSize(getPrefWidth(), getPrefHeight()); } public void setStyle (ProgressBarStyle style) { if (style == null) throw new IllegalArgumentException("style cannot be null."); this.style = style; invalidateHierarchy(); } /** Returns the progress bar's style. Modifying the returned style may not have an effect until * {@link #setStyle(ProgressBarStyle)} is called. */ public ProgressBarStyle getStyle () { return style; } @Override public void act (float delta) { super.act(delta); if (animateTime > 0) { animateTime -= delta; Stage stage = getStage(); if (stage != null && stage.getActionsRequestRendering()) Gdx.graphics.requestRendering(); } } @Override public void draw (Batch batch, float parentAlpha) { ProgressBarStyle style = this.style; boolean disabled = this.disabled; final Drawable knob = getKnobDrawable(); final Drawable bg = (disabled && style.disabledBackground != null) ? style.disabledBackground : style.background; final Drawable knobBefore = (disabled && style.disabledKnobBefore != null) ? style.disabledKnobBefore : style.knobBefore; final Drawable knobAfter = (disabled && style.disabledKnobAfter != null) ? style.disabledKnobAfter : style.knobAfter; Color color = getColor(); float x = getX(); float y = getY(); float width = getWidth(); float height = getHeight(); float knobHeight = knob == null ? 0 : knob.getMinHeight(); float knobWidth = knob == null ? 0 : knob.getMinWidth(); float percent = getVisualPercent(); batch.setColor(color.r, color.g, color.b, color.a * parentAlpha); if (vertical) { float positionHeight = height; float bgTopHeight = 0; if (bg != null) { bg.draw(batch, x + (int)((width - bg.getMinWidth()) * 0.5f), y, bg.getMinWidth(), height); bgTopHeight = bg.getTopHeight(); positionHeight -= bgTopHeight + bg.getBottomHeight(); } float knobHeightHalf = 0; if (min != max) { if (knob == null) { knobHeightHalf = knobBefore == null ? 0 : knobBefore.getMinHeight() * 0.5f; position = (positionHeight - knobHeightHalf) * percent; position = Math.min(positionHeight - knobHeightHalf, position); } else { knobHeightHalf = knobHeight * 0.5f; position = (positionHeight - knobHeight) * percent; position = Math.min(positionHeight - knobHeight, position) + bg.getBottomHeight(); } position = Math.max(0, position); } if (knobBefore != null) { float offset = 0; if (bg != null) offset = bgTopHeight; knobBefore.draw(batch, x + (int)((width - knobBefore.getMinWidth()) * 0.5f), y + offset, knobBefore.getMinWidth(), (int)(position + knobHeightHalf)); } if (knobAfter != null) { knobAfter.draw(batch, x + (int)((width - knobAfter.getMinWidth()) * 0.5f), y + (int)(position + knobHeightHalf), knobAfter.getMinWidth(), height - (int)(position + knobHeightHalf)); } if (knob != null) knob.draw(batch, x + (int)((width - knobWidth) * 0.5f), (int)(y + position), knobWidth, knobHeight); } else { float positionWidth = width; float bgLeftWidth = 0; if (bg != null) { bg.draw(batch, x, y + (int)((height - bg.getMinHeight()) * 0.5f), width, bg.getMinHeight()); bgLeftWidth = bg.getLeftWidth(); positionWidth -= bgLeftWidth + bg.getRightWidth(); } float knobWidthHalf = 0; if (min != max) { if (knob == null) { knobWidthHalf = knobBefore == null ? 0 : knobBefore.getMinWidth() * 0.5f; position = (positionWidth - knobWidthHalf) * percent; position = Math.min(positionWidth - knobWidthHalf, position); } else { knobWidthHalf = knobWidth * 0.5f; position = (positionWidth - knobWidth) * percent; position = Math.min(positionWidth - knobWidth, position) + bgLeftWidth; } position = Math.max(0, position); } if (knobBefore != null) { float offset = 0; if (bg != null) offset = bgLeftWidth; knobBefore.draw(batch, x + offset, y + (int)((height - knobBefore.getMinHeight()) * 0.5f), (int)(position + knobWidthHalf), knobBefore.getMinHeight()); } if (knobAfter != null) { knobAfter.draw(batch, x + (int)(position + knobWidthHalf), y + (int)((height - knobAfter.getMinHeight()) * 0.5f), width - (int)(position + knobWidthHalf), knobAfter.getMinHeight()); } if (knob != null) knob.draw(batch, (int)(x + position), (int)(y + (height - knobHeight) * 0.5f), knobWidth, knobHeight); } } public float getValue () { return value; } /** If {@link #setAnimateDuration(float) animating} the progress bar value, this returns the value current displayed. */ public float getVisualValue () { if (animateTime > 0) return animateInterpolation.apply(animateFromValue, value, 1 - animateTime / animateDuration); return value; } public float getPercent () { return (value - min) / (max - min); } public float getVisualPercent () { return visualInterpolation.apply((getVisualValue() - min) / (max - min)); } protected Drawable getKnobDrawable () { return (disabled && style.disabledKnob != null) ? style.disabledKnob : style.knob; } /** Returns progress bar visual position within the range. */ protected float getKnobPosition () { return this.position; } /** Sets the progress bar position, rounded to the nearest step size and clamped to the minimum and maximum values. * {@link #clamp(float)} can be overridden to allow values outside of the progress bar's min/max range. * @return false if the value was not changed because the progress bar already had the value or it was canceled by a * listener. */ public boolean setValue (float value) { value = clamp(Math.round(value / stepSize) * stepSize); if (!shiftIgnoresSnap || (!Gdx.input.isKeyPressed(Keys.SHIFT_LEFT) && !Gdx.input.isKeyPressed(Keys.SHIFT_RIGHT))) value = snap(value); float oldValue = this.value; if (value == oldValue) return false; float oldVisualValue = getVisualValue(); this.value = value; ChangeEvent changeEvent = Pools.obtain(ChangeEvent.class); boolean cancelled = fire(changeEvent); if (cancelled) this.value = oldValue; else if (animateDuration > 0) { animateFromValue = oldVisualValue; animateTime = animateDuration; } Pools.free(changeEvent); return !cancelled; } /** Clamps the value to the progress bar's min/max range. This can be overridden to allow a range different from the progress * bar knob's range. */ protected float clamp (float value) { return MathUtils.clamp(value, min, max); } /** Sets the range of this progress bar. The progress bar's current value is clamped to the range. */ public void setRange (float min, float max) { if (min > max) throw new IllegalArgumentException("min must be <= max"); this.min = min; this.max = max; if (value < min) setValue(min); else if (value > max) setValue(max); } public void setStepSize (float stepSize) { if (stepSize <= 0) throw new IllegalArgumentException("steps must be > 0: " + stepSize); this.stepSize = stepSize; } public float getPrefWidth () { if (vertical) { final Drawable knob = getKnobDrawable(); final Drawable bg = (disabled && style.disabledBackground != null) ? style.disabledBackground : style.background; return Math.max(knob == null ? 0 : knob.getMinWidth(), bg.getMinWidth()); } else return 140; } public float getPrefHeight () { if (vertical) return 140; else { final Drawable knob = getKnobDrawable(); final Drawable bg = (disabled && style.disabledBackground != null) ? style.disabledBackground : style.background; return Math.max(knob == null ? 0 : knob.getMinHeight(), bg == null ? 0 : bg.getMinHeight()); } } public float getMinValue () { return this.min; } public float getMaxValue () { return this.max; } public float getStepSize () { return this.stepSize; } /** If > 0, changes to the progress bar value via {@link #setValue(float)} will happen over this duration in seconds. */ public void setAnimateDuration (float duration) { this.animateDuration = duration; } /** Sets the interpolation to use for {@link #setAnimateDuration(float)}. */ public void setAnimateInterpolation (Interpolation animateInterpolation) { if (animateInterpolation == null) throw new IllegalArgumentException("animateInterpolation cannot be null."); this.animateInterpolation = animateInterpolation; } /** Sets the interpolation to use for display. */ public void setVisualInterpolation (Interpolation interpolation) { this.visualInterpolation = interpolation; } /** Will make this progress bar snap to the specified values, if the knob is within the threshold. * @param values May be null. */ public void setSnapToValues (float[] values, float threshold) { this.snapValues = values; this.threshold = threshold; } /** Returns a snapped value. */ private float snap (float value) { if (snapValues == null) return value; for (int i = 0; i < snapValues.length; i++) { if (Math.abs(value - snapValues[i]) <= threshold) return snapValues[i]; } return value; } public void setDisabled (boolean disabled) { this.disabled = disabled; } public boolean isDisabled () { return disabled; } /** The style for a progress bar, see {@link ProgressBar}. * @author mzechner * @author Nathan Sweet */ static public class ProgressBarStyle { /** The progress bar background, stretched only in one direction. Optional. */ public Drawable background; /** Optional. **/ public Drawable disabledBackground; /** Optional, centered on the background. */ public Drawable knob, disabledKnob; /** Optional. */ public Drawable knobBefore, knobAfter, disabledKnobBefore, disabledKnobAfter; public ProgressBarStyle () { } public ProgressBarStyle (Drawable background, Drawable knob) { this.background = background; this.knob = knob; } public ProgressBarStyle (ProgressBarStyle style) { this.background = style.background; this.disabledBackground = style.disabledBackground; this.knob = style.knob; this.disabledKnob = style.disabledKnob; this.knobBefore = style.knobBefore; this.knobAfter = style.knobAfter; this.disabledKnobBefore = style.disabledKnobBefore; this.disabledKnobAfter = style.disabledKnobAfter; } } }
gdx/src/com/badlogic/gdx/scenes/scene2d/ui/ProgressBar.java
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.scenes.scene2d.ui; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input.Keys; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.NinePatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.math.Interpolation; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener.ChangeEvent; import com.badlogic.gdx.scenes.scene2d.utils.Disableable; import com.badlogic.gdx.scenes.scene2d.utils.Drawable; import com.badlogic.gdx.utils.Pools; /** A progress bar is a widget that visually displays the progress of some activity or a value within given range. The progress bar * has a range (min, max) and a stepping between each value it represents. The percentage of completeness typically starts out as * an empty progress bar and gradually becomes filled in as the task or variable value progresses. * <p> * {@link ChangeEvent} is fired when the progress bar knob is moved. Cancelling the event will move the knob to where it was * previously. * <p> * The preferred height of a progress bar is determined by the larger of the knob and background. The preferred width of progress * bar is 140, a relatively arbitrary size. * @author mzechner * @author Nathan Sweet */ public class ProgressBar extends Widget implements Disableable { private ProgressBarStyle style; private float min, max, stepSize; private float value, animateFromValue; float position; final boolean vertical; private float animateDuration, animateTime; private Interpolation animateInterpolation = Interpolation.linear; private float[] snapValues; private float threshold; boolean disabled; boolean shiftIgnoresSnap; private Interpolation visualInterpolation = Interpolation.linear; public ProgressBar (float min, float max, float stepSize, boolean vertical, Skin skin) { this(min, max, stepSize, vertical, skin.get("default-" + (vertical ? "vertical" : "horizontal"), ProgressBarStyle.class)); } public ProgressBar (float min, float max, float stepSize, boolean vertical, Skin skin, String styleName) { this(min, max, stepSize, vertical, skin.get(styleName, ProgressBarStyle.class)); } /** Creates a new progress bar. It's width is determined by the given prefWidth parameter, its height is determined by the * maximum of the height of either the progress bar {@link NinePatch} or progress bar handle {@link TextureRegion}. The min and * max values determine the range the values of this progress bar can take on, the stepSize parameter specifies the distance * between individual values. * <p> * E.g. min could be 4, max could be 10 and stepSize could be 0.2, giving you a total of 30 values, 4.0 4.2, 4.4 and so on. * @param min the minimum value * @param max the maximum value * @param stepSize the step size between values * @param style the {@link ProgressBarStyle} */ public ProgressBar (float min, float max, float stepSize, boolean vertical, ProgressBarStyle style) { if (min > max) throw new IllegalArgumentException("max must be > min. min,max: " + min + ", " + max); if (stepSize <= 0) throw new IllegalArgumentException("stepSize must be > 0: " + stepSize); setStyle(style); this.min = min; this.max = max; this.stepSize = stepSize; this.vertical = vertical; this.value = min; setSize(getPrefWidth(), getPrefHeight()); } public void setStyle (ProgressBarStyle style) { if (style == null) throw new IllegalArgumentException("style cannot be null."); this.style = style; invalidateHierarchy(); } /** Returns the progress bar's style. Modifying the returned style may not have an effect until * {@link #setStyle(ProgressBarStyle)} is called. */ public ProgressBarStyle getStyle () { return style; } @Override public void act (float delta) { super.act(delta); if (animateTime > 0) { animateTime -= delta; Stage stage = getStage(); if (stage != null && stage.getActionsRequestRendering()) Gdx.graphics.requestRendering(); } } @Override public void draw (Batch batch, float parentAlpha) { ProgressBarStyle style = this.style; boolean disabled = this.disabled; final Drawable knob = getKnobDrawable(); final Drawable bg = (disabled && style.disabledBackground != null) ? style.disabledBackground : style.background; final Drawable knobBefore = (disabled && style.disabledKnobBefore != null) ? style.disabledKnobBefore : style.knobBefore; final Drawable knobAfter = (disabled && style.disabledKnobAfter != null) ? style.disabledKnobAfter : style.knobAfter; Color color = getColor(); float x = getX(); float y = getY(); float width = getWidth(); float height = getHeight(); float knobHeight = knob == null ? 0 : knob.getMinHeight(); float knobWidth = knob == null ? 0 : knob.getMinWidth(); float percent = getVisualPercent(); batch.setColor(color.r, color.g, color.b, color.a * parentAlpha); if (vertical) { float positionHeight = height; float bgTopHeight = 0; if (bg != null) { bg.draw(batch, x + (int)((width - bg.getMinWidth()) * 0.5f), y, bg.getMinWidth(), height); bgTopHeight = bg.getTopHeight(); positionHeight -= bgTopHeight + bg.getBottomHeight(); } float knobHeightHalf = 0; if (min != max) { if (knob == null) { knobHeightHalf = knobBefore == null ? 0 : knobBefore.getMinHeight() * 0.5f; position = (positionHeight - knobHeightHalf) * percent; position = Math.min(positionHeight - knobHeightHalf, position); } else { knobHeightHalf = knobHeight * 0.5f; position = (positionHeight - knobHeight) * percent; position = Math.min(positionHeight - knobHeight, position) + bg.getBottomHeight(); } position = Math.max(0, position); } if (knobBefore != null) { float offset = 0; if (bg != null) offset = bgTopHeight; knobBefore.draw(batch, x + (int)((width - knobBefore.getMinWidth()) * 0.5f), y + offset, knobBefore.getMinWidth(), (int)(position + knobHeightHalf)); } if (knobAfter != null) { knobAfter.draw(batch, x + (int)((width - knobAfter.getMinWidth()) * 0.5f), y + (int)(position + knobHeightHalf), knobAfter.getMinWidth(), height - (int)(position + knobHeightHalf)); } if (knob != null) knob.draw(batch, x + (int)((width - knobWidth) * 0.5f), (int)(y + position), knobWidth, knobHeight); } else { float positionWidth = width; float bgLeftWidth = 0; if (bg != null) { bg.draw(batch, x, y + (int)((height - bg.getMinHeight()) * 0.5f), width, bg.getMinHeight()); bgLeftWidth = bg.getLeftWidth(); positionWidth -= bgLeftWidth + bg.getRightWidth(); } float knobWidthHalf = 0; if (min != max) { if (knob == null) { knobWidthHalf = knobBefore == null ? 0 : knobBefore.getMinWidth() * 0.5f; position = (positionWidth - knobWidthHalf) * percent; position = Math.min(positionWidth - knobWidthHalf, position); } else { knobWidthHalf = knobWidth * 0.5f; position = (positionWidth - knobWidth) * percent; position = Math.min(positionWidth - knobWidth, position) + bgLeftWidth; } position = Math.max(0, position); } if (knobBefore != null) { float offset = 0; if (bg != null) offset = bgLeftWidth; knobBefore.draw(batch, x + offset, y + (int)((height - knobBefore.getMinHeight()) * 0.5f), (int)(position + knobWidthHalf), knobBefore.getMinHeight()); } if (knobAfter != null) { knobAfter.draw(batch, x + (int)(position + knobWidthHalf), y + (int)((height - knobAfter.getMinHeight()) * 0.5f), width - (int)(position + knobWidthHalf), knobAfter.getMinHeight()); } if (knob != null) knob.draw(batch, (int)(x + position), (int)(y + (height - knobHeight) * 0.5f), knobWidth, knobHeight); } } public float getValue () { return value; } /** If {@link #setAnimateDuration(float) animating} the progress bar value, this returns the value current displayed. */ public float getVisualValue () { if (animateTime > 0) return animateInterpolation.apply(animateFromValue, value, 1 - animateTime / animateDuration); return value; } public float getPercent () { return (value - min) / (max - min); } public float getVisualPercent () { return visualInterpolation.apply((getVisualValue() - min) / (max - min)); } protected Drawable getKnobDrawable () { return (disabled && style.disabledKnob != null) ? style.disabledKnob : style.knob; } /** Returns progress bar visual position within the range. */ protected float getKnobPosition () { return this.position; } /** Sets the progress bar position, rounded to the nearest step size and clamped to the minimum and maximum values. * {@link #clamp(float)} can be overridden to allow values outside of the progress bar's min/max range. * @return false if the value was not changed because the progress bar already had the value or it was canceled by a listener. */ public boolean setValue (float value) { value = clamp(Math.round(value / stepSize) * stepSize); if (!shiftIgnoresSnap || (!Gdx.input.isKeyPressed(Keys.SHIFT_LEFT) && !Gdx.input.isKeyPressed(Keys.SHIFT_RIGHT))) value = snap(value); float oldValue = this.value; if (value == oldValue) return false; float oldVisualValue = getVisualValue(); this.value = value; ChangeEvent changeEvent = Pools.obtain(ChangeEvent.class); boolean cancelled = fire(changeEvent); if (cancelled) this.value = oldValue; else if (animateDuration > 0) { animateFromValue = oldVisualValue; animateTime = animateDuration; } Pools.free(changeEvent); return !cancelled; } /** Clamps the value to the progress bar's min/max range. This can be overridden to allow a range different from the progress * bar knob's range. */ protected float clamp (float value) { return MathUtils.clamp(value, min, max); } /** Sets the range of this progress bar. The progress bar's current value is clamped to the range. */ public void setRange (float min, float max) { if (min > max) throw new IllegalArgumentException("min must be <= max"); this.min = min; this.max = max; if (value < min) setValue(min); else if (value > max) setValue(max); } public void setStepSize (float stepSize) { if (stepSize <= 0) throw new IllegalArgumentException("steps must be > 0: " + stepSize); this.stepSize = stepSize; } public float getPrefWidth () { if (vertical) { final Drawable knob = getKnobDrawable(); final Drawable bg = (disabled && style.disabledBackground != null) ? style.disabledBackground : style.background; return Math.max(knob == null ? 0 : knob.getMinWidth(), bg.getMinWidth()); } else return 140; } public float getPrefHeight () { if (vertical) return 140; else { final Drawable knob = getKnobDrawable(); final Drawable bg = (disabled && style.disabledBackground != null) ? style.disabledBackground : style.background; return Math.max(knob == null ? 0 : knob.getMinHeight(), bg == null ? 0 : bg.getMinHeight()); } } public float getMinValue () { return this.min; } public float getMaxValue () { return this.max; } public float getStepSize () { return this.stepSize; } /** If > 0, changes to the progress bar value via {@link #setValue(float)} will happen over this duration in seconds. */ public void setAnimateDuration (float duration) { this.animateDuration = duration; } /** Sets the interpolation to use for {@link #setAnimateDuration(float)}. */ public void setAnimateInterpolation (Interpolation animateInterpolation) { if (animateInterpolation == null) throw new IllegalArgumentException("animateInterpolation cannot be null."); this.animateInterpolation = animateInterpolation; } /** Sets the interpolation to use for display. */ public void setVisualInterpolation (Interpolation interpolation) { this.visualInterpolation = interpolation; } /** Will make this progress bar snap to the specified values, if the knob is within the threshold. */ public void setSnapToValues (float[] values, float threshold) { this.snapValues = values; this.threshold = threshold; } /** Returns a snapped value. */ private float snap (float value) { if (snapValues == null) return value; for (int i = 0; i < snapValues.length; i++) { if (Math.abs(value - snapValues[i]) <= threshold) return snapValues[i]; } return value; } public void setDisabled (boolean disabled) { this.disabled = disabled; } public boolean isDisabled () { return disabled; } /** The style for a progress bar, see {@link ProgressBar}. * @author mzechner * @author Nathan Sweet */ static public class ProgressBarStyle { /** The progress bar background, stretched only in one direction. Optional. */ public Drawable background; /** Optional. **/ public Drawable disabledBackground; /** Optional, centered on the background. */ public Drawable knob, disabledKnob; /** Optional. */ public Drawable knobBefore, knobAfter, disabledKnobBefore, disabledKnobAfter; public ProgressBarStyle () { } public ProgressBarStyle (Drawable background, Drawable knob) { this.background = background; this.knob = knob; } public ProgressBarStyle (ProgressBarStyle style) { this.background = style.background; this.disabledBackground = style.disabledBackground; this.knob = style.knob; this.disabledKnob = style.disabledKnob; this.knobBefore = style.knobBefore; this.knobAfter = style.knobAfter; this.disabledKnobBefore = style.disabledKnobBefore; this.disabledKnobAfter = style.disabledKnobAfter; } } }
Javadoc.
gdx/src/com/badlogic/gdx/scenes/scene2d/ui/ProgressBar.java
Javadoc.
Java
apache-2.0
05cd0d33c9d73d3488bcee36431ab8e94f9b98ba
0
jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics
/* * Java Genetic Algorithm Library (@!identifier!@). * Copyright (c) @!year!@ Franz Wilhelmstötter * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * * Author: * Franz Wilhelmstötter ([email protected]) * */ package org.jenetics; import java.io.IOException; import java.util.Random; import javolution.context.ObjectFactory; import javolution.lang.Realtime; import javolution.text.Text; import javolution.text.TextFormat; import javolution.xml.XMLFormat; import javolution.xml.XMLSerializable; import javolution.xml.stream.XMLStreamException; import org.jenetics.util.Factory; import org.jenetics.util.Mean; import org.jenetics.util.RandomRegistry; import org.jenetics.util.Validator; /** * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a> * @version $Id$ */ public final class EnumGene<E extends Enum<E>> implements Gene<E, EnumGene<E>>, Mean<EnumGene<E>>, Realtime, XMLSerializable { private static final long serialVersionUID = 1L; private E _value; EnumGene() { } @Override public E getAllele() { return _value; } /** * Return the gene which lies between {@code this} and {@code that}, * according to it's ordinal number ({@link Enum#ordinal()}). * * @return the gene which lies between {@code this} and {@code that}. */ @Override public EnumGene<E> mean(final EnumGene<E> that) { Validator.nonNull(that, "Enum value"); if (that._value == _value) { return that; } else { final Class<?> type = that.getClass(); @SuppressWarnings("unchecked") final E[] values = (E[])type.getEnumConstants(); final int ordinal = (that.getAllele().ordinal() + _value.ordinal())/2; return newInstance(values[ordinal]); } } /** * @return always {@code true}. */ @Override public boolean isValid() { return true; } @Override public EnumGene<E> copy() { return valueOf(_value); } @Override public EnumGene<E> newInstance() { final Random random = RandomRegistry.getRandom(); final E[] values = _value.getDeclaringClass().getEnumConstants(); final int index = random.nextInt(values.length); return valueOf(values[index]); } /** * Return the {@link Factory} view of this gene. * * @return the {@link Factory} view of this gene. */ Factory<EnumGene<E>> asFactory() { return this; } @Override public int hashCode() { int hash = 17; if (_value != null) { hash += 37*_value.ordinal() + 17; } return hash; } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (!(obj instanceof EnumGene<?>)) { return false; } EnumGene<?> gene = (EnumGene<?>)obj; if (gene._value != null) { return gene._value.equals(_value); } else { return _value != null && _value.equals(gene._value); } } @SuppressWarnings({"rawtypes" }) private static final ObjectFactory<EnumGene> FACTORY = new ObjectFactory<EnumGene>() { @Override protected EnumGene<?> create() { return new EnumGene(); } }; static <T extends Enum<T>> EnumGene<T> newInstance(final T value) { @SuppressWarnings("unchecked") EnumGene<T> e = FACTORY.object(); e._value = value; return e; } public static <T extends Enum<T>> EnumGene<T> valueOf(final T value) { Validator.nonNull(value, "Enum value"); return newInstance(value); } @Override public Text toText() { return Text.valueOf(_value.toString()); } static final TextFormat<String> STRING_FORMAT = new TextFormat<String>() { @Override public Appendable format(final String value, final Appendable appendable) throws IOException { return appendable.append(value); } @Override public String parse(final CharSequence seq, final TextFormat.Cursor curs) { return seq.toString(); } }; @SuppressWarnings({ "unchecked", "rawtypes"}) static final XMLFormat<EnumGene> XML = new XMLFormat<EnumGene>(EnumGene.class) { private static final String TYPE = "type"; @Override public EnumGene newInstance( final Class<EnumGene> cls, final InputElement xml ) throws XMLStreamException { try { final String typeName = xml.getAttribute(TYPE, ""); final String value = xml.getText().toString(); final Class<Enum> type = (Class<Enum>)Class.forName(typeName); return EnumGene.valueOf(Enum.valueOf(type, value)); } catch (ClassNotFoundException e) { throw new XMLStreamException(e); } } @Override public void write(final EnumGene gene, final OutputElement xml) throws XMLStreamException { xml.setAttribute(TYPE, gene._value.getClass().getName()); xml.addText(gene._value.name()); } @Override public void read(final InputElement element, final EnumGene gene) { } }; }
src/main/java/org/jenetics/EnumGene.java
/* * Java Genetic Algorithm Library (@!identifier!@). * Copyright (c) @!year!@ Franz Wilhelmstötter * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * * Author: * Franz Wilhelmstötter ([email protected]) * */ package org.jenetics; import java.io.IOException; import java.util.Random; import javolution.context.ObjectFactory; import javolution.lang.Realtime; import javolution.text.Text; import javolution.text.TextFormat; import javolution.xml.XMLFormat; import javolution.xml.XMLSerializable; import javolution.xml.stream.XMLStreamException; import org.jenetics.util.Factory; import org.jenetics.util.Mean; import org.jenetics.util.RandomRegistry; import org.jenetics.util.Validator; /** * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a> * @version $Id$ */ public final class EnumGene<E extends Enum<E>> implements Gene<E, EnumGene<E>>, Mean<EnumGene<E>>, Realtime, XMLSerializable { private static final long serialVersionUID = 1L; private E _value; EnumGene() { } @Override public E getAllele() { return _value; } /** * Return the gene which lies between {@code this} and {@code that}, * according to it's ordinal number ({@link Enum#ordinal()}). * * @return the gene which lies between {@code this} and {@code that}. */ @Override public EnumGene<E> mean(final EnumGene<E> that) { Validator.nonNull(that, "Enum value"); if (that._value == _value) { return that; } else { final Class<?> type = that.getClass(); @SuppressWarnings("unchecked") final E[] values = (E[])type.getEnumConstants(); final int ordinal = (that.getAllele().ordinal() + _value.ordinal())/2; return newInstance(values[ordinal]); } } @Override public boolean isValid() { return true; } @Override public EnumGene<E> copy() { return valueOf(_value); } @Override public EnumGene<E> newInstance() { final Random random = RandomRegistry.getRandom(); final E[] values = _value.getDeclaringClass().getEnumConstants(); final int index = random.nextInt(values.length); return valueOf(values[index]); } /** * Return the {@link Factory} view of this gene. * * @return the {@link Factory} view of this gene. */ Factory<EnumGene<E>> asFactory() { return this; } @Override public int hashCode() { int hash = 17; if (_value != null) { hash += 37*_value.ordinal() + 17; } return hash; } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (!(obj instanceof EnumGene<?>)) { return false; } EnumGene<?> gene = (EnumGene<?>)obj; if (gene._value != null) { return gene._value.equals(_value); } else { return _value != null && _value.equals(gene._value); } } @SuppressWarnings({"rawtypes" }) private static final ObjectFactory<EnumGene> FACTORY = new ObjectFactory<EnumGene>() { @Override protected EnumGene<?> create() { return new EnumGene(); } }; static <T extends Enum<T>> EnumGene<T> newInstance(final T value) { @SuppressWarnings("unchecked") EnumGene<T> e = FACTORY.object(); e._value = value; return e; } public static <T extends Enum<T>> EnumGene<T> valueOf(final T value) { Validator.nonNull(value, "Enum value"); return newInstance(value); } @Override public Text toText() { return Text.valueOf(_value.toString()); } static final TextFormat<String> STRING_FORMAT = new TextFormat<String>() { @Override public Appendable format(final String value, final Appendable appendable) throws IOException { return appendable.append(value); } @Override public String parse(final CharSequence seq, final TextFormat.Cursor curs) { return seq.toString(); } }; @SuppressWarnings({ "unchecked", "rawtypes"}) static final XMLFormat<EnumGene> XML = new XMLFormat<EnumGene>(EnumGene.class) { private static final String TYPE = "type"; @Override public EnumGene newInstance( final Class<EnumGene> cls, final InputElement xml ) throws XMLStreamException { try { final String typeName = xml.getAttribute(TYPE, ""); final String value = xml.getText().toString(); final Class<Enum> type = (Class<Enum>)Class.forName(typeName); return EnumGene.valueOf(Enum.valueOf(type, value)); } catch (ClassNotFoundException e) { throw new XMLStreamException(e); } } @Override public void write(final EnumGene gene, final OutputElement xml) throws XMLStreamException { xml.setAttribute(TYPE, gene._value.getClass().getName()); xml.addText(gene._value.name()); } @Override public void read(final InputElement element, final EnumGene gene) { } }; }
Improve javadoc.
src/main/java/org/jenetics/EnumGene.java
Improve javadoc.
Java
apache-2.0
f9fa85862d0380c468227b7f2268fc47a34bb831
0
x-meta/x-meta
package org.xmeta.util; import ognl.Ognl; import ognl.OgnlException; import org.xmeta.Thing; public class OgnlUtil { private static String CACHE = "__ognl_attrPathCache_"; /** * 通过Ognl表达式取值, 事物的属性是Ognl的表达式。使用这种方式缓存了Ognl表达式。 * * @param thing * @param pathAttributeName * @param root * @return * @throws OgnlException */ public static Object getValue(Thing thing, String pathAttributeName, Object root) throws OgnlException{ return getValue(thing, pathAttributeName, thing.getString(pathAttributeName), root); } /** * 通过Ognl表达式取值, 事物的属性是Ognl的表达式。使用这种方式缓存了Ognl表达式。 * * @param thing * @param pathAttribute * @param path 是事物的属性值,如果为空返回null * @param root * @return * @throws OgnlException */ public static Object getValue(Thing thing, String pathAttributeName, String pathAttributeValue, Object root) throws OgnlException{ if(pathAttributeValue == null || "".equals(pathAttributeValue)){ return null; } String key = CACHE + pathAttributeName; PathCache pathCache = (PathCache) thing.getData(key); if(pathCache == null || pathCache.lastModified != thing.getMetadata().getLastModified()){ if(pathCache == null){ pathCache = new PathCache(); thing.setData(key, pathCache); } pathCache.lastModified = thing.getMetadata().getLastModified(); pathCache.expression = Ognl.parseExpression(pathAttributeValue); } return Ognl.getValue(pathCache.expression, root); } static class PathCache{ //事物缓存 long lastModified; //Ognl表达式 Object expression; } }
org.xmeta.engine/src/main/java/org/xmeta/util/OgnlUtil.java
package org.xmeta.util; import ognl.Ognl; import ognl.OgnlException; import org.xmeta.Thing; public class OgnlUtil { private static String CACHE = "__ognl_attrPathCache_"; /** * 通过Ognl表达式取值, 事物的属性是Ognl的表达式。使用这种方式缓存了Ognl表达式。 * * @param thing * @param pathAttributeName * @param root * @return * @throws OgnlException */ public static Object getValue(Thing thing, String pathAttributeName, Object root) throws OgnlException{ return getValue(thing, pathAttributeName, thing.getString(pathAttributeName), root); } /** * 通过Ognl表达式取值, 事物的属性是Ognl的表达式。使用这种方式缓存了Ognl表达式。 * * @param thing * @param pathAttribute * @param path 是事物的属性值 * @param root * @return * @throws OgnlException */ public static Object getValue(Thing thing, String pathAttributeName, String pathAttributeValue, Object root) throws OgnlException{ String key = CACHE + pathAttributeName; PathCache pathCache = (PathCache) thing.getData(key); if(pathCache == null || pathCache.lastModified != thing.getMetadata().getLastModified()){ if(pathCache == null){ pathCache = new PathCache(); thing.setData(key, pathCache); } pathCache.lastModified = thing.getMetadata().getLastModified(); pathCache.expression = Ognl.parseExpression(pathAttributeValue); } return Ognl.getValue(pathCache.expression, root); } static class PathCache{ //事物缓存 long lastModified; //Ognl表达式 Object expression; } }
修改了OgnlUtil
org.xmeta.engine/src/main/java/org/xmeta/util/OgnlUtil.java
修改了OgnlUtil
Java
apache-2.0
9b1bf2de4a098a494142c27f0dc3f4d2b8327f80
0
chunlinyao/fop,chunlinyao/fop,apache/fop,apache/fop,chunlinyao/fop,apache/fop,apache/fop,chunlinyao/fop,chunlinyao/fop,apache/fop
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.fonts.truetype; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.fontbox.cff.CFFStandardString; import org.apache.fontbox.cff.encoding.CFFEncoding; import org.apache.fop.fonts.MultiByteFont; import org.apache.fop.fonts.cff.CFFDataReader; import org.apache.fop.fonts.cff.CFFDataReader.CFFIndexData; import org.apache.fop.fonts.cff.CFFDataReader.DICTEntry; import org.apache.fop.fonts.cff.CFFDataReader.FDSelect; import org.apache.fop.fonts.cff.CFFDataReader.FontDict; import org.apache.fop.fonts.cff.CFFDataReader.Format0FDSelect; import org.apache.fop.fonts.cff.CFFDataReader.Format3FDSelect; /** * Reads an OpenType CFF file and generates a subset * The OpenType specification can be found at the Microsoft * Typography site: http://www.microsoft.com/typography/otspec/ */ public class OTFSubSetFile extends OTFFile { private byte[] output; private int currentPos = 0; private int realSize = 0; /** A map containing each glyph to be included in the subset * with their existing and new GID's **/ private LinkedHashMap<Integer, Integer> subsetGlyphs; /** A map of the new GID to SID used to construct the charset table **/ private LinkedHashMap<Integer, Integer> gidToSID; private CFFIndexData localIndexSubr; private CFFIndexData globalIndexSubr; /** List of subroutines to write to the local / global indexes in the subset font **/ private List<byte[]> subsetLocalIndexSubr; private List<byte[]> subsetGlobalIndexSubr; /** For fonts which have an FDSelect or ROS flag in Top Dict, this is used to store the * local subroutine indexes for each group as opposed to the above subsetLocalIndexSubr */ private ArrayList<List<byte[]>> fdSubrs; /** The subset FD Select table used to store the mappings between glyphs and their * associated FDFont object which point to a private dict and local subroutines. */ private LinkedHashMap<Integer, FDIndexReference> subsetFDSelect; /** A list of unique subroutines from the global / local subroutine indexes */ private List<Integer> localUniques; private List<Integer> globalUniques; /** A store of the number of subroutines each global / local subroutine will store **/ private int subsetLocalSubrCount; private int subsetGlobalSubrCount; /** A list of char string data for each glyph to be stored in the subset font **/ private List<byte[]> subsetCharStringsIndex; /** The embedded name to change in the name table **/ private String embeddedName; /** An array used to hold the string index data for the subset font **/ private List<byte[]> stringIndexData = new ArrayList<byte[]>(); /** The CFF reader object used to read data and offsets from the original font file */ private CFFDataReader cffReader = null; /** The class used to represent this font **/ private MultiByteFont mbFont; /** The number of standard strings in CFF **/ private static final int NUM_STANDARD_STRINGS = 391; /** The operator used to identify a local subroutine reference */ private static final int LOCAL_SUBROUTINE = 10; /** The operator used to identify a global subroutine reference */ private static final int GLOBAL_SUBROUTINE = 29; public OTFSubSetFile() throws IOException { super(); } public void readFont(FontFileReader in, String embeddedName, String header, MultiByteFont mbFont) throws IOException { this.mbFont = mbFont; readFont(in, embeddedName, header, mbFont.getUsedGlyphs()); } /** * Reads and creates a subset of the font. * * @param in FontFileReader to read from * @param name Name to be checked for in the font file * @param header The header of the font file * @param glyphs Map of glyphs (glyphs has old index as (Integer) key and * new index as (Integer) value) * @throws IOException in case of an I/O problem */ void readFont(FontFileReader in, String embeddedName, String header, Map<Integer, Integer> usedGlyphs) throws IOException { fontFile = in; currentPos = 0; realSize = 0; this.embeddedName = embeddedName; //Sort by the new GID and store in a LinkedHashMap subsetGlyphs = sortByValue(usedGlyphs); output = new byte[in.getFileSize()]; initializeFont(in); cffReader = new CFFDataReader(fontFile); //Create the CIDFontType0C data createCFF(); } private LinkedHashMap<Integer, Integer> sortByValue(Map<Integer, Integer> map) { List<Entry<Integer, Integer>> list = new ArrayList<Entry<Integer, Integer>>(map.entrySet()); Collections.sort(list, new Comparator<Entry<Integer, Integer>>() { public int compare(Entry<Integer, Integer> o1, Entry<Integer, Integer> o2) { return ((Comparable<Integer>) o1.getValue()).compareTo(o2.getValue()); } }); LinkedHashMap<Integer, Integer> result = new LinkedHashMap<Integer, Integer>(); for (Entry<Integer, Integer> entry : list) { result.put(entry.getKey(), entry.getValue()); } return result; } private void createCFF() throws IOException { //Header writeBytes(cffReader.getHeader()); //Name Index writeIndex(Arrays.asList(embeddedName.getBytes())); //Keep offset of the topDICT so it can be updated once all data has been written int topDictOffset = currentPos; //Top DICT Index and Data byte[] topDictIndex = cffReader.getTopDictIndex().getByteData(); int offSize = topDictIndex[2]; writeBytes(topDictIndex, 0, 3 + (offSize * 2)); int topDictDataOffset = currentPos; writeTopDICT(); //Create the char string index data and related local / global subroutines if (cffReader.getFDSelect() == null) { createCharStringData(); } else { createCharStringDataCID(); } //If it is a CID-Keyed font, store each FD font and add each SID List<Integer> fontNameSIDs = null; List<Integer> subsetFDFonts = null; if (cffReader.getFDSelect() != null) { subsetFDFonts = getUsedFDFonts(); fontNameSIDs = storeFDStrings(subsetFDFonts); } //String index writeStringIndex(); //Global subroutine index writeIndex(subsetGlobalIndexSubr); //Encoding int encodingOffset = currentPos; writeEncoding(fileFont.getEncoding()); //Charset table int charsetOffset = currentPos; writeCharsetTable(cffReader.getFDSelect() != null); //FDSelect table int fdSelectOffset = currentPos; if (cffReader.getFDSelect() != null) { writeFDSelect(); } //Char Strings Index int charStringOffset = currentPos; writeIndex(subsetCharStringsIndex); if (cffReader.getFDSelect() == null) { //Keep offset to modify later with the local subroutine index offset int privateDictOffset = currentPos; writePrivateDict(); //Local subroutine index int localIndexOffset = currentPos; writeIndex(subsetLocalIndexSubr); //Update the offsets updateOffsets(topDictOffset, charsetOffset, charStringOffset, privateDictOffset, localIndexOffset, encodingOffset); } else { List<Integer> privateDictOffsets = writeCIDDictsAndSubrs(subsetFDFonts); int fdArrayOffset = writeFDArray(subsetFDFonts, privateDictOffsets, fontNameSIDs); updateCIDOffsets(topDictDataOffset, fdArrayOffset, fdSelectOffset, charsetOffset, charStringOffset, encodingOffset); } } private List<Integer> storeFDStrings(List<Integer> uniqueNewRefs) throws IOException { ArrayList<Integer> fontNameSIDs = new ArrayList<Integer>(); List<FontDict> fdFonts = cffReader.getFDFonts(); for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict fdFont = fdFonts.get(uniqueNewRefs.get(i)); byte[] fdFontByteData = fdFont.getByteData(); Map<String, DICTEntry> fdFontDict = cffReader.parseDictData(fdFontByteData); fontNameSIDs.add(stringIndexData.size() + NUM_STANDARD_STRINGS); stringIndexData.add(cffReader.getStringIndex().getValue(fdFontDict.get("FontName") .getOperands().get(0).intValue() - NUM_STANDARD_STRINGS)); } return fontNameSIDs; } private void writeBytes(byte[] out) { for (int i = 0; i < out.length; i++) { output[currentPos++] = out[i]; realSize++; } } private void writeBytes(byte[] out, int offset, int length) { for (int i = offset; i < offset + length; i++) { output[currentPos++] = out[i]; realSize++; } } private void writeEncoding(CFFEncoding encoding) throws IOException { LinkedHashMap<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); DICTEntry encodingEntry = topDICT.get("Encoding"); if (encodingEntry != null && encodingEntry.getOperands().get(0).intValue() != 0 && encodingEntry.getOperands().get(0).intValue() != 1) { writeByte(0); writeByte(gidToSID.size()); for (int gid : gidToSID.keySet()) { int code = encoding.getCode(gidToSID.get(gid)); writeByte(code); } } } private void writeTopDICT() throws IOException { LinkedHashMap<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); List<String> topDictStringEntries = Arrays.asList("version", "Notice", "Copyright", "FullName", "FamilyName", "Weight", "PostScript"); for (Map.Entry<String, DICTEntry> dictEntry : topDICT.entrySet()) { String dictKey = dictEntry.getKey(); DICTEntry entry = dictEntry.getValue(); //If the value is an SID, update the reference but keep the size the same if (dictKey.equals("ROS")) { writeROSEntry(entry); } else if (dictKey.equals("CIDCount")) { writeCIDCount(entry); } else if (topDictStringEntries.contains(dictKey)) { writeTopDictStringEntry(entry); } else { writeBytes(entry.getByteData()); } } } private void writeROSEntry(DICTEntry dictEntry) throws IOException { int sidA = dictEntry.getOperands().get(0).intValue(); if (sidA > 390) { stringIndexData.add(cffReader.getStringIndex().getValue(sidA - NUM_STANDARD_STRINGS)); } int sidAStringIndex = stringIndexData.size() + 390; int sidB = dictEntry.getOperands().get(1).intValue(); if (sidB > 390) { stringIndexData.add("Identity".getBytes()); } int sidBStringIndex = stringIndexData.size() + 390; byte[] cidEntryByteData = dictEntry.getByteData(); cidEntryByteData = updateOffset(cidEntryByteData, 0, dictEntry.getOperandLengths().get(0), sidAStringIndex); cidEntryByteData = updateOffset(cidEntryByteData, dictEntry.getOperandLengths().get(0), dictEntry.getOperandLengths().get(1), sidBStringIndex); cidEntryByteData = updateOffset(cidEntryByteData, dictEntry.getOperandLengths().get(0) + dictEntry.getOperandLengths().get(1), dictEntry.getOperandLengths().get(2), 139); writeBytes(cidEntryByteData); } private void writeCIDCount(DICTEntry dictEntry) throws IOException { byte[] cidCountByteData = dictEntry.getByteData(); cidCountByteData = updateOffset(cidCountByteData, 0, dictEntry.getOperandLengths().get(0), subsetGlyphs.size()); writeBytes(cidCountByteData); } private void writeTopDictStringEntry(DICTEntry dictEntry) throws IOException { int sid = dictEntry.getOperands().get(0).intValue(); if (sid > 391) { stringIndexData.add(cffReader.getStringIndex().getValue(sid - 391)); } byte[] newDictEntry = createNewRef(stringIndexData.size() + 390, dictEntry.getOperator(), dictEntry.getOperandLength()); writeBytes(newDictEntry); } private void writeStringIndex() throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); int charsetOffset = topDICT.get("charset").getOperands().get(0).intValue(); gidToSID = new LinkedHashMap<Integer, Integer>(); for (int gid : subsetGlyphs.keySet()) { int sid = cffReader.getSIDFromGID(charsetOffset, gid); //Check whether the SID falls into the standard string set if (sid < NUM_STANDARD_STRINGS) { gidToSID.put(subsetGlyphs.get(gid), sid); if (mbFont != null) { mbFont.mapUsedGlyphName(subsetGlyphs.get(gid), CFFStandardString.getName(sid)); } } else { int index = sid - NUM_STANDARD_STRINGS; if (index <= cffReader.getStringIndex().getNumObjects()) { if (mbFont != null) { mbFont.mapUsedGlyphName(subsetGlyphs.get(gid), new String(cffReader.getStringIndex().getValue(index))); } gidToSID.put(subsetGlyphs.get(gid), stringIndexData.size() + 391); stringIndexData.add(cffReader.getStringIndex().getValue(index)); } else { if (mbFont != null) { mbFont.mapUsedGlyphName(subsetGlyphs.get(gid), ".notdef"); } gidToSID.put(subsetGlyphs.get(gid), index); } } } //Write the String Index writeIndex(stringIndexData); } private void createCharStringDataCID() throws IOException { CFFIndexData charStringsIndex = cffReader.getCharStringIndex(); FDSelect fontDictionary = cffReader.getFDSelect(); if (fontDictionary instanceof Format0FDSelect) { throw new UnsupportedOperationException("OTF CFF CID Format0 currently not implemented"); } else if (fontDictionary instanceof Format3FDSelect) { Format3FDSelect fdSelect = (Format3FDSelect)fontDictionary; Map<Integer, Integer> subsetGroups = new HashMap<Integer, Integer>(); List<Integer> uniqueGroups = new ArrayList<Integer>(); for (int gid : subsetGlyphs.keySet()) { Integer[] ranges = fdSelect.getRanges().keySet().toArray(new Integer[0]); for (int i = 0; i < ranges.length; i++) { int nextRange = -1; if (i < ranges.length - 1) { nextRange = ranges[i + 1]; } else { nextRange = fdSelect.getSentinelGID(); } if (gid >= ranges[i] && gid < nextRange) { subsetGroups.put(gid, fdSelect.getRanges().get(ranges[i])); if (!uniqueGroups.contains(fdSelect.getRanges().get(ranges[i]))) { uniqueGroups.add(fdSelect.getRanges().get(ranges[i])); } } } } //Prepare resources globalIndexSubr = cffReader.getGlobalIndexSubr(); //Create the new char string index subsetCharStringsIndex = new ArrayList<byte[]>(); globalUniques = new ArrayList<Integer>(); subsetFDSelect = new LinkedHashMap<Integer, FDIndexReference>(); List<List<Integer>> foundLocalUniques = new ArrayList<List<Integer>>(); for (int i = 0; i < uniqueGroups.size(); i++) { foundLocalUniques.add(new ArrayList<Integer>()); } for (int gid : subsetGlyphs.keySet()) { int group = subsetGroups.get(gid); localIndexSubr = cffReader.getFDFonts().get(group).getLocalSubrData(); localUniques = foundLocalUniques.get(uniqueGroups.indexOf(subsetGroups.get(gid))); FDIndexReference newFDReference = new FDIndexReference( uniqueGroups.indexOf(subsetGroups.get(gid)), subsetGroups.get(gid)); subsetFDSelect.put(subsetGlyphs.get(gid), newFDReference); byte[] data = charStringsIndex.getValue(gid); preScanForSubsetIndexSize(data); } //Create the two lists which are to store the local and global subroutines subsetGlobalIndexSubr = new ArrayList<byte[]>(); fdSubrs = new ArrayList<List<byte[]>>(); subsetGlobalSubrCount = globalUniques.size(); globalUniques.clear(); localUniques = null; for (int l = 0; l < foundLocalUniques.size(); l++) { fdSubrs.add(new ArrayList<byte[]>()); } List<List<Integer>> foundLocalUniquesB = new ArrayList<List<Integer>>(); for (int k = 0; k < uniqueGroups.size(); k++) { foundLocalUniquesB.add(new ArrayList<Integer>()); } for (Integer gid : subsetGlyphs.keySet()) { int group = subsetGroups.get(gid); localIndexSubr = cffReader.getFDFonts().get(group).getLocalSubrData(); localUniques = foundLocalUniquesB.get(subsetFDSelect.get(subsetGlyphs.get(gid)).getNewFDIndex()); byte[] data = charStringsIndex.getValue(gid); subsetLocalIndexSubr = fdSubrs.get(subsetFDSelect.get(subsetGlyphs.get(gid)).getNewFDIndex()); subsetLocalSubrCount = foundLocalUniques.get(subsetFDSelect.get(subsetGlyphs.get(gid)).getNewFDIndex()).size(); data = readCharStringData(data, subsetLocalSubrCount); subsetCharStringsIndex.add(data); } } } private void writeFDSelect() { writeByte(0); //Format for (Integer gid : subsetFDSelect.keySet()) { writeByte(subsetFDSelect.get(gid).getNewFDIndex()); } } private List<Integer> getUsedFDFonts() { List<Integer> uniqueNewRefs = new ArrayList<Integer>(); for (int gid : subsetFDSelect.keySet()) { int fdIndex = subsetFDSelect.get(gid).getOldFDIndex(); if (!uniqueNewRefs.contains(fdIndex)) { uniqueNewRefs.add(fdIndex); } } return uniqueNewRefs; } private List<Integer> writeCIDDictsAndSubrs(List<Integer> uniqueNewRefs) throws IOException { List<Integer> privateDictOffsets = new ArrayList<Integer>(); List<FontDict> fdFonts = cffReader.getFDFonts(); for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict curFDFont = fdFonts.get(uniqueNewRefs.get(i)); HashMap<String, DICTEntry> fdPrivateDict = cffReader.parseDictData( curFDFont.getPrivateDictData()); int privateDictOffset = currentPos; privateDictOffsets.add(privateDictOffset); byte[] fdPrivateDictByteData = curFDFont.getPrivateDictData(); if (fdPrivateDict.get("Subrs") != null) { fdPrivateDictByteData = updateOffset(fdPrivateDictByteData, fdPrivateDict.get("Subrs").getOffset(), fdPrivateDict.get("Subrs").getOperandLength(), fdPrivateDictByteData.length); } writeBytes(fdPrivateDictByteData); writeIndex(fdSubrs.get(i)); } return privateDictOffsets; } private int writeFDArray(List<Integer> uniqueNewRefs, List<Integer> privateDictOffsets, List<Integer> fontNameSIDs) throws IOException { int offset = currentPos; List<FontDict> fdFonts = cffReader.getFDFonts(); writeCard16(uniqueNewRefs.size()); writeByte(1); //Offset size writeByte(1); //First offset int count = 1; for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict fdFont = fdFonts.get(uniqueNewRefs.get(i)); count += fdFont.getByteData().length; writeByte(count); } for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict fdFont = fdFonts.get(uniqueNewRefs.get(i)); byte[] fdFontByteData = fdFont.getByteData(); Map<String, DICTEntry> fdFontDict = cffReader.parseDictData(fdFontByteData); //Update the SID to the FontName fdFontByteData = updateOffset(fdFontByteData, fdFontDict.get("FontName").getOffset() - 1, fdFontDict.get("FontName").getOperandLengths().get(0), fontNameSIDs.get(i)); //Update the Private dict reference fdFontByteData = updateOffset(fdFontByteData, fdFontDict.get("Private").getOffset() + fdFontDict.get("Private").getOperandLengths().get(0), fdFontDict.get("Private").getOperandLengths().get(1), privateDictOffsets.get(i)); writeBytes(fdFontByteData); } return offset; } private class FDIndexReference { private int newFDIndex; private int oldFDIndex; public FDIndexReference(int newFDIndex, int oldFDIndex) { this.newFDIndex = newFDIndex; this.oldFDIndex = oldFDIndex; } public int getNewFDIndex() { return newFDIndex; } public int getOldFDIndex() { return oldFDIndex; } } private void createCharStringData() throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); CFFIndexData charStringsIndex = cffReader.getCharStringIndex(); DICTEntry privateEntry = topDICT.get("Private"); if (privateEntry != null) { int privateOffset = privateEntry.getOperands().get(1).intValue(); Map<String, DICTEntry> privateDICT = cffReader.getPrivateDict(privateEntry); if (privateDICT.get("Subrs") != null) { int localSubrOffset = privateOffset + privateDICT.get("Subrs").getOperands().get(0).intValue(); localIndexSubr = cffReader.readIndex(localSubrOffset); } else { localIndexSubr = cffReader.readIndex(null); } } globalIndexSubr = cffReader.getGlobalIndexSubr(); //Create the two lists which are to store the local and global subroutines subsetLocalIndexSubr = new ArrayList<byte[]>(); subsetGlobalIndexSubr = new ArrayList<byte[]>(); //Create the new char string index subsetCharStringsIndex = new ArrayList<byte[]>(); localUniques = new ArrayList<Integer>(); globalUniques = new ArrayList<Integer>(); for (int gid : subsetGlyphs.keySet()) { byte[] data = charStringsIndex.getValue(gid); preScanForSubsetIndexSize(data); } //Store the size of each subset index and clear the unique arrays subsetLocalSubrCount = localUniques.size(); subsetGlobalSubrCount = globalUniques.size(); localUniques.clear(); globalUniques.clear(); for (int gid : subsetGlyphs.keySet()) { byte[] data = charStringsIndex.getValue(gid); //Retrieve modified char string data and fill local / global subroutine arrays data = readCharStringData(data, subsetLocalSubrCount); subsetCharStringsIndex.add(data); } } private void preScanForSubsetIndexSize(byte[] data) throws IOException { boolean hasLocalSubroutines = localIndexSubr != null && localIndexSubr.getNumObjects() > 0; boolean hasGlobalSubroutines = globalIndexSubr != null && globalIndexSubr.getNumObjects() > 0; BytesNumber operand = new BytesNumber(-1, -1); for (int dataPos = 0; dataPos < data.length; dataPos++) { int b0 = data[dataPos] & 0xff; if (b0 == LOCAL_SUBROUTINE && hasLocalSubroutines) { int subrNumber = getSubrNumber(localIndexSubr.getNumObjects(), operand.getNumber()); if (!localUniques.contains(subrNumber) && subrNumber < localIndexSubr.getNumObjects()) { localUniques.add(subrNumber); byte[] subr = localIndexSubr.getValue(subrNumber); preScanForSubsetIndexSize(subr); } operand.clearNumber(); } else if (b0 == GLOBAL_SUBROUTINE && hasGlobalSubroutines) { int subrNumber = getSubrNumber(globalIndexSubr.getNumObjects(), operand.getNumber()); if (!globalUniques.contains(subrNumber) && subrNumber < globalIndexSubr.getNumObjects()) { globalUniques.add(subrNumber); byte[] subr = globalIndexSubr.getValue(subrNumber); preScanForSubsetIndexSize(subr); } operand.clearNumber(); } else if ((b0 >= 0 && b0 <= 27) || (b0 >= 29 && b0 <= 31)) { operand.clearNumber(); if (b0 == 19 || b0 == 20) { dataPos += 1; } } else if (b0 == 28 || (b0 >= 32 && b0 <= 255)) { operand = readNumber(b0, data, dataPos); dataPos += operand.getNumBytes() - 1; } } } private int getSubrNumber(int numSubroutines, int operand) { int bias = getBias(numSubroutines); return bias + operand; } private byte[] readCharStringData(byte[] data, int subsetLocalSubrCount) throws IOException { boolean hasLocalSubroutines = localIndexSubr != null && localIndexSubr.getNumObjects() > 0; boolean hasGlobalSubroutines = globalIndexSubr != null && globalIndexSubr.getNumObjects() > 0; BytesNumber operand = new BytesNumber(-1, -1); for (int dataPos = 0; dataPos < data.length; dataPos++) { int b0 = data[dataPos] & 0xff; if (b0 == 10 && hasLocalSubroutines) { int subrNumber = getSubrNumber(localIndexSubr.getNumObjects(), operand.getNumber()); int newRef = getNewRefForReference(subrNumber, localUniques, localIndexSubr, subsetLocalIndexSubr, subsetLocalSubrCount); if (newRef != -1) { byte[] newData = constructNewRefData(dataPos, data, operand, subsetLocalSubrCount, newRef, new int[] {10}); dataPos -= data.length - newData.length; data = newData; } operand.clearNumber(); } else if (b0 == 29 && hasGlobalSubroutines) { int subrNumber = getSubrNumber(globalIndexSubr.getNumObjects(), operand.getNumber()); int newRef = getNewRefForReference(subrNumber, globalUniques, globalIndexSubr, subsetGlobalIndexSubr, subsetGlobalSubrCount); if (newRef != -1) { byte[] newData = constructNewRefData(dataPos, data, operand, subsetGlobalSubrCount, newRef, new int[] {29}); dataPos -= (data.length - newData.length); data = newData; } operand.clearNumber(); } else if ((b0 >= 0 && b0 <= 27) || (b0 >= 29 && b0 <= 31)) { operand.clearNumber(); if (b0 == 19 || b0 == 20) { dataPos += 1; } } else if (b0 == 28 || (b0 >= 32 && b0 <= 255)) { operand = readNumber(b0, data, dataPos); dataPos += operand.getNumBytes() - 1; } } //Return the data with the modified references to our arrays return data; } private int getNewRefForReference(int subrNumber, List<Integer> uniquesArray, CFFIndexData indexSubr, List<byte[]> subsetIndexSubr, int subrCount) throws IOException { int newRef = -1; if (!uniquesArray.contains(subrNumber)) { if (subrNumber < indexSubr.getNumObjects()) { byte[] subr = indexSubr.getValue(subrNumber); subr = readCharStringData(subr, subrCount); if (!uniquesArray.contains(subrNumber)) { uniquesArray.add(subrNumber); subsetIndexSubr.add(subr); newRef = subsetIndexSubr.size() - 1; } else { newRef = uniquesArray.indexOf(subrNumber); } } } else { newRef = uniquesArray.indexOf(subrNumber); } return newRef; } private int getBias(int subrCount) { if (subrCount < 1240) { return 107; } else if (subrCount < 33900) { return 1131; } else { return 32768; } } private byte[] constructNewRefData(int curDataPos, byte[] currentData, BytesNumber operand, int fullSubsetIndexSize, int curSubsetIndexSize, int[] operatorCode) { //Create the new array with the modified reference byte[] newData; int startRef = curDataPos - operand.getNumBytes(); int length = operand.getNumBytes() + 1; byte[] preBytes = new byte[startRef]; System.arraycopy(currentData, 0, preBytes, 0, startRef); int newBias = getBias(fullSubsetIndexSize); int newRef = curSubsetIndexSize - newBias; byte[] newRefBytes = createNewRef(newRef, operatorCode, -1); newData = concatArray(preBytes, newRefBytes); byte[] postBytes = new byte[currentData.length - (startRef + length)]; System.arraycopy(currentData, startRef + length, postBytes, 0, currentData.length - (startRef + length)); return concatArray(newData, postBytes); } public static byte[] createNewRef(int newRef, int[] operatorCode, int forceLength) { byte[] newRefBytes; int sizeOfOperator = operatorCode.length; if ((forceLength == -1 && newRef <= 107) || forceLength == 1) { newRefBytes = new byte[1 + sizeOfOperator]; //The index values are 0 indexed newRefBytes[0] = (byte)(newRef + 139); for (int i = 0; i < operatorCode.length; i++) { newRefBytes[1 + i] = (byte)operatorCode[i]; } } else if ((forceLength == -1 && newRef <= 1131) || forceLength == 2) { newRefBytes = new byte[2 + sizeOfOperator]; if (newRef <= 363) { newRefBytes[0] = (byte)247; } else if (newRef <= 619) { newRefBytes[0] = (byte)248; } else if (newRef <= 875) { newRefBytes[0] = (byte)249; } else { newRefBytes[0] = (byte)250; } newRefBytes[1] = (byte)(newRef - 108); for (int i = 0; i < operatorCode.length; i++) { newRefBytes[2 + i] = (byte)operatorCode[i]; } } else if ((forceLength == -1 && newRef <= 32767) || forceLength == 3) { newRefBytes = new byte[3 + sizeOfOperator]; newRefBytes[0] = 28; newRefBytes[1] = (byte)(newRef >> 8); newRefBytes[2] = (byte)newRef; for (int i = 0; i < operatorCode.length; i++) { newRefBytes[3 + i] = (byte)operatorCode[i]; } } else { newRefBytes = new byte[5 + sizeOfOperator]; newRefBytes[0] = 29; newRefBytes[1] = (byte)(newRef >> 24); newRefBytes[2] = (byte)(newRef >> 16); newRefBytes[3] = (byte)(newRef >> 8); newRefBytes[4] = (byte)newRef; for (int i = 0; i < operatorCode.length; i++) { newRefBytes[5 + i] = (byte)operatorCode[i]; } } return newRefBytes; } public static byte[] concatArray(byte[] a, byte[] b) { int aLen = a.length; int bLen = b.length; byte[] c = new byte[aLen + bLen]; System.arraycopy(a, 0, c, 0, aLen); System.arraycopy(b, 0, c, aLen, bLen); return c; } private int writeIndex(List<byte[]> dataArray) { int hdrTotal = 3; //2 byte number of items this.writeCard16(dataArray.size()); //Offset Size: 1 byte = 256, 2 bytes = 65536 etc. int totLength = 0; for (int i = 0; i < dataArray.size(); i++) { totLength += dataArray.get(i).length; } int offSize = 1; if (totLength <= (1 << 8)) { offSize = 1; } else if (totLength <= (1 << 16)) { offSize = 2; } else if (totLength <= (1 << 24)) { offSize = 3; } else { offSize = 4; } this.writeByte(offSize); //Count the first offset 1 hdrTotal += offSize; int total = 0; for (int i = 0; i < dataArray.size(); i++) { hdrTotal += offSize; int length = dataArray.get(i).length; switch (offSize) { case 1: if (i == 0) { writeByte(1); } total += length; writeByte(total + 1); break; case 2: if (i == 0) { writeCard16(1); } total += length; writeCard16(total + 1); break; case 3: if (i == 0) { writeThreeByteNumber(1); } total += length; writeThreeByteNumber(total + 1); break; case 4: if (i == 0) { writeULong(1); } total += length; writeULong(total + 1); break; default: throw new AssertionError("Offset Size was not an expected value."); } } for (int i = 0; i < dataArray.size(); i++) { writeBytes(dataArray.get(i)); } return hdrTotal + total; } private BytesNumber readNumber(int b0, byte[] input, int curPos) throws IOException { if (b0 == 28) { int b1 = input[curPos + 1] & 0xff; int b2 = input[curPos + 2] & 0xff; return new BytesNumber(Integer.valueOf((short) (b1 << 8 | b2)), 3); } else if (b0 >= 32 && b0 <= 246) { return new BytesNumber(Integer.valueOf(b0 - 139), 1); } else if (b0 >= 247 && b0 <= 250) { int b1 = input[curPos + 1] & 0xff; return new BytesNumber(Integer.valueOf((b0 - 247) * 256 + b1 + 108), 2); } else if (b0 >= 251 && b0 <= 254) { int b1 = input[curPos + 1] & 0xff; return new BytesNumber(Integer.valueOf(-(b0 - 251) * 256 - b1 - 108), 2); } else if (b0 == 255) { int b1 = input[curPos + 1] & 0xff; int b2 = input[curPos + 2] & 0xff; return new BytesNumber(Integer.valueOf((short)(b1 << 8 | b2)), 5); } else { throw new IllegalArgumentException(); } } /** * A class used to store the last number operand and also it's size in bytes */ private static final class BytesNumber { private int number; private int numBytes; public BytesNumber(int number, int numBytes) { this.number = number; this.numBytes = numBytes; } public int getNumber() { return this.number; } public int getNumBytes() { return this.numBytes; } public void clearNumber() { this.number = -1; this.numBytes = -1; } } private void writeCharsetTable(boolean cidFont) throws IOException { writeByte(0); for (int gid : gidToSID.keySet()) { if (cidFont && gid == 0) { continue; } writeCard16((cidFont) ? gid : gidToSID.get(gid)); } } private void writePrivateDict() throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); DICTEntry privateEntry = topDICT.get("Private"); if (privateEntry != null) { writeBytes(cffReader.getPrivateDictBytes(privateEntry)); } } private void updateOffsets(int topDictOffset, int charsetOffset, int charStringOffset, int privateDictOffset, int localIndexOffset, int encodingOffset) throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); Map<String, DICTEntry> privateDICT = null; DICTEntry privateEntry = topDICT.get("Private"); if (privateEntry != null) { privateDICT = cffReader.getPrivateDict(privateEntry); } int dataPos = 3 + (cffReader.getTopDictIndex().getOffSize() * cffReader.getTopDictIndex().getOffsets().length); int dataTopDictOffset = topDictOffset + dataPos; updateFixedOffsets(topDICT, dataTopDictOffset, charsetOffset, charStringOffset, encodingOffset); if (privateDICT != null) { //Private index offset in the top dict int oldPrivateOffset = dataTopDictOffset + privateEntry.getOffset(); output = updateOffset(output, oldPrivateOffset + privateEntry.getOperandLengths().get(0), privateEntry.getOperandLengths().get(1), privateDictOffset); //Update the local subroutine index offset in the private dict DICTEntry subroutines = privateDICT.get("Subrs"); int oldLocalSubrOffset = privateDictOffset + subroutines.getOffset(); //Value needs to be converted to -139 etc. int encodeValue = 0; if (subroutines.getOperandLength() == 1) { encodeValue = 139; } output = updateOffset(output, oldLocalSubrOffset, subroutines.getOperandLength(), (localIndexOffset - privateDictOffset) + encodeValue); } } private void updateFixedOffsets(Map<String, DICTEntry> topDICT, int dataTopDictOffset, int charsetOffset, int charStringOffset, int encodingOffset) { //Charset offset in the top dict DICTEntry charset = topDICT.get("charset"); int oldCharsetOffset = dataTopDictOffset + charset.getOffset(); output = updateOffset(output, oldCharsetOffset, charset.getOperandLength(), charsetOffset); //Char string index offset in the private dict DICTEntry charString = topDICT.get("CharStrings"); int oldCharStringOffset = dataTopDictOffset + charString.getOffset(); output = updateOffset(output, oldCharStringOffset, charString.getOperandLength(), charStringOffset); DICTEntry encodingEntry = topDICT.get("Encoding"); if (encodingEntry != null && encodingEntry.getOperands().get(0).intValue() != 0 && encodingEntry.getOperands().get(0).intValue() != 1) { int oldEncodingOffset = dataTopDictOffset + encodingEntry.getOffset(); output = updateOffset(output, oldEncodingOffset, encodingEntry.getOperandLength(), encodingOffset); } } private void updateCIDOffsets(int topDictDataOffset, int fdArrayOffset, int fdSelectOffset, int charsetOffset, int charStringOffset, int encodingOffset) { LinkedHashMap<String, DICTEntry> topDict = cffReader.getTopDictEntries(); DICTEntry fdArrayEntry = topDict.get("FDArray"); if (fdArrayEntry != null) { output = updateOffset(output, topDictDataOffset + fdArrayEntry.getOffset() - 1, fdArrayEntry.getOperandLength(), fdArrayOffset); } DICTEntry fdSelect = topDict.get("FDSelect"); if (fdSelect != null) { output = updateOffset(output, topDictDataOffset + fdSelect.getOffset() - 1, fdSelect.getOperandLength(), fdSelectOffset); } updateFixedOffsets(topDict, topDictDataOffset, charsetOffset, charStringOffset, encodingOffset); } private byte[] updateOffset(byte[] out, int position, int length, int replacement) { switch (length) { case 1: out[position] = (byte)(replacement & 0xFF); break; case 2: if (replacement <= 363) { out[position] = (byte)247; } else if (replacement <= 619) { out[position] = (byte)248; } else if (replacement <= 875) { out[position] = (byte)249; } else { out[position] = (byte)250; } out[position + 1] = (byte)(replacement - 108); break; case 3: out[position] = (byte)28; out[position + 1] = (byte)((replacement >> 8) & 0xFF); out[position + 2] = (byte)(replacement & 0xFF); break; case 5: out[position] = (byte)29; out[position + 1] = (byte)((replacement >> 24) & 0xFF); out[position + 2] = (byte)((replacement >> 16) & 0xFF); out[position + 3] = (byte)((replacement >> 8) & 0xFF); out[position + 4] = (byte)(replacement & 0xFF); break; default: } return out; } /** * Appends a byte to the output array, * updates currentPost but not realSize */ private void writeByte(int b) { output[currentPos++] = (byte)b; realSize++; } /** * Appends a USHORT to the output array, * updates currentPost but not realSize */ private void writeCard16(int s) { byte b1 = (byte)((s >> 8) & 0xff); byte b2 = (byte)(s & 0xff); writeByte(b1); writeByte(b2); } private void writeThreeByteNumber(int s) { byte b1 = (byte)((s >> 16) & 0xFF); byte b2 = (byte)((s >> 8) & 0xFF); byte b3 = (byte)(s & 0xFF); output[currentPos++] = b1; output[currentPos++] = b2; output[currentPos++] = b3; realSize += 3; } /** * Appends a ULONG to the output array, * at the given position */ private void writeULong(int s) { byte b1 = (byte)((s >> 24) & 0xff); byte b2 = (byte)((s >> 16) & 0xff); byte b3 = (byte)((s >> 8) & 0xff); byte b4 = (byte)(s & 0xff); output[currentPos++] = b1; output[currentPos++] = b2; output[currentPos++] = b3; output[currentPos++] = b4; realSize += 4; } /** * Returns a subset of the fonts (readFont() MUST be called first in order to create the * subset). * @return byte array */ public byte[] getFontSubset() { byte[] ret = new byte[realSize]; System.arraycopy(output, 0, ret, 0, realSize); return ret; } }
src/java/org/apache/fop/fonts/truetype/OTFSubSetFile.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.fonts.truetype; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.apache.fontbox.cff.CFFStandardString; import org.apache.fontbox.cff.encoding.CFFEncoding; import org.apache.fop.fonts.MultiByteFont; import org.apache.fop.fonts.cff.CFFDataReader; import org.apache.fop.fonts.cff.CFFDataReader.CFFIndexData; import org.apache.fop.fonts.cff.CFFDataReader.DICTEntry; import org.apache.fop.fonts.cff.CFFDataReader.FDSelect; import org.apache.fop.fonts.cff.CFFDataReader.FontDict; import org.apache.fop.fonts.cff.CFFDataReader.Format0FDSelect; import org.apache.fop.fonts.cff.CFFDataReader.Format3FDSelect; /** * Reads an OpenType CFF file and generates a subset * The OpenType specification can be found at the Microsoft * Typography site: http://www.microsoft.com/typography/otspec/ */ public class OTFSubSetFile extends OTFFile { private byte[] output; private int currentPos = 0; private int realSize = 0; /** A map containing each glyph to be included in the subset * with their existing and new GID's **/ private LinkedHashMap<Integer, Integer> subsetGlyphs; /** A map of the new GID to SID used to construct the charset table **/ private LinkedHashMap<Integer, Integer> gidToSID; private CFFIndexData localIndexSubr; private CFFIndexData globalIndexSubr; /** List of subroutines to write to the local / global indexes in the subset font **/ private List<byte[]> subsetLocalIndexSubr; private List<byte[]> subsetGlobalIndexSubr; /** For fonts which have an FDSelect or ROS flag in Top Dict, this is used to store the * local subroutine indexes for each group as opposed to the above subsetLocalIndexSubr */ private ArrayList<List<byte[]>> fdSubrs; /** The subset FD Select table used to store the mappings between glyphs and their * associated FDFont object which point to a private dict and local subroutines. */ private LinkedHashMap<Integer, FDIndexReference> subsetFDSelect; /** A list of unique subroutines from the global / local subroutine indexes */ private List<Integer> localUniques; private List<Integer> globalUniques; /** A store of the number of subroutines each global / local subroutine will store **/ private int subsetLocalSubrCount; private int subsetGlobalSubrCount; /** A list of char string data for each glyph to be stored in the subset font **/ private List<byte[]> subsetCharStringsIndex; /** The embedded name to change in the name table **/ private String embeddedName; /** An array used to hold the string index data for the subset font **/ private List<byte[]> stringIndexData = new ArrayList<byte[]>(); /** The CFF reader object used to read data and offsets from the original font file */ private CFFDataReader cffReader = null; /** The class used to represent this font **/ private MultiByteFont mbFont; /** The number of standard strings in CFF **/ private static final int NUM_STANDARD_STRINGS = 391; /** The operator used to identify a local subroutine reference */ private static final int LOCAL_SUBROUTINE = 10; /** The operator used to identify a global subroutine reference */ private static final int GLOBAL_SUBROUTINE = 29; public OTFSubSetFile() throws IOException { super(); } public void readFont(FontFileReader in, String embeddedName, String header, MultiByteFont mbFont) throws IOException { this.mbFont = mbFont; readFont(in, embeddedName, header, mbFont.getUsedGlyphs()); } /** * Reads and creates a subset of the font. * * @param in FontFileReader to read from * @param name Name to be checked for in the font file * @param header The header of the font file * @param glyphs Map of glyphs (glyphs has old index as (Integer) key and * new index as (Integer) value) * @throws IOException in case of an I/O problem */ void readFont(FontFileReader in, String embeddedName, String header, Map<Integer, Integer> usedGlyphs) throws IOException { fontFile = in; currentPos = 0; realSize = 0; this.embeddedName = embeddedName; //Sort by the new GID and store in a LinkedHashMap subsetGlyphs = sortByValue(usedGlyphs); output = new byte[in.getFileSize()]; initializeFont(in); cffReader = new CFFDataReader(fontFile); //Create the CIDFontType0C data createCFF(); } private LinkedHashMap<Integer, Integer> sortByValue(Map<Integer, Integer> map) { List<Entry<Integer, Integer>> list = new ArrayList<Entry<Integer, Integer>>(map.entrySet()); Collections.sort(list, new Comparator<Entry<Integer, Integer>>() { public int compare(Entry<Integer, Integer> o1, Entry<Integer, Integer> o2) { return ((Comparable<Integer>) o1.getValue()).compareTo(o2.getValue()); } }); LinkedHashMap<Integer, Integer> result = new LinkedHashMap<Integer, Integer>(); for (Entry<Integer, Integer> entry : list) { result.put(entry.getKey(), entry.getValue()); } return result; } private void createCFF() throws IOException { //Header writeBytes(cffReader.getHeader()); //Name Index writeIndex(Arrays.asList(embeddedName.getBytes())); //Keep offset of the topDICT so it can be updated once all data has been written int topDictOffset = currentPos; //Top DICT Index and Data byte[] topDictIndex = cffReader.getTopDictIndex().getByteData(); int offSize = topDictIndex[2]; writeBytes(topDictIndex, 0, 3 + (offSize * 2)); int topDictDataOffset = currentPos; writeTopDICT(); //Create the char string index data and related local / global subroutines if (cffReader.getFDSelect() == null) { createCharStringData(); } else { createCharStringDataCID(); } //If it is a CID-Keyed font, store each FD font and add each SID List<Integer> fontNameSIDs = null; List<Integer> subsetFDFonts = null; if (cffReader.getFDSelect() != null) { subsetFDFonts = getUsedFDFonts(); fontNameSIDs = storeFDStrings(subsetFDFonts); } //String index writeStringIndex(); //Global subroutine index writeIndex(subsetGlobalIndexSubr); //Encoding int encodingOffset = currentPos; writeEncoding(fileFont.getEncoding()); //Charset table int charsetOffset = currentPos; writeCharsetTable(cffReader.getFDSelect() != null); //FDSelect table int fdSelectOffset = currentPos; if (cffReader.getFDSelect() != null) { writeFDSelect(); } //Char Strings Index int charStringOffset = currentPos; writeIndex(subsetCharStringsIndex); if (cffReader.getFDSelect() == null) { //Keep offset to modify later with the local subroutine index offset int privateDictOffset = currentPos; writePrivateDict(); //Local subroutine index int localIndexOffset = currentPos; writeIndex(subsetLocalIndexSubr); //Update the offsets updateOffsets(topDictOffset, charsetOffset, charStringOffset, privateDictOffset, localIndexOffset, encodingOffset); } else { List<Integer> privateDictOffsets = writeCIDDictsAndSubrs(subsetFDFonts); int fdArrayOffset = writeFDArray(subsetFDFonts, privateDictOffsets, fontNameSIDs); updateCIDOffsets(topDictDataOffset, fdArrayOffset, fdSelectOffset, charsetOffset, charStringOffset, encodingOffset); } } private List<Integer> storeFDStrings(List<Integer> uniqueNewRefs) throws IOException { ArrayList<Integer> fontNameSIDs = new ArrayList<Integer>(); List<FontDict> fdFonts = cffReader.getFDFonts(); for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict fdFont = fdFonts.get(uniqueNewRefs.get(i)); byte[] fdFontByteData = fdFont.getByteData(); Map<String, DICTEntry> fdFontDict = cffReader.parseDictData(fdFontByteData); fontNameSIDs.add(stringIndexData.size() + NUM_STANDARD_STRINGS); stringIndexData.add(cffReader.getStringIndex().getValue(fdFontDict.get("FontName") .getOperands().get(0).intValue() - NUM_STANDARD_STRINGS)); } return fontNameSIDs; } private void writeBytes(byte[] out) { for (int i = 0; i < out.length; i++) { output[currentPos++] = out[i]; realSize++; } } private void writeBytes(byte[] out, int offset, int length) { for (int i = offset; i < offset + length; i++) { output[currentPos++] = out[i]; realSize++; } } private void writeEncoding(CFFEncoding encoding) throws IOException { LinkedHashMap<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); DICTEntry encodingEntry = topDICT.get("Encoding"); if (encodingEntry != null && encodingEntry.getOperands().get(0).intValue() != 0 && encodingEntry.getOperands().get(0).intValue() != 1) { writeByte(0); writeByte(gidToSID.size()); for (int gid : gidToSID.keySet()) { int code = encoding.getCode(gidToSID.get(gid)); writeByte(code); } } } private void writeTopDICT() throws IOException { LinkedHashMap<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); List<String> topDictStringEntries = Arrays.asList("version", "Notice", "Copyright", "FullName", "FamilyName", "Weight", "PostScript"); for (Map.Entry<String, DICTEntry> dictEntry : topDICT.entrySet()) { String dictKey = dictEntry.getKey(); DICTEntry entry = dictEntry.getValue(); //If the value is an SID, update the reference but keep the size the same if (dictKey.equals("ROS")) { writeROSEntry(entry); } else if (dictKey.equals("CIDCount")) { writeCIDCount(entry); } else if (topDictStringEntries.contains(dictKey)) { writeTopDictStringEntry(entry); } else { writeBytes(entry.getByteData()); } } } private void writeROSEntry(DICTEntry dictEntry) throws IOException { int sidA = dictEntry.getOperands().get(0).intValue(); if (sidA > 390) { stringIndexData.add(cffReader.getStringIndex().getValue(sidA - NUM_STANDARD_STRINGS)); } int sidAStringIndex = stringIndexData.size() + 390; int sidB = dictEntry.getOperands().get(1).intValue(); if (sidB > 390) { stringIndexData.add("Identity".getBytes()); } int sidBStringIndex = stringIndexData.size() + 390; byte[] cidEntryByteData = dictEntry.getByteData(); cidEntryByteData = updateOffset(cidEntryByteData, 0, dictEntry.getOperandLengths().get(0), sidAStringIndex); cidEntryByteData = updateOffset(cidEntryByteData, dictEntry.getOperandLengths().get(0), dictEntry.getOperandLengths().get(1), sidBStringIndex); cidEntryByteData = updateOffset(cidEntryByteData, dictEntry.getOperandLengths().get(0) + dictEntry.getOperandLengths().get(1), dictEntry.getOperandLengths().get(2), 139); writeBytes(cidEntryByteData); } private void writeCIDCount(DICTEntry dictEntry) throws IOException { byte[] cidCountByteData = dictEntry.getByteData(); cidCountByteData = updateOffset(cidCountByteData, 0, dictEntry.getOperandLengths().get(0), subsetGlyphs.size()); writeBytes(cidCountByteData); } private void writeTopDictStringEntry(DICTEntry dictEntry) throws IOException { int sid = dictEntry.getOperands().get(0).intValue(); if (sid > 391) { stringIndexData.add(cffReader.getStringIndex().getValue(sid - 391)); } byte[] newDictEntry = createNewRef(stringIndexData.size() + 390, dictEntry.getOperator(), dictEntry.getOperandLength()); writeBytes(newDictEntry); } private void writeStringIndex() throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); int charsetOffset = topDICT.get("charset").getOperands().get(0).intValue(); gidToSID = new LinkedHashMap<Integer, Integer>(); for (int gid : subsetGlyphs.keySet()) { int sid = cffReader.getSIDFromGID(charsetOffset, gid); //Check whether the SID falls into the standard string set if (sid < NUM_STANDARD_STRINGS) { gidToSID.put(subsetGlyphs.get(gid), sid); if (mbFont != null) { mbFont.mapUsedGlyphName(subsetGlyphs.get(gid), CFFStandardString.getName(sid)); } } else { int index = sid - NUM_STANDARD_STRINGS; if (index <= cffReader.getStringIndex().getNumObjects()) { if (mbFont != null) { mbFont.mapUsedGlyphName(subsetGlyphs.get(gid), new String(cffReader.getStringIndex().getValue(index))); } gidToSID.put(subsetGlyphs.get(gid), stringIndexData.size() + 391); stringIndexData.add(cffReader.getStringIndex().getValue(index)); } else { if (mbFont != null) { mbFont.mapUsedGlyphName(subsetGlyphs.get(gid), ".notdef"); } gidToSID.put(subsetGlyphs.get(gid), index); } } } //Write the String Index writeIndex(stringIndexData); } private void createCharStringDataCID() throws IOException { CFFIndexData charStringsIndex = cffReader.getCharStringIndex(); FDSelect fontDictionary = cffReader.getFDSelect(); if (fontDictionary instanceof Format0FDSelect) { throw new UnsupportedOperationException("OTF CFF CID Format0 currently not implemented"); } else if (fontDictionary instanceof Format3FDSelect) { Format3FDSelect fdSelect = (Format3FDSelect)fontDictionary; Map<Integer, Integer> subsetGroups = new HashMap<Integer, Integer>(); List<Integer> uniqueGroups = new ArrayList<Integer>(); for (int gid : subsetGlyphs.keySet()) { Integer[] ranges = fdSelect.getRanges().keySet().toArray(new Integer[0]); for (int i = 0; i < ranges.length; i++) { int nextRange = -1; if (i < ranges.length - 1) { nextRange = ranges[i + 1]; } else { nextRange = fdSelect.getSentinelGID(); } if (gid >= ranges[i] && gid < nextRange) { subsetGroups.put(gid, fdSelect.getRanges().get(ranges[i])); if (!uniqueGroups.contains(fdSelect.getRanges().get(ranges[i]))) { uniqueGroups.add(fdSelect.getRanges().get(ranges[i])); } } } } //Prepare resources globalIndexSubr = cffReader.getGlobalIndexSubr(); //Create the new char string index subsetCharStringsIndex = new ArrayList<byte[]>(); globalUniques = new ArrayList<Integer>(); subsetFDSelect = new LinkedHashMap<Integer, FDIndexReference>(); List<List<Integer>> foundLocalUniques = new ArrayList<List<Integer>>(); for (int i = 0; i < uniqueGroups.size(); i++) { foundLocalUniques.add(new ArrayList<Integer>()); } for (int gid : subsetGlyphs.keySet()) { int group = subsetGroups.get(gid); localIndexSubr = cffReader.getFDFonts().get(group).getLocalSubrData(); localUniques = foundLocalUniques.get(uniqueGroups.indexOf(subsetGroups.get(gid))); FDIndexReference newFDReference = new FDIndexReference( uniqueGroups.indexOf(subsetGroups.get(gid)), subsetGroups.get(gid)); subsetFDSelect.put(subsetGlyphs.get(gid), newFDReference); byte[] data = charStringsIndex.getValue(gid); preScanForSubsetIndexSize(data); } //Create the two lists which are to store the local and global subroutines subsetGlobalIndexSubr = new ArrayList<byte[]>(); fdSubrs = new ArrayList<List<byte[]>>(); subsetGlobalSubrCount = globalUniques.size(); globalUniques.clear(); localUniques = null; for (int l = 0; l < foundLocalUniques.size(); l++) { fdSubrs.add(new ArrayList<byte[]>()); } List<List<Integer>> foundLocalUniquesB = new ArrayList<List<Integer>>(); for (int k = 0; k < uniqueGroups.size(); k++) { foundLocalUniquesB.add(new ArrayList<Integer>()); } for (Integer gid : subsetGlyphs.keySet()) { int group = subsetGroups.get(gid); localIndexSubr = cffReader.getFDFonts().get(group).getLocalSubrData(); localUniques = foundLocalUniquesB.get(subsetFDSelect.get(subsetGlyphs.get(gid)).getNewFDIndex()); byte[] data = charStringsIndex.getValue(gid); subsetLocalIndexSubr = fdSubrs.get(subsetFDSelect.get(subsetGlyphs.get(gid)).getNewFDIndex()); subsetLocalSubrCount = foundLocalUniques.get(subsetFDSelect.get(subsetGlyphs.get(gid)).getNewFDIndex()).size(); data = readCharStringData(data, subsetLocalSubrCount); subsetCharStringsIndex.add(data); } } } private void writeFDSelect() { writeByte(0); //Format for (Integer gid : subsetFDSelect.keySet()) { writeByte(subsetFDSelect.get(gid).getNewFDIndex()); } } private List<Integer> getUsedFDFonts() { List<Integer> uniqueNewRefs = new ArrayList<Integer>(); for (int gid : subsetFDSelect.keySet()) { int fdIndex = subsetFDSelect.get(gid).getOldFDIndex(); if (!uniqueNewRefs.contains(fdIndex)) { uniqueNewRefs.add(fdIndex); } } return uniqueNewRefs; } private List<Integer> writeCIDDictsAndSubrs(List<Integer> uniqueNewRefs) throws IOException { List<Integer> privateDictOffsets = new ArrayList<Integer>(); List<FontDict> fdFonts = cffReader.getFDFonts(); for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict curFDFont = fdFonts.get(uniqueNewRefs.get(i)); HashMap<String, DICTEntry> fdPrivateDict = cffReader.parseDictData( curFDFont.getPrivateDictData()); int privateDictOffset = currentPos; privateDictOffsets.add(privateDictOffset); byte[] fdPrivateDictByteData = curFDFont.getPrivateDictData(); if (fdPrivateDict.get("Subrs") != null) { fdPrivateDictByteData = updateOffset(fdPrivateDictByteData, fdPrivateDict.get("Subrs").getOffset(), fdPrivateDict.get("Subrs").getOperandLength(), fdPrivateDictByteData.length); } writeBytes(fdPrivateDictByteData); writeIndex(fdSubrs.get(i)); } return privateDictOffsets; } private int writeFDArray(List<Integer> uniqueNewRefs, List<Integer> privateDictOffsets, List<Integer> fontNameSIDs) throws IOException { int offset = currentPos; List<FontDict> fdFonts = cffReader.getFDFonts(); writeCard16(uniqueNewRefs.size()); writeByte(1); //Offset size writeByte(1); //First offset int count = 1; for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict fdFont = fdFonts.get(uniqueNewRefs.get(i)); count += fdFont.getByteData().length; writeByte(count); } for (int i = 0; i < uniqueNewRefs.size(); i++) { FontDict fdFont = fdFonts.get(uniqueNewRefs.get(i)); byte[] fdFontByteData = fdFont.getByteData(); Map<String, DICTEntry> fdFontDict = cffReader.parseDictData(fdFontByteData); //Update the SID to the FontName fdFontByteData = updateOffset(fdFontByteData, fdFontDict.get("FontName").getOffset() - 1, fdFontDict.get("FontName").getOperandLengths().get(0), fontNameSIDs.get(i)); //Update the Private dict reference fdFontByteData = updateOffset(fdFontByteData, fdFontDict.get("Private").getOffset() + fdFontDict.get("Private").getOperandLengths().get(0), fdFontDict.get("Private").getOperandLengths().get(1), privateDictOffsets.get(i)); writeBytes(fdFontByteData); } return offset; } private class FDIndexReference { private int newFDIndex; private int oldFDIndex; public FDIndexReference(int newFDIndex, int oldFDIndex) { this.newFDIndex = newFDIndex; this.oldFDIndex = oldFDIndex; } public int getNewFDIndex() { return newFDIndex; } public int getOldFDIndex() { return oldFDIndex; } } private void createCharStringData() throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); CFFIndexData charStringsIndex = cffReader.getCharStringIndex(); DICTEntry privateEntry = topDICT.get("Private"); if (privateEntry != null) { int privateOffset = privateEntry.getOperands().get(1).intValue(); Map<String, DICTEntry> privateDICT = cffReader.getPrivateDict(privateEntry); int localSubrOffset = privateOffset + privateDICT.get("Subrs").getOperands().get(0).intValue(); localIndexSubr = cffReader.readIndex(localSubrOffset); } globalIndexSubr = cffReader.getGlobalIndexSubr(); //Create the two lists which are to store the local and global subroutines subsetLocalIndexSubr = new ArrayList<byte[]>(); subsetGlobalIndexSubr = new ArrayList<byte[]>(); //Create the new char string index subsetCharStringsIndex = new ArrayList<byte[]>(); localUniques = new ArrayList<Integer>(); globalUniques = new ArrayList<Integer>(); for (int gid : subsetGlyphs.keySet()) { byte[] data = charStringsIndex.getValue(gid); preScanForSubsetIndexSize(data); } //Store the size of each subset index and clear the unique arrays subsetLocalSubrCount = localUniques.size(); subsetGlobalSubrCount = globalUniques.size(); localUniques.clear(); globalUniques.clear(); for (int gid : subsetGlyphs.keySet()) { byte[] data = charStringsIndex.getValue(gid); //Retrieve modified char string data and fill local / global subroutine arrays data = readCharStringData(data, subsetLocalSubrCount); subsetCharStringsIndex.add(data); } } private void preScanForSubsetIndexSize(byte[] data) throws IOException { boolean hasLocalSubroutines = localIndexSubr != null && localIndexSubr.getNumObjects() > 0; boolean hasGlobalSubroutines = globalIndexSubr != null && globalIndexSubr.getNumObjects() > 0; BytesNumber operand = new BytesNumber(-1, -1); for (int dataPos = 0; dataPos < data.length; dataPos++) { int b0 = data[dataPos] & 0xff; if (b0 == LOCAL_SUBROUTINE && hasLocalSubroutines) { int subrNumber = getSubrNumber(localIndexSubr.getNumObjects(), operand.getNumber()); if (!localUniques.contains(subrNumber) && subrNumber < localIndexSubr.getNumObjects()) { localUniques.add(subrNumber); byte[] subr = localIndexSubr.getValue(subrNumber); preScanForSubsetIndexSize(subr); } operand.clearNumber(); } else if (b0 == GLOBAL_SUBROUTINE && hasGlobalSubroutines) { int subrNumber = getSubrNumber(globalIndexSubr.getNumObjects(), operand.getNumber()); if (!globalUniques.contains(subrNumber) && subrNumber < globalIndexSubr.getNumObjects()) { globalUniques.add(subrNumber); byte[] subr = globalIndexSubr.getValue(subrNumber); preScanForSubsetIndexSize(subr); } operand.clearNumber(); } else if ((b0 >= 0 && b0 <= 27) || (b0 >= 29 && b0 <= 31)) { operand.clearNumber(); if (b0 == 19 || b0 == 20) { dataPos += 1; } } else if (b0 == 28 || (b0 >= 32 && b0 <= 255)) { operand = readNumber(b0, data, dataPos); dataPos += operand.getNumBytes() - 1; } } } private int getSubrNumber(int numSubroutines, int operand) { int bias = getBias(numSubroutines); return bias + operand; } private byte[] readCharStringData(byte[] data, int subsetLocalSubrCount) throws IOException { boolean hasLocalSubroutines = localIndexSubr != null && localIndexSubr.getNumObjects() > 0; boolean hasGlobalSubroutines = globalIndexSubr != null && globalIndexSubr.getNumObjects() > 0; BytesNumber operand = new BytesNumber(-1, -1); for (int dataPos = 0; dataPos < data.length; dataPos++) { int b0 = data[dataPos] & 0xff; if (b0 == 10 && hasLocalSubroutines) { int subrNumber = getSubrNumber(localIndexSubr.getNumObjects(), operand.getNumber()); int newRef = getNewRefForReference(subrNumber, localUniques, localIndexSubr, subsetLocalIndexSubr, subsetLocalSubrCount); if (newRef != -1) { byte[] newData = constructNewRefData(dataPos, data, operand, subsetLocalSubrCount, newRef, new int[] {10}); dataPos -= data.length - newData.length; data = newData; } operand.clearNumber(); } else if (b0 == 29 && hasGlobalSubroutines) { int subrNumber = getSubrNumber(globalIndexSubr.getNumObjects(), operand.getNumber()); int newRef = getNewRefForReference(subrNumber, globalUniques, globalIndexSubr, subsetGlobalIndexSubr, subsetGlobalSubrCount); if (newRef != -1) { byte[] newData = constructNewRefData(dataPos, data, operand, subsetGlobalSubrCount, newRef, new int[] {29}); dataPos -= (data.length - newData.length); data = newData; } operand.clearNumber(); } else if ((b0 >= 0 && b0 <= 27) || (b0 >= 29 && b0 <= 31)) { operand.clearNumber(); if (b0 == 19 || b0 == 20) { dataPos += 1; } } else if (b0 == 28 || (b0 >= 32 && b0 <= 255)) { operand = readNumber(b0, data, dataPos); dataPos += operand.getNumBytes() - 1; } } //Return the data with the modified references to our arrays return data; } private int getNewRefForReference(int subrNumber, List<Integer> uniquesArray, CFFIndexData indexSubr, List<byte[]> subsetIndexSubr, int subrCount) throws IOException { int newRef = -1; if (!uniquesArray.contains(subrNumber)) { if (subrNumber < indexSubr.getNumObjects()) { byte[] subr = indexSubr.getValue(subrNumber); subr = readCharStringData(subr, subrCount); if (!uniquesArray.contains(subrNumber)) { uniquesArray.add(subrNumber); subsetIndexSubr.add(subr); newRef = subsetIndexSubr.size() - 1; } else { newRef = uniquesArray.indexOf(subrNumber); } } } else { newRef = uniquesArray.indexOf(subrNumber); } return newRef; } private int getBias(int subrCount) { if (subrCount < 1240) { return 107; } else if (subrCount < 33900) { return 1131; } else { return 32768; } } private byte[] constructNewRefData(int curDataPos, byte[] currentData, BytesNumber operand, int fullSubsetIndexSize, int curSubsetIndexSize, int[] operatorCode) { //Create the new array with the modified reference byte[] newData; int startRef = curDataPos - operand.getNumBytes(); int length = operand.getNumBytes() + 1; byte[] preBytes = new byte[startRef]; System.arraycopy(currentData, 0, preBytes, 0, startRef); int newBias = getBias(fullSubsetIndexSize); int newRef = curSubsetIndexSize - newBias; byte[] newRefBytes = createNewRef(newRef, operatorCode, -1); newData = concatArray(preBytes, newRefBytes); byte[] postBytes = new byte[currentData.length - (startRef + length)]; System.arraycopy(currentData, startRef + length, postBytes, 0, currentData.length - (startRef + length)); return concatArray(newData, postBytes); } public static byte[] createNewRef(int newRef, int[] operatorCode, int forceLength) { byte[] newRefBytes; int sizeOfOperator = operatorCode.length; if ((forceLength == -1 && newRef <= 107) || forceLength == 1) { newRefBytes = new byte[1 + sizeOfOperator]; //The index values are 0 indexed newRefBytes[0] = (byte)(newRef + 139); for (int i = 0; i < operatorCode.length; i++) { newRefBytes[1 + i] = (byte)operatorCode[i]; } } else if ((forceLength == -1 && newRef <= 1131) || forceLength == 2) { newRefBytes = new byte[2 + sizeOfOperator]; if (newRef <= 363) { newRefBytes[0] = (byte)247; } else if (newRef <= 619) { newRefBytes[0] = (byte)248; } else if (newRef <= 875) { newRefBytes[0] = (byte)249; } else { newRefBytes[0] = (byte)250; } newRefBytes[1] = (byte)(newRef - 108); for (int i = 0; i < operatorCode.length; i++) { newRefBytes[2 + i] = (byte)operatorCode[i]; } } else if ((forceLength == -1 && newRef <= 32767) || forceLength == 3) { newRefBytes = new byte[3 + sizeOfOperator]; newRefBytes[0] = 28; newRefBytes[1] = (byte)(newRef >> 8); newRefBytes[2] = (byte)newRef; for (int i = 0; i < operatorCode.length; i++) { newRefBytes[3 + i] = (byte)operatorCode[i]; } } else { newRefBytes = new byte[5 + sizeOfOperator]; newRefBytes[0] = 29; newRefBytes[1] = (byte)(newRef >> 24); newRefBytes[2] = (byte)(newRef >> 16); newRefBytes[3] = (byte)(newRef >> 8); newRefBytes[4] = (byte)newRef; for (int i = 0; i < operatorCode.length; i++) { newRefBytes[5 + i] = (byte)operatorCode[i]; } } return newRefBytes; } public static byte[] concatArray(byte[] a, byte[] b) { int aLen = a.length; int bLen = b.length; byte[] c = new byte[aLen + bLen]; System.arraycopy(a, 0, c, 0, aLen); System.arraycopy(b, 0, c, aLen, bLen); return c; } private int writeIndex(List<byte[]> dataArray) { int hdrTotal = 3; //2 byte number of items this.writeCard16(dataArray.size()); //Offset Size: 1 byte = 256, 2 bytes = 65536 etc. int totLength = 0; for (int i = 0; i < dataArray.size(); i++) { totLength += dataArray.get(i).length; } int offSize = 1; if (totLength <= (1 << 8)) { offSize = 1; } else if (totLength <= (1 << 16)) { offSize = 2; } else if (totLength <= (1 << 24)) { offSize = 3; } else { offSize = 4; } this.writeByte(offSize); //Count the first offset 1 hdrTotal += offSize; int total = 0; for (int i = 0; i < dataArray.size(); i++) { hdrTotal += offSize; int length = dataArray.get(i).length; switch (offSize) { case 1: if (i == 0) { writeByte(1); } total += length; writeByte(total + 1); break; case 2: if (i == 0) { writeCard16(1); } total += length; writeCard16(total + 1); break; case 3: if (i == 0) { writeThreeByteNumber(1); } total += length; writeThreeByteNumber(total + 1); break; case 4: if (i == 0) { writeULong(1); } total += length; writeULong(total + 1); break; default: throw new AssertionError("Offset Size was not an expected value."); } } for (int i = 0; i < dataArray.size(); i++) { writeBytes(dataArray.get(i)); } return hdrTotal + total; } private BytesNumber readNumber(int b0, byte[] input, int curPos) throws IOException { if (b0 == 28) { int b1 = input[curPos + 1] & 0xff; int b2 = input[curPos + 2] & 0xff; return new BytesNumber(Integer.valueOf((short) (b1 << 8 | b2)), 3); } else if (b0 >= 32 && b0 <= 246) { return new BytesNumber(Integer.valueOf(b0 - 139), 1); } else if (b0 >= 247 && b0 <= 250) { int b1 = input[curPos + 1] & 0xff; return new BytesNumber(Integer.valueOf((b0 - 247) * 256 + b1 + 108), 2); } else if (b0 >= 251 && b0 <= 254) { int b1 = input[curPos + 1] & 0xff; return new BytesNumber(Integer.valueOf(-(b0 - 251) * 256 - b1 - 108), 2); } else if (b0 == 255) { int b1 = input[curPos + 1] & 0xff; int b2 = input[curPos + 2] & 0xff; return new BytesNumber(Integer.valueOf((short)(b1 << 8 | b2)), 5); } else { throw new IllegalArgumentException(); } } /** * A class used to store the last number operand and also it's size in bytes */ private static final class BytesNumber { private int number; private int numBytes; public BytesNumber(int number, int numBytes) { this.number = number; this.numBytes = numBytes; } public int getNumber() { return this.number; } public int getNumBytes() { return this.numBytes; } public void clearNumber() { this.number = -1; this.numBytes = -1; } } private void writeCharsetTable(boolean cidFont) throws IOException { writeByte(0); for (int gid : gidToSID.keySet()) { if (cidFont && gid == 0) { continue; } writeCard16((cidFont) ? gid : gidToSID.get(gid)); } } private void writePrivateDict() throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); DICTEntry privateEntry = topDICT.get("Private"); if (privateEntry != null) { writeBytes(cffReader.getPrivateDictBytes(privateEntry)); } } private void updateOffsets(int topDictOffset, int charsetOffset, int charStringOffset, int privateDictOffset, int localIndexOffset, int encodingOffset) throws IOException { Map<String, DICTEntry> topDICT = cffReader.getTopDictEntries(); Map<String, DICTEntry> privateDICT = null; DICTEntry privateEntry = topDICT.get("Private"); if (privateEntry != null) { privateDICT = cffReader.getPrivateDict(privateEntry); } int dataPos = 3 + (cffReader.getTopDictIndex().getOffSize() * cffReader.getTopDictIndex().getOffsets().length); int dataTopDictOffset = topDictOffset + dataPos; updateFixedOffsets(topDICT, dataTopDictOffset, charsetOffset, charStringOffset, encodingOffset); if (privateDICT != null) { //Private index offset in the top dict int oldPrivateOffset = dataTopDictOffset + privateEntry.getOffset(); output = updateOffset(output, oldPrivateOffset + privateEntry.getOperandLengths().get(0), privateEntry.getOperandLengths().get(1), privateDictOffset); //Update the local subroutine index offset in the private dict DICTEntry subroutines = privateDICT.get("Subrs"); int oldLocalSubrOffset = privateDictOffset + subroutines.getOffset(); //Value needs to be converted to -139 etc. int encodeValue = 0; if (subroutines.getOperandLength() == 1) { encodeValue = 139; } output = updateOffset(output, oldLocalSubrOffset, subroutines.getOperandLength(), (localIndexOffset - privateDictOffset) + encodeValue); } } private void updateFixedOffsets(Map<String, DICTEntry> topDICT, int dataTopDictOffset, int charsetOffset, int charStringOffset, int encodingOffset) { //Charset offset in the top dict DICTEntry charset = topDICT.get("charset"); int oldCharsetOffset = dataTopDictOffset + charset.getOffset(); output = updateOffset(output, oldCharsetOffset, charset.getOperandLength(), charsetOffset); //Char string index offset in the private dict DICTEntry charString = topDICT.get("CharStrings"); int oldCharStringOffset = dataTopDictOffset + charString.getOffset(); output = updateOffset(output, oldCharStringOffset, charString.getOperandLength(), charStringOffset); DICTEntry encodingEntry = topDICT.get("Encoding"); if (encodingEntry != null && encodingEntry.getOperands().get(0).intValue() != 0 && encodingEntry.getOperands().get(0).intValue() != 1) { int oldEncodingOffset = dataTopDictOffset + encodingEntry.getOffset(); output = updateOffset(output, oldEncodingOffset, encodingEntry.getOperandLength(), encodingOffset); } } private void updateCIDOffsets(int topDictDataOffset, int fdArrayOffset, int fdSelectOffset, int charsetOffset, int charStringOffset, int encodingOffset) { LinkedHashMap<String, DICTEntry> topDict = cffReader.getTopDictEntries(); DICTEntry fdArrayEntry = topDict.get("FDArray"); if (fdArrayEntry != null) { output = updateOffset(output, topDictDataOffset + fdArrayEntry.getOffset() - 1, fdArrayEntry.getOperandLength(), fdArrayOffset); } DICTEntry fdSelect = topDict.get("FDSelect"); if (fdSelect != null) { output = updateOffset(output, topDictDataOffset + fdSelect.getOffset() - 1, fdSelect.getOperandLength(), fdSelectOffset); } updateFixedOffsets(topDict, topDictDataOffset, charsetOffset, charStringOffset, encodingOffset); } private byte[] updateOffset(byte[] out, int position, int length, int replacement) { switch (length) { case 1: out[position] = (byte)(replacement & 0xFF); break; case 2: if (replacement <= 363) { out[position] = (byte)247; } else if (replacement <= 619) { out[position] = (byte)248; } else if (replacement <= 875) { out[position] = (byte)249; } else { out[position] = (byte)250; } out[position + 1] = (byte)(replacement - 108); break; case 3: out[position] = (byte)28; out[position + 1] = (byte)((replacement >> 8) & 0xFF); out[position + 2] = (byte)(replacement & 0xFF); break; case 5: out[position] = (byte)29; out[position + 1] = (byte)((replacement >> 24) & 0xFF); out[position + 2] = (byte)((replacement >> 16) & 0xFF); out[position + 3] = (byte)((replacement >> 8) & 0xFF); out[position + 4] = (byte)(replacement & 0xFF); break; default: } return out; } /** * Appends a byte to the output array, * updates currentPost but not realSize */ private void writeByte(int b) { output[currentPos++] = (byte)b; realSize++; } /** * Appends a USHORT to the output array, * updates currentPost but not realSize */ private void writeCard16(int s) { byte b1 = (byte)((s >> 8) & 0xff); byte b2 = (byte)(s & 0xff); writeByte(b1); writeByte(b2); } private void writeThreeByteNumber(int s) { byte b1 = (byte)((s >> 16) & 0xFF); byte b2 = (byte)((s >> 8) & 0xFF); byte b3 = (byte)(s & 0xFF); output[currentPos++] = b1; output[currentPos++] = b2; output[currentPos++] = b3; realSize += 3; } /** * Appends a ULONG to the output array, * at the given position */ private void writeULong(int s) { byte b1 = (byte)((s >> 24) & 0xff); byte b2 = (byte)((s >> 16) & 0xff); byte b3 = (byte)((s >> 8) & 0xff); byte b4 = (byte)(s & 0xff); output[currentPos++] = b1; output[currentPos++] = b2; output[currentPos++] = b3; output[currentPos++] = b4; realSize += 4; } /** * Returns a subset of the fonts (readFont() MUST be called first in order to create the * subset). * @return byte array */ public byte[] getFontSubset() { byte[] ret = new byte[realSize]; System.arraycopy(output, 0, ret, 0, realSize); return ret; } }
FOP-2323: NPE caused by missing local subroutine index in private dictonary of OTF font git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@1547330 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/fop/fonts/truetype/OTFSubSetFile.java
FOP-2323: NPE caused by missing local subroutine index in private dictonary of OTF font
Java
apache-2.0
4d46761b9e69b39c22aa530af1126e1e84991799
0
cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x
/* * Copyright 2002-2005 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.servlet.tags; import java.beans.PropertyEditor; import java.io.IOException; import javax.servlet.jsp.JspException; import javax.servlet.jsp.tagext.TagSupport; import org.springframework.web.util.ExpressionEvaluationUtils; import org.springframework.web.util.HtmlUtils; import org.springframework.web.util.TagUtils; /** * Tag for transforming reference data values from form controllers and * other objects inside a <code>spring:bind</code> tag. * * <p>The BindTag has a PropertyEditor that it uses to transform properties of * a bean to a String, useable in HTML forms. This tag uses that PropertyEditor * to transform objects passed into this tag. * * @author Alef Arendsen * @author Juergen Hoeller * @since 20.09.2003 * @see BindTag */ public class TransformTag extends HtmlEscapingAwareTag { /** the value to transform using the appropriate property editor */ private Object value; /** the variable to put the result in */ private String var; /** the scope of the variable the result will be put in */ private String scope = TagUtils.SCOPE_PAGE; /** * Set the value to transform, using the appropriate PropertyEditor * from the enclosing BindTag. * <p>The value can either be a plain value to transform (a hard-coded String * value in a JSP or a JSP expression), or a JSP EL expression to be evaluated * (transforming the result of the expression). * <p>Like all of Spring's JSP tags, this tag is capable of parsing EL expressions * itself, on any JSP version. Note, however, that EL expressions in a JSP 2.0 page * will be evaluated by the JSP container, with the result getting passed in here. * For this reason, the type of this property is Object (accepting any result * object from a pre-evaluated expression) rather than String. */ public void setValue(Object value) { this.value = value; } /** * Set PageContext attribute name under which to expose * a variable that contains the result of the transformation. * @see #setScope * @see javax.servlet.jsp.PageContext#setAttribute */ public void setVar(String var) { this.var = var; } /** * Set the scope to export the variable to. * Default is SCOPE_PAGE ("page"). * @see #setVar * @see org.springframework.web.util.TagUtils#SCOPE_PAGE * @see javax.servlet.jsp.PageContext#setAttribute */ public void setScope(String scope) { this.scope = scope; } protected final int doStartTagInternal() throws JspException { Object resolvedValue = this.value; if (this.value instanceof String) { String strValue = (String) this.value; resolvedValue = ExpressionEvaluationUtils.evaluate("value", strValue, pageContext); } if (resolvedValue != null) { // Find the BindTag, if applicable. BindTag tag = (BindTag) TagSupport.findAncestorWithClass(this, BindTag.class); if (tag == null) { // The tag can only be used within a BindTag. throw new JspException("TransformTag can only be used within BindTag"); } // OK, get the property editor. PropertyEditor editor = tag.getEditor(); String result = null; if (editor != null) { // If an editor was found, edit the value. editor.setValue(resolvedValue); result = editor.getAsText(); } else { // Else, just do a toString. result = resolvedValue.toString(); } result = isHtmlEscape() ? HtmlUtils.htmlEscape(result) : result; String resolvedVar = ExpressionEvaluationUtils.evaluateString("var", this.var, pageContext); if (resolvedVar != null) { String resolvedScope = ExpressionEvaluationUtils.evaluateString("scope", this.scope, pageContext); pageContext.setAttribute(resolvedVar, result, TagUtils.getScope(resolvedScope)); } else { try { // Else, just print it out. pageContext.getOut().print(result); } catch (IOException ex) { throw new JspException(ex); } } } return SKIP_BODY; } }
src/org/springframework/web/servlet/tags/TransformTag.java
/* * Copyright 2002-2005 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.servlet.tags; import java.beans.PropertyEditor; import java.io.IOException; import javax.servlet.jsp.JspException; import javax.servlet.jsp.tagext.TagSupport; import org.springframework.web.util.ExpressionEvaluationUtils; import org.springframework.web.util.HtmlUtils; import org.springframework.web.util.TagUtils; /** * Tag for transforming reference data values from form controllers and * other objects inside a <code>spring:bind</code> tag. * * <p>The BindTag has a PropertyEditor that it uses to transform properties of * a bean to a String, useable in HTML forms. This tag uses that PropertyEditor * to transform objects passed into this tag. * * @author Alef Arendsen * @author Juergen Hoeller * @since 20.09.2003 * @see BindTag */ public class TransformTag extends HtmlEscapingAwareTag { /** the value to transform using the appropriate property editor */ private Object value; /** the variable to put the result in */ private String var; /** the scope of the variable the result will be put in */ private String scope = TagUtils.SCOPE_PAGE; /** * Set the value to transform, using the appropriate PropertyEditor * from the enclosing BindTag. * <p>The value can either be a plain value to transform (a hard-coded String * value in a JSP or a JSP expression), or a JSP EL expression to be evaluated * (transforming the result of the expression). * <p>Like all of Spring's JSP tags, this tag is capable of parsing EL expressions * itself, on any JSP version. Note, however, that EL expressions in a JSP 2.0 page * will be evaluated by the JSP container, with the result getting passed in here. * For this reason, the type of this property is Object (accepting any result * object from a pre-evaluated expression) rather than String. */ public void setValue(Object value) { this.value = value; } /** * Set PageContext attribute name under which to expose * a variable that contains the result of the transformation. * @see #setScope * @see javax.servlet.jsp.PageContext#setAttribute */ public void setVar(String var) { this.var = var; } /** * Set the scope to export the variable to. * Default is SCOPE_PAGE ("page"). * @see #setVar * @see org.springframework.web.util.TagUtils#SCOPE_PAGE * @see javax.servlet.jsp.PageContext#setAttribute */ public void setScope(String scope) { this.scope = scope; } protected final int doStartTagInternal() throws JspException { Object resolvedValue = this.value; if (this.value instanceof String) { String strValue = (String) this.value; resolvedValue = ExpressionEvaluationUtils.evaluate("value", strValue, Object.class, pageContext); } if (resolvedValue != null) { // Find the BindTag, if applicable. BindTag tag = (BindTag) TagSupport.findAncestorWithClass(this, BindTag.class); if (tag == null) { // The tag can only be used within a BindTag. throw new JspException("TransformTag can only be used within BindTag"); } // OK, get the property editor. PropertyEditor editor = tag.getEditor(); String result = null; if (editor != null) { // If an editor was found, edit the value. editor.setValue(resolvedValue); result = editor.getAsText(); } else { // Else, just do a toString. result = resolvedValue.toString(); } result = isHtmlEscape() ? HtmlUtils.htmlEscape(result) : result; String resolvedVar = ExpressionEvaluationUtils.evaluateString("var", this.var, pageContext); if (resolvedVar != null) { String resolvedScope = ExpressionEvaluationUtils.evaluateString("scope", this.scope, pageContext); pageContext.setAttribute(resolvedVar, result, TagUtils.getScope(resolvedScope)); } else { try { // Else, just print it out. pageContext.getOut().print(result); } catch (IOException ex) { throw new JspException(ex); } } } return SKIP_BODY; } }
use simpler "evaluate" signature git-svn-id: b619a0c99665f88f1afe72824344cefe9a1c8c90@8375 fd5a2b45-1f63-4059-99e9-3c7cb7fd75c8
src/org/springframework/web/servlet/tags/TransformTag.java
use simpler "evaluate" signature
Java
artistic-2.0
3af58b24a6cbfd9fe9febde22311dd453a1175a0
0
SpeedxPz/open2jam,open2jamorg/open2jam
package org.open2jam.render; import java.awt.Canvas; import java.awt.event.KeyEvent; import java.util.ArrayList; import javax.swing.JOptionPane; import java.util.Map; import org.open2jam.parser.ResourcesHandler; import org.open2jam.entities.*; public class Render extends Canvas implements GameWindowCallback { /** The window that is being used to render the game */ private GameWindow window; /** The time at which the last rendering looped started from the point of view of the game logic */ private long lastLoopTime = SystemTimer.getTime(); /** The time since the last record of fps */ private long lastFpsTime = 0; /** The recorded fps */ private int fps; private java.util.List<Entity> entities; private Entity key_0; public Render(int renderingType) { // create a window based on a chosen rendering method ResourceFactory.get().setRenderingType(renderingType); window = ResourceFactory.get().getGameWindow(); window.setResolution(800,600); window.setGameWindowCallback(this); window.setTitle("Render"); window.startRendering(); } /** * Intialise the common elements for the game. * this is called by the window render */ public void initialise() { entities = new java.util.ArrayList<Entity>(); EntityBuilder eb = new EntityBuilder(); try { javax.xml.parsers.SAXParser saxParser = javax.xml.parsers.SAXParserFactory.newInstance().newSAXParser(); saxParser.parse( new java.io.File("resources.xml"), new ResourcesHandler(eb) ); } catch (Exception err) { err.printStackTrace(); } Map<String,Entity> entites_map = eb.getResult(); Entity e = entites_map.get("note0"); e.setX(50); e.setY(50); entities.add(e); e = entites_map.get("note1"); e.setX(100); e.setY(50); entities.add(e); e = entites_map.get("note2"); e.setX(150); e.setY(50); entities.add(e); e = entites_map.get("note3"); e.setX(200); e.setY(50); entities.add(e); e = entites_map.get("note4"); e.setX(250); e.setY(50); entities.add(e); e = entites_map.get("note5"); e.setX(300); e.setY(50); entities.add(e); e = entites_map.get("note6"); e.setX(350); e.setY(50); entities.add(e); } /** * Notification that a frame is being rendered. Responsible for * running game logic and rendering the scene. */ public void frameRendering() { SystemTimer.sleep(lastLoopTime+10-SystemTimer.getTime()); // work out how long its been since the last update, this // will be used to calculate how far the entities should // move this loop long delta = SystemTimer.getTime() - lastLoopTime; lastLoopTime = SystemTimer.getTime(); lastFpsTime += delta; fps++; // update our FPS counter if a second has passed if (lastFpsTime >= 1000) { window.setTitle("Render"+" (FPS: "+fps+")"); lastFpsTime = 0; fps = 0; } // cycle round asking each entity to move itself for (int i=0;i<entities.size();i++)entities.get(i).move(delta); // cycle round drawing all the entities we have in the game for (int i=0;i<entities.size();i++)entities.get(i).draw(); boolean skp = window.isKeyPressed(KeyEvent.VK_SPACE); if(skp && entities.size() == 0){ entities.add(key_0.clone()); } } public boolean removeEntity(Entity e){ return entities.remove(e); } /** * Notifcation that the game window has been closed */ public void windowClosed() { System.exit(0); } /** * The entry point into the game. We'll simply create an * instance of class which will start the display and game * loop. * * @param argv The arguments that are passed into our game */ public static void main(String argv[]) { new Render(ResourceFactory.OPENGL_LWJGL); } }
ojn_render/org/open2jam/render/Render.java
package org.open2jam.render; import java.awt.Canvas; import java.awt.event.KeyEvent; import java.util.ArrayList; import javax.swing.JOptionPane; import java.util.Map; import org.open2jam.parser.ResourcesHandler; import org.open2jam.entities.*; public class Render extends Canvas implements GameWindowCallback { /** The window that is being used to render the game */ private GameWindow window; /** The time at which the last rendering looped started from the point of view of the game logic */ private long lastLoopTime = SystemTimer.getTime(); /** The time since the last record of fps */ private long lastFpsTime = 0; /** The recorded fps */ private int fps; private java.util.List<Entity> entities; private Entity key_0; public Render(int renderingType) { // create a window based on a chosen rendering method ResourceFactory.get().setRenderingType(renderingType); window = ResourceFactory.get().getGameWindow(); window.setResolution(800,600); window.setGameWindowCallback(this); window.setTitle("Render"); window.startRendering(); } /** * Intialise the common elements for the game. * this is called by the window render */ public void initialise() { entities = new java.util.ArrayList<Entity>(); EntityBuilder eb = new EntityBuilder(); try { javax.xml.parsers.SAXParser saxParser = javax.xml.parsers.SAXParserFactory.newInstance().newSAXParser(); saxParser.parse( new java.io.File("resources.xml"), new ResourcesHandler(eb) ); } catch (Exception err) { err.printStackTrace(); } Map<String,Entity> entites_map = eb.getResult(); Entity e = entites_map.get("note0"); e.setX(50); e.setY(50); entities.add(e); e = entites_map.get("note1"); e.setX(100); e.setY(50); entities.add(e); e = entites_map.get("note2"); e.setX(150); e.setY(50); entities.add(e); e = entites_map.get("note3"); e.setX(200); e.setY(50); entities.add(e); e = entites_map.get("note4"); e.setX(250); e.setY(50); entities.add(e); e = entites_map.get("note5"); e.setX(300); e.setY(50); entities.add(e); e = entites_map.get("note6"); e.setX(350); e.setY(50); entities.add(e); } /** * Notification that a frame is being rendered. Responsible for * running game logic and rendering the scene. */ public void frameRendering() { SystemTimer.sleep(lastLoopTime+10-SystemTimer.getTime()); // work out how long its been since the last update, this // will be used to calculate how far the entities should // move this loop long delta = SystemTimer.getTime() - lastLoopTime; lastLoopTime = SystemTimer.getTime(); lastFpsTime += delta; fps++; // update our FPS counter if a second has passed if (lastFpsTime >= 1000) { window.setTitle("Render"+" (FPS: "+fps+")"); lastFpsTime = 0; fps = 0; } // cycle round asking each entity to move itself for (int i=0;i<entities.size();i++)entities.get(i).move(delta); // cycle round drawing all the entities we have in the game for (int i=0;i<entities.size();i++)entities.get(i).draw(); boolean skp = window.isKeyPressed(KeyEvent.VK_SPACE); if(skp && entities.size() == 0){ entities.add(key_0.clone()); } } public boolean removeEntity(Entity e){ return entities.remove(e); } /** * Notifcation that the game window has been closed */ public void windowClosed() { System.exit(0); } /** * The entry point into the game. We'll simply create an * instance of class which will start the display and game * loop. * * @param argv The arguments that are passed into our game */ public static void main(String argv[]) { int result = JOptionPane.showOptionDialog(null,"Java2D or OpenGL?","Java2D or OpenGL?",JOptionPane.YES_NO_CANCEL_OPTION,JOptionPane.QUESTION_MESSAGE,null,new String[] {"Java2D","LWJGL"},null); if (result == 0) { new Render(ResourceFactory.JAVA2D); } else if (result == 1) { new Render(ResourceFactory.OPENGL_LWJGL); } } }
java2d is not working, so I'll remove it for now
ojn_render/org/open2jam/render/Render.java
java2d is not working, so I'll remove it for now
Java
bsd-3-clause
2b23f2e5622ecca0a3418af08a84ea5fdd04f84a
0
alexbirkett/kiwi-java
package no.birkett.kiwi; import java.util.*; /** * Created by alex on 30/01/15. */ public class Solver { private static class Tag { Symbol marker; Symbol other; public Tag(){ marker = new Symbol(); other = new Symbol(); } } private static class EditInfo { Tag tag; Constraint constraint; double constant; public EditInfo(Constraint constraint, Tag tag, double constant){ this.constraint = constraint; this.tag = tag; this.constant = constant; } } private Map<Constraint, Tag> cns = new LinkedHashMap<Constraint, Tag>(); private Map<Symbol, Row> rows = new LinkedHashMap<Symbol, Row>(); private Map<Variable, Symbol> vars = new LinkedHashMap<Variable, Symbol>(); private Map<Variable, EditInfo> edits = new LinkedHashMap<Variable, EditInfo>(); private List<Symbol> infeasibleRows = new ArrayList<Symbol>(); private Row objective = new Row(); private Row artificial; private long idTick = 1; /** * Add a constraint to the solver. * * @param constraint * @throws DuplicateConstraintException The given constraint has already been added to the solver. * @throws UnsatisfiableConstraintException The given constraint is required and cannot be satisfied. */ public void addConstraint(Constraint constraint) throws DuplicateConstraintException, UnsatisfiableConstraintException { if (cns.containsKey(constraint)) { throw new DuplicateConstraintException(constraint); } Tag tag = new Tag(); Row row = createRow(constraint, tag); Symbol subject = chooseSubject(row, tag); if(subject.getType() == Symbol.Type.INVALID && allDummies(row)){ if (!Util.nearZero(row.getConstant())) { throw new UnsatisfiableConstraintException(constraint); } else { subject = tag.marker; } } if (subject.getType() == Symbol.Type.INVALID) { if (!addWithArtificialVariable(row)) { throw new UnsatisfiableConstraintException(constraint); } } else { row.solveFor(subject); substitute(subject, row); this.rows.put(subject, row); } this.cns.put(constraint, tag); optimize(objective); } public void removeConstraint(Constraint constraint) throws UnknownConstraintException, InternalSolverError{ Tag tag = cns.get(constraint); if(tag == null){ throw new UnknownConstraintException(constraint); } cns.remove(constraint); removeConstraintEffects(constraint, tag); Row row = rows.get(tag.marker); if(row != null){ rows.remove(tag.marker); } else{ row = getMarkerLeavingRow(tag.marker); if(row == null){ throw new InternalSolverError("internal solver error"); } //This looks wrong! changes made below //Symbol leaving = tag.marker; //rows.remove(tag.marker); Symbol leaving = null; for(Symbol s: rows.keySet()){ if(rows.get(s) == row){ leaving = s; } } if(leaving == null){ throw new InternalSolverError("internal solver error"); } rows.remove(leaving); row.solveFor(leaving, tag.marker); substitute(tag.marker, row); } optimize(objective); } void removeConstraintEffects(Constraint constraint, Tag tag){ if(tag.marker.getType() == Symbol.Type.ERROR){ removeMarkerEffects(tag.marker, constraint.getStrength()); } else if(tag.other.getType() == Symbol.Type.ERROR){ removeMarkerEffects(tag.other, constraint.getStrength()); } } void removeMarkerEffects(Symbol marker, double strength){ Row row = rows.get(marker); if(row != null){ objective.insert(row, -strength); }else { objective.insert(marker, -strength); } } Row getMarkerLeavingRow(Symbol marker){ double dmax = Double.MAX_VALUE; double r1 = dmax; double r2 = dmax; Row first = null; Row second = null; Row third = null; for(Symbol s: rows.keySet()){ Row candidateRow = rows.get(s); double c = candidateRow.coefficientFor(marker); if(c == 0.0){ continue; } if(s.getType() == Symbol.Type.EXTERNAL){ third = candidateRow; } else if(c < 0.0){ double r = - candidateRow.getConstant() / c; if(r < r1){ r1 = r; first = candidateRow; } } else{ double r = candidateRow.getConstant() / c; if(r < r2){ r2 = r; second = candidateRow; } } } if(first != null){ return first; } if(second != null){ return second; } return third; } public boolean hasConstraint(Constraint constraint){ return cns.containsKey(constraint); } public void addEditVariable(Variable variable, double strength) throws DuplicateEditVariableException, RequiredFailureException{ if(edits.containsKey(variable)){ throw new DuplicateEditVariableException(); } strength = Strength.clip(strength); if(strength == Strength.REQUIRED){ throw new RequiredFailureException(); } List<Term> terms = new ArrayList<>(); terms.add(new Term(variable)); Constraint constraint = new Constraint(new Expression(terms), RelationalOperator.OP_EQ, strength); try { addConstraint(constraint); } catch (DuplicateConstraintException e) { e.printStackTrace(); } catch (UnsatisfiableConstraintException e) { e.printStackTrace(); } EditInfo info = new EditInfo(constraint, cns.get(constraint), 0.0); edits.put(variable, info); } public void removeEditVariable(Variable variable) throws UnknownEditVariableException{ EditInfo edit = edits.get(variable); if(edit == null){ throw new UnknownEditVariableException(); } try { removeConstraint(edit.constraint); } catch (UnknownConstraintException e) { e.printStackTrace(); } edits.remove(variable); } public boolean hasEditVariable(Variable variable){ return edits.containsKey(variable); } public void suggestValue(Variable variable, double value) throws UnknownEditVariableException{ EditInfo info = edits.get(variable); if(info == null){ throw new UnknownEditVariableException(); } double delta = value - info.constant; info.constant = value; Row row = rows.get(info.tag.marker); if(row != null){ if(row.add(-delta) < 0.0){ infeasibleRows.add(info.tag.marker); } dualOptimize(); return; } row = rows.get(info.tag.other); if(row != null){ if(row.add(delta) < 0.0){ infeasibleRows.add(info.tag.other); } dualOptimize(); return; } for(Symbol s: rows.keySet()){ Row currentRow = rows.get(s); double coefficient = currentRow.coefficientFor(info.tag.marker); if(coefficient != 0.0 && currentRow.add(delta * coefficient) < 0.0 && s.getType() != Symbol.Type.EXTERNAL){ infeasibleRows.add(s); } } dualOptimize(); } /** * Update the values of the external solver variables. */ public void updateVariables() { for (Map.Entry<Variable, Symbol> varEntry : vars.entrySet()) { Variable variable = varEntry.getKey(); Row row = this.rows.get(varEntry.getValue()); if (row == null) { variable.setValue(0); } else { variable.setValue(row.getConstant()); } } } /** * Create a new Row object for the given constraint. * <p/> * The terms in the constraint will be converted to cells in the row. * Any term in the constraint with a coefficient of zero is ignored. * This method uses the `getVarSymbol` method to get the symbol for * the variables added to the row. If the symbol for a given cell * variable is basic, the cell variable will be substituted with the * basic row. * <p/> * The necessary slack and error variables will be added to the row. * If the constant for the row is negative, the sign for the row * will be inverted so the constant becomes positive. * <p/> * The tag will be updated with the marker and error symbols to use * for tracking the movement of the constraint in the tableau. */ Row createRow(Constraint constraint, Tag tag) { Expression expression = constraint.getExpression(); Row row = new Row(expression.getConstant()); for (Term term : expression.getTerms()) { if (!Util.nearZero(term.getCoefficient())) { Symbol symbol = getVarSymbol(term.getVariable()); Row otherRow = rows.get(symbol); if (otherRow == null) { row.insert(symbol, term.getCoefficient()); } else { row.insert(otherRow, term.getCoefficient()); } } } switch (constraint.getOp()) { case OP_LE: case OP_GE: { double coeff = constraint.getOp() == RelationalOperator.OP_LE ? 1.0 : -1.0; Symbol slack = new Symbol(Symbol.Type.SLACK, idTick++); tag.marker = slack; row.insert(slack, coeff); if (constraint.getStrength() < Strength.REQUIRED) { Symbol error = new Symbol(Symbol.Type.ERROR, idTick++); tag.other = error; row.insert(error, -coeff); this.objective.insert(error, constraint.getStrength()); } break; } case OP_EQ: { if (constraint.getStrength() < Strength.REQUIRED) { Symbol errplus = new Symbol(Symbol.Type.ERROR, idTick++); Symbol errminus = new Symbol(Symbol.Type.ERROR, idTick++); tag.marker = errplus; tag.other = errminus; row.insert(errplus, -1.0); // v = eplus - eminus row.insert(errminus, 1.0); // v - eplus + eminus = 0 this.objective.insert(errplus, constraint.getStrength()); this.objective.insert(errminus, constraint.getStrength()); } else { Symbol dummy = new Symbol(Symbol.Type.DUMMY, idTick++); tag.marker = dummy; row.insert(dummy); } break; } } // Ensure the row as a positive constant. if (row.getConstant() < 0.0) { row.reverseSign(); } return row; } /** * Choose the subject for solving for the row * <p/> * This method will choose the best subject for using as the solve * target for the row. An invalid symbol will be returned if there * is no valid target. * The symbols are chosen according to the following precedence: * 1) The first symbol representing an external variable. * 2) A negative slack or error tag variable. * If a subject cannot be found, an invalid symbol will be returned. */ private static Symbol chooseSubject(Row row, Tag tag) { for (Map.Entry<Symbol, Double> cell : row.getCells().entrySet()) { if (cell.getKey().getType() == Symbol.Type.EXTERNAL) { return cell.getKey(); } } if (tag.marker.getType() == Symbol.Type.SLACK || tag.marker.getType() == Symbol.Type.ERROR) { if (row.coefficientFor(tag.marker) < 0.0) return tag.marker; } if (tag.other != null && (tag.other.getType() == Symbol.Type.SLACK || tag.other.getType() == Symbol.Type.ERROR)) { if (row.coefficientFor(tag.other) < 0.0) return tag.other; } return new Symbol(); } /** * Add the row to the tableau using an artificial variable. * <p/> * This will return false if the constraint cannot be satisfied. */ private boolean addWithArtificialVariable(Row row) { //TODO check this // Create and add the artificial variable to the tableau Symbol art = new Symbol(Symbol.Type.SLACK, idTick++); rows.put(art, row.deepCopy()); this.artificial = row.deepCopy(); // Optimize the artificial objective. This is successful // only if the artificial objective is optimized to zero. optimize(this.artificial); boolean success = Util.nearZero(artificial.getConstant()); artificial = null; // If the artificial variable is basic, pivot the row so that // it becomes basic. If the row is constant, exit early. Row rowptr = this.rows.get(art); if (rowptr != null) { /**this looks wrong!!!*/ //rows.remove(rowptr); LinkedList<Symbol> deleteQueue = new LinkedList<>(); for(Symbol s: rows.keySet()){ if(rows.get(s) == rowptr){ deleteQueue.add(s); } } while(!deleteQueue.isEmpty()){ rows.remove(deleteQueue.pop()); } deleteQueue.clear(); if (rowptr.getCells().isEmpty()) { return success; } Symbol entering = anyPivotableSymbol(rowptr); if (entering.getType() == Symbol.Type.INVALID) { return false; // unsatisfiable (will this ever happen?) } rowptr.solveFor(art, entering); substitute(entering, rowptr); this.rows.put(entering, rowptr); } // Remove the artificial variable from the tableau. for (Map.Entry<Symbol, Row> rowEntry : rows.entrySet()) { rowEntry.getValue().remove(art); } objective.remove(art); return success; } /** * Substitute the parametric symbol with the given row. * <p/> * This method will substitute all instances of the parametric symbol * in the tableau and the objective function with the given row. */ void substitute(Symbol symbol, Row row) { for (Map.Entry<Symbol, Row> rowEntry : rows.entrySet()) { rowEntry.getValue().substitute(symbol, row); if (rowEntry.getKey().getType() != Symbol.Type.EXTERNAL && rowEntry.getValue().getConstant() < 0.0) { infeasibleRows.add(rowEntry.getKey()); } } objective.substitute(symbol, row); if (artificial != null) { artificial.substitute(symbol, row); } } /** * Optimize the system for the given objective function. * <p/> * This method performs iterations of Phase 2 of the simplex method * until the objective function reaches a minimum. * * @throws InternalSolverError The value of the objective function is unbounded. */ void optimize(Row objective) { while (true) { Symbol entering = getEnteringSymbol(objective); if (entering.getType() == Symbol.Type.INVALID) { return; } Row entry = getLeavingRow(entering); if(entry == null){ throw new InternalSolverError("The objective is unbounded."); } Symbol leaving = null; for(Symbol key: rows.keySet()){ if(rows.get(key) == entry){ leaving = key; } } Symbol entryKey = null; for(Symbol key: rows.keySet()){ if(rows.get(key) == entry){ entryKey = key; } } rows.remove(entryKey); entry.solveFor(leaving, entering); substitute(entering, entry); rows.put(entering, entry); } } void dualOptimize() throws InternalSolverError{ while(!infeasibleRows.isEmpty()){ Symbol leaving = infeasibleRows.remove(infeasibleRows.size() - 1); Row row = rows.get(leaving); if(row != null && row.getConstant() < 0.0){ Symbol entering = getDualEnteringSymbol(row); if(entering.getType() == Symbol.Type.INVALID){ throw new InternalSolverError("internal solver error"); } rows.remove(leaving); row.solveFor(leaving, entering); substitute(entering, row); rows.put(entering, row); } } } /** * Compute the entering variable for a pivot operation. * <p/> * This method will return first symbol in the objective function which * is non-dummy and has a coefficient less than zero. If no symbol meets * the criteria, it means the objective function is at a minimum, and an * invalid symbol is returned. */ private static Symbol getEnteringSymbol(Row objective) { for (Map.Entry<Symbol, Double> cell : objective.getCells().entrySet()) { if (cell.getKey().getType() != Symbol.Type.DUMMY && cell.getValue() < 0.0) { return cell.getKey(); } } return new Symbol(); } private Symbol getDualEnteringSymbol(Row row){ Symbol entering = new Symbol(); double ratio = Double.MAX_VALUE; for(Symbol s: row.getCells().keySet()){ if(s.getType() != Symbol.Type.DUMMY){ double currentCell = row.getCells().get(s); if(currentCell > 0.0){ double coefficient = objective.coefficientFor(s); double r = coefficient / currentCell; if(r < ratio){ ratio = r; entering = s; } } } } return entering; } /** * Get the first Slack or Error symbol in the row. * <p/> * If no such symbol is present, and Invalid symbol will be returned. */ private Symbol anyPivotableSymbol(Row row) { Symbol symbol = null; for (Map.Entry<Symbol, Double> entry : row.getCells().entrySet()) { if (entry.getKey().getType() == Symbol.Type.SLACK || entry.getKey().getType() == Symbol.Type.ERROR) { symbol = entry.getKey(); } } if (symbol == null) { symbol = new Symbol(); } return symbol; } /** * Compute the row which holds the exit symbol for a pivot. * <p/> * This documentation is copied from the C++ version and is outdated * <p/> * <p/> * This method will return an iterator to the row in the row map * which holds the exit symbol. If no appropriate exit symbol is * found, the end() iterator will be returned. This indicates that * the objective function is unbounded. */ private Row getLeavingRow(Symbol entering) { double ratio = Double.MAX_VALUE; Row row = null; for(Symbol key: rows.keySet()){ if(key.getType() != Symbol.Type.EXTERNAL){ Row candidateRow = rows.get(key); double temp = candidateRow.coefficientFor(entering); if(temp < 0){ double temp_ratio = (-candidateRow.getConstant() / temp); if(temp_ratio < ratio){ ratio = temp_ratio; row = candidateRow; } } } } return row; } /** * Get the symbol for the given variable. * <p/> * If a symbol does not exist for the variable, one will be created. */ private Symbol getVarSymbol(Variable variable) { Symbol symbol; if (vars.containsKey(variable)) { symbol = vars.get(variable); } else { symbol = new Symbol(Symbol.Type.EXTERNAL, idTick++); symbol.setVariableName(variable.getName()); vars.put(variable, symbol); } return symbol; } /** * Test whether a row is composed of all dummy variables. */ private static boolean allDummies(Row row) { for (Map.Entry<Symbol, Double> cell : row.getCells().entrySet()) { if (cell.getKey().getType() != Symbol.Type.DUMMY) { return false; } } return true; } }
src/main/java/no/birkett/kiwi/Solver.java
package no.birkett.kiwi; import java.util.*; /** * Created by alex on 30/01/15. */ public class Solver { private static class Tag { Symbol marker; Symbol other; public Tag(){ marker = new Symbol(); other = new Symbol(); } } private static class EditInfo { Tag tag; Constraint constraint; double constant; public EditInfo(Constraint constraint, Tag tag, double constant){ this.constraint = constraint; this.tag = tag; this.constant = constant; } } private Map<Constraint, Tag> cns = new LinkedHashMap<Constraint, Tag>(); private Map<Symbol, Row> rows = new LinkedHashMap<Symbol, Row>(); private Map<Variable, Symbol> vars = new LinkedHashMap<Variable, Symbol>(); private Map<Variable, EditInfo> edits = new LinkedHashMap<Variable, EditInfo>(); private List<Symbol> infeasibleRows = new ArrayList<Symbol>(); private Row objective = new Row(); private Row artificial; private long idTick = 1; /** * Add a constraint to the solver. * * @param constraint * @throws DuplicateConstraintException The given constraint has already been added to the solver. * @throws UnsatisfiableConstraintException The given constraint is required and cannot be satisfied. */ public void addConstraint(Constraint constraint) throws DuplicateConstraintException, UnsatisfiableConstraintException { if (cns.containsKey(constraint)) { throw new DuplicateConstraintException(constraint); } Tag tag = new Tag(); Row row = createRow(constraint, tag); Symbol subject = chooseSubject(row, tag); if(subject.getType() == Symbol.Type.INVALID && allDummies(row)){ if (!Util.nearZero(row.getConstant())) { throw new UnsatisfiableConstraintException(constraint); } else { subject = tag.marker; } } if (subject.getType() == Symbol.Type.INVALID) { if (!addWithArtificialVariable(row)) { throw new UnsatisfiableConstraintException(constraint); } } else { row.solveFor(subject); substitute(subject, row); this.rows.put(subject, row); } this.cns.put(constraint, tag); optimize(objective); } public void removeConstraint(Constraint constraint) throws UnknownConstraintException, InternalSolverError{ Tag tag = cns.get(constraint); if(tag == null){ throw new UnknownConstraintException(constraint); } cns.remove(constraint); removeConstraintEffects(constraint, tag); Row row = rows.get(tag.marker); if(row != null){ rows.remove(tag.marker); } else{ row = getMarkerLeavingRow(tag.marker); if(row == null){ throw new InternalSolverError("internal solver error"); } //This looks wrong! changes made below //Symbol leaving = tag.marker; //rows.remove(tag.marker); Symbol leaving = null; for(Symbol s: rows.keySet()){ if(rows.get(s) == row){ leaving = s; } } if(leaving == null){ throw new InternalSolverError("internal solver error"); } rows.remove(leaving); row.solveFor(leaving, tag.marker); substitute(tag.marker, row); } optimize(objective); } void removeConstraintEffects(Constraint constraint, Tag tag){ if(tag.marker.getType() == Symbol.Type.ERROR){ removeMarkerEffects(tag.marker, constraint.getStrength()); } else if(tag.other.getType() == Symbol.Type.ERROR){ removeMarkerEffects(tag.other, constraint.getStrength()); } } void removeMarkerEffects(Symbol marker, double strength){ Row row = rows.get(marker); if(row != null){ objective.insert(row, -strength); }else { objective.insert(marker, -strength); } } Row getMarkerLeavingRow(Symbol marker){ double dmax = Double.MAX_VALUE; double r1 = dmax; double r2 = dmax; Row first = null; Row second = null; Row third = null; for(Symbol s: rows.keySet()){ Row candidateRow = rows.get(s); double c = candidateRow.coefficientFor(marker); if(c == 0.0){ continue; } if(s.getType() == Symbol.Type.EXTERNAL){ third = candidateRow; } else if(c < 0.0){ double r = - candidateRow.getConstant() / c; if(r < r1){ r1 = r; first = candidateRow; } } else{ double r = candidateRow.getConstant() / c; if(r < r2){ r2 = r; second = candidateRow; } } } if(first != null){ return first; } if(second != null){ return second; } return third; } public boolean hasConstraint(Constraint constraint){ return cns.containsKey(constraint); } public void addEditVariable(Variable variable, double strength) throws DuplicateEditVariableException, RequiredFailureException{ if(edits.containsKey(variable)){ throw new DuplicateEditVariableException(); } strength = Strength.clip(strength); if(strength == Strength.REQUIRED){ throw new RequiredFailureException(); } List<Term> terms = new ArrayList<>(); terms.add(new Term(variable)); Constraint constraint = new Constraint(new Expression(terms), RelationalOperator.OP_EQ, strength); try { addConstraint(constraint); } catch (DuplicateConstraintException e) { e.printStackTrace(); } catch (UnsatisfiableConstraintException e) { e.printStackTrace(); } EditInfo info = new EditInfo(constraint, cns.get(constraint), 0.0); edits.put(variable, info); } public void removeEditVariable(Variable variable) throws UnknownEditVariableException{ EditInfo edit = edits.get(variable); if(edit == null){ throw new UnknownEditVariableException(); } try { removeConstraint(edit.constraint); } catch (UnknownConstraintException e) { e.printStackTrace(); } edits.remove(variable); } public boolean hasEditVariable(Variable variable){ return edits.containsKey(variable); } public void suggestValue(Variable variable, double value) throws UnknownEditVariableException{ EditInfo info = edits.get(variable); if(info == null){ throw new UnknownEditVariableException(); } double delta = value - info.constant; info.constant = value; Row row = rows.get(info.tag.marker); if(row != null){ if(row.add(-delta) < 0.0){ infeasibleRows.add(info.tag.marker); } dualOptimize(); return; } row = rows.get(info.tag.other); if(row != null){ if(row.add(delta) < 0.0){ infeasibleRows.add(info.tag.other); } dualOptimize(); return; } for(Symbol s: rows.keySet()){ Row currentRow = rows.get(s); double coefficient = currentRow.coefficientFor(info.tag.marker); if(coefficient != 0.0 && currentRow.add(delta * coefficient) < 0.0 && s.getType() != Symbol.Type.EXTERNAL){ infeasibleRows.add(s); } } dualOptimize(); } /** * Update the values of the external solver variables. */ public void updateVariables() { for (Map.Entry<Variable, Symbol> varEntry : vars.entrySet()) { Variable variable = varEntry.getKey(); Row row = this.rows.get(varEntry.getValue()); if (row == null) { variable.setValue(0); } else { variable.setValue(row.getConstant()); } } } /** * Create a new Row object for the given constraint. * <p/> * The terms in the constraint will be converted to cells in the row. * Any term in the constraint with a coefficient of zero is ignored. * This method uses the `getVarSymbol` method to get the symbol for * the variables added to the row. If the symbol for a given cell * variable is basic, the cell variable will be substituted with the * basic row. * <p/> * The necessary slack and error variables will be added to the row. * If the constant for the row is negative, the sign for the row * will be inverted so the constant becomes positive. * <p/> * The tag will be updated with the marker and error symbols to use * for tracking the movement of the constraint in the tableau. */ Row createRow(Constraint constraint, Tag tag) { Expression expression = constraint.getExpression(); Row row = new Row(expression.getConstant()); for (Term term : expression.getTerms()) { if (!Util.nearZero(term.getCoefficient())) { Symbol symbol = getVarSymbol(term.getVariable()); Row otherRow = rows.get(symbol); if (otherRow == null) { row.insert(symbol, term.getCoefficient()); } else { row.insert(otherRow, term.getCoefficient()); } } } switch (constraint.getOp()) { case OP_LE: case OP_GE: { double coeff = constraint.getOp() == RelationalOperator.OP_LE ? 1.0 : -1.0; Symbol slack = new Symbol(Symbol.Type.SLACK, idTick++); tag.marker = slack; row.insert(slack, coeff); if (constraint.getStrength() < Strength.REQUIRED) { Symbol error = new Symbol(Symbol.Type.ERROR, idTick++); tag.other = error; row.insert(error, -coeff); this.objective.insert(error, constraint.getStrength()); } break; } case OP_EQ: { if (constraint.getStrength() < Strength.REQUIRED) { Symbol errplus = new Symbol(Symbol.Type.ERROR, idTick++); Symbol errminus = new Symbol(Symbol.Type.ERROR, idTick++); tag.marker = errplus; tag.other = errminus; row.insert(errplus, -1.0); // v = eplus - eminus row.insert(errminus, 1.0); // v - eplus + eminus = 0 this.objective.insert(errplus, constraint.getStrength()); this.objective.insert(errminus, constraint.getStrength()); } else { Symbol dummy = new Symbol(Symbol.Type.DUMMY, idTick++); tag.marker = dummy; row.insert(dummy); } break; } } // Ensure the row as a positive constant. if (row.getConstant() < 0.0) { row.reverseSign(); } return row; } /** * Choose the subject for solving for the row * <p/> * This method will choose the best subject for using as the solve * target for the row. An invalid symbol will be returned if there * is no valid target. * The symbols are chosen according to the following precedence: * 1) The first symbol representing an external variable. * 2) A negative slack or error tag variable. * If a subject cannot be found, an invalid symbol will be returned. */ private static Symbol chooseSubject(Row row, Tag tag) { for (Map.Entry<Symbol, Double> cell : row.getCells().entrySet()) { if (cell.getKey().getType() == Symbol.Type.EXTERNAL) { return cell.getKey(); } } if (tag.marker.getType() == Symbol.Type.SLACK || tag.marker.getType() == Symbol.Type.ERROR) { if (row.coefficientFor(tag.marker) < 0.0) return tag.marker; } if (tag.other != null && (tag.other.getType() == Symbol.Type.SLACK || tag.other.getType() == Symbol.Type.ERROR)) { if (row.coefficientFor(tag.other) < 0.0) return tag.other; } return new Symbol(); } /** * Add the row to the tableau using an artificial variable. * <p/> * This will return false if the constraint cannot be satisfied. */ private boolean addWithArtificialVariable(Row row) { //TODO check this // Create and add the artificial variable to the tableau Symbol art = new Symbol(Symbol.Type.SLACK, idTick++); rows.put(art, row.deepCopy()); this.artificial = row.deepCopy(); // Optimize the artificial objective. This is successful // only if the artificial objective is optimized to zero. optimize(this.artificial); boolean success = Util.nearZero(artificial.getConstant()); artificial = null; // If the artificial variable is basic, pivot the row so that // it becomes basic. If the row is constant, exit early. Row rowptr = this.rows.get(art); if (rowptr != null) { /**this looks wrong!!!*/ //rows.remove(rowptr); LinkedList<Symbol> deleteQueue = new LinkedList<>(); for(Symbol s: rows.keySet()){ if(rows.get(s) == rowptr){ deleteQueue.add(s); } } while(!deleteQueue.isEmpty()){ rows.remove(deleteQueue.pop()); } deleteQueue.clear(); if (rowptr.getCells().isEmpty()) { return success; } Symbol entering = anyPivotableSymbol(rowptr); if (entering.getType() == Symbol.Type.INVALID) { return false; // unsatisfiable (will this ever happen?) } rowptr.solveFor(art, entering); substitute(entering, rowptr); this.rows.put(entering, rowptr); } // Remove the artificial variable from the tableau. for (Map.Entry<Symbol, Row> rowEntry : rows.entrySet()) { rowEntry.getValue().remove(art); } objective.remove(art); return success; } /** * Substitute the parametric symbol with the given row. * <p/> * This method will substitute all instances of the parametric symbol * in the tableau and the objective function with the given row. */ void substitute(Symbol symbol, Row row) { for (Map.Entry<Symbol, Row> rowEntry : rows.entrySet()) { rowEntry.getValue().substitute(symbol, row); if (rowEntry.getKey().getType() != Symbol.Type.EXTERNAL && rowEntry.getValue().getConstant() < 0.0) { infeasibleRows.add(rowEntry.getKey()); } } objective.substitute(symbol, row); if (artificial != null) { artificial.substitute(symbol, row); } } /** * Optimize the system for the given objective function. * <p/> * This method performs iterations of Phase 2 of the simplex method * until the objective function reaches a minimum. * * @throws InternalSolverError The value of the objective function is unbounded. */ void optimize(Row objective) { while (true) { Symbol entering = getEnteringSymbol(objective); if (entering.getType() == Symbol.Type.INVALID) { return; } Row entry = getLeavingRow(entering); if(entry == null){ throw new InternalSolverError("The objective is unbounded."); } Symbol leaving = null; for(Symbol key: rows.keySet()){ if(rows.get(key) == entry){ leaving = key; } } Symbol entryKey = null; for(Symbol key: rows.keySet()){ if(rows.get(key) == entry){ entryKey = key; } } rows.remove(entryKey); entry.solveFor(leaving, entering); substitute(entering, entry); rows.put(entering, entry); } } void dualOptimize() throws InternalSolverError{ while(!infeasibleRows.isEmpty()){ Symbol leaving = infeasibleRows.remove(infeasibleRows.size() - 1); Row row = rows.get(leaving); if(row != null && row.getConstant() < 0.0){ Symbol entering = getDualEnteringSymbol(row); if(entering.getType() == Symbol.Type.INVALID){ throw new InternalSolverError("internal solver error"); } rows.remove(entering); row.solveFor(leaving, entering); substitute(entering, row); rows.put(entering, row); } } } /** * Compute the entering variable for a pivot operation. * <p/> * This method will return first symbol in the objective function which * is non-dummy and has a coefficient less than zero. If no symbol meets * the criteria, it means the objective function is at a minimum, and an * invalid symbol is returned. */ private static Symbol getEnteringSymbol(Row objective) { for (Map.Entry<Symbol, Double> cell : objective.getCells().entrySet()) { if (cell.getKey().getType() != Symbol.Type.DUMMY && cell.getValue() < 0.0) { return cell.getKey(); } } return new Symbol(); } private Symbol getDualEnteringSymbol(Row row){ Symbol entering = new Symbol(); double ratio = Double.MAX_VALUE; for(Symbol s: row.getCells().keySet()){ if(s.getType() != Symbol.Type.DUMMY){ double currentCell = row.getCells().get(s); if(currentCell > 0.0){ double coefficient = objective.coefficientFor(s); double r = coefficient / currentCell; if(r < ratio){ ratio = r; entering = s; } } } } return entering; } /** * Get the first Slack or Error symbol in the row. * <p/> * If no such symbol is present, and Invalid symbol will be returned. */ private Symbol anyPivotableSymbol(Row row) { Symbol symbol = null; for (Map.Entry<Symbol, Double> entry : row.getCells().entrySet()) { if (entry.getKey().getType() == Symbol.Type.SLACK || entry.getKey().getType() == Symbol.Type.ERROR) { symbol = entry.getKey(); } } if (symbol == null) { symbol = new Symbol(); } return symbol; } /** * Compute the row which holds the exit symbol for a pivot. * <p/> * This documentation is copied from the C++ version and is outdated * <p/> * <p/> * This method will return an iterator to the row in the row map * which holds the exit symbol. If no appropriate exit symbol is * found, the end() iterator will be returned. This indicates that * the objective function is unbounded. */ private Row getLeavingRow(Symbol entering) { double ratio = Double.MAX_VALUE; Row row = null; for(Symbol key: rows.keySet()){ if(key.getType() != Symbol.Type.EXTERNAL){ Row candidateRow = rows.get(key); double temp = candidateRow.coefficientFor(entering); if(temp < 0){ double temp_ratio = (-candidateRow.getConstant() / temp); if(temp_ratio < ratio){ ratio = temp_ratio; row = candidateRow; } } } } return row; } /** * Get the symbol for the given variable. * <p/> * If a symbol does not exist for the variable, one will be created. */ private Symbol getVarSymbol(Variable variable) { Symbol symbol; if (vars.containsKey(variable)) { symbol = vars.get(variable); } else { symbol = new Symbol(Symbol.Type.EXTERNAL, idTick++); symbol.setVariableName(variable.getName()); vars.put(variable, symbol); } return symbol; } /** * Test whether a row is composed of all dummy variables. */ private static boolean allDummies(Row row) { for (Map.Entry<Symbol, Double> cell : row.getCells().entrySet()) { if (cell.getKey().getType() != Symbol.Type.DUMMY) { return false; } } return true; } }
Erase the leaving symbols, not the entering symbols in dualOptimize. Matches the Kiwi C++ implementation: https://github.com/nucleic/kiwi/blob/0989ff015fdea5c59c90b1fd87383dfe6f733257/kiwi/solverimpl.h#L615
src/main/java/no/birkett/kiwi/Solver.java
Erase the leaving symbols, not the entering symbols in dualOptimize. Matches the Kiwi C++ implementation: https://github.com/nucleic/kiwi/blob/0989ff015fdea5c59c90b1fd87383dfe6f733257/kiwi/solverimpl.h#L615
Java
mit
1e6c16525d79bcc4e9351edaf5ec6c4221026fdd
0
nking/curvature-scale-space-corners-and-transformations,nking/curvature-scale-space-corners-and-transformations
package algorithms.imageProcessing; import algorithms.QuickSort; import algorithms.compGeometry.LinesAndAngles; import algorithms.util.PairIntArray; import gnu.trove.list.TDoubleList; import gnu.trove.list.TIntList; import gnu.trove.list.array.TDoubleArrayList; import gnu.trove.list.array.TIntArrayList; import gnu.trove.map.TIntDoubleMap; import gnu.trove.map.TIntObjectMap; import gnu.trove.map.hash.TIntObjectHashMap; import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.logging.Logger; /** NOTE: NOT READY FOR USE YET. TODO: need to change read pattern of difference sat, and optimization. "Efficient Partial Shape Matching of Outer Contours: by Donoser - called IS-Match, integral shape match - a silhouette of ordered points are sampled making it an "order preserved assignment problem". - a chord angle descriptor is local and global and is invariant to similarity transformations. - the method returns partial sub matches so works with articulated data and occluded shapes - uses an efficient integral image based matching algorithm - the multi-objective optimization uses principles of Paretto efficiency, defined with the fraction of the total matched and the summed differences of angles. - the final result returned is the sequences and the total fraction matched and summed absolute differences, instead of the Salukwadze distance of a Paretto frontier. * point sampling: (a) same number of points over each contour - can handle similarity transforms, but not occlusion (b) OR, equidistant points - can handle occlusion, but not scale ** equidistant is used here. NOTE: changes will be made soon to accomodate search of remaining points when there are unequal number of points. The runtime complexity for building the integral image is O(m*n) where n and m are the number of sampled points on the input shapes. The runtime complexity for the search of the integral image of summed differences and analysis will be added here: * * @author nichole */ public class PartialShapeMatcher { /** * in sampling the boundaries of the shapes, one can * choose to use the same number for each (which can result * in very different spacings for different sized curves) * or one can choose a set distance between sampling * points. * dp is the set distance between sampling points. The authors use 3 as an example. */ protected int dp = 5; protected Logger log = Logger.getLogger(this.getClass().getName()); public void overrideSamplingDistance(int d) { this.dp = d; } /** * NOT READY FOR USE. A shape is defined as the CW ordered sequence of points P_1...P_N and the shape to match has points Q_1...Q_N. The spacings used within this method are equidistant and the default is 5, so override that if a different number is needed. The fixed equidistant spacing is invariant to rotation and translation, but not to scale, so if the user needs to solve for scale, need to do so outside of this method, that is, apply scale changes to the datasets before use of this method.. * @param p * @param q */ public Sequences match(PairIntArray p, PairIntArray q) { log.info("p.n=" + p.getN() + " q.n=" + q.getN()); int diffN = p.getN() - q.getN(); // --- make difference matrices --- //md[0:n2-1][0:n1-1][0:n1-1] float[][][] md; int n1, n2; if (diffN <= 0) { n1 = p.getN(); n2 = q.getN(); md = createDifferenceMatrices(p, q); } else { n1 = q.getN(); n2 = p.getN(); md = createDifferenceMatrices(q, p); } /* the matrices in md can be analyzed for best global solution and separately for best local solution. This method will return results for a local solution to create the point correspondence list. Note that the local best could be two different kinds of models, so might write two different methods for the results. (1) the assumption of same object but with some amount of occlusion, hence gaps in correspondence. (2) the assumption of same object but with some parts being differently oriented, for an example, the scissors opened versus closed. */ List<Sequence> sequences = new ArrayList<Sequence>(); List<Sequence> discarded = new ArrayList<Sequence>(); extractSimilar(md, sequences, discarded); /* need sum of differences in sequence and the fraction of the whole. paretto efficiency is that all are at a compromise of best state, such that increasing the state of one did not worsen the state of another. prefer: -- smaller total difference for largest fraction of whole -- 2ndly, largest total coverage Note that for the rigid model (exvepting scale transformation) one would want to maximize the 3nd point, coverage, first with a consistent transformation. The articulated model chooses the 2nd point, second to get best fits of components first. */ Sequences sequences0 = matchArticulated( sequences, n1, n2); //addFeasibleDiscarded(sequences0, discarded); if (diffN <= 0) { return sequences0; } transpose(sequences0, n1, n2); return sequences0; } protected void extractSimilar(float[][][] md, List<Sequence> sequences, List<Sequence> discarded) { //md[0:n2-1][0:n1-1][0:n1-1] int n2 = md.length; int n1 = md[0].length; int rMax = (int)Math.sqrt(n1); if (rMax < 1) { rMax = 1; } // 23 degrees is 0.4014 double thresh = 23. * Math.PI/180.; /* TODO: will apply a different pattern of reading the blocks and merging results next. */ MinDiffs mins = new MinDiffs(n1); for (int r = 2; r <= rMax; ++r) { findMinDifferenceMatrix(md, r, thresh, mins); } // 10 degrees is 0.175 double tolerance = 0.25; DiffMatrixResults equivBest = new DiffMatrixResults(n1); for (int r = 2; r <= rMax; ++r) { findEquivalentBest(md, r, mins, thresh, tolerance, n1, n2, equivBest); } StringBuilder sb = new StringBuilder(); for (int i = 0; i < n1; ++i) { sb.append(String.format("[%4d]: ", i)); TIntList list = equivBest.indexes[i]; if (list == null) { sb.append(" NA"); } else { list.sort(); for (int j = 0; j < list.size(); ++j) { sb.append(Integer.toString(list.get(j))); sb.append(","); } } sb.append(" | "); log.info(sb.toString()); sb.delete(0, sb.length()); } // ----- find sequential correspondences ---- for (int idx1 = 0; idx1 < n1; ++idx1) { TIntList list = equivBest.indexes[idx1]; if (list == null) { continue; } TDoubleList diffList = equivBest.diffs[idx1]; QuickSort.sortBy1stArg(list, diffList); double sumAbsDiff = 0; for (int j = 0; j < list.size(); ++j) { int idx2 = list.get(j); double diff = diffList.get(j); sumAbsDiff += Math.abs(diff); Sequence s = new Sequence(); s.startIdx1 = idx1; s.startIdx2 = idx2; s.stopIdx2 = idx2; //search through higher index lists to aggregate int nextLIdx = idx1 + 1; while (nextLIdx < n1) { TIntList list2 = equivBest.indexes[nextLIdx]; if (list2 == null) { break; } TIntSet set2 = equivBest.indexSets[nextLIdx]; int idx3 = s.stopIdx2 + 1; if (set2.contains(idx3)) { s.stopIdx2 = idx3; // NOTE: an expensive operation which will be // replaced with new block pattern reading: int rmIdx = list2.indexOf(idx3); diff = equivBest.diffs[nextLIdx].get(rmIdx); sumAbsDiff += Math.abs(diff); list2.removeAt(rmIdx); equivBest.diffs[nextLIdx].removeAt(rmIdx); set2.remove(idx3); } else { break; } nextLIdx++; } int n = s.stopIdx2 - s.startIdx2 + 1; s.fractionOfWhole = (float)n/(float)n1; s.absAvgSumDiffs = (float)(sumAbsDiff/(float)n); if (s.stopIdx2 - s.startIdx2 > 1) { if (s.absAvgSumDiffs <= tolerance) { sequences.add(s); log.info(String.format( "seq %d:%d to %d frac=%.4f avg diff=%.4f", s.startIdx1, s.startIdx2, s.stopIdx2, s.fractionOfWhole, s.absAvgSumDiffs)); } else if (s.absAvgSumDiffs <= 3*tolerance) { discarded.add(s); } } } } log.info(sequences.size() + " sequences"); } protected Sequences matchArticulated(List<Sequence> sequences, int n1, int n2) { //(1) ascending sort ordered by startIdx1 and then // descending fraction of whole Collections.sort(sequences, new SequenceComparator()); // (1.5) descending sort of fraction, then diff, then startIdx List<Sequence> list2 = new ArrayList<Sequence>(sequences); float maxAvgDiff = findMaxAvgDiff(list2); System.out.println("list2.sz=" + list2.size()); Collections.sort(list2, new SequenceComparator3(maxAvgDiff)); //Collections.sort(list2, new SequenceComparator2()); //(2) a lookup for items in list2 // belonging to >= startIdx1. TreeMap<Integer, TIntList> startLookup = new TreeMap<Integer, TIntList>(); for (int i = 0; i < list2.size(); ++i) { Sequence s1 = list2.get(i); Integer key = Integer.valueOf(s1.startIdx1); for (int j = 0; j < list2.size(); ++j) { Sequence s2 = list2.get(j); if (s2.startIdx1 < key.intValue()) { continue; } if (!startLookup.containsKey(key)) { startLookup.put(key, new TIntArrayList()); } // TODO: revisit this...avoiding adding entire list if (startLookup.get(key).size() > n1/4) { break; } startLookup.get(key).add(j); } log.fine("FSORT: " + i + " " + s1.toString()); } //(2.5) a lookup for items in list2 // belonging to >= startIdx2. TreeMap<Integer, TIntList> startLookup2 = new TreeMap<Integer, TIntList>(); for (int i = 0; i < list2.size(); ++i) { Sequence s1 = list2.get(i); Integer key = Integer.valueOf(s1.startIdx2); for (int j = 0; j < list2.size(); ++j) { Sequence s2 = list2.get(j); if (s2.startIdx2 < key.intValue()) { continue; } if (!startLookup2.containsKey(key)) { startLookup2.put(key, new TIntArrayList()); } // TODO: revisit this...avoiding adding entire list if (startLookup2.get(key).size() > n2/4) { break; } startLookup2.get(key).add(j); } } // (3) create "tracks" of sequences Set<Sequence> added = new HashSet<Sequence>(); List<Sequences> tracks = new ArrayList<Sequences>(); for (int i = 0; i < sequences.size(); ++i) { Sequence s = sequences.get(i); if (added.contains(s)) { continue; } added.add(s); Sequences currentTrack = new Sequences(); tracks.add(currentTrack); currentTrack.sequences.add(s); /* (next.startIdx1 > startIdx1 + (stopIdx2 - stopIdx1)) */ Sequence lastSequence = s; while (true) { int nextStartIdx1 = lastSequence.startIdx1 + (lastSequence.stopIdx2 - lastSequence.startIdx2) + 1; // if nextStartIdx1 is larger than n1, it will // be missing from startLookup and we've // ended the progression through idx1 Entry<Integer, TIntList> entry = startLookup.ceilingEntry( Integer.valueOf(nextStartIdx1)); if (entry == null) { break; } TIntList list2Indexes = entry.getValue(); boolean appended = false; for (int j = 0; j < list2Indexes.size(); ++j) { int list2Idx = list2Indexes.get(j); Sequence s2 = list2.get(list2Idx); if (s2.startIdx2 <= lastSequence.stopIdx2) { continue; } /* if (!verifyConsistenCW(currentTrack, s2)) { continue; }*/ currentTrack.sequences.add(s2); added.add(s2); appended = true; lastSequence = s2; break; } if (!appended) { break; } } // if first sequence startIdx2 is > 0, need to // search the region before startIdx2 also if (currentTrack.sequences.get(0).startIdx2 > 1) { int nextStartIdx2 = 0; while (true) { Entry<Integer, TIntList> entry = startLookup2.ceilingEntry( Integer.valueOf(nextStartIdx2)); if (entry == null) { break; } TIntList list2Indexes = entry.getValue(); boolean appended = false; for (int j = 0; j < list2Indexes.size(); ++j) { int list2Idx = list2Indexes.get(j); Sequence s2 = list2.get(list2Idx); if (s2.stopIdx2 >= currentTrack.sequences.get(0).startIdx2) { continue; } // check for range clash... if (intersectsExistingRange1(currentTrack.sequences, s2)) { continue; } currentTrack.sequences.add(s2); added.add(s2); appended = true; lastSequence = s2; break; } if (!appended) { break; } nextStartIdx2 = lastSequence.stopIdx2 + 1; if (nextStartIdx2 > currentTrack.sequences.get(0).startIdx2) { break; } } } } filterForConsistentClockwise(tracks); // calculate the stats for each track (== Sequences) for (Sequences track : tracks) { int sumLen = 0; float sumFrac = 0; double sumDiffs = 0; for (Sequence s : track.sequences) { int len = s.stopIdx2 - s.startIdx2 + 1; float diff = s.absAvgSumDiffs * len; sumLen += len; sumDiffs += diff; sumFrac += s.fractionOfWhole; } track.absSumDiffs = sumDiffs; track.avgSumDiffs = (float)(sumDiffs/(float)sumLen); track.fractionOfWhole = sumFrac; } // sorting here needs to prefer higher fraction and // longer segments too Collections.sort(tracks, new TrackComparator(n1)); for (int i = 0; i < tracks.size(); ++i) { Sequences track = tracks.get(i); log.info("track " + i + ": " + track.toString()); } return tracks.get(0); } protected double matchRigidWithOcclusion(List<Sequence> srquences, int n1) { throw new UnsupportedOperationException("not yet implemented"); } /** * index0 is rotations of q, index1 is p.n, index2 is q.n returns a[0:q.n-1][0:p.n-1][0:p.n-1] */ protected float[][][] createDifferenceMatrices( PairIntArray p, PairIntArray q) { if (p.getN() > q.getN()) { throw new IllegalArgumentException( "q.n must be >= p.n"); } /* | a_1_1...a_1_N | | a_2_1...a_2_N | ... | a_N_1...a_N_N | elements on the diagonal are zero to shift to different first point as reference, can shift down k-1 rows and left k-1 columns. */ //log.fine("a1:"); float[][] a1 = createDescriptorMatrix(p, p.getN()); //log.fine("a2:"); float[][] a2 = createDescriptorMatrix(q, q.getN()); /* - find rxr sized blocks similar to one another by starting at main diagonal element A_1(s,s) and A_2(m,m) which have a small angular difference value 1 D_a(s,m,r) = ---- * summation_i_0_to_(r-1) r^2 * summation_j_0_(r-1) of [A_1(s+i,s+j) - A_2(m+i,m+j)]^2 //s range 0 to M-1 //m range 0 to M-1, M<=N //r range 2 to sqrt(min(n, n)) - to calculate all D_a(s,m,r) uses concept of "integral image" by Viola and Jones (for their data, I(x,y)=i(x,y)+I(x-1,y)+I(x,y-1)-I(x-1,y-1)) - N integral images int_1...int_N of size MXM are built for N descriptor difference matrices M_D^n where the number of sampled points on the two shapes is N and M, respectively where M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1) then, all matching triplets {s,m,r} which provide a difference value D_a(s,m,r) below a fixed threshold are calculated. --------------------------- (1) make difference matrices. there will be N A_2 matrices in which each is shifted left and up by 1 (or some other value). M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1) shifting A_2 by 0 through n where n is (N-M+1?), but shifting it by N instead would cover all orientation angles. (2) make Summary Area Tables of the N M_D^m matrices. (3) search: starting on the diagonals of the integral images made from the N M_D^n matrices, D_α(s, m, r) can be calculated for every block of any size starting at any point on the diagonal in constant time. */ /* MXM NXN 30 31 32 33 20 21 22 20 21 22 23 10 11 12 10 11 12 13 00 01 02 00 01 02 03 p_i_j - q_i_j 01 02 03 00 20 21 22 31 32 33 30 10 11 12 21 22 23 20 00 01 02 11 12 13 10 p_i_j - q_(i+1)_(j+1) 12 13 10 11 20 21 22 02 03 00 01 10 11 12 32 33 30 31 00 01 02 22 23 20 21 p_i_j - q_(i+2)_(j+2) 23 20 21 22 20 21 22 13 10 11 12 10 11 12 03 00 01 02 00 01 02 33 30 31 32 p_i_j - q_(i+3)_(j+3) */ // --- make difference matrices --- int n1 = p.getN(); int n2 = q.getN(); float[][][] md = new float[n2][][]; float[][] prevA2Shifted = null; for (int i = 0; i < n2; ++i) { float[][] shifted2; if (prevA2Shifted == null) { shifted2 = copy(a2); } else { // shifts by 1 to left and up by 1 rotate(prevA2Shifted); shifted2 = prevA2Shifted; } //M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1) md[i] = subtract(a1, shifted2); assert(md[i].length == n1); assert(md[i][0].length == n1); prevA2Shifted = shifted2; } // ---- make summary area table for md----- for (int i = 0; i < md.length; ++i) { applySummedAreaTableConversion(md[i]); } //printDiagonal(md[0], mdCoords[0]); log.fine("md.length=" + md.length); return md; } /** given the shape points for p and q, create a matrix of descriptors, describing the difference in chord angles. The chord descriptor is invariant to translation, rotation, and scale: - a chord is a line joining 2 region points - uses the relative orientation between 2 chords angle a_i_j is from chord P_i_P_j to reference point P_i to another sampled point and chord P_j_P_(j-d) and P_j d is the number of points before j in the sequence of points P. a_i_j is the angle between the 2 chords P_i_P_j and P_j_P_(j-d) */ protected float[][] createDescriptorMatrix(PairIntArray p, int n) { float[][] a = new float[n][]; for (int i = 0; i < n; ++i) { a[i] = new float[n]; } /* P1 Pmid P2 */ log.fine("n=" + n); for (int i1 = 0; i1 < n; ++i1) { int start = i1 + 1 + dp; for (int ii = start; ii < (start + n - 1 - dp); ++ii) { int i2 = ii; int imid = i2 - dp; // wrap around if (imid > (n - 1)) { imid -= n; } // wrap around if (i2 > (n - 1)) { i2 -= n; } //log.fine("i1=" + i1 + " imid=" + imid + " i2=" + i2); double angleA = LinesAndAngles.calcClockwiseAngle( p.getX(i1), p.getY(i1), p.getX(i2), p.getY(i2), p.getX(imid), p.getY(imid) ); /* String str = String.format( "[%d](%d,%d) [%d](%d,%d) [%d](%d,%d) a=%.4f", i1, p.getX(i1), p.getY(i1), i2, p.getX(i2), p.getY(i2), imid, p.getX(imid), p.getY(imid), (float) angleA * 180. / Math.PI); log.fine(str); */ a[i1][i2] = (float)angleA; } } return a; } protected int distanceSqEucl(int x1, int y1, int x2, int y2) { int diffX = x1 - x2; int diffY = y1 - y2; return (diffX * diffX + diffY * diffY); } private float[][] copy(float[][] a) { float[][] a2 = new float[a.length][]; for (int i = 0; i < a2.length; ++i) { a2[i] = Arrays.copyOf(a[i], a[i].length); } return a2; } private void rotate(float[][] prevShifted) { // shift x left by 1 first for (int y = 0; y < prevShifted[0].length; ++y) { float tmp0 = prevShifted[0][y]; for (int x = 0; x < (prevShifted.length- 1); ++x){ prevShifted[x][y] = prevShifted[x + 1][y]; } prevShifted[prevShifted.length - 1][y] = tmp0; } // shift y down by 1 for (int x = 0; x < prevShifted.length; ++x) { float tmp0 = prevShifted[x][0]; for (int y = 0; y < (prevShifted[x].length - 1); ++y){ prevShifted[x][y] = prevShifted[x][y + 1]; } prevShifted[x][prevShifted[x].length - 1] = tmp0; } } private float[][] subtract(float[][] a1, float[][] a2) { /* MXM NXN 20 21 22 10 11 10 11 12 00 01 00 01 02 01 02 00 10 11 21 22 20 00 01 11 12 10 12 10 11 10 11 02 00 01 00 01 22 20 21 subtracting only the MXM portion */ assert(a1.length == a1[0].length); assert(a2.length == a2[0].length); int n1 = a1.length; int n2 = a2.length; assert(n1 <= n2); float[][] output = new float[n1][]; for (int i = 0; i < n1; ++i) { output[i] = new float[n1]; for (int j = 0; j < n1; ++j) { output[i][j] = a1[i][j] - a2[i][j]; } } return output; } private void print(String label, float[][] a) { StringBuilder sb = new StringBuilder(label); sb.append("\n"); for (int j = 0; j < a[0].length; ++j) { sb.append(String.format("row: %3d", j)); for (int i = 0; i < a.length; ++i) { sb.append(String.format(" %.4f,", a[i][j])); } log.fine(sb.toString()); sb.delete(0, sb.length()); } } protected void applySummedAreaTableConversion(float[][] mdI) { for (int x = 0; x < mdI.length; ++x) { for (int y = 0; y < mdI[x].length; ++y) { if (x > 0 && y > 0) { mdI[x][y] += (mdI[x - 1][y] + mdI[x][y - 1] - mdI[x - 1][y - 1]); } else if (x > 0) { mdI[x][y] += mdI[x - 1][y]; } else if (y > 0) { mdI[x][y] += mdI[x][y - 1]; } } } } private void filterForConsistentClockwise( List<Sequences> tracks) { TIntList rmList = new TIntArrayList(); for (int i = 0; i < tracks.size(); ++i) { Sequences sequences = tracks.get(i); if (sequences.sequences.isEmpty()) { rmList.add(i); continue; } Collections.sort(sequences.sequences, new SequenceComparator4()); /* all startIdx1 should be increasing, and wrap around should be considered. then, all startIdx2 should be increasing and wrap around whould be considered. */ Sequence s0 = sequences.sequences.get(0); boolean notValid = false; int ns = sequences.sequences.size(); // check startIdx1 then startIdx2 for (int check = 0; check < 2; ++check) { boolean wrapped = false; int prev = (check == 0) ? s0.startIdx1 : s0.startIdx2; for (int j = 1; j <= ns; ++j) { Sequence s; if (j == ns) { if (check == 0) { break; } s = sequences.sequences.get(0); } else { s = sequences.sequences.get(j); } int idx = (check == 0) ? s.startIdx1 : s.startIdx2; if (idx == prev) { rmList.add(i); notValid = true; break; } else if (idx < prev) { if (wrapped) { rmList.add(i); notValid = true; break; } wrapped = true; prev = idx; } prev = idx; } // end loop over j sequences in a track if (notValid) { break; } } // end loop over check } log.info("removing " + rmList.size() + " tracks from " + tracks.size()); for (int i = (rmList.size() - 1); i > -1; --i) { int rmIdx = rmList.get(i); tracks.remove(rmIdx); } } private void transpose(Sequences sequences, int n1, int n2) { sequences.fractionOfWhole *= ((float)n1/(float)n2); List<Sequence> sqs = sequences.getList(); for (Sequence s : sqs) { int n = s.stopIdx2 - s.startIdx2; int startIdx2 = s.startIdx1; s.startIdx1 = s.startIdx2; s.startIdx2 = startIdx2; s.stopIdx2 = s.startIdx2 + n; } } private class DiffMatrixResults { private TIntSet[] indexSets = null; // indexes' indexes are i and values are j private TIntList[] indexes = null; private TDoubleList[] diffs = null; public DiffMatrixResults(int n) { indexes = new TIntList[n]; diffs = new TDoubleList[n]; indexSets = new TIntSet[n]; } public void add(int i, int j, double diff) { if (indexes[i] == null) { indexes[i] = new TIntArrayList(); diffs[i] = new TDoubleArrayList(); indexSets[i] = new TIntHashSet(); } if (!indexSets[i].contains(j)) { indexSets[i].add(j); indexes[i].add(j); diffs[i].add(diff); } } } public static class Sequence { int startIdx1; int startIdx2 = -1; int stopIdx2 = -1; float absAvgSumDiffs; float fractionOfWhole; public int getStartIdx1() { return startIdx1; } public int getStartIdx2() { return startIdx2; } public int getStopIdx2() { return stopIdx2; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format( "(%d:%d to %d, f=%.4f d=%.4f)", startIdx1, startIdx2, stopIdx2, fractionOfWhole, absAvgSumDiffs)); return sb.toString(); } } private class MinDiffs { // first dimension index: md[*][][] int[] idxs0; // i is the index of this array and represents the index // of point in p array // j is i + idxs[0] and represents the index // of point in q array float[] mins; public MinDiffs(int n) { idxs0 = new int[n]; mins = new float[n]; Arrays.fill(idxs0, -1); Arrays.fill(mins, Float.MAX_VALUE); } } public static class Sequences { List<Sequence> sequences = new ArrayList<Sequence>(); float fractionOfWhole; double absSumDiffs; float avgSumDiffs; public List<Sequence> getList() { return sequences; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("frac=%.4f", fractionOfWhole)); sb.append(String.format(", avgDiff=%.4f, sumDiff=%.4f", avgSumDiffs, absSumDiffs)); for (Sequence s : sequences) { sb.append("\n").append(s.toString()); } return sb.toString(); } } private class TrackComparator implements Comparator<Sequences> { final int maxNPoints; public TrackComparator(int n1) { this.maxNPoints = n1; } @Override public int compare(Sequences o1, Sequences o2) { // adding a term to prefer the larger // fraction, but in a smaller number of // larger segments. // hard wiring a minimum size of 5 for segments float ns = (float)(maxNPoints/5); float ns1 = 1.f - ((float)o1.sequences.size()/ns); float ns2 = 1.f - ((float)o2.sequences.size()/ns); //NOTE: this may need to change for cases where, // for example, have one very large segment that // is the right answer and several smaller matches // that are false due to occlusion... presumably // other sequences have as many false matches, but // this needs alot more testing. float s1 = o1.fractionOfWhole * ns1; float s2 = o2.fractionOfWhole * ns2; if (s1 > s2) { return -1; } else if (s1 < s2) { return 1; } if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.absSumDiffs < o2.absSumDiffs) { return -1; } else if (o1.absSumDiffs > o2.absSumDiffs) { return 1; } return 0; } } /** * comparator for a preferring high fraction and low differences, then descending sort of fraction, * then diff, then startIdx */ private class SequenceComparator3 implements Comparator<Sequence> { private final float maxDiff; public SequenceComparator3(float maxDiff) { this.maxDiff = maxDiff; } @Override public int compare(Sequence o1, Sequence o2) { float d1 = 1.f - (o1.absAvgSumDiffs/maxDiff); float d2 = 1.f - (o2.absAvgSumDiffs/maxDiff); float s1 = o1.fractionOfWhole + d1; float s2 = o2.fractionOfWhole + d2; if (s1 > s2) { return -1; } else if (s1 < s2) { return 1; } if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.absAvgSumDiffs < o2.absAvgSumDiffs) { return -1; } else if (o1.absAvgSumDiffs > o2.absAvgSumDiffs) { return 1; } if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } return 0; } } /** * comparator for descending sort of fraction, * then diff, then startIdx */ private class SequenceComparator2 implements Comparator<Sequence> { @Override public int compare(Sequence o1, Sequence o2) { if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.absAvgSumDiffs < o2.absAvgSumDiffs) { return -1; } else if (o1.absAvgSumDiffs > o2.absAvgSumDiffs) { return 1; } if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } return 0; } } /** * comparator to sort by ascending startIdx, then * descending fraction of whole */ private class SequenceComparator implements Comparator<Sequence> { @Override public int compare(Sequence o1, Sequence o2) { if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.startIdx2 < o2.startIdx2) { return -1; } else if (o1.startIdx2 > o2.startIdx2) { return 1; } if (o1.absAvgSumDiffs < o2.absAvgSumDiffs) { return -1; } else if (o1.absAvgSumDiffs > o2.absAvgSumDiffs) { return 1; } // should not arrive here return 0; } } /** * comparator for descending sort startIdx1, * then startIdx2 */ private class SequenceComparator4 implements Comparator<Sequence> { @Override public int compare(Sequence o1, Sequence o2) { if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } if (o1.startIdx2 < o2.startIdx2) { return -1; } else if (o1.startIdx2 > o2.startIdx2) { return 1; } return 0; } } /** * * @param md 3 dimensional array of difference matrices * @param r block size * @return */ private void findMinDifferenceMatrix( float[][][] md, int r, double threshold, MinDiffs output) { if (r < 1) { throw new IllegalArgumentException("r cannot be < 1"); } double c = 1./(double)(r*r); //md[0:n2-1][0:n1-1][0:n1-1] int n1 = md[0].length; int n2 = md.length; int[] idxs0 = output.idxs0; float[] mins = output.mins; int count = 0; for (int jOffset = 0; jOffset < md.length; jOffset++) { log.info(String.format("block=%d md[%d]", r, jOffset)); float[][] a = md[jOffset]; float sum = 0; //for (int i = 0; i < a.length; i+=r) { for (int i = (r - 1); i < a.length; i++) { float s1; if ((i - r) > -1) { s1 = a[i][i] - a[i-r][i] - a[i][i-r] + a[i-r][i-r]; log.finest( String.format( " [%d,%d] %.4f, %.4f, %.4f, %.4f => %.4f", i, i, a[i][i], a[i-r][i], a[i][i-r], a[i-r][i-r], s1*c)); } else { s1 = a[i][i]; log.finest( String.format(" [%d,%d] %.4f => %.4f", i, i, a[i][i], s1*c)); } s1 *= c; log.info(String.format(" [%2d,%2d<-%2d] => %.4f", i, ((i + jOffset) < n2) ? i + jOffset : (i + jOffset) - n2, ((i + jOffset - r + 1) < n2) ? i + jOffset - r + 1 : (i + jOffset - r + 1) - n2, s1*c)); float absS1 = s1; if (absS1 < 0) { absS1 *= -1; } if (absS1 > threshold) { continue; } // note, idx from q is i + iOffset count++; sum += absS1; if (absS1 < Math.abs(mins[i])) { int idx2 = i + jOffset; if (idx2 >= n1) { idx2 -= n1; } mins[i] = s1; idxs0[i] = jOffset; // fill in the rest of the diagonal in this block for (int k = (i-1); k > (i-r); k--) { if (k < 0) { break; } if (absS1 < Math.abs(mins[k])) { idx2 = k + jOffset; if (idx2 >= n1) { idx2 -= n1; } mins[k] = s1; idxs0[k] = jOffset; } } } } if (count == 0) { sum = Integer.MAX_VALUE; } log.info(String.format( "SUM=%.4f block=%d md[%d]", sum, r, jOffset)); } log.fine("OFFSETS=" + Arrays.toString(idxs0)); log.fine("mins=" + Arrays.toString(mins)); } /** * * @param md * @param r * @param mins * @param threshold * @param tolerance * @param n1 * @param n2 * @param output contains pairs of i and jOffset, where * j is i + jOffset */ private void findEquivalentBest(float[][][] md, int r, MinDiffs mins, double threshold, double tolerance, int n1, int n2, DiffMatrixResults output) { //md[0:n2-1][0:n1-1][0:n1-1] assert(md.length == n2); double c = 1./(double)(r*r); // capture all "best" within mins[i] += tolerance for (int jOffset = 0; jOffset < n2; jOffset++) { float[][] a = md[jOffset]; for (int i = 0; i < n1; i+=r) { if (mins.idxs0[i] == -1) { // there is no best for this p index continue; } // mins.mins[i] is the best for index i (== P_i) // mins.idxs0[i] is jOffset of best // j is index i + jOffset float s1; if ((i - r) > -1) { s1 = a[i][i] - a[i-r][i] - a[i][i-r] + a[i-r][i-r]; } else { s1 = a[i][i]; } s1 *= c; float absS1 = s1; if (absS1 < 0) { absS1 *= -1; } if (absS1 > threshold) { continue; } double best = mins.mins[i]; if (Math.abs(s1 - best) > tolerance) { continue; } int idx2 = jOffset + i; if (idx2 >= n2) { idx2 -= n2; } output.add(i, idx2, s1); // fill in the rest of the diagonal in this block for (int k = (i-1); k > (i-r); k--) { if (k < 0) { break; } if ((k - r) > -1) { s1 = a[k][k] - a[k-r][k] - a[k][k-r] + a[k-r][k-r]; } else { s1 = a[k][k]; } s1 *= c; absS1 = s1; if (absS1 < 0) { absS1 *= -1; } if (absS1 > threshold) { continue; } if (Math.abs(s1 - best) > tolerance) { continue; } idx2 = jOffset + k; if (idx2 >= n1) { idx2 -= n1; } output.add(k, idx2, s1); } } } } private float findMaxAvgDiff(List<Sequence> sequences) { float max = Float.MIN_VALUE; for (Sequence s : sequences) { float d = s.absAvgSumDiffs; if (d > max) { max = d; } } return max; } private boolean intersectsExistingRange1( List<Sequence> existingList, Sequence s) { int stopIdx1 = s.startIdx1 + (s.stopIdx2 - s.startIdx2); for (Sequence s0 : existingList) { int s0stopIdx1 = s0.startIdx1 + (s0.stopIdx2 - s0.startIdx2); if (s.startIdx1 >= s0.startIdx1 && s.startIdx1 <= s0stopIdx1) { return true; } if (stopIdx1 >= s0.startIdx1 && stopIdx1 <= s0stopIdx1) { return true; } if (s.startIdx1 <= s0.startIdx1 && stopIdx1 >= s0stopIdx1) { return true; } } return false; } }
src/algorithms/imageProcessing/PartialShapeMatcher.java
package algorithms.imageProcessing; import algorithms.QuickSort; import algorithms.compGeometry.LinesAndAngles; import algorithms.util.PairIntArray; import gnu.trove.list.TDoubleList; import gnu.trove.list.TIntList; import gnu.trove.list.array.TDoubleArrayList; import gnu.trove.list.array.TIntArrayList; import gnu.trove.map.TIntDoubleMap; import gnu.trove.map.TIntObjectMap; import gnu.trove.map.hash.TIntObjectHashMap; import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.logging.Logger; /** NOTE: NOT READY FOR USE YET. TODO: need to change read pattern of difference sat, and optimization. "Efficient Partial Shape Matching of Outer Contours: by Donoser - called IS-Match, integral shape match - a silhouette of ordered points are sampled making it an "order preserved assignment problem". - a chord angle descriptor is local and global and is invariant to similarity transformations. - the method returns partial sub matches so works with articulated data and occluded shapes - uses an efficient integral image based matching algorithm - the multi-objective optimization uses principles of Paretto efficiency, defined with the fraction of the total matched and the summed differences of angles. - the final result returned is the sequences and the total fraction matched and summed absolute differences, instead of the Salukwadze distance of a Paretto frontier. * point sampling: (a) same number of points over each contour - can handle similarity transforms, but not occlusion (b) OR, equidistant points - can handle occlusion, but not scale ** equidistant is used here. NOTE: changes will be made soon to accomodate search of remaining points when there are unequal number of points. The runtime complexity for building the integral image is O(m*n) where n and m are the number of sampled points on the input shapes. The runtime complexity for the search of the integral image of summed differences and analysis will be added here: * * @author nichole */ public class PartialShapeMatcher { /** * in sampling the boundaries of the shapes, one can * choose to use the same number for each (which can result * in very different spacings for different sized curves) * or one can choose a set distance between sampling * points. * dp is the set distance between sampling points. The authors use 3 as an example. */ protected int dp = 5; protected Logger log = Logger.getLogger(this.getClass().getName()); public void overrideSamplingDistance(int d) { this.dp = d; } /** * NOT READY FOR USE. A shape is defined as the CW ordered sequence of points P_1...P_N and the shape to match has points Q_1...Q_N. The spacings used within this method are equidistant and the default is 5, so override that if a different number is needed. The fixed equidistant spacing is invariant to rotation and translation, but not to scale, so if the user needs to solve for scale, need to do so outside of this method, that is, apply scale changes to the datasets before use of this method.. * @param p * @param q */ public Sequences match(PairIntArray p, PairIntArray q) { log.info("p.n=" + p.getN() + " q.n=" + q.getN()); int diffN = p.getN() - q.getN(); // --- make difference matrices --- //md[0:n2-1][0:n1-1][0:n1-1] float[][][] md; int n1, n2; if (diffN <= 0) { n1 = p.getN(); n2 = q.getN(); md = createDifferenceMatrices(p, q); } else { n1 = q.getN(); n2 = p.getN(); md = createDifferenceMatrices(q, p); } /* the matrices in md can be analyzed for best global solution and separately for best local solution. This method will return results for a local solution to create the point correspondence list. Note that the local best could be two different kinds of models, so might write two different methods for the results. (1) the assumption of same object but with some amount of occlusion, hence gaps in correspondence. (2) the assumption of same object but with some parts being differently oriented, for an example, the scissors opened versus closed. */ List<Sequence> sequences = extractSimilar(md); /* need sum of differences in sequence and the fraction of the whole. paretto efficiency is that all are at a compromise of best state, such that increasing the state of one did not worsen the state of another. prefer: -- smaller total difference for largest fraction of whole -- 2ndly, largest total coverage Note that for the rigid model (exvepting scale transformation) one would want to maximize the 3nd point, coverage, first with a consistent transformation. The articulated model chooses the 2nd point, second to get best fits of components first. */ Sequences sequences0 = matchArticulated( sequences, n1, n2); if (diffN <= 0) { return sequences0; } transpose(sequences0, n1, n2); return sequences0; } protected List<Sequence> extractSimilar(float[][][] md) { //md[0:n2-1][0:n1-1][0:n1-1] int n2 = md.length; int n1 = md[0].length; int rMax = (int)Math.sqrt(n1); if (rMax < 1) { rMax = 1; } // 23 degrees is 0.4014 double thresh = 23. * Math.PI/180.; /* TODO: will apply a different pattern of reading the blocks and merging results next. */ MinDiffs mins = new MinDiffs(n1); for (int r = 2; r <= rMax; ++r) { findMinDifferenceMatrix(md, r, thresh, mins); } // 10 degrees is 0.175 double tolerance = 0.25; DiffMatrixResults equivBest = new DiffMatrixResults(n1); for (int r = 2; r <= rMax; ++r) { findEquivalentBest(md, r, mins, thresh, tolerance, n1, n2, equivBest); } /* StringBuilder sb = new StringBuilder(); for (int i = 0; i < n1; ++i) { sb.append(String.format("[%4d]: ", i)); TIntList list = equivBest.indexes[i]; if (list == null) { sb.append(" NA"); } else { list.sort(); for (int j = 0; j < list.size(); ++j) { sb.append(Integer.toString(list.get(j))); sb.append(","); } } sb.append(" | "); log.fine(sb.toString()); sb.delete(0, sb.length()); } */ // ----- find sequential correspondences ---- List<Sequence> sequences = new ArrayList<Sequence>(); for (int idx1 = 0; idx1 < n1; ++idx1) { TIntList list = equivBest.indexes[idx1]; if (list == null) { continue; } TDoubleList diffList = equivBest.diffs[idx1]; QuickSort.sortBy1stArg(list, diffList); double sumAbsDiff = 0; for (int j = 0; j < list.size(); ++j) { int idx2 = list.get(j); double diff = diffList.get(j); sumAbsDiff += Math.abs(diff); Sequence s = new Sequence(); s.startIdx1 = idx1; s.startIdx2 = idx2; s.stopIdx2 = idx2; //search through higher index lists to aggregate int nextLIdx = idx1 + 1; while (nextLIdx < n1) { TIntList list2 = equivBest.indexes[nextLIdx]; if (list2 == null) { break; } TIntSet set2 = equivBest.indexSets[nextLIdx]; int idx3 = s.stopIdx2 + 1; if (set2.contains(idx3)) { s.stopIdx2 = idx3; // NOTE: an expensive operation which will be // replaced with new block pattern reading: int rmIdx = list2.indexOf(idx3); diff = equivBest.diffs[nextLIdx].get(rmIdx); sumAbsDiff += Math.abs(diff); list2.removeAt(rmIdx); equivBest.diffs[nextLIdx].removeAt(rmIdx); set2.remove(idx3); } else { break; } nextLIdx++; } int n = s.stopIdx2 - s.startIdx2 + 1; s.fractionOfWhole = (float)n/(float)n1; s.absAvgSumDiffs = (float)(sumAbsDiff/(float)n); if (s.stopIdx2 - s.startIdx2 > 1) { sequences.add(s); log.fine(String.format( "seq %d:%d to %d frac=%.4f avg diff=%.4f", s.startIdx1, s.startIdx2, s.stopIdx2, s.fractionOfWhole, s.absAvgSumDiffs)); } } } log.fine(sequences.size() + " sequences"); return sequences; } protected Sequences matchArticulated(List<Sequence> sequences, int n1, int n2) { //(1) ascending sort ordered by startIdx1 and then // descending fraction of whole Collections.sort(sequences, new SequenceComparator()); // (1.5) descending sort of fraction, then diff, then startIdx List<Sequence> list2 = new ArrayList<Sequence>(sequences); float maxAvgDiff = findMaxAvgDiff(list2); System.out.println("list2.sz=" + list2.size()); Collections.sort(list2, new SequenceComparator3(maxAvgDiff)); //Collections.sort(list2, new SequenceComparator2()); //(2) a lookup for items in list2 // belonging to >= startIdx1. TreeMap<Integer, TIntList> startLookup = new TreeMap<Integer, TIntList>(); for (int i = 0; i < list2.size(); ++i) { Sequence s1 = list2.get(i); Integer key = Integer.valueOf(s1.startIdx1); for (int j = 0; j < list2.size(); ++j) { Sequence s2 = list2.get(j); if (s2.startIdx1 < key.intValue()) { continue; } if (!startLookup.containsKey(key)) { startLookup.put(key, new TIntArrayList()); } // TODO: revisit this...avoiding adding entire list if (startLookup.get(key).size() > n1/4) { break; } startLookup.get(key).add(j); } log.fine("FSORT: " + i + " " + s1.toString()); } //(2.5) a lookup for items in list2 // belonging to >= startIdx2. TreeMap<Integer, TIntList> startLookup2 = new TreeMap<Integer, TIntList>(); for (int i = 0; i < list2.size(); ++i) { Sequence s1 = list2.get(i); Integer key = Integer.valueOf(s1.startIdx2); for (int j = 0; j < list2.size(); ++j) { Sequence s2 = list2.get(j); if (s2.startIdx2 < key.intValue()) { continue; } if (!startLookup2.containsKey(key)) { startLookup2.put(key, new TIntArrayList()); } // TODO: revisit this...avoiding adding entire list if (startLookup2.get(key).size() > n2/4) { break; } startLookup2.get(key).add(j); } } // (3) create "tracks" of sequences Set<Sequence> added = new HashSet<Sequence>(); List<Sequences> tracks = new ArrayList<Sequences>(); for (int i = 0; i < sequences.size(); ++i) { Sequence s = sequences.get(i); if (added.contains(s)) { continue; } added.add(s); Sequences currentTrack = new Sequences(); tracks.add(currentTrack); currentTrack.sequences.add(s); /* (next.startIdx1 > startIdx1 + (stopIdx2 - stopIdx1)) */ Sequence lastSequence = s; while (true) { int nextStartIdx1 = lastSequence.startIdx1 + (lastSequence.stopIdx2 - lastSequence.startIdx2) + 1; // if nextStartIdx1 is larger than n1, it will // be missing from startLookup and we've // ended the progression through idx1 Entry<Integer, TIntList> entry = startLookup.ceilingEntry( Integer.valueOf(nextStartIdx1)); if (entry == null) { break; } TIntList list2Indexes = entry.getValue(); boolean appended = false; for (int j = 0; j < list2Indexes.size(); ++j) { int list2Idx = list2Indexes.get(j); Sequence s2 = list2.get(list2Idx); if (s2.startIdx2 <= lastSequence.stopIdx2) { continue; } /* if (!verifyConsistenCW(currentTrack, s2)) { continue; }*/ currentTrack.sequences.add(s2); added.add(s2); appended = true; lastSequence = s2; break; } if (!appended) { break; } } // if first sequence startIdx2 is > 0, need to // search the region before startIdx2 also if (currentTrack.sequences.get(0).startIdx2 > 1) { int nextStartIdx2 = 0; while (true) { Entry<Integer, TIntList> entry = startLookup2.ceilingEntry( Integer.valueOf(nextStartIdx2)); if (entry == null) { break; } TIntList list2Indexes = entry.getValue(); boolean appended = false; for (int j = 0; j < list2Indexes.size(); ++j) { int list2Idx = list2Indexes.get(j); Sequence s2 = list2.get(list2Idx); if (s2.stopIdx2 >= currentTrack.sequences.get(0).startIdx2) { continue; } // check for range clash... if (intersectsExistingRange1(currentTrack.sequences, s2)) { continue; } currentTrack.sequences.add(s2); added.add(s2); appended = true; lastSequence = s2; break; } if (!appended) { break; } nextStartIdx2 = lastSequence.stopIdx2 + 1; if (nextStartIdx2 > currentTrack.sequences.get(0).startIdx2) { break; } } } } filterForConsistentClockwise(tracks); // calculate the stats for each track (== Sequences) for (Sequences track : tracks) { int sumLen = 0; float sumFrac = 0; double sumDiffs = 0; for (Sequence s : track.sequences) { int len = s.stopIdx2 - s.startIdx2 + 1; float diff = s.absAvgSumDiffs * len; sumLen += len; sumDiffs += diff; sumFrac += s.fractionOfWhole; } track.absSumDiffs = sumDiffs; track.avgSumDiffs = (float)(sumDiffs/(float)sumLen); track.fractionOfWhole = sumFrac; } // sorting here needs to prefer higher fraction and // longer segments too Collections.sort(tracks, new TrackComparator(n1)); for (int i = 0; i < tracks.size(); ++i) { Sequences track = tracks.get(i); log.info("track " + i + ": " + track.toString()); } return tracks.get(0); } protected double matchRigidWithOcclusion(List<Sequence> srquences, int n1) { throw new UnsupportedOperationException("not yet implemented"); } /** * index0 is rotations of q, index1 is p.n, index2 is q.n returns a[0:q.n-1][0:p.n-1][0:p.n-1] */ protected float[][][] createDifferenceMatrices( PairIntArray p, PairIntArray q) { if (p.getN() > q.getN()) { throw new IllegalArgumentException( "q.n must be >= p.n"); } /* | a_1_1...a_1_N | | a_2_1...a_2_N | ... | a_N_1...a_N_N | elements on the diagonal are zero to shift to different first point as reference, can shift down k-1 rows and left k-1 columns. */ //log.fine("a1:"); float[][] a1 = createDescriptorMatrix(p, p.getN()); //log.fine("a2:"); float[][] a2 = createDescriptorMatrix(q, q.getN()); /* - find rxr sized blocks similar to one another by starting at main diagonal element A_1(s,s) and A_2(m,m) which have a small angular difference value 1 D_a(s,m,r) = ---- * summation_i_0_to_(r-1) r^2 * summation_j_0_(r-1) of [A_1(s+i,s+j) - A_2(m+i,m+j)]^2 //s range 0 to M-1 //m range 0 to M-1, M<=N //r range 2 to sqrt(min(n, n)) - to calculate all D_a(s,m,r) uses concept of "integral image" by Viola and Jones (for their data, I(x,y)=i(x,y)+I(x-1,y)+I(x,y-1)-I(x-1,y-1)) - N integral images int_1...int_N of size MXM are built for N descriptor difference matrices M_D^n where the number of sampled points on the two shapes is N and M, respectively where M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1) then, all matching triplets {s,m,r} which provide a difference value D_a(s,m,r) below a fixed threshold are calculated. --------------------------- (1) make difference matrices. there will be N A_2 matrices in which each is shifted left and up by 1 (or some other value). M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1) shifting A_2 by 0 through n where n is (N-M+1?), but shifting it by N instead would cover all orientation angles. (2) make Summary Area Tables of the N M_D^m matrices. (3) search: starting on the diagonals of the integral images made from the N M_D^n matrices, D_α(s, m, r) can be calculated for every block of any size starting at any point on the diagonal in constant time. */ /* MXM NXN 30 31 32 33 20 21 22 20 21 22 23 10 11 12 10 11 12 13 00 01 02 00 01 02 03 p_i_j - q_i_j 01 02 03 00 20 21 22 31 32 33 30 10 11 12 21 22 23 20 00 01 02 11 12 13 10 p_i_j - q_(i+1)_(j+1) 12 13 10 11 20 21 22 02 03 00 01 10 11 12 32 33 30 31 00 01 02 22 23 20 21 p_i_j - q_(i+2)_(j+2) 23 20 21 22 20 21 22 13 10 11 12 10 11 12 03 00 01 02 00 01 02 33 30 31 32 p_i_j - q_(i+2)_(j+2) */ // --- make difference matrices --- int n1 = p.getN(); int n2 = q.getN(); float[][][] md = new float[n2][][]; float[][] prevA2Shifted = null; for (int i = 0; i < n2; ++i) { float[][] shifted2; if (prevA2Shifted == null) { shifted2 = copy(a2); } else { // shifts by 1 to left and up by 1 rotate(prevA2Shifted); shifted2 = prevA2Shifted; } //M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1) md[i] = subtract(a1, shifted2); assert(md[i].length == n1); assert(md[i][0].length == n1); prevA2Shifted = shifted2; } // ---- make summary area table for md----- for (int i = 0; i < md.length; ++i) { applySummedAreaTableConversion(md[i]); } //printDiagonal(md[0], mdCoords[0]); log.fine("md.length=" + md.length); return md; } /** given the shape points for p and q, create a matrix of descriptors, describing the difference in chord angles. The chord descriptor is invariant to translation, rotation, and scale: - a chord is a line joining 2 region points - uses the relative orientation between 2 chords angle a_i_j is from chord P_i_P_j to reference point P_i to another sampled point and chord P_j_P_(j-d) and P_j d is the number of points before j in the sequence of points P. a_i_j is the angle between the 2 chords P_i_P_j and P_j_P_(j-d) */ protected float[][] createDescriptorMatrix(PairIntArray p, int n) { float[][] a = new float[n][]; for (int i = 0; i < n; ++i) { a[i] = new float[n]; } /* P1 Pmid P2 */ log.fine("n=" + n); for (int i1 = 0; i1 < n; ++i1) { int start = i1 + 1 + dp; for (int ii = start; ii < (start + n - 1 - dp); ++ii) { int i2 = ii; int imid = i2 - dp; // wrap around if (imid > (n - 1)) { imid -= n; } // wrap around if (i2 > (n - 1)) { i2 -= n; } //log.fine("i1=" + i1 + " imid=" + imid + " i2=" + i2); double angleA = LinesAndAngles.calcClockwiseAngle( p.getX(i1), p.getY(i1), p.getX(i2), p.getY(i2), p.getX(imid), p.getY(imid) ); /* String str = String.format( "[%d](%d,%d) [%d](%d,%d) [%d](%d,%d) a=%.4f", i1, p.getX(i1), p.getY(i1), i2, p.getX(i2), p.getY(i2), imid, p.getX(imid), p.getY(imid), (float) angleA * 180. / Math.PI); log.fine(str); */ a[i1][i2] = (float)angleA; } } return a; } protected int distanceSqEucl(int x1, int y1, int x2, int y2) { int diffX = x1 - x2; int diffY = y1 - y2; return (diffX * diffX + diffY * diffY); } private float[][] copy(float[][] a) { float[][] a2 = new float[a.length][]; for (int i = 0; i < a2.length; ++i) { a2[i] = Arrays.copyOf(a[i], a[i].length); } return a2; } private void rotate(float[][] prevShifted) { // shift x left by 1 first for (int y = 0; y < prevShifted[0].length; ++y) { float tmp0 = prevShifted[0][y]; for (int x = 0; x < (prevShifted.length- 1); ++x){ prevShifted[x][y] = prevShifted[x + 1][y]; } prevShifted[prevShifted.length - 1][y] = tmp0; } // shift y down by 1 for (int x = 0; x < prevShifted.length; ++x) { float tmp0 = prevShifted[x][0]; for (int y = 0; y < (prevShifted[x].length - 1); ++y){ prevShifted[x][y] = prevShifted[x][y + 1]; } prevShifted[x][prevShifted[x].length - 1] = tmp0; } } private float[][] subtract(float[][] a1, float[][] a2) { /* MXM NXN 20 21 22 10 11 10 11 12 00 01 00 01 02 01 02 00 10 11 21 22 20 00 01 11 12 10 12 10 11 10 11 02 00 01 00 01 22 20 21 subtracting only the MXM portion */ assert(a1.length == a1[0].length); assert(a2.length == a2[0].length); int n1 = a1.length; int n2 = a2.length; assert(n1 <= n2); float[][] output = new float[n1][]; for (int i = 0; i < n1; ++i) { output[i] = new float[n1]; for (int j = 0; j < n1; ++j) { output[i][j] = a1[i][j] - a2[i][j]; } } return output; } private void print(String label, float[][] a) { StringBuilder sb = new StringBuilder(label); sb.append("\n"); for (int j = 0; j < a[0].length; ++j) { sb.append(String.format("row: %3d", j)); for (int i = 0; i < a.length; ++i) { sb.append(String.format(" %.4f,", a[i][j])); } log.fine(sb.toString()); sb.delete(0, sb.length()); } } protected void applySummedAreaTableConversion(float[][] mdI) { for (int x = 0; x < mdI.length; ++x) { for (int y = 0; y < mdI[x].length; ++y) { if (x > 0 && y > 0) { mdI[x][y] += (mdI[x - 1][y] + mdI[x][y - 1] - mdI[x - 1][y - 1]); } else if (x > 0) { mdI[x][y] += mdI[x - 1][y]; } else if (y > 0) { mdI[x][y] += mdI[x][y - 1]; } } } } private void filterForConsistentClockwise( List<Sequences> tracks) { TIntList rmList = new TIntArrayList(); for (int i = 0; i < tracks.size(); ++i) { Sequences sequences = tracks.get(i); if (sequences.sequences.isEmpty()) { rmList.add(i); continue; } Collections.sort(sequences.sequences, new SequenceComparator4()); /* all startIdx1 should be increasing, and wrap around should be considered. then, all startIdx2 should be increasing and wrap around whould be considered. */ Sequence s0 = sequences.sequences.get(0); boolean notValid = false; int ns = sequences.sequences.size(); // check startIdx1 then startIdx2 for (int check = 0; check < 2; ++check) { boolean wrapped = false; int prev = (check == 0) ? s0.startIdx1 : s0.startIdx2; for (int j = 1; j <= ns; ++j) { Sequence s; if (j == ns) { if (check == 0) { break; } s = sequences.sequences.get(0); } else { s = sequences.sequences.get(j); } int idx = (check == 0) ? s.startIdx1 : s.startIdx2; if (idx == prev) { rmList.add(i); notValid = true; break; } else if (idx < prev) { if (wrapped) { rmList.add(i); notValid = true; break; } wrapped = true; prev = idx; } prev = idx; } // end loop over j sequences in a track if (notValid) { break; } } // end loop over check } log.info("removing " + rmList.size() + " tracks from " + tracks.size()); for (int i = (rmList.size() - 1); i > -1; --i) { int rmIdx = rmList.get(i); tracks.remove(rmIdx); } } private void transpose(Sequences sequences, int n1, int n2) { sequences.fractionOfWhole *= ((float)n1/(float)n2); List<Sequence> sqs = sequences.getList(); for (Sequence s : sqs) { int n = s.stopIdx2 - s.startIdx2; int startIdx2 = s.startIdx1; s.startIdx1 = s.startIdx2; s.startIdx2 = startIdx2; s.stopIdx2 = s.startIdx2 + n; } } private class DiffMatrixResults { private TIntSet[] indexSets = null; // indexes' indexes are i and values are j private TIntList[] indexes = null; private TDoubleList[] diffs = null; public DiffMatrixResults(int n) { indexes = new TIntList[n]; diffs = new TDoubleList[n]; indexSets = new TIntSet[n]; } public void add(int i, int j, double diff) { if (indexes[i] == null) { indexes[i] = new TIntArrayList(); diffs[i] = new TDoubleArrayList(); indexSets[i] = new TIntHashSet(); } if (!indexSets[i].contains(j)) { indexSets[i].add(j); indexes[i].add(j); diffs[i].add(diff); } } } public static class Sequence { int startIdx1; int startIdx2 = -1; int stopIdx2 = -1; float absAvgSumDiffs; float fractionOfWhole; public int getStartIdx1() { return startIdx1; } public int getStartIdx2() { return startIdx2; } public int getStopIdx2() { return stopIdx2; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format( "(%d:%d to %d, f=%.4f d=%.4f)", startIdx1, startIdx2, stopIdx2, fractionOfWhole, absAvgSumDiffs)); return sb.toString(); } } private class MinDiffs { // first dimension index: md[*][][] int[] idxs0; // i is the index of this array and represents the index // of point in p array // j is i + idxs[0] and represents the index // of point in q array float[] mins; public MinDiffs(int n) { idxs0 = new int[n]; mins = new float[n]; Arrays.fill(idxs0, -1); Arrays.fill(mins, Float.MAX_VALUE); } } public static class Sequences { List<Sequence> sequences = new ArrayList<Sequence>(); float fractionOfWhole; double absSumDiffs; float avgSumDiffs; public List<Sequence> getList() { return sequences; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("frac=%.4f", fractionOfWhole)); sb.append(String.format(", avgDiff=%.4f, sumDiff=%.4f", avgSumDiffs, absSumDiffs)); for (Sequence s : sequences) { sb.append("\n").append(s.toString()); } return sb.toString(); } } private class TrackComparator implements Comparator<Sequences> { final int maxNPoints; public TrackComparator(int n1) { this.maxNPoints = n1; } @Override public int compare(Sequences o1, Sequences o2) { // adding a term to prefer the larger // fraction, but in a smaller number of // larger segments. // hard wiring a minimum size of 5 for segments float ns = (float)(maxNPoints/5); float ns1 = 1.f - ((float)o1.sequences.size()/ns); float ns2 = 1.f - ((float)o2.sequences.size()/ns); //NOTE: this may need to change for cases where, // for example, have one very large segment that // is the right answer and several smaller matches // that are false due to occlusion... presumably // other sequences have as many false matches, but // this needs alot more testing. float s1 = o1.fractionOfWhole * ns1; float s2 = o2.fractionOfWhole * ns2; if (s1 > s2) { return -1; } else if (s1 < s2) { return 1; } if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.absSumDiffs < o2.absSumDiffs) { return -1; } else if (o1.absSumDiffs > o2.absSumDiffs) { return 1; } return 0; } } /** * comparator for a preferring high fraction and low differences, then descending sort of fraction, * then diff, then startIdx */ private class SequenceComparator3 implements Comparator<Sequence> { private final float maxDiff; public SequenceComparator3(float maxDiff) { this.maxDiff = maxDiff; } @Override public int compare(Sequence o1, Sequence o2) { float d1 = 1.f - (o1.absAvgSumDiffs/maxDiff); float d2 = 1.f - (o2.absAvgSumDiffs/maxDiff); float s1 = o1.fractionOfWhole + d1; float s2 = o2.fractionOfWhole + d2; if (s1 > s2) { return -1; } else if (s1 < s2) { return 1; } if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.absAvgSumDiffs < o2.absAvgSumDiffs) { return -1; } else if (o1.absAvgSumDiffs > o2.absAvgSumDiffs) { return 1; } if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } return 0; } } /** * comparator for descending sort of fraction, * then diff, then startIdx */ private class SequenceComparator2 implements Comparator<Sequence> { @Override public int compare(Sequence o1, Sequence o2) { if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.absAvgSumDiffs < o2.absAvgSumDiffs) { return -1; } else if (o1.absAvgSumDiffs > o2.absAvgSumDiffs) { return 1; } if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } return 0; } } /** * comparator to sort by ascending startIdx, then * descending fraction of whole */ private class SequenceComparator implements Comparator<Sequence> { @Override public int compare(Sequence o1, Sequence o2) { if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } if (o1.fractionOfWhole > o2.fractionOfWhole) { return -1; } else if (o1.fractionOfWhole < o2.fractionOfWhole) { return 1; } if (o1.startIdx2 < o2.startIdx2) { return -1; } else if (o1.startIdx2 > o2.startIdx2) { return 1; } if (o1.absAvgSumDiffs < o2.absAvgSumDiffs) { return -1; } else if (o1.absAvgSumDiffs > o2.absAvgSumDiffs) { return 1; } // should not arrive here return 0; } } /** * comparator for descending sort startIdx1, * then startIdx2 */ private class SequenceComparator4 implements Comparator<Sequence> { @Override public int compare(Sequence o1, Sequence o2) { if (o1.startIdx1 < o2.startIdx1) { return -1; } else if (o1.startIdx1 > o2.startIdx1) { return 1; } if (o1.startIdx2 < o2.startIdx2) { return -1; } else if (o1.startIdx2 > o2.startIdx2) { return 1; } return 0; } } /** * * @param md 3 dimensional array of difference matrices * @param r block size * @return */ private void findMinDifferenceMatrix( float[][][] md, int r, double threshold, MinDiffs output) { double c = 1./(double)(r*r); //md[0:n2-1][0:n1-1][0:n1-1] int n1 = md[0].length; int[] idxs0 = output.idxs0; float[] mins = output.mins; int count = 0; for (int jOffset = 0; jOffset < md.length; jOffset++) { log.fine("md[" + jOffset + "]:"); float[][] a = md[jOffset]; float sum = 0; for (int i = 0; i < a.length; i+=r) { float s1; if ((i - r) > -1) { s1 = a[i][i] - a[i-r][i] - a[i][i-r] + a[i-r][i-r]; log.fine( String.format( " [%d,%d] %.4f, %.4f, %.4f, %.4f => %.4f", i, i, a[i][i], a[i-r][i], a[i][i-r], a[i-r][i-r], s1*c)); } else { s1 = a[i][i]; log.fine( String.format( " [%d,%d] %.4f => %.4f", i, i, a[i][i], s1*c)); } s1 *= c; float absS1 = s1; if (absS1 < 0) { absS1 *= -1; } if (absS1 > threshold) { continue; } // note, idx from q is i + iOffset count++; sum += absS1; if (absS1 < Math.abs(mins[i])) { int idx2 = i + jOffset; if (idx2 >= n1) { idx2 -= n1; } mins[i] = s1; idxs0[i] = jOffset; // fill in the rest of the diagonal in this block for (int k = (i-1); k > (i-r); k--) { if (k < 0) { break; } if (mins[i] < mins[k]) { idx2 = k + jOffset; if (idx2 >= n1) { idx2 -= n1; } mins[k] = s1; idxs0[k] = jOffset; } } } } if (count == 0) { sum = Integer.MAX_VALUE; } log.fine("SUM=" + sum); } log.fine("OFFSETS=" + Arrays.toString(idxs0)); log.fine("mins=" + Arrays.toString(mins)); } /** * * @param md * @param r * @param mins * @param threshold * @param tolerance * @param n1 * @param n2 * @param output contains pairs of i and jOffset, where * j is i + jOffset */ private void findEquivalentBest(float[][][] md, int r, MinDiffs mins, double threshold, double tolerance, int n1, int n2, DiffMatrixResults output) { //md[0:n2-1][0:n1-1][0:n1-1] assert(md.length == n2); double c = 1./(double)(r*r); // capture all "best" within mins[i] += tolerance for (int jOffset = 0; jOffset < n2; jOffset++) { float[][] a = md[jOffset]; for (int i = 0; i < n1; i+=r) { if (mins.idxs0[i] == -1) { // there is no best for this p index continue; } // mins.mins[i] is the best for index i (== P_i) // mins.idxs0[i] is jOffset of best // j is index i + jOffset float s1; if ((i - r) > -1) { s1 = a[i][i] - a[i-r][i] - a[i][i-r] + a[i-r][i-r]; } else { s1 = a[i][i]; } s1 *= c; float absS1 = s1; if (absS1 < 0) { absS1 *= -1; } if (absS1 > threshold) { continue; } double best = mins.mins[i]; if (Math.abs(s1 - best) > tolerance) { continue; } int idx2 = jOffset + i; if (idx2 >= n2) { idx2 -= n2; } output.add(i, idx2, s1); // fill in the rest of the diagonal in this block for (int k = (i-1); k > (i-r); k--) { if (k < 0) { break; } if ((k - r) > -1) { s1 = a[k][k] - a[k-r][k] - a[k][k-r] + a[k-r][k-r]; } else { s1 = a[k][k]; } s1 *= c; absS1 = s1; if (absS1 < 0) { absS1 *= -1; } if (absS1 > threshold) { continue; } if (Math.abs(s1 - best) > tolerance) { continue; } idx2 = jOffset + k; if (idx2 >= n1) { idx2 -= n1; } output.add(k, idx2, s1); } } } } private float findMaxAvgDiff(List<Sequence> sequences) { float max = Float.MIN_VALUE; for (Sequence s : sequences) { float d = s.absAvgSumDiffs; if (d > max) { max = d; } } return max; } private boolean intersectsExistingRange1( List<Sequence> existingList, Sequence s) { int stopIdx1 = s.startIdx1 + (s.stopIdx2 - s.startIdx2); for (Sequence s0 : existingList) { int s0stopIdx1 = s0.startIdx1 + (s0.stopIdx2 - s0.startIdx2); if (s.startIdx1 >= s0.startIdx1 && s.startIdx1 <= s0stopIdx1) { return true; } if (stopIdx1 >= s0.startIdx1 && stopIdx1 <= s0stopIdx1) { return true; } if (s.startIdx1 <= s0.startIdx1 && stopIdx1 >= s0stopIdx1) { return true; } } return false; } }
more towards refactoring the reading of blocks in PartialShapeMatcher
src/algorithms/imageProcessing/PartialShapeMatcher.java
more towards refactoring the reading of blocks in PartialShapeMatcher
Java
mit
5cab123cf07cff67d985c61bb02d2c24168d0736
0
qiniu/java-sdk
package com.qiniu.storage; import com.qiniu.common.QiniuException; import com.qiniu.http.Client; import com.qiniu.http.Response; import com.qiniu.util.StringMap; import java.io.File; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; /** * 并发分片上传 * <p> * 分片上传 v1 * 参考文档:<a href="https://developer.qiniu.com/kodo/7443/shard-to-upload">分片上传</a> * <p/> * 上传通过将一个文件分割为固定大小的块(4M),每次上传一个块的内容(服务端只分块,没有分片)。 * 等待所有块都上传完成之后,再将这些块拼接起来,构成一个完整的文件。 * <p/> * <p> * 分片上传 v2 * 参考文档:<a href="https://developer.qiniu.com/kodo/6364/multipartupload-interface">分片上传</a> * <p/> * 上传通过将一个文件分割为固定大小的块(大小可配置,通过 Configuration.resumableUploadAPIV2BlockSize),每次上传一个块的内容。 * 等待所有块都上传完成之后,再将这些块拼接起来,构成一个完整的文件。 * <p/> * <p> * 另外分片上传还支持纪录上传进度,如果本次上传被暂停,那么下次还可以从上次 * 上次完成的文件偏移位置,继续开始上传,这样就实现了断点续传功能。 * <p> * 服务端网络较稳定,较大文件(如500M以上)才需要将块记录保存下来。 * 小文件没有必要,可以有效地实现大文件的上传。 */ public class ConcurrentResumeUploader extends ResumeUploader { /** * 构建分片上传文件的对象【兼容老版本】 * 分片上传时,每个上传操作会占用 blockSize 大小内存,blockSize 也即分片大小, * 在分片 v1 中 blockSize 为 4M; * 分片 v2 可自定义 blockSize,定义方式为:Configuration.resumableUploadAPIV2BlockSize,范围为:1M ~ 1GB,分片 v2 需要注意每个文件最大分片数量为 10000; * 当采用并发分片时,占用内存大小和当时启用并发任务数量有关,即:blockSize * 并发数量, * 并发任务数量配置方式:Configuration.resumableUploadMaxConcurrentTaskCount * <p> * 支持分片上传 v1/v2,支持断点续传,支持并发 * * @param client 上传 client【必须】 * @param upToken 上传凭证【必须】 * @param key 文件保存名称【可选】 * @param file 文件【必须】 * @param params 自定义参数【可选】 * 自定义文件 metadata 信息,key 需要增加前缀 x-qn-meta- :如 params.put("x-qn-meta-key", "foo") * 用户自定义变量,key 需要增加前缀 x: :如 params.put("x:foo", "foo") * @param mime 文件 mime type【可选】 * @param recorder 断点续传信息记录对象【可选】 * @param configuration 上传配置信息【必须】 */ public ConcurrentResumeUploader(Client client, String upToken, String key, File file, StringMap params, String mime, Recorder recorder, Configuration configuration) { super(client, upToken, key, file, params, mime, recorder, configuration); } /** * 构建分片上传文件流的对象【兼容老版本】 * 分片上传时,每个上传操作会占用 blockSize 大小内存,blockSize 也即分片大小, * 在分片 v1 中 blockSize 为 4M; * 分片 v2 可自定义 blockSize,定义方式为:Configuration.resumableUploadAPIV2BlockSize,范围为:1M ~ 1GB,分片 v2 需要注意每个文件最大分片数量为 10000; * 当采用并发分片时,占用内存大小和当时启用并发任务数量有关,即:blockSize * 并发数量, * 并发任务数量配置方式:Configuration.resumableUploadMaxConcurrentTaskCount * <p> * 支持分片上传 v1/v2,支持并发 * 不支持断点续传,不支持定义file name * * @param client 上传 client 【必须】 * @param upToken 上传凭证 【必须】 * @param key 文件保存名称 【可选】 * @param stream 文件流 【必须】 * @param params 自定义参数【可选】 * 自定义文件 metadata 信息,key 需要增加前缀 x-qn-meta- :如 params.put("x-qn-meta-key", "foo") * 用户自定义变量,key 需要增加前缀 x: :如 params.put("x:foo", "foo") * @param mime 文件 mime type【可选】 * @param configuration 上传配置信息 【必须】 */ public ConcurrentResumeUploader(Client client, String upToken, String key, InputStream stream, StringMap params, String mime, Configuration configuration) { super(client, upToken, key, stream, null, params, mime, configuration); } /** * 构建分片上传文件流的对象 * 分片上传时,每个上传操作会占用 blockSize 大小内存,blockSize 也即分片大小, * 在分片 v1 中 blockSize 为 4M; * 分片 v2 可自定义 blockSize,定义方式为:Configuration.resumableUploadAPIV2BlockSize,范围为:1M ~ 1GB,分片 v2 需要注意每个文件最大分片数量为 10000; * 当采用并发分片时,占用内存大小和当时启用并发任务数量有关,即:blockSize * 并发数量, * 并发任务数量配置方式:Configuration.resumableUploadMaxConcurrentTaskCount * <p> * 支持分片上传 v1/v2,支持并发,支持定义file name * 不支持断点续传 * * @param client 上传 client 【必须】 * @param upToken 上传凭证 【必须】 * @param key 文件保存名称 【可选】 * @param stream 文件流 【必须】 * @param fileName 文件名 【可选】 * @param params 自定义参数【可选】 * 自定义文件 metadata 信息,key 需要增加前缀 x-qn-meta- :如 params.put("x-qn-meta-key", "foo") * 用户自定义变量,key 需要增加前缀 x: :如 params.put("x:foo", "foo") * @param mime 文件 mime type 【可选】 * @param configuration 上传配置信息 【必须】 */ public ConcurrentResumeUploader(Client client, String upToken, String key, InputStream stream, String fileName, StringMap params, String mime, Configuration configuration) { super(client, upToken, key, stream, fileName, params, mime, configuration); } @Override Response uploadData() throws QiniuException { // 处理参数 int maxConcurrentTaskCount = config.resumableUploadMaxConcurrentTaskCount; ExecutorService pool = config.resumableUploadConcurrentTaskExecutorService; if (maxConcurrentTaskCount < 1) { maxConcurrentTaskCount = 1; } if (pool == null) { pool = Executors.newFixedThreadPool(maxConcurrentTaskCount); } // 开启并发任务 System.out.println("并发上传 task count:" + maxConcurrentTaskCount); List<Future<Response>> futures = new ArrayList<>(); for (int i = 0; i < maxConcurrentTaskCount; i++) { Future<Response> future = pool.submit(new Callable<Response>() { @Override public Response call() throws Exception { return ConcurrentResumeUploader.super.uploadData(); } }); futures.add(future); } // 等待所有并发任务完成 Response response = null; QiniuException exception = null; for (Future<Response> future : futures) { while (!future.isDone()) { try { Thread.sleep(500); } catch (InterruptedException ignored) { } } try { Response responseP = future.get(); if (response == null || (responseP != null && responseP.isOK())) { response = responseP; } } catch (Exception e) { exception = new QiniuException(e); } System.out.println("并发上传 task complete, index:" + futures.indexOf(future)); } // 所有块上传完成说明上传成功 if (uploadPerformer.isAllBlocksUploaded()) { return response; } // 未完成 如果有异常则抛出异常,理论上未完成必定有异常 if (exception != null) { throw exception; } return response; } }
src/main/java/com/qiniu/storage/ConcurrentResumeUploader.java
package com.qiniu.storage; import com.qiniu.common.QiniuException; import com.qiniu.http.Client; import com.qiniu.http.Response; import com.qiniu.util.StringMap; import java.io.File; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; /** * 并发分片上传 * <p> * 分片上传 v1 * 参考文档:<a href="https://developer.qiniu.com/kodo/7443/shard-to-upload">分片上传</a> * <p/> * 上传通过将一个文件分割为固定大小的块(4M),每次上传一个块的内容(服务端只分块,没有分片)。 * 等待所有块都上传完成之后,再将这些块拼接起来,构成一个完整的文件。 * <p/> * <p> * 分片上传 v2 * 参考文档:<a href="https://developer.qiniu.com/kodo/6364/multipartupload-interface">分片上传</a> * <p/> * 上传通过将一个文件分割为固定大小的块(大小可配置,通过 Configuration.resumableUploadAPIV2BlockSize),每次上传一个块的内容。 * 等待所有块都上传完成之后,再将这些块拼接起来,构成一个完整的文件。 * <p/> * <p> * 另外分片上传还支持纪录上传进度,如果本次上传被暂停,那么下次还可以从上次 * 上次完成的文件偏移位置,继续开始上传,这样就实现了断点续传功能。 * <p> * 服务端网络较稳定,较大文件(如500M以上)才需要将块记录保存下来。 * 小文件没有必要,可以有效地实现大文件的上传。 */ public class ConcurrentResumeUploader extends ResumeUploader { /** * 构建分片上传文件的对象【兼容老版本】 * 分片上传时,每个上传操作会占用 blockSize 大小内存,blockSize 也即分片大小, * 在分片 v1 中 blockSize 为 4M; * 分片 v2 可自定义 blockSize,定义方式为:Configuration.resumableUploadAPIV2BlockSize,范围为:1M ~ 1GB,分片 v2 需要注意每个文件最大分片数量为 10000; * 当采用并发分片时,占用内存大小和当时启用并发任务数量有关,即:blockSize * 并发数量, * 并发任务数量配置方式:Configuration.resumableUploadMaxConcurrentTaskCount * <p> * 支持分片上传 v1/v2,支持断点续传,支持并发 * * @param client 上传 client【必须】 * @param upToken 上传凭证【必须】 * @param key 文件保存名称【可选】 * @param file 文件【必须】 * @param params 自定义参数【可选】 * 自定义文件 metadata 信息,key 需要增加前缀 x-qn-meta- :如 params.put("x-qn-meta-key", "foo") * 用户自定义变量,key 需要增加前缀 x: :如 params.put("x:foo", "foo") * @param mime 文件 mime type【可选】 * @param recorder 断点续传信息记录对象【可选】 * @param configuration 上传配置信息【必须】 */ public ConcurrentResumeUploader(Client client, String upToken, String key, File file, StringMap params, String mime, Recorder recorder, Configuration configuration) { super(client, upToken, key, file, params, mime, recorder, configuration); } /** * 构建分片上传文件流的对象【兼容老版本】 * 分片上传时,每个上传操作会占用 blockSize 大小内存,blockSize 也即分片大小, * 在分片 v1 中 blockSize 为 4M; * 分片 v2 可自定义 blockSize,定义方式为:Configuration.resumableUploadAPIV2BlockSize,范围为:1M ~ 1GB,分片 v2 需要注意每个文件最大分片数量为 10000; * 当采用并发分片时,占用内存大小和当时启用并发任务数量有关,即:blockSize * 并发数量, * 并发任务数量配置方式:Configuration.resumableUploadMaxConcurrentTaskCount * <p> * 支持分片上传 v1/v2,支持并发 * 不支持断点续传,不支持定义file name * * @param client 上传 client 【必须】 * @param upToken 上传凭证 【必须】 * @param key 文件保存名称 【可选】 * @param stream 文件流 【必须】 * @param params 自定义参数【可选】 * 自定义文件 metadata 信息,key 需要增加前缀 x-qn-meta- :如 params.put("x-qn-meta-key", "foo") * 用户自定义变量,key 需要增加前缀 x: :如 params.put("x:foo", "foo") * @param mime 文件 mime type【可选】 * @param configuration 上传配置信息 【必须】 */ public ConcurrentResumeUploader(Client client, String upToken, String key, InputStream stream, StringMap params, String mime, Configuration configuration) { super(client, upToken, key, stream, null, params, mime, configuration); } /** * 构建分片上传文件流的对象 * 分片上传时,每个上传操作会占用 blockSize 大小内存,blockSize 也即分片大小, * 在分片 v1 中 blockSize 为 4M; * 分片 v2 可自定义 blockSize,定义方式为:Configuration.resumableUploadAPIV2BlockSize,范围为:1M ~ 1GB,分片 v2 需要注意每个文件最大分片数量为 10000; * 当采用并发分片时,占用内存大小和当时启用并发任务数量有关,即:blockSize * 并发数量, * 并发任务数量配置方式:Configuration.resumableUploadMaxConcurrentTaskCount * <p> * 支持分片上传 v1/v2,支持并发,支持定义file name * 不支持断点续传 * * @param client 上传 client 【必须】 * @param upToken 上传凭证 【必须】 * @param key 文件保存名称 【可选】 * @param stream 文件流 【必须】 * @param fileName 文件名 【可选】 * @param params 自定义参数【可选】 * 自定义文件 metadata 信息,key 需要增加前缀 x-qn-meta- :如 params.put("x-qn-meta-key", "foo") * 用户自定义变量,key 需要增加前缀 x: :如 params.put("x:foo", "foo") * @param mime 文件 mime type 【可选】 * @param configuration 上传配置信息 【必须】 */ public ConcurrentResumeUploader(Client client, String upToken, String key, InputStream stream, String fileName, StringMap params, String mime, Configuration configuration) { super(client, upToken, key, stream, fileName, params, mime, configuration); } @Override Response uploadData() throws QiniuException { // 处理参数 int maxConcurrentTaskCount = config.resumableUploadMaxConcurrentTaskCount; ExecutorService pool = config.resumableUploadConcurrentTaskExecutorService; if (maxConcurrentTaskCount < 1) { maxConcurrentTaskCount = 1; } if (pool == null) { pool = Executors.newFixedThreadPool(maxConcurrentTaskCount); } // 开启并发任务 System.out.println("并发上传 task count:" + maxConcurrentTaskCount); List<Future<Response>> futures = new ArrayList<>(); for (int i = 0; i < maxConcurrentTaskCount; i++) { Future<Response> future = pool.submit(new Callable<Response>() { @Override public Response call() throws Exception { return ConcurrentResumeUploader.super.uploadData(); } }); futures.add(future); } // 等待所有并发任务完成 Response response = null; QiniuException exception = null; for (Future<Response> future : futures) { while (!future.isDone()) { try { Thread.sleep(500); } catch (InterruptedException ignored) { } } try { Response responseP = future.get(); if (response == null || (responseP != null && responseP.isOK())) { response = responseP; } } catch (Exception e) { exception = new QiniuException(e); } System.out.println("并发上传 task complete, index:" + futures.indexOf(future)); } // 所有块上传完成说明上传成功 if (uploadPerformer.isAllBlocksUploaded()) { return response; } // 未完成 如果有异常则有异常,理论上未完成必定有异常 if (exception != null) { throw exception; } return response; } }
modify code desc
src/main/java/com/qiniu/storage/ConcurrentResumeUploader.java
modify code desc
Java
mit
9f75ef121bfd95f7bdc5a8c864f13a0c5b5743a5
0
emc-mongoose/mongoose
package com.emc.mongoose.common.net; import it.unimi.dsi.fastutil.ints.Int2ObjectMap; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import sun.rmi.server.UnicastRef; import sun.rmi.transport.Channel; import sun.rmi.transport.LiveRef; import sun.rmi.transport.tcp.TCPEndpoint; import java.io.IOException; import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.NetworkInterface; import java.net.SocketException; import java.net.URI; import java.net.URISyntaxException; import java.rmi.Naming; import java.rmi.NotBoundException; import java.rmi.RemoteException; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.RMISocketFactory; import java.rmi.server.RemoteObjectInvocationHandler; import java.rmi.server.UnicastRemoteObject; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import static java.lang.reflect.Proxy.getInvocationHandler; /** Created on 28.09.16. */ public abstract class ServiceUtil { private static Int2ObjectMap<Registry> REGISTRY_MAP = new Int2ObjectOpenHashMap<>(); private static final String RMI_SCHEME = "rmi"; private static final String KEY_RMI_HOSTNAME = "java.rmi.server.hostname"; private static final Map<String, Service> SVC_MAP = new HashMap<>(); private static synchronized void ensureRmiRegistryIsAvailableAt(final int port) throws RemoteException { if(!REGISTRY_MAP.containsKey(port)) { try { REGISTRY_MAP.put(port, LocateRegistry.createRegistry(port)); } catch(final RemoteException e) { REGISTRY_MAP.put(port, LocateRegistry.getRegistry(port)); } } } private static void ensureRmiUseFixedPort(final int port) throws IOException, IllegalStateException { final RMISocketFactory prevSocketFactory = RMISocketFactory.getSocketFactory(); if(prevSocketFactory == null) { RMISocketFactory.setSocketFactory(new FixedPortRmiSocketFactory(port)); } else if(!(prevSocketFactory instanceof FixedPortRmiSocketFactory)) { throw new IllegalStateException("Invalid RMI socket factory was set"); } } public static URI getLocalSvcUri(final String svcName, final int port) throws URISyntaxException { final String hostName = getHostAddr(); return new URI(RMI_SCHEME, null, hostName, port, "/" + svcName, null, null); } private static URI getRemoteSvcUri(final String addr, final String svcName) throws URISyntaxException { final int port; final int portPos = addr.lastIndexOf(":"); if(portPos < 0) { throw new URISyntaxException(addr, "No port information in the address"); } else { port = Integer.parseInt(addr.substring(portPos + 1)); } return getRemoteSvcUri(addr.substring(0, portPos), port, svcName); } private static URI getRemoteSvcUri(final String addr, final int port, final String svcName) throws URISyntaxException { return new URI(RMI_SCHEME, null, addr, port, "/" + svcName, null, null); } public static String getHostAddr() { String hostName = System.getProperty(KEY_RMI_HOSTNAME); if(hostName != null) { return hostName; } InetAddress addr = null; try { final Enumeration<NetworkInterface> netIfaces = NetworkInterface.getNetworkInterfaces(); NetworkInterface nextNetIface; String nextNetIfaceName; while(netIfaces.hasMoreElements()) { nextNetIface = netIfaces.nextElement(); nextNetIfaceName = nextNetIface.getDisplayName(); if(!nextNetIface.isLoopback() && nextNetIface.isUp()) { final Enumeration<InetAddress> addrs = nextNetIface.getInetAddresses(); while(addrs.hasMoreElements()) { addr = addrs.nextElement(); if(Inet4Address.class.isInstance(addr)) { break; } } } } } catch(final SocketException e) { e.printStackTrace(System.err); } if(addr == null) { addr = InetAddress.getLoopbackAddress(); } return addr.getHostAddress(); } public static String create(final Service svc, final int port) { try { ensureRmiRegistryIsAvailableAt(port); ensureRmiUseFixedPort(port); UnicastRemoteObject.exportObject(svc, port); final String svcName = svc.getName(); final String svcUri = getLocalSvcUri(svcName, port).toString(); synchronized(SVC_MAP) { if(!SVC_MAP.containsKey(svcName + ":" + port)) { Naming.rebind(svcUri, svc); SVC_MAP.put(svcName + ":" + port, svc); } else { throw new AssertionError("Service already registered"); } } return svcUri; } catch(final IOException | URISyntaxException e) { e.printStackTrace(System.err); return null; } } @SuppressWarnings("unchecked") public static <S extends Service> S resolve(final String addr, final String name) throws NotBoundException, IOException, URISyntaxException { final String svcUri = getRemoteSvcUri(addr, name).toString(); return (S) Naming.lookup(svcUri); } @SuppressWarnings("unchecked") public static <S extends Service> S resolve( final String addr, final int port, final String name ) throws NotBoundException, IOException, URISyntaxException { final String svcUri = getRemoteSvcUri(addr, port, name).toString(); return (S) Naming.lookup(svcUri); } public static String close(final Service svc) throws RemoteException, MalformedURLException { final String svcName = svc.getName(); String svcUri = null; try { UnicastRemoteObject.unexportObject(svc, true); } finally { try { svcUri = getLocalSvcUri(svcName, svc.getRegistryPort()).toString(); Naming.unbind(svcUri); synchronized(SVC_MAP) { if(null == SVC_MAP.remove(svcName + ":" + svc.getRegistryPort())) { System.err.println( "Failed to remove the service \"" + svcName + "\"" ); } } } catch(final NotBoundException | URISyntaxException e) { e.printStackTrace(System.err); } } return svcUri; } public static void shutdown() { synchronized(SVC_MAP) { for(final Service svc : SVC_MAP.values()) { try { System.out.println("Service closed: " + close(svc)); } catch(final RemoteException | MalformedURLException e) { e.printStackTrace(System.err); } } SVC_MAP.clear(); } REGISTRY_MAP.clear(); } public static String getAddress(final Service svc) throws RemoteException { final RemoteObjectInvocationHandler h = (RemoteObjectInvocationHandler) getInvocationHandler(svc); final LiveRef ref = ((UnicastRef) h.getRef()).getLiveRef(); final Channel channel = ref.getChannel(); final TCPEndpoint endpoint = (TCPEndpoint) channel.getEndpoint(); return endpoint.getHost() + ":" + endpoint.getPort(); } }
common/src/main/java/com/emc/mongoose/common/net/ServiceUtil.java
package com.emc.mongoose.common.net; import it.unimi.dsi.fastutil.ints.Int2ObjectMap; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import sun.rmi.server.UnicastRef; import sun.rmi.transport.Channel; import sun.rmi.transport.LiveRef; import sun.rmi.transport.tcp.TCPEndpoint; import java.io.IOException; import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.NetworkInterface; import java.net.SocketException; import java.net.URI; import java.net.URISyntaxException; import java.rmi.Naming; import java.rmi.NotBoundException; import java.rmi.RemoteException; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.RMISocketFactory; import java.rmi.server.RemoteObjectInvocationHandler; import java.rmi.server.UnicastRemoteObject; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import static java.lang.reflect.Proxy.getInvocationHandler; /** Created on 28.09.16. */ public abstract class ServiceUtil { private static Int2ObjectMap<Registry> REGISTRY_MAP = new Int2ObjectOpenHashMap<>(); private static final String RMI_SCHEME = "rmi"; private static final String KEY_RMI_HOSTNAME = "java.rmi.server.hostname"; private static final Map<String, Service> SVC_MAP = new HashMap<>(); private static synchronized void ensureRmiRegistryIsAvailableAt(final int port) throws RemoteException { if(!REGISTRY_MAP.containsKey(port)) { try { REGISTRY_MAP.put(port, LocateRegistry.createRegistry(port)); } catch(final RemoteException e) { REGISTRY_MAP.put(port, LocateRegistry.getRegistry(port)); } } } private static void ensureRmiUseFixedPort(final int port) throws IOException { System.out.println("Set fixed port for RMI: " + port); RMISocketFactory.setSocketFactory(new FixedPortRmiSocketFactory(port)); } public static URI getLocalSvcUri(final String svcName, final int port) throws URISyntaxException { final String hostName = getHostAddr(); return new URI(RMI_SCHEME, null, hostName, port, "/" + svcName, null, null); } private static URI getRemoteSvcUri(final String addr, final String svcName) throws URISyntaxException { final int port; final int portPos = addr.lastIndexOf(":"); if(portPos < 0) { throw new URISyntaxException(addr, "No port information in the address"); } else { port = Integer.parseInt(addr.substring(portPos + 1)); } return getRemoteSvcUri(addr.substring(0, portPos), port, svcName); } private static URI getRemoteSvcUri(final String addr, final int port, final String svcName) throws URISyntaxException { return new URI(RMI_SCHEME, null, addr, port, "/" + svcName, null, null); } public static String getHostAddr() { String hostName = System.getProperty(KEY_RMI_HOSTNAME); if(hostName != null) { return hostName; } InetAddress addr = null; try { final Enumeration<NetworkInterface> netIfaces = NetworkInterface.getNetworkInterfaces(); NetworkInterface nextNetIface; String nextNetIfaceName; while(netIfaces.hasMoreElements()) { nextNetIface = netIfaces.nextElement(); nextNetIfaceName = nextNetIface.getDisplayName(); if(!nextNetIface.isLoopback() && nextNetIface.isUp()) { final Enumeration<InetAddress> addrs = nextNetIface.getInetAddresses(); while(addrs.hasMoreElements()) { addr = addrs.nextElement(); if(Inet4Address.class.isInstance(addr)) { break; } } } } } catch(final SocketException e) { e.printStackTrace(System.err); } if(addr == null) { addr = InetAddress.getLoopbackAddress(); } return addr.getHostAddress(); } public static String create(final Service svc, final int port) { try { ensureRmiRegistryIsAvailableAt(port); ensureRmiUseFixedPort(port); UnicastRemoteObject.exportObject(svc, port); final String svcName = svc.getName(); final String svcUri = getLocalSvcUri(svcName, port).toString(); synchronized(SVC_MAP) { if(!SVC_MAP.containsKey(svcName + ":" + port)) { Naming.rebind(svcUri, svc); SVC_MAP.put(svcName + ":" + port, svc); } else { throw new AssertionError("Service already registered"); } } return svcUri; } catch(final IOException | URISyntaxException e) { e.printStackTrace(System.err); return null; } } @SuppressWarnings("unchecked") public static <S extends Service> S resolve(final String addr, final String name) throws NotBoundException, IOException, URISyntaxException { final String svcUri = getRemoteSvcUri(addr, name).toString(); return (S) Naming.lookup(svcUri); } @SuppressWarnings("unchecked") public static <S extends Service> S resolve( final String addr, final int port, final String name ) throws NotBoundException, IOException, URISyntaxException { final String svcUri = getRemoteSvcUri(addr, port, name).toString(); return (S) Naming.lookup(svcUri); } public static String close(final Service svc) throws RemoteException, MalformedURLException { final String svcName = svc.getName(); String svcUri = null; try { UnicastRemoteObject.unexportObject(svc, true); } finally { try { svcUri = getLocalSvcUri(svcName, svc.getRegistryPort()).toString(); Naming.unbind(svcUri); synchronized(SVC_MAP) { if(null == SVC_MAP.remove(svcName + ":" + svc.getRegistryPort())) { System.err.println( "Failed to remove the service \"" + svcName + "\"" ); } } } catch(final NotBoundException | URISyntaxException e) { e.printStackTrace(System.err); } } return svcUri; } public static void shutdown() { synchronized(SVC_MAP) { for(final Service svc : SVC_MAP.values()) { try { System.out.println("Service closed: " + close(svc)); } catch(final RemoteException | MalformedURLException e) { e.printStackTrace(System.err); } } SVC_MAP.clear(); } REGISTRY_MAP.clear(); } public static String getAddress(final Service svc) throws RemoteException { final RemoteObjectInvocationHandler h = (RemoteObjectInvocationHandler) getInvocationHandler(svc); final LiveRef ref = ((UnicastRef) h.getRef()).getLiveRef(); final Channel channel = ref.getChannel(); final TCPEndpoint endpoint = (TCPEndpoint) channel.getEndpoint(); return endpoint.getHost() + ":" + endpoint.getPort(); } }
fixed RMI port experiment
common/src/main/java/com/emc/mongoose/common/net/ServiceUtil.java
fixed RMI port experiment
Java
mit
b06b2490b2d027af54264a50abeaf5ce94e13032
0
Coderhypo/ayanami,Coderhypo/ayanami,Coderhypo/ayanami
package xyz.acmer.entity.problem; import xyz.acmer.entity.user.User; import javax.persistence.*; import java.util.Date; /** * 提交状态表 * Created by hypo on 16-2-11. */ @Entity public class Status { /** * 站内runid */ private Long runId; /** * 提交者 */ private User submiter; /** * 题目 */ private Problem problem; /** * 返回状态 */ private String result; /** * 消耗内存 */ private String memory; /** * 运行时间 */ private String time; /** * 提交语言 */ private String language; /** * 代码长度 */ private String length; /** * 提交时间 */ private Date submitTime; public Status(User submiter, Problem problem, String result, String memory, String time, String language, String length, Date submitTime) { this.submiter = submiter; this.problem = problem; this.result = result; this.memory = memory; this.time = time; this.language = language; this.length = length; this.submitTime = submitTime; } @Id @GeneratedValue @Column(name = "run_id") public Long getRunId() { return runId; } public void setRunId(Long runId) { this.runId = runId; } @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "user_id") public User getSubmiter() { return submiter; } public void setSubmiter(User submiter) { this.submiter = submiter; } @OneToOne(fetch = FetchType.EAGER) @JoinColumn(name = "problem_id") public Problem getProblem() { return problem; } public void setProblem(Problem problem) { this.problem = problem; } @Column(name = "result", length = 40) public String getResult() { return result; } public void setResult(String result) { this.result = result; } @Column(name = "memory", length = 100) public String getMemory() { return memory; } public void setMemory(String memory) { this.memory = memory; } @Column(name = "time", length = 100) public String getTime() { return time; } public void setTime(String time) { this.time = time; } @Column(name = "language", length = 20) public String getLanguage() { return language; } public void setLanguage(String language) { this.language = language; } @Column(name = "length", length = 50) public String getLength() { return length; } public void setLength(String length) { this.length = length; } @Column(name = "submit_time", nullable = false, columnDefinition = "TIMESTAMP") public Date getSubmitTime() { return submitTime; } public void setSubmitTime(Date submitTime) { this.submitTime = submitTime; } }
src/main/java/xyz/acmer/entity/problem/Status.java
package xyz.acmer.entity.problem; import xyz.acmer.entity.user.User; import javax.persistence.*; import java.util.Date; /** * 提交状态表 * Created by hypo on 16-2-11. */ @Entity public class Status { /** * 站内runid */ private Long runId; /** * 提交者 */ private User submiter; /** * 题目 */ private Problem problem; /** * 返回状态 */ private String result; /** * 消耗内存 */ private String memory; /** * 运行时间 */ private String time; /** * 提交语言 */ private String language; /** * 代码长度 */ private String length; /** * 提交时间 */ private Date submitTime; public Status(User submiter, Problem problem, String result, String memory, String time, String language, String length, Date submitTime) { this.submiter = submiter; this.problem = problem; this.result = result; this.memory = memory; this.time = time; this.language = language; this.length = length; this.submitTime = submitTime; } @Id @GeneratedValue public Long getRunId() { return runId; } public void setRunId(Long runId) { this.runId = runId; } @ManyToOne(optional = false) @JoinColumn(name = "submiter") public User getSubmiter() { return submiter; } public void setSubmiter(User submiter) { this.submiter = submiter; } @ManyToOne(optional = false) @JoinColumn(name = "problem") public Problem getProblem() { return problem; } public void setProblem(Problem problem) { this.problem = problem; } @Column(name = "result", length = 40) public String getResult() { return result; } public void setResult(String result) { this.result = result; } @Column(name = "memory", length = 100) public String getMemory() { return memory; } public void setMemory(String memory) { this.memory = memory; } @Column(name = "time", length = 100) public String getTime() { return time; } public void setTime(String time) { this.time = time; } @Column(name = "language", length = 20) public String getLanguage() { return language; } public void setLanguage(String language) { this.language = language; } @Column(name = "length", length = 50) public String getLength() { return length; } public void setLength(String length) { this.length = length; } @Column(name = "submit_time", nullable = false, columnDefinition = "TIMESTAMP") public Date getSubmitTime() { return submitTime; } public void setSubmitTime(Date submitTime) { this.submitTime = submitTime; } }
update status
src/main/java/xyz/acmer/entity/problem/Status.java
update status
Java
mit
0c1fefa39a938b5fe73500e452ffd34a1f568ae0
0
literacyapp-org/literacyapp-web,elimu-ai/webapp,elimu-ai/webapp,literacyapp-org/literacyapp-web,elimu-ai/webapp,literacyapp-org/literacyapp-web,elimu-ai/webapp,literacyapp-org/literacyapp-web
package ai.elimu.web.content; import ai.elimu.dao.AllophoneDao; import ai.elimu.dao.AudioContributionEventDao; import java.security.Principal; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.Logger; import ai.elimu.dao.AudioDao; import ai.elimu.dao.ContributorDao; import ai.elimu.dao.EmojiDao; import ai.elimu.dao.ImageDao; import ai.elimu.dao.LetterDao; import ai.elimu.dao.LetterSoundCorrespondenceDao; import ai.elimu.dao.NumberContributionEventDao; import ai.elimu.dao.NumberDao; import ai.elimu.dao.StoryBookContributionEventDao; import ai.elimu.dao.StoryBookDao; import ai.elimu.dao.SyllableDao; import ai.elimu.dao.VideoDao; import ai.elimu.dao.WordContributionEventDao; import ai.elimu.dao.WordDao; import ai.elimu.model.contributor.Contributor; import ai.elimu.model.v2.enums.Environment; import ai.elimu.web.context.EnvironmentContextLoaderListener; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.web.savedrequest.DefaultSavedRequest; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @Controller @RequestMapping("/content") public class MainContentController { private final Logger logger = LogManager.getLogger(); @Autowired private LetterDao letterDao; @Autowired private AllophoneDao allophoneDao; @Autowired private LetterSoundCorrespondenceDao letterSoundCorrespondenceDao; @Autowired private NumberDao numberDao; @Autowired private SyllableDao syllableDao; @Autowired private WordDao wordDao; @Autowired private EmojiDao emojiDao; @Autowired private StoryBookDao storyBookDao; @Autowired private AudioDao audioDao; @Autowired private ImageDao imageDao; @Autowired private VideoDao videoDao; @Autowired private StoryBookContributionEventDao storyBookContributionEventDao; @Autowired private AudioContributionEventDao audioContributionEventDao; @Autowired private WordContributionEventDao wordContributionEventDao; @Autowired private NumberContributionEventDao numberContributionEventDao; @Autowired private ContributorDao contributorDao; @RequestMapping(method = RequestMethod.GET) public String handleRequest( HttpServletRequest request, HttpSession session, Principal principal, Model model) { logger.info("handleRequest"); // Check if the Contributor has not yet provided all required details Contributor contributor = (Contributor) session.getAttribute("contributor"); if (StringUtils.isBlank(contributor.getEmail())) { return "redirect:/content/contributor/add-email"; } else if (StringUtils.isBlank(contributor.getFirstName()) || StringUtils.isBlank(contributor.getLastName())) { return "redirect:/content/contributor/edit-name"; } else if (StringUtils.isBlank(contributor.getMotivation()) && EnvironmentContextLoaderListener.env!=Environment.DEV) { return "redirect:/content/contributor/edit-motivation"; } else { // Redirect to originally requested URL DefaultSavedRequest defaultSavedRequest = (DefaultSavedRequest) session.getAttribute("SPRING_SECURITY_SAVED_REQUEST"); logger.info("defaultSavedRequest: " + defaultSavedRequest); if (defaultSavedRequest != null) { logger.info("Redirecting to " + defaultSavedRequest.getServletPath()); return "redirect:" + defaultSavedRequest.getServletPath(); } } model.addAttribute("letterCount", letterDao.readCount()); model.addAttribute("allophoneCount", allophoneDao.readCount()); model.addAttribute("letterSoundCorrespondenceCount", letterSoundCorrespondenceDao.readCount()); model.addAttribute("numberCount", numberDao.readCount()); model.addAttribute("syllableCount", syllableDao.readCount()); model.addAttribute("wordCount", wordDao.readCount()); model.addAttribute("emojiCount", emojiDao.readCount()); model.addAttribute("storyBookCount", storyBookDao.readCount()); model.addAttribute("audioCount", audioDao.readCount()); model.addAttribute("imageCount", imageDao.readCount()); model.addAttribute("videoCount", videoDao.readCount()); List<Contributor> contributorsWithStoryBookContributions = contributorDao.readAllWithStoryBookContributions(); logger.info("contributorsWithStoryBookContributions.size(): " + contributorsWithStoryBookContributions.size()); model.addAttribute("contributorsWithStoryBookContributions", contributorsWithStoryBookContributions); Map<Long, Long> storyBookContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithStoryBookContributions) { storyBookContributionsCountMap.put(contributorWithContributions.getId(), storyBookContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("storyBookContributionsCountMap", storyBookContributionsCountMap); List<Contributor> contributorsWithAudioContributions = contributorDao.readAllWithAudioContributions(); logger.info("contributorsWithAudioContributions.size(): " + contributorsWithAudioContributions.size()); model.addAttribute("contributorsWithAudioContributions", contributorsWithAudioContributions); Map<Long, Long> audioContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithAudioContributions) { audioContributionsCountMap.put(contributorWithContributions.getId(), audioContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("audioContributionsCountMap", audioContributionsCountMap); List<Contributor> contributorsWithWordContributions = contributorDao.readAllWithWordContributions(); logger.info("contributorsWithWordContributions.size(): " + contributorsWithWordContributions.size()); model.addAttribute("contributorsWithWordContributions", contributorsWithWordContributions); Map<Long, Long> wordContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithWordContributions) { wordContributionsCountMap.put(contributorWithContributions.getId(), wordContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("wordContributionsCountMap", wordContributionsCountMap); List<Contributor> contributorsWithNumberContributions = contributorDao.readAllWithNumberContributions(); logger.info("contributorsWithNumberContributions.size(): " + contributorsWithNumberContributions.size()); model.addAttribute("contributorsWithNumberContributions", contributorsWithNumberContributions); Map<Long, Long> numberContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithNumberContributions) { numberContributionsCountMap.put(contributorWithContributions.getId(), numberContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("numberContributionsCountMap", numberContributionsCountMap); return "content/main"; } }
src/main/java/ai/elimu/web/content/MainContentController.java
package ai.elimu.web.content; import ai.elimu.dao.AllophoneDao; import ai.elimu.dao.AudioContributionEventDao; import java.security.Principal; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.Logger; import ai.elimu.dao.AudioDao; import ai.elimu.dao.ContributorDao; import ai.elimu.dao.EmojiDao; import ai.elimu.dao.ImageDao; import ai.elimu.dao.LetterDao; import ai.elimu.dao.LetterSoundCorrespondenceDao; import ai.elimu.dao.NumberContributionEventDao; import ai.elimu.dao.NumberDao; import ai.elimu.dao.StoryBookContributionEventDao; import ai.elimu.dao.StoryBookDao; import ai.elimu.dao.SyllableDao; import ai.elimu.dao.VideoDao; import ai.elimu.dao.WordContributionEventDao; import ai.elimu.dao.WordDao; import ai.elimu.model.contributor.Contributor; import ai.elimu.model.v2.enums.Environment; import ai.elimu.web.context.EnvironmentContextLoaderListener; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.web.savedrequest.DefaultSavedRequest; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @Controller @RequestMapping("/content") public class MainContentController { private final Logger logger = LogManager.getLogger(); @Autowired private LetterDao letterDao; @Autowired private AllophoneDao allophoneDao; @Autowired private LetterSoundCorrespondenceDao letterSoundCorrespondenceDao; @Autowired private NumberDao numberDao; @Autowired private SyllableDao syllableDao; @Autowired private WordDao wordDao; @Autowired private EmojiDao emojiDao; @Autowired private StoryBookDao storyBookDao; @Autowired private AudioDao audioDao; @Autowired private ImageDao imageDao; @Autowired private VideoDao videoDao; @Autowired private StoryBookContributionEventDao storyBookContributionEventDao; @Autowired private AudioContributionEventDao audioContributionEventDao; @Autowired private WordContributionEventDao wordContributionEventDao; @Autowired private NumberContributionEventDao numberContributionEventDao; @Autowired private ContributorDao contributorDao; @RequestMapping(method = RequestMethod.GET) public String handleRequest( HttpServletRequest request, HttpSession session, Principal principal, Model model) { logger.info("handleRequest"); // Check if the Contributor has not yet provided all required details Contributor contributor = (Contributor) session.getAttribute("contributor"); if (StringUtils.isBlank(contributor.getEmail())) { return "redirect:/content/contributor/add-email"; } else if (StringUtils.isBlank(contributor.getFirstName()) || StringUtils.isBlank(contributor.getLastName())) { return "redirect:/content/contributor/edit-name"; } else if (StringUtils.isBlank(contributor.getMotivation()) && EnvironmentContextLoaderListener.env!=Environment.DEV) { return "redirect:/content/contributor/edit-motivation"; } else { // Redirect to originally requested URL DefaultSavedRequest defaultSavedRequest = (DefaultSavedRequest) session.getAttribute("SPRING_SECURITY_SAVED_REQUEST"); logger.info("defaultSavedRequest: " + defaultSavedRequest); if (defaultSavedRequest != null) { logger.info("Redirecting to " + defaultSavedRequest.getServletPath()); return "redirect:" + defaultSavedRequest.getServletPath(); } } model.addAttribute("letterCount", letterDao.readCount()); model.addAttribute("allophoneCount", allophoneDao.readCount()); model.addAttribute("letterSoundCorrespondenceCount", letterSoundCorrespondenceDao.readCount()); model.addAttribute("numberCount", numberDao.readCount()); model.addAttribute("syllableCount", syllableDao.readCount()); model.addAttribute("wordCount", wordDao.readCount()); model.addAttribute("emojiCount", emojiDao.readCount()); model.addAttribute("storyBookCount", storyBookDao.readCount()); model.addAttribute("audioCount", audioDao.readCount()); model.addAttribute("imageCount", imageDao.readCount()); model.addAttribute("videoCount", videoDao.readCount()); List<Contributor> contributorsWithStoryBookContributions = contributorDao.readAllWithStoryBookContributions(); logger.info("contributorsWithStoryBookContributions.size(): " + contributorsWithStoryBookContributions.size()); model.addAttribute("contributorsWithStoryBookContributions", contributorsWithStoryBookContributions); Map<Long, Long> storyBookContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithStoryBookContributions) { storyBookContributionsCountMap.put(contributorWithContributions.getId(), storyBookContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("storyBookContributionsCountMap", storyBookContributionsCountMap); List<Contributor> contributorsWithAudioContributions = contributorDao.readAllWithAudioContributions(); logger.info("contributorsWithAudioContributions.size(): " + contributorsWithAudioContributions.size()); model.addAttribute("contributorsWithAudioContributions", contributorsWithAudioContributions); Map<Long, Long> audioContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithAudioContributions) { audioContributionsCountMap.put(contributorWithContributions.getId(), audioContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("audioContributionsCountMap", audioContributionsCountMap); List<Contributor> contributorsWithWordContributions = contributorDao.readAllWithWordContributions(); logger.info("contributorsWithWordContributions.size(): " + contributorsWithWordContributions.size()); model.addAttribute("contributorsWithWordContributions", contributorsWithWordContributions); Map<Long, Long> wordContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithWordContributions) { wordContributionsCountMap.put(contributorWithContributions.getId(), wordContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("wordContributionsCountMap", wordContributionsCountMap); List<Contributor> contributorsWithNumberContributions = contributorDao.readAllWithNumberContributions(); logger.info("contributorsWithNumberContributions.size(): " + contributorsWithNumberContributions.size()); model.addAttribute("contributorsWithNumberContributions", contributorsWithNumberContributions); Map<Long, Long> numberContributionsCountMap = new HashMap<>(); for (Contributor contributorWithContributions : contributorsWithNumberContributions) { numberContributionsCountMap.put(contributorWithContributions.getId(), numberContributionEventDao.readCount(contributorWithContributions)); } model.addAttribute("numberContributionsCountMap", numberContributionsCountMap); return "content/main"; } }
#1401 Skip Edit Contribution page for dev - deletion of unwanted line
src/main/java/ai/elimu/web/content/MainContentController.java
#1401 Skip Edit Contribution page for dev - deletion of unwanted line
Java
mit
dd2e36d8bb90c3afafd1b4cf75563d853fc66aff
0
Mashape/unirest-java,Mashape/unirest-java
/** * The MIT License * * Copyright for portions of OpenUnirest/uniresr-java are held by Mashape (c) 2013 as part of Kong/unirest-java. * All other copyright for OpenUnirest/unirest-java are held by OpenUnirest (c) 2018. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package unirest; import java.io.InputStream; import java.util.Collection; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Consumer; import java.util.function.Function; public interface HttpRequest<R extends HttpRequest> { /** * add a route param that replaces the matching {name} * For example routeParam("name", "fred") will replace {name} in * https://localhost/users/{user} * to * https://localhost/users/fred * * @param name the name of the param (do not include curly braces {} * @param value the value to replace the placeholder with * @return this request builder */ R routeParam(String name, String value); /** * Basic auth credentials * @param username the username * @param password the password * @return this request builder */ R basicAuth(String username, String password); /** * The Accept heder to send (e.g. application/json * @param value a valid mime type for the Accept header * @return this request builder */ R accept(String value); /** * Add a http header * @param name name of the header * @param value value for the header * @return this request builder */ R header(String name, String value); /** * Add headers as a map * @param headerMap a map of headers * @return this request builder */ R headers(Map<String, String> headerMap); /** * add a query param to the url. The value will be URL-Encoded * @param name the name of the param * @param value the value of the param * @return this request builder */ R queryString(String name, Object value); /** * Add multiple param with the same param name. * queryString("name", Arrays.asList("bob", "linda")) will result in * ?name=bob&amp;name=linda * @param name the name of the param * @param value a collection of values * @return this request builder */ R queryString(String name, Collection<?> value); /** * Add query params as a map of name value pairs * @param parameters a map of params * @return this request builder */ R queryString(Map<String, Object> parameters); /** * Pass a ObjectMapper for the request. This will override any globally * configured ObjectMapper * @param mapper the ObjectMapper * @return this request builder */ R withObjectMapper(ObjectMapper mapper); /** * Executes the request and returns the response with the body mapped into a String * @return response */ HttpResponse<String> asString(); /** * Executes the request asynchronously and returns the response with the body mapped into a String * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<String>> asStringAsync(); /** * Executes the request asynchronously and returns the response with the body mapped into a String * @param callback a callback handler * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<String>> asStringAsync(Callback<String> callback); /** * Executes the request and returns the response with the body mapped into a JsonNode * @return response */ HttpResponse<JsonNode> asJson(); /** * Executes the request asynchronously and returns the response with the body mapped into a JsonNode * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<JsonNode>> asJsonAsync(); /** * Executes the request asynchronously and returns the response with the body mapped into a JsonNode * @param callback a callback handler * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<JsonNode>> asJsonAsync(Callback<JsonNode> callback); /** * Executes the request and returns the response with the body mapped into T by a configured ObjectMapper * @param responseClass the class to return. This will be passed to the ObjectMapper * @param <T> the return type * @return a response */ <T> HttpResponse<T> asObject(Class<? extends T> responseClass); /** * Executes the request and returns the response with the body mapped into T by a configured ObjectMapper * @param genericType the genertic type to return. This will be passed to the ObjectMapper * @param <T> the return type * @return a response */ <T> HttpResponse<T> asObject(GenericType<T> genericType); /** * Execute the request and pass the raw response to a function for mapping. * This is raw response contains the original InputStream and is suitable for * reading large responses. * @param function the function to map the response into a object of T * @param <T> The type of the response mapping * @return A HttpResponse containing T as the body */ <T> HttpResponse<T> asObject(Function<RawResponse, T> function); /** * Executes the request asynchronously and returns response with the body mapped into T by a configured ObjectMapper * @param responseClass the class type to map to * @param <T> the return type * @return a CompletableFuture of a response */ <T> CompletableFuture<HttpResponse<T>> asObjectAsync(Class<? extends T> responseClass); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(Class<? extends T> responseClass, Callback<T> callback); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(GenericType<T> genericType); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(GenericType<T> genericType, Callback<T> callback); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(Function<RawResponse, T> function); HttpResponse<InputStream> asBinary(); CompletableFuture<HttpResponse<InputStream>> asBinaryAsync(); CompletableFuture<HttpResponse<InputStream>> asBinaryAsync(Callback<InputStream> callback); void thenConsume(Consumer<RawResponse> consumer); void thenConsumeAsync(Consumer<RawResponse> consumer); HttpMethod getHttpMethod(); String getUrl(); Headers getHeaders(); Body getBody(); }
src/main/java/unirest/HttpRequest.java
/** * The MIT License * * Copyright for portions of OpenUnirest/uniresr-java are held by Mashape (c) 2013 as part of Kong/unirest-java. * All other copyright for OpenUnirest/unirest-java are held by OpenUnirest (c) 2018. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package unirest; import java.io.InputStream; import java.util.Collection; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Consumer; import java.util.function.Function; public interface HttpRequest<R extends HttpRequest> { /** * add a route param that replaces the matching {name} * For example routeParam("name", "fred") will replace {name} in * https://localhost/users/{user} * to * https://localhost/users/fred * * @param name the name of the param (do not include curly braces {} * @param value the value to replace the placeholder with * @return this request builder */ R routeParam(String name, String value); /** * Basic auth credentials * @param username the username * @param password the password * @return this request builder */ R basicAuth(String username, String password); /** * The Accept heder to send (e.g. application/json * @param value a valid mime type for the Accept header * @return this request builder */ R accept(String value); /** * Add a http header * @param name name of the header * @param value value for the header * @return this request builder */ R header(String name, String value); /** * Add headers as a map * @param headerMap a map of headers * @return this request builder */ R headers(Map<String, String> headerMap); /** * add a query param to the url. The value will be URL-Encoded * @param name the name of the param * @param value the value of the param * @return this request builder */ R queryString(String name, Object value); /** * Add multiple param with the same param name. * queryString("name", Arrays.asList("bob", "linda")) will result in * ?name=bob&amp;name=linda * @param name the name of the param * @param value a collection of values * @return this request builder */ R queryString(String name, Collection<?> value); /** * Add query params as a map of name value pairs * @param parameters a map of params * @return this request builder */ R queryString(Map<String, Object> parameters); /** * Pass a ObjectMapper for the request. This will override any globally * configured ObjectMapper * @param mapper the ObjectMapper * @return this request builder */ R withObjectMapper(ObjectMapper mapper); /** * Executes the request and returns the response with the body mapped into a String * @return response */ HttpResponse<String> asString(); /** * Executes the request asynchronously and returns the response with the body mapped into a String * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<String>> asStringAsync(); /** * Executes the request asynchronously and returns the response with the body mapped into a String * @param callback a callback handler * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<String>> asStringAsync(Callback<String> callback); /** * Executes the request and returns the response with the body mapped into a JsonNode * @return response */ HttpResponse<JsonNode> asJson(); /** * Executes the request asynchronously and returns the response with the body mapped into a JsonNode * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<JsonNode>> asJsonAsync(); /** * Executes the request asynchronously and returns the response with the body mapped into a JsonNode * @param callback a callback handler * @return a CompletableFuture of a response */ CompletableFuture<HttpResponse<JsonNode>> asJsonAsync(Callback<JsonNode> callback); /** * Executes the request and returns the response with the body mapped into T by a configured ObjectMapper * @param responseClass the class to return. This will be passed to the ObjectMapper * @param <T> the return type * @return a response */ <T> HttpResponse<T> asObject(Class<? extends T> responseClass); /** * Executes the request and returns the response with the body mapped into T by a configured ObjectMapper * @param genericType the genertic type to return. This will be passed to the ObjectMapper * @param <T> the return type * @return a response */ <T> HttpResponse<T> asObject(GenericType<T> genericType); /** * Executes the request asynchronously and returns response with the body mapped into T by a configured ObjectMapper * @param responseClass the class type to map to * @param <T> the return type * @return a CompletableFuture of a response */ <T> CompletableFuture<HttpResponse<T>> asObjectAsync(Class<? extends T> responseClass); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(Class<? extends T> responseClass, Callback<T> callback); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(GenericType<T> genericType); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(GenericType<T> genericType, Callback<T> callback); <T> HttpResponse<T> asObject(Function<RawResponse, T> function); <T> CompletableFuture<HttpResponse<T>> asObjectAsync(Function<RawResponse, T> function); HttpResponse<InputStream> asBinary(); CompletableFuture<HttpResponse<InputStream>> asBinaryAsync(); CompletableFuture<HttpResponse<InputStream>> asBinaryAsync(Callback<InputStream> callback); void thenConsume(Consumer<RawResponse> consumer); void thenConsumeAsync(Consumer<RawResponse> consumer); HttpMethod getHttpMethod(); String getUrl(); Headers getHeaders(); Body getBody(); }
more docs
src/main/java/unirest/HttpRequest.java
more docs
Java
agpl-3.0
e11e5cb0c86b51c7970232a9e7757a9926f590e7
0
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
package com.imcode.imcms.servlet.superadmin; import com.imcode.util.HumanReadable; import com.imcode.util.MultipartHttpServletRequest; import imcode.server.Imcms; import imcode.server.ImcmsServices; import imcode.server.WebAppGlobalConstants; import imcode.server.user.UserDomainObject; import imcode.util.Utility; import imcode.util.io.FileUtility; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.DirectoryFileFilter; import org.apache.commons.io.filefilter.NotFileFilter; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.*; import java.util.*; public class FileAdmin extends HttpServlet { private final static Logger log = Logger.getLogger( "FileAdmin" ); private static final int BUFFER_SIZE = 65536; private static final String ADMIN_TEMPLATE_FILE_ADMIN_COPY_OVERWRIGHT_WARNING = "FileAdminCopyOverwriteWarning.html"; private static final String ADMIN_TEMPLATE_FILE_ADMIN_MOVE_OVERWRITE_WARNING = "FileAdminMoveOverwriteWarning.html"; public void doGet( HttpServletRequest req, HttpServletResponse res ) throws ServletException, IOException { UserDomainObject user = Utility.getLoggedOnUser( req ); if ( !user.isSuperAdmin() ) { Utility.redirectToStartDocument( req, res ); return; } Utility.setNoCache( res ); File dir1 = null; File dir2 = null; File[] roots = getRoots(); if (roots.length > 0) { dir1 = roots[0]; if (roots.length > 1) { dir2 = roots[1]; } } outputFileAdmin( res, user, dir1, dir2 ); } /** * Check to see if the path is a child to one of the rootpaths */ private boolean isUnderRoot( File path, File[] roots ) { for ( int i = 0; i < roots.length; i++ ) { if ( FileUtility.directoryIsAncestorOfOrEqualTo( roots[i], path ) ) { return true; } } return false; } public void doPost( HttpServletRequest req, HttpServletResponse res ) throws ServletException, IOException { ImcmsServices imcref = Imcms.getServices(); UserDomainObject user = Utility.getLoggedOnUser( req ); if ( !user.isSuperAdmin() ) { Utility.redirectToStartDocument( req, res ); return; } Utility.setNoCache( res ); MultipartHttpServletRequest mp = new MultipartHttpServletRequest(req); if ( mp.getParameter( "cancel" ) != null ) { res.sendRedirect( "AdminManager" ); return; } File[] roots = getRoots(); File dir1 = getContextRelativeDirectoryFromRequest(mp, "dir1"); File dir2 = getContextRelativeDirectoryFromRequest(mp, "dir2"); if ( !isUnderRoot( dir1, roots ) || !isUnderRoot( dir2, roots )) { doGet( req, res ); return ; } String[] files1 = mp.getParameterValues( "files1" ); String[] files2 = mp.getParameterValues( "files2" ); String name = mp.getParameter( "name" ); boolean outputHasBeenHandled = false; if ( mp.getParameter( "change1" ) != null ) { //UserDomainObject wants to change dir1 dir1 = changeDir( files1, dir1, roots ); } else if ( mp.getParameter( "change2" ) != null ) { //UserDomainObject wants to change dir2 dir2 = changeDir( files2, dir2, roots ); } else if ( mp.getParameter( "mkdir1" ) != null ) { outputHasBeenHandled = makeDirectory( name, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "mkdir2" ) != null ) { outputHasBeenHandled = makeDirectory( name, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "delete1" ) != null ) { outputHasBeenHandled = delete( dir1, files1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "delete2" ) != null ) { outputHasBeenHandled = delete( dir2, files2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "deleteok" ) != null ) { deleteOk( mp, roots ); } else if ( mp.getParameter( "upload1" ) != null ) { outputHasBeenHandled = upload( mp, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "upload2" ) != null ) { outputHasBeenHandled = upload( mp, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "download1" ) != null ) { outputHasBeenHandled = download( files1, dir1, res ); } else if ( mp.getParameter( "download2" ) != null ) { outputHasBeenHandled = download( files2, dir2, res ); } else if ( mp.getParameter( "rename1" ) != null ) { outputHasBeenHandled = rename( files1, name, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "rename2" ) != null ) { outputHasBeenHandled = rename( files2, name, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "copy1" ) != null ) { outputHasBeenHandled = copy( files1, dir1, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "copy2" ) != null ) { outputHasBeenHandled = copy( files2, dir2, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "copyok" ) != null ) { copyOk( mp, roots ); } else if ( mp.getParameter( "move1" ) != null ) { outputHasBeenHandled = move( files1, dir1, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "move2" ) != null ) { outputHasBeenHandled = move( files2, dir2, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "moveok" ) != null ) { moveOk( mp, roots ); } if ( !outputHasBeenHandled ) { outputFileAdmin( res, user, dir1, dir2 ); } } private File getContextRelativeDirectoryFromRequest(HttpServletRequest request, String parameter) throws IOException { File webappPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath() ; String dirParameter = request.getParameter( parameter ); return new File( webappPath, dirParameter ).getCanonicalFile(); } private File[] getRoots() { String rootpaths = Imcms.getServices().getConfig().getFileAdminRootPaths(); List rootList = new ArrayList(); if ( rootpaths != null ) { StringTokenizer st = new StringTokenizer( rootpaths, ":;" ); int tokenCount = st.countTokens(); for ( int i = 0; i < tokenCount; i++ ) { String oneRoot = st.nextToken().trim(); File oneRootFile = FileUtility.getFileFromWebappRelativePath( oneRoot ); if ( oneRootFile.isDirectory() ) { rootList.add( oneRootFile ); } } } return (File[])rootList.toArray( new File[rootList.size()] ); } private boolean move( String[] files, File sourceDir, File destDir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( files != null && !sourceDir.equals( destDir ) ) { File[] sourceFileTree = makeFileTreeList( makeAbsoluteFileList( sourceDir, files ), false ); File[] relativeSourceFileTree = makeRelativeFileList( sourceDir, sourceFileTree ); StringBuffer optionList = new StringBuffer(); StringBuffer fileList = new StringBuffer(); File webAppPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); for ( int i = 0; i < relativeSourceFileTree.length; i++ ) { File destFile = new File( destDir, relativeSourceFileTree[i].getPath() ); fileList.append( relativeSourceFileTree[i] ).append( File.pathSeparator ); if ( destFile.exists() ) { String optionString = createWarningFileOptionString(destFile); optionList.append( "<option>" ).append( optionString ).append( "</option>" ); } } if ( optionList.length() > 0 ) { outputMoveOverwriteWarning( optionList.toString(), sourceDir, destDir, fileList.toString(), dir1, dir2, res, user, imcref ); handledOutput = true; } else { File[] destFiles = makeAbsoluteFileList( destDir, relativeSourceFileTree ); for ( int i = 0; i < sourceFileTree.length; i++ ) { File destFile = destFiles[i]; destFile.getParentFile().mkdirs(); File sourceFile = sourceFileTree[i]; if ( sourceFile.isFile() ) { FileUtils.copyFile( sourceFile, destFile ); } if ( sourceFile.length() == destFile.length() ) { FileUtils.forceDelete(sourceFile); } } } } return handledOutput; } private String createWarningFileOptionString(File destFile) { File webAppPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); return FileUtility.relativizeFile(webAppPath, destFile).getPath() + ( destFile.isDirectory() ? File.separator : " [" + destFile.length() + "]" ); } private boolean copy( String[] files, File sourceDir, File destDir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( files != null && !sourceDir.equals( destDir ) ) { File[] sourceFileTree = makeFileTreeList( makeAbsoluteFileList( sourceDir, files ), true ); File[] relativeSourceFileTree = makeRelativeFileList( sourceDir, sourceFileTree ); StringBuffer optionList = new StringBuffer(); StringBuffer fileList = new StringBuffer(); for ( int i = 0; i < relativeSourceFileTree.length; i++ ) { File destFile = new File( destDir, relativeSourceFileTree[i].getPath() ); fileList.append( relativeSourceFileTree[i] ).append( File.pathSeparator ); if ( destFile.exists() ) { String optionString = createWarningFileOptionString(destFile); optionList.append( "<option>" ).append( optionString ).append( "</option>" ); } } if ( optionList.length() > 0 ) { ouputCopyOverwriteWarning( optionList.toString(), sourceDir, destDir, fileList.toString(), dir1, dir2, res, user, imcref ); handledOutput = true; } else { File[] destFileTree = makeAbsoluteFileList( destDir, relativeSourceFileTree ); for ( int i = 0; i < sourceFileTree.length; i++ ) { File sourceFile = sourceFileTree[i]; File destFile = destFileTree[i]; if ( sourceFile.isDirectory() ) { destFile.mkdir(); continue; } FileUtils.copyFile( sourceFile, destFile ); } } } return handledOutput; } private boolean rename( String[] files, String name, File dir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( files != null && files.length == 1 ) { //Has the user chosen just one file? if ( name != null && name.length() > 0 ) { File oldFilename = new File( dir, files[0] ); File newFilename = new File( dir, name ); if ( oldFilename.exists() ) { oldFilename.renameTo( newFilename ); } } else { outputBlankFilenameError( dir1, dir2, res, user, imcref ); handledOutput = true; } } return handledOutput; } private boolean download( String[] files, File dir, HttpServletResponse res ) throws IOException { boolean handledOutput = false; if ( files != null && files.length == 1 ) { //Has the user chosen just one file? File file = new File( dir, files[0] ); try { res.setContentType( "application/octet-stream" ); res.setHeader( "Content-Disposition", "attachment; filename=\"" + file.getName() + '\"' ); BufferedInputStream fin = new BufferedInputStream( new FileInputStream( file ) ); res.setContentLength( fin.available() ); byte[] buffer = new byte[BUFFER_SIZE]; ServletOutputStream out = res.getOutputStream(); for ( int bytes_read; ( bytes_read = fin.read( buffer ) ) != -1; ) { out.write( buffer, 0, bytes_read ); } handledOutput = true; } catch ( FileNotFoundException ex ) { // FIXME: Error dialog? log.debug( "Download failed", ex ); } } return handledOutput; } private boolean delete( File dir, String[] files, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; File[] farray = makeFileTreeList( makeAbsoluteFileList( dir, files ), false ); File[] filelist = makeRelativeFileList( dir, farray ); if ( filelist != null && filelist.length > 0 ) { outputDeleteWarning( filelist, dir1, dir2, dir, res, user, imcref ); handledOutput = true; } return handledOutput; } private boolean makeDirectory( String name, File dir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( name != null && name.length() > 0 ) { File newname = new File( dir, name ); if ( !newname.exists() ) { newname.mkdir(); } } else { outputBlankFilenameError( dir1, dir2, res, user, imcref ); handledOutput = true; } return handledOutput; } private boolean upload( MultipartHttpServletRequest mp, File destDir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; String fileContents = mp.getParameter( "file" ); if ( fileContents == null || fileContents.length() < 1 ) { outputBlankFileError( dir1, dir2, res, user, imcref ); handledOutput = true; return handledOutput; } String filename = mp.getParameterFileItem( "file" ).getName() ; File file = new File( destDir, filename ); File uniqueFile = findUniqueFilename( file ); if ( file.equals( uniqueFile ) || file.renameTo( uniqueFile ) ) { FileOutputStream fout = new FileOutputStream( file ); byte[] bytes = fileContents.getBytes( "8859_1" ); fout.write( bytes ); fout.flush(); fout.close(); if ( !file.equals( uniqueFile ) ) { outputFileExistedAndTheOriginalWasRenamedNotice( dir1, dir2, uniqueFile.getName(), res, user, imcref ); handledOutput = true; } } else { // FIXME: Output failed-to-rename-original-file error dialog handledOutput = false; } return handledOutput; } private void outputFileAdmin( HttpServletResponse res, UserDomainObject user, File dir1, File dir2 ) throws IOException { Utility.setDefaultHtmlContentType( res ); res.getOutputStream().print( parseFileAdmin( user, dir1, dir2 ) ); } static File findUniqueFilename(File file) { File uniqueFile = file; int counter = 1; String previousSuffix = ""; while ( uniqueFile.exists() ) { String filenameWithoutSuffix = StringUtils.substringBeforeLast(uniqueFile.getName(), previousSuffix); String suffix = "." + counter; counter++; uniqueFile = new File(uniqueFile.getParentFile(), filenameWithoutSuffix + suffix); previousSuffix = suffix; } return uniqueFile; } private void outputMoveOverwriteWarning( String option_list, File sourceDir, File destDir, String file_list, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { outputWarning(option_list, sourceDir, destDir, file_list, dir1, dir2, res, imcref, ADMIN_TEMPLATE_FILE_ADMIN_MOVE_OVERWRITE_WARNING, user); } private void ouputCopyOverwriteWarning( String option_list, File sourceDir, File destDir, String file_list, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { outputWarning(option_list, sourceDir, destDir, file_list, dir1, dir2, res, imcref, ADMIN_TEMPLATE_FILE_ADMIN_COPY_OVERWRIGHT_WARNING, user); } private void outputWarning(String option_list, File sourceDir, File destDir, String file_list, File dir1, File dir2, HttpServletResponse res, ImcmsServices imcref, String template, UserDomainObject user) throws IOException { List vec = new ArrayList(); vec.add( "#filelist#" ); vec.add( option_list ); vec.add( "#source#" ); vec.add( getContextRelativeAbsolutePathToDirectory(sourceDir) ); vec.add( "#dest#" ); vec.add( getContextRelativeAbsolutePathToDirectory(destDir) ); vec.add( "#files#" ); vec.add( file_list ); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( template, user, vec ) ); } private void outputFileExistedAndTheOriginalWasRenamedNotice( File dir1, File dir2, String newFilename, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { List vec = new ArrayList(); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); vec.add( "#filename#" ); vec.add( newFilename ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminFileExisted.html", user, vec ) ); } private void outputBlankFileError( File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { List vec = new ArrayList(); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminFileBlank.html", user, vec ) ); } private void outputDeleteWarning( File[] filelist, File dir1, File dir2, File sourceDir, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { StringBuffer files = new StringBuffer(); StringBuffer optionlist = new StringBuffer(); for ( int i = 0; i < filelist.length; i++ ) { File foo = new File( sourceDir, filelist[i].getPath() ); String bar = createWarningFileOptionString(foo); optionlist.append( "<option>" ).append( bar ).append( "</option>" ); files.append( filelist[i] ).append( File.pathSeparator ); } List vec = new ArrayList(); vec.add( "#filelist#" ); vec.add( optionlist.toString() ); vec.add( "#files#" ); vec.add( StringEscapeUtils.escapeHtml(files.toString()) ); vec.add( "#source#" ); vec.add( getContextRelativeAbsolutePathToDirectory(sourceDir) ); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminDeleteWarning.html", user, vec ) ); } private void outputBlankFilenameError( File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { List vec = new ArrayList(); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminNameBlank.html", user, vec ) ); } private interface FromSourceFileToDestinationFileCommand { void execute(File source, File destination) throws IOException; } private void moveOk( HttpServletRequest mp, File[] roots ) throws IOException { fromSourceToDestination(mp, roots, new FromSourceFileToDestinationFileCommand() { public void execute(File source, File dest) throws IOException { dest.getParentFile().mkdirs(); if ( source.isFile() ) { FileUtils.copyFile( source, dest ); } if ( source.length() == dest.length() ) { FileUtils.forceDelete( source ); } } }); } private void fromSourceToDestination(HttpServletRequest mp, File[] roots, FromSourceFileToDestinationFileCommand command) throws IOException { File srcdir = getContextRelativeDirectoryFromRequest(mp, "source" ); File dstdir = getContextRelativeDirectoryFromRequest(mp, "dest" ); String files = mp.getParameter( "files" ); if ( isUnderRoot( srcdir, roots ) && isUnderRoot( dstdir, roots ) ) { StringTokenizer st = new StringTokenizer( files, ":;" ); while ( st.hasMoreTokens() ) { String foo = st.nextToken(); File source = new File( srcdir, foo ); File dest = new File( dstdir, foo ); command.execute(source, dest); } } } private void copyOk( HttpServletRequest mp, File[] roots ) throws IOException { fromSourceToDestination(mp, roots, new FromSourceFileToDestinationFileCommand() { public void execute(File source, File destination) throws IOException { if ( source.isDirectory() ) { destination.mkdir(); } else { FileUtils.copyFile( source, destination ); } } }); } private void deleteOk( HttpServletRequest mp, File[] roots ) throws IOException { String files = mp.getParameter( "files" ) ; File path = getContextRelativeDirectoryFromRequest(mp, "source") ; if ( null != files && null != path ) { StringTokenizer st = new StringTokenizer( files, ":;" ); while ( st.hasMoreTokens() ) { File foo = new File( path, st.nextToken() ); if ( foo.exists() && isUnderRoot( foo.getParentFile(), roots ) ) { FileUtils.forceDelete(foo); } } } } private File changeDir( String[] files, File dir, File[] roots ) throws IOException { File resultDir = dir; if ( files != null && files.length == 1 ) { //Has the user chosen just one dir? String filename = files[0]; if ( filename.startsWith(File.separator) ) { File newDir = new File( WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(), filename) ; if ( isUnderRoot( newDir, roots ) ) { resultDir = newDir; } } else { //Is the dir one of the roots? File newDir = new File( dir, filename ); //No? Treat it like a relative path... if ( newDir.isDirectory() && isUnderRoot( newDir, roots ) ) { //It IS a directory, i hope? resultDir = newDir; } } } return resultDir.getCanonicalFile(); } private File[] makeAbsoluteFileList( File parent, String[] filePaths ) { File[] files = new File[filePaths.length]; for ( int i = 0; i < filePaths.length; i++ ) { String filePath = filePaths[i]; files[i] = new File( filePath ); } return makeAbsoluteFileList( parent, files ); } /** * Takes a list of files that are supposed to share a common parent, and returns them in an array. */ private File[] makeAbsoluteFileList( File parent, File[] files ) { if ( files == null || parent == null ) { return null; } LinkedList list = new LinkedList(); for ( int i = 0; i < files.length; i++ ) { String filename = files[i].getPath(); if ( !( "..".equals( filename ) || new File( filename ).isAbsolute() ) ) { list.add( new File( parent, filename ) ); } } return (File[])list.toArray( new File[list.size()] ); } /** * Takes a list of files that share a common parent, orphans them, and returns them in an array. */ private File[] makeRelativeFileList( File relativeParentDir, File[] files ) { if ( files == null || relativeParentDir == null ) { return null; } File[] relativeFileList = new File[files.length]; for ( int i = 0; i < files.length; i++ ) { relativeFileList[i] = FileUtility.relativizeFile( relativeParentDir, files[i] ); } return relativeFileList; } /** * Takes a list of files and dirs in one dir, and recursively adds the files of the subdirs. */ private File[] makeFileTreeList( File[] files, boolean dirfirst ) { if ( files == null ) { return new File[0]; } LinkedList list = new LinkedList(); for ( int i = 0; i < files.length; i++ ) { if ( dirfirst ) { list.add( files[i] ); } if ( files[i].isDirectory() ) { File[] sub_list = makeFileTreeList( files[i].listFiles(), dirfirst ); for ( int j = 0; j < sub_list.length; j++ ) { list.add( sub_list[j] ); } } if ( !dirfirst ) { list.add( files[i] ); } } File[] result = new File[list.size()]; for ( int i = 0; i < result.length; i++ ) { result[i] = (File)list.removeFirst(); } return result; } private String parseFileAdmin( UserDomainObject user, File fd1, File fd2 ) throws IOException { ImcmsServices imcref = Imcms.getServices(); File[] rootlist = getRoots(); List vec = new ArrayList(); if ( fd1 != null ) { vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(fd1) ); String optionlist = createDirectoryOptionList( rootlist, fd1 ); vec.add( "#files1#" ); vec.add( optionlist ); } else { vec.add( "#dir1#" ); vec.add( "" ); vec.add( "#files1#" ); vec.add( "" ); } if ( fd2 != null ) { vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(fd2) ); String optionlist = createDirectoryOptionList( rootlist, fd2 ); vec.add( "#files2#" ); vec.add( optionlist ); } else { vec.add( "#dir2#" ); vec.add( "" ); vec.add( "#files2#" ); vec.add( "" ); } return imcref.getAdminTemplate( "FileAdmin.html", user, vec ); } private String getContextRelativeAbsolutePathToDirectory(File dir) throws IOException { File webappPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); return File.separator + getPathRelativeTo( dir, webappPath ) + File.separator; } private String getPathRelativeTo( File file, File root ) throws IOException { if ( !FileUtility.directoryIsAncestorOfOrEqualTo( root, file ) ) { return file.getCanonicalPath(); } if ( file.equals( root ) ) { return ""; } return FileUtility.relativizeFile( root, file ).getPath() ; } private String createDirectoryOptionList( File[] rootlist, File directory ) throws IOException { StringBuffer optionlist = new StringBuffer(); File webappPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); for ( int i = 0; i < rootlist.length; i++ ) { String dirname = getPathRelativeTo( rootlist[i], webappPath ) ; optionlist.append(getDirectoryOption(File.separator + dirname + File.separator, File.separator + dirname + File.separator) ); } File parent = directory.getParentFile(); if ( isUnderRoot( parent, rootlist )) { optionlist.append(getDirectoryOption(".." + File.separator, ".." + File.separator) ); } File[] dirlist = directory.listFiles( (FileFilter)DirectoryFileFilter.INSTANCE ); Arrays.sort(dirlist, getFileComparator()); for ( int i = 0; null != dirlist && i < dirlist.length; i++ ) { optionlist.append(getDirectoryOption(dirlist[i].getName() + File.separator, dirlist[i].getName() + File.separator)); } File[] filelist = directory.listFiles( (FileFilter)new NotFileFilter( DirectoryFileFilter.INSTANCE ) ); Arrays.sort(filelist, getFileComparator()); for ( int i = 0; null != filelist && i < filelist.length; i++ ) { String formatedFileSize = HumanReadable.getHumanReadableByteSize( filelist[i].length() ); String filename = filelist[i].getName(); String fileNameAndSize = filename + " [" + formatedFileSize + "]"; optionlist.append("<option value=\""); optionlist.append(StringEscapeUtils.escapeHtml(filename)); optionlist.append("\">"); optionlist.append(StringEscapeUtils.escapeHtml(fileNameAndSize)); optionlist.append("</option>"); } return optionlist.toString(); } private String getDirectoryOption(String value, String text) { return "<option style=\"background-color:#f0f0f0\" value=\"" + StringEscapeUtils.escapeHtml(value) + "\">" + StringEscapeUtils.escapeHtml(text) + "</option>"; } private Comparator getFileComparator() { return new Comparator() { public int compare(Object a, Object b) { File filea = (File)a; File fileb = (File)b; //--- Sort directories before files, // otherwise alphabetical ignoring case. if (filea.isDirectory() && !fileb.isDirectory()) { return -1; } else if (!filea.isDirectory() && fileb.isDirectory()) { return 1; } else { return filea.getName().compareToIgnoreCase(fileb.getName()); } } }; } }
server/src/com/imcode/imcms/servlet/superadmin/FileAdmin.java
package com.imcode.imcms.servlet.superadmin; import com.imcode.util.HumanReadable; import com.imcode.util.MultipartHttpServletRequest; import imcode.server.Imcms; import imcode.server.ImcmsServices; import imcode.server.WebAppGlobalConstants; import imcode.server.user.UserDomainObject; import imcode.util.Utility; import imcode.util.io.FileUtility; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.DirectoryFileFilter; import org.apache.commons.io.filefilter.NotFileFilter; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import javax.servlet.ServletException; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.*; import java.util.*; public class FileAdmin extends HttpServlet { private final static Logger log = Logger.getLogger( "FileAdmin" ); private static final int BUFFER_SIZE = 65536; private static final String ADMIN_TEMPLATE_FILE_ADMIN_COPY_OVERWRIGHT_WARNING = "FileAdminCopyOverwriteWarning.html"; private static final String ADMIN_TEMPLATE_FILE_ADMIN_MOVE_OVERWRITE_WARNING = "FileAdminMoveOverwriteWarning.html"; public void doGet( HttpServletRequest req, HttpServletResponse res ) throws ServletException, IOException { UserDomainObject user = Utility.getLoggedOnUser( req ); if ( !user.isSuperAdmin() ) { Utility.redirectToStartDocument( req, res ); return; } Utility.setNoCache( res ); File dir1 = null; File dir2 = null; File[] roots = getRoots(); if (roots.length > 0) { dir1 = roots[0]; if (roots.length > 1) { dir2 = roots[1]; } } outputFileAdmin( res, user, dir1, dir2 ); } /** * Check to see if the path is a child to one of the rootpaths */ private boolean isUnderRoot( File path, File[] roots ) { for ( int i = 0; i < roots.length; i++ ) { if ( FileUtility.directoryIsAncestorOfOrEqualTo( roots[i], path ) ) { return true; } } return false; } public void doPost( HttpServletRequest req, HttpServletResponse res ) throws ServletException, IOException { ImcmsServices imcref = Imcms.getServices(); UserDomainObject user = Utility.getLoggedOnUser( req ); if ( !user.isSuperAdmin() ) { Utility.redirectToStartDocument( req, res ); return; } Utility.setNoCache( res ); MultipartHttpServletRequest mp = new MultipartHttpServletRequest(req); if ( mp.getParameter( "cancel" ) != null ) { res.sendRedirect( "AdminManager" ); return; } File[] roots = getRoots(); File dir1 = getContextRelativeDirectoryFromRequest(mp, "dir1"); File dir2 = getContextRelativeDirectoryFromRequest(mp, "dir2"); if ( !isUnderRoot( dir1, roots ) || !isUnderRoot( dir2, roots )) { doGet( req, res ); return ; } String[] files1 = mp.getParameterValues( "files1" ); String[] files2 = mp.getParameterValues( "files2" ); String name = mp.getParameter( "name" ); boolean outputHasBeenHandled = false; if ( mp.getParameter( "change1" ) != null ) { //UserDomainObject wants to change dir1 dir1 = changeDir( files1, dir1, roots ); } else if ( mp.getParameter( "change2" ) != null ) { //UserDomainObject wants to change dir2 dir2 = changeDir( files2, dir2, roots ); } else if ( mp.getParameter( "mkdir1" ) != null ) { outputHasBeenHandled = makeDirectory( name, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "mkdir2" ) != null ) { outputHasBeenHandled = makeDirectory( name, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "delete1" ) != null ) { outputHasBeenHandled = delete( dir1, files1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "delete2" ) != null ) { outputHasBeenHandled = delete( dir2, files2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "deleteok" ) != null ) { deleteOk( mp, roots ); } else if ( mp.getParameter( "upload1" ) != null ) { outputHasBeenHandled = upload( mp, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "upload2" ) != null ) { outputHasBeenHandled = upload( mp, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "download1" ) != null ) { outputHasBeenHandled = download( files1, dir1, res ); } else if ( mp.getParameter( "download2" ) != null ) { outputHasBeenHandled = download( files2, dir2, res ); } else if ( mp.getParameter( "rename1" ) != null ) { outputHasBeenHandled = rename( files1, name, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "rename2" ) != null ) { outputHasBeenHandled = rename( files2, name, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "copy1" ) != null ) { outputHasBeenHandled = copy( files1, dir1, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "copy2" ) != null ) { outputHasBeenHandled = copy( files2, dir2, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "copyok" ) != null ) { copyOk( mp, roots ); } else if ( mp.getParameter( "move1" ) != null ) { outputHasBeenHandled = move( files1, dir1, dir2, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "move2" ) != null ) { outputHasBeenHandled = move( files2, dir2, dir1, dir1, dir2, res, user, imcref ); } else if ( mp.getParameter( "moveok" ) != null ) { moveOk( mp, roots ); } if ( !outputHasBeenHandled ) { outputFileAdmin( res, user, dir1, dir2 ); } } private File getContextRelativeDirectoryFromRequest(HttpServletRequest request, String parameter) throws IOException { File webappPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath() ; String dirParameter = request.getParameter( parameter ); return new File( webappPath, dirParameter ).getCanonicalFile(); } private File[] getRoots() { String rootpaths = Imcms.getServices().getConfig().getFileAdminRootPaths(); List rootList = new ArrayList(); if ( rootpaths != null ) { StringTokenizer st = new StringTokenizer( rootpaths, ":;" ); int tokenCount = st.countTokens(); for ( int i = 0; i < tokenCount; i++ ) { String oneRoot = st.nextToken().trim(); File oneRootFile = FileUtility.getFileFromWebappRelativePath( oneRoot ); if ( oneRootFile.isDirectory() ) { rootList.add( oneRootFile ); } } } return (File[])rootList.toArray( new File[rootList.size()] ); } private boolean move( String[] files, File sourceDir, File destDir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( files != null && !sourceDir.equals( destDir ) ) { File[] sourceFileTree = makeFileTreeList( makeAbsoluteFileList( sourceDir, files ), false ); File[] relativeSourceFileTree = makeRelativeFileList( sourceDir, sourceFileTree ); StringBuffer optionList = new StringBuffer(); StringBuffer fileList = new StringBuffer(); File webAppPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); for ( int i = 0; i < relativeSourceFileTree.length; i++ ) { File destFile = new File( destDir, relativeSourceFileTree[i].getPath() ); fileList.append( relativeSourceFileTree[i] ).append( File.pathSeparator ); if ( destFile.exists() ) { String optionString = createWarningFileOptionString(destFile); optionList.append( "<option>" ).append( optionString ).append( "</option>" ); } } if ( optionList.length() > 0 ) { outputMoveOverwriteWarning( optionList.toString(), sourceDir, destDir, fileList.toString(), dir1, dir2, res, user, imcref ); handledOutput = true; } else { File[] destFiles = makeAbsoluteFileList( destDir, relativeSourceFileTree ); for ( int i = 0; i < sourceFileTree.length; i++ ) { File destFile = destFiles[i]; destFile.getParentFile().mkdirs(); File sourceFile = sourceFileTree[i]; if ( sourceFile.isFile() ) { FileUtils.copyFile( sourceFile, destFile ); } if ( sourceFile.length() == destFile.length() ) { FileUtils.forceDelete(sourceFile); } } } } return handledOutput; } private String createWarningFileOptionString(File destFile) { File webAppPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); return FileUtility.relativizeFile(webAppPath, destFile).getPath() + ( destFile.isDirectory() ? File.separator : " [" + destFile.length() + "]" ); } private boolean copy( String[] files, File sourceDir, File destDir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( files != null && !sourceDir.equals( destDir ) ) { File[] sourceFileTree = makeFileTreeList( makeAbsoluteFileList( sourceDir, files ), true ); File[] relativeSourceFileTree = makeRelativeFileList( sourceDir, sourceFileTree ); StringBuffer optionList = new StringBuffer(); StringBuffer fileList = new StringBuffer(); for ( int i = 0; i < relativeSourceFileTree.length; i++ ) { File destFile = new File( destDir, relativeSourceFileTree[i].getPath() ); fileList.append( relativeSourceFileTree[i] ).append( File.pathSeparator ); if ( destFile.exists() ) { String optionString = createWarningFileOptionString(destFile); optionList.append( "<option>" ).append( optionString ).append( "</option>" ); } } if ( optionList.length() > 0 ) { ouputCopyOverwriteWarning( optionList.toString(), sourceDir, destDir, fileList.toString(), dir1, dir2, res, user, imcref ); handledOutput = true; } else { File[] destFileTree = makeAbsoluteFileList( destDir, relativeSourceFileTree ); for ( int i = 0; i < sourceFileTree.length; i++ ) { File sourceFile = sourceFileTree[i]; File destFile = destFileTree[i]; if ( sourceFile.isDirectory() ) { destFile.mkdir(); continue; } FileUtils.copyFile( sourceFile, destFile ); } } } return handledOutput; } private boolean rename( String[] files, String name, File dir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( files != null && files.length == 1 ) { //Has the user chosen just one file? if ( name != null && name.length() > 0 ) { File oldFilename = new File( dir, files[0] ); File newFilename = new File( dir, name ); if ( oldFilename.exists() ) { oldFilename.renameTo( newFilename ); } } else { outputBlankFilenameError( dir1, dir2, res, user, imcref ); handledOutput = true; } } return handledOutput; } private boolean download( String[] files, File dir, HttpServletResponse res ) throws IOException { boolean handledOutput = false; if ( files != null && files.length == 1 ) { //Has the user chosen just one file? File file = new File( dir, files[0] ); try { res.setContentType( "application/octet-stream" ); res.setHeader( "Content-Disposition", "attachment; filename=\"" + file.getName() + '\"' ); BufferedInputStream fin = new BufferedInputStream( new FileInputStream( file ) ); res.setContentLength( fin.available() ); byte[] buffer = new byte[BUFFER_SIZE]; ServletOutputStream out = res.getOutputStream(); for ( int bytes_read; ( bytes_read = fin.read( buffer ) ) != -1; ) { out.write( buffer, 0, bytes_read ); } handledOutput = true; } catch ( FileNotFoundException ex ) { // FIXME: Error dialog? log.debug( "Download failed", ex ); } } return handledOutput; } private boolean delete( File dir, String[] files, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; File[] farray = makeFileTreeList( makeAbsoluteFileList( dir, files ), false ); File[] filelist = makeRelativeFileList( dir, farray ); if ( filelist != null && filelist.length > 0 ) { outputDeleteWarning( filelist, dir1, dir2, dir, res, user, imcref ); handledOutput = true; } return handledOutput; } private boolean makeDirectory( String name, File dir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; if ( name != null && name.length() > 0 ) { File newname = new File( dir, name ); if ( !newname.exists() ) { newname.mkdir(); } } else { outputBlankFilenameError( dir1, dir2, res, user, imcref ); handledOutput = true; } return handledOutput; } private boolean upload( MultipartHttpServletRequest mp, File destDir, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { boolean handledOutput = false; String fileContents = mp.getParameter( "file" ); if ( fileContents == null || fileContents.length() < 1 ) { outputBlankFileError( dir1, dir2, res, user, imcref ); handledOutput = true; return handledOutput; } String filename = mp.getParameterFileItem( "file" ).getName() ; File file = new File( destDir, filename ); File uniqueFile = findUniqueFilename( file ); if ( file.equals( uniqueFile ) || file.renameTo( uniqueFile ) ) { FileOutputStream fout = new FileOutputStream( file ); byte[] bytes = fileContents.getBytes( "8859_1" ); fout.write( bytes ); fout.flush(); fout.close(); if ( !file.equals( uniqueFile ) ) { outputFileExistedAndTheOriginalWasRenamedNotice( dir1, dir2, uniqueFile.getName(), res, user, imcref ); handledOutput = true; } } else { // FIXME: Output failed-to-rename-original-file error dialog handledOutput = false; } return handledOutput; } private void outputFileAdmin( HttpServletResponse res, UserDomainObject user, File dir1, File dir2 ) throws IOException { Utility.setDefaultHtmlContentType( res ); res.getOutputStream().print( parseFileAdmin( user, dir1, dir2 ) ); } static File findUniqueFilename(File file) { File uniqueFile = file; int counter = 1; String previousSuffix = ""; while ( uniqueFile.exists() ) { String filenameWithoutSuffix = StringUtils.substringBeforeLast(uniqueFile.getName(), previousSuffix); String suffix = "." + counter; counter++; uniqueFile = new File(uniqueFile.getParentFile(), filenameWithoutSuffix + suffix); previousSuffix = suffix; } return uniqueFile; } private void outputMoveOverwriteWarning( String option_list, File sourceDir, File destDir, String file_list, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { outputWarning(option_list, sourceDir, destDir, file_list, dir1, dir2, res, imcref, ADMIN_TEMPLATE_FILE_ADMIN_MOVE_OVERWRITE_WARNING, user); } private void ouputCopyOverwriteWarning( String option_list, File sourceDir, File destDir, String file_list, File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { outputWarning(option_list, sourceDir, destDir, file_list, dir1, dir2, res, imcref, ADMIN_TEMPLATE_FILE_ADMIN_COPY_OVERWRIGHT_WARNING, user); } private void outputWarning(String option_list, File sourceDir, File destDir, String file_list, File dir1, File dir2, HttpServletResponse res, ImcmsServices imcref, String template, UserDomainObject user) throws IOException { List vec = new ArrayList(); vec.add( "#filelist#" ); vec.add( option_list ); vec.add( "#source#" ); vec.add( getContextRelativeAbsolutePathToDirectory(sourceDir) ); vec.add( "#dest#" ); vec.add( getContextRelativeAbsolutePathToDirectory(destDir) ); vec.add( "#files#" ); vec.add( file_list ); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( template, user, vec ) ); } private void outputFileExistedAndTheOriginalWasRenamedNotice( File dir1, File dir2, String newFilename, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { List vec = new ArrayList(); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); vec.add( "#filename#" ); vec.add( newFilename ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminFileExisted.html", user, vec ) ); } private void outputBlankFileError( File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { List vec = new ArrayList(); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminFileBlank.html", user, vec ) ); } private void outputDeleteWarning( File[] filelist, File dir1, File dir2, File sourceDir, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { StringBuffer files = new StringBuffer(); StringBuffer optionlist = new StringBuffer(); for ( int i = 0; i < filelist.length; i++ ) { File foo = new File( sourceDir, filelist[i].getPath() ); String bar = createWarningFileOptionString(foo); optionlist.append( "<option>" ).append( bar ).append( "</option>" ); files.append( filelist[i] ).append( File.pathSeparator ); } List vec = new ArrayList(); vec.add( "#filelist#" ); vec.add( optionlist.toString() ); vec.add( "#files#" ); vec.add( StringEscapeUtils.escapeHtml(files.toString()) ); vec.add( "#source#" ); vec.add( getContextRelativeAbsolutePathToDirectory(sourceDir) ); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminDeleteWarning.html", user, vec ) ); } private void outputBlankFilenameError( File dir1, File dir2, HttpServletResponse res, UserDomainObject user, ImcmsServices imcref ) throws IOException { List vec = new ArrayList(); vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir1) ); vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(dir2) ); Utility.setDefaultHtmlContentType( res ); ServletOutputStream out = res.getOutputStream(); out.print( imcref.getAdminTemplate( "FileAdminNameBlank.html", user, vec ) ); } private interface FromSourceFileToDestinationFileCommand { void execute(File source, File destination) throws IOException; } private void moveOk( HttpServletRequest mp, File[] roots ) throws IOException { fromSourceToDestination(mp, roots, new FromSourceFileToDestinationFileCommand() { public void execute(File source, File dest) throws IOException { dest.getParentFile().mkdirs(); if ( source.isFile() ) { FileUtils.copyFile( source, dest ); } if ( source.length() == dest.length() ) { FileUtils.forceDelete( source ); } } }); } private void fromSourceToDestination(HttpServletRequest mp, File[] roots, FromSourceFileToDestinationFileCommand command) throws IOException { File srcdir = getContextRelativeDirectoryFromRequest(mp, "source" ); File dstdir = getContextRelativeDirectoryFromRequest(mp, "dest" ); String files = mp.getParameter( "files" ); if ( isUnderRoot( srcdir, roots ) && isUnderRoot( dstdir, roots ) ) { StringTokenizer st = new StringTokenizer( files, ":;" ); while ( st.hasMoreTokens() ) { String foo = st.nextToken(); File source = new File( srcdir, foo ); File dest = new File( dstdir, foo ); command.execute(source, dest); } } } private void copyOk( HttpServletRequest mp, File[] roots ) throws IOException { fromSourceToDestination(mp, roots, new FromSourceFileToDestinationFileCommand() { public void execute(File source, File destination) throws IOException { if ( source.isDirectory() ) { destination.mkdir(); } else { FileUtils.copyFile( source, destination ); } } }); } private void deleteOk( HttpServletRequest mp, File[] roots ) throws IOException { String files = mp.getParameter( "files" ) ; File path = getContextRelativeDirectoryFromRequest(mp, "source") ; if ( null != files && null != path ) { StringTokenizer st = new StringTokenizer( files, ":;" ); while ( st.hasMoreTokens() ) { File foo = new File( path, st.nextToken() ); if ( foo.exists() && isUnderRoot( foo.getParentFile(), roots ) ) { FileUtils.forceDelete(foo); } } } } private File changeDir( String[] files, File dir, File[] roots ) throws IOException { File resultDir = dir; if ( files != null && files.length == 1 ) { //Has the user chosen just one dir? String filename = files[0]; if ( new File( filename ).isAbsolute() ) { File newDir = new File( WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(), filename) ; if ( isUnderRoot( newDir, roots ) ) { resultDir = newDir; } } else { //Is the dir one of the roots? File newDir = new File( dir, filename ); //No? Treat it like a relative path... if ( newDir.isDirectory() && isUnderRoot( newDir, roots ) ) { //It IS a directory, i hope? resultDir = newDir; } } } return resultDir.getCanonicalFile(); } private File[] makeAbsoluteFileList( File parent, String[] filePaths ) { File[] files = new File[filePaths.length]; for ( int i = 0; i < filePaths.length; i++ ) { String filePath = filePaths[i]; files[i] = new File( filePath ); } return makeAbsoluteFileList( parent, files ); } /** * Takes a list of files that are supposed to share a common parent, and returns them in an array. */ private File[] makeAbsoluteFileList( File parent, File[] files ) { if ( files == null || parent == null ) { return null; } LinkedList list = new LinkedList(); for ( int i = 0; i < files.length; i++ ) { String filename = files[i].getPath(); if ( !( "..".equals( filename ) || new File( filename ).isAbsolute() ) ) { list.add( new File( parent, filename ) ); } } return (File[])list.toArray( new File[list.size()] ); } /** * Takes a list of files that share a common parent, orphans them, and returns them in an array. */ private File[] makeRelativeFileList( File relativeParentDir, File[] files ) { if ( files == null || relativeParentDir == null ) { return null; } File[] relativeFileList = new File[files.length]; for ( int i = 0; i < files.length; i++ ) { relativeFileList[i] = FileUtility.relativizeFile( relativeParentDir, files[i] ); } return relativeFileList; } /** * Takes a list of files and dirs in one dir, and recursively adds the files of the subdirs. */ private File[] makeFileTreeList( File[] files, boolean dirfirst ) { if ( files == null ) { return new File[0]; } LinkedList list = new LinkedList(); for ( int i = 0; i < files.length; i++ ) { if ( dirfirst ) { list.add( files[i] ); } if ( files[i].isDirectory() ) { File[] sub_list = makeFileTreeList( files[i].listFiles(), dirfirst ); for ( int j = 0; j < sub_list.length; j++ ) { list.add( sub_list[j] ); } } if ( !dirfirst ) { list.add( files[i] ); } } File[] result = new File[list.size()]; for ( int i = 0; i < result.length; i++ ) { result[i] = (File)list.removeFirst(); } return result; } private String parseFileAdmin( UserDomainObject user, File fd1, File fd2 ) throws IOException { ImcmsServices imcref = Imcms.getServices(); File[] rootlist = getRoots(); List vec = new ArrayList(); if ( fd1 != null ) { vec.add( "#dir1#" ); vec.add( getContextRelativeAbsolutePathToDirectory(fd1) ); String optionlist = createDirectoryOptionList( rootlist, fd1 ); vec.add( "#files1#" ); vec.add( optionlist ); } else { vec.add( "#dir1#" ); vec.add( "" ); vec.add( "#files1#" ); vec.add( "" ); } if ( fd2 != null ) { vec.add( "#dir2#" ); vec.add( getContextRelativeAbsolutePathToDirectory(fd2) ); String optionlist = createDirectoryOptionList( rootlist, fd2 ); vec.add( "#files2#" ); vec.add( optionlist ); } else { vec.add( "#dir2#" ); vec.add( "" ); vec.add( "#files2#" ); vec.add( "" ); } return imcref.getAdminTemplate( "FileAdmin.html", user, vec ); } private String getContextRelativeAbsolutePathToDirectory(File dir) throws IOException { File webappPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); return File.separator + getPathRelativeTo( dir, webappPath ) + File.separator; } private String getPathRelativeTo( File file, File root ) throws IOException { if ( !FileUtility.directoryIsAncestorOfOrEqualTo( root, file ) ) { return file.getCanonicalPath(); } if ( file.equals( root ) ) { return ""; } return FileUtility.relativizeFile( root, file ).getPath() ; } private String createDirectoryOptionList( File[] rootlist, File directory ) throws IOException { StringBuffer optionlist = new StringBuffer(); File webappPath = WebAppGlobalConstants.getInstance().getAbsoluteWebAppPath(); for ( int i = 0; i < rootlist.length; i++ ) { String dirname = getPathRelativeTo( rootlist[i], webappPath ) ; optionlist.append(getDirectoryOption(File.separator + dirname + File.separator, File.separator + dirname + File.separator) ); } File parent = directory.getParentFile(); if ( isUnderRoot( parent, rootlist )) { optionlist.append(getDirectoryOption(".." + File.separator, ".." + File.separator) ); } File[] dirlist = directory.listFiles( (FileFilter)DirectoryFileFilter.INSTANCE ); Arrays.sort(dirlist, getFileComparator()); for ( int i = 0; null != dirlist && i < dirlist.length; i++ ) { optionlist.append(getDirectoryOption(dirlist[i].getName() + File.separator, dirlist[i].getName() + File.separator)); } File[] filelist = directory.listFiles( (FileFilter)new NotFileFilter( DirectoryFileFilter.INSTANCE ) ); Arrays.sort(filelist, getFileComparator()); for ( int i = 0; null != filelist && i < filelist.length; i++ ) { String formatedFileSize = HumanReadable.getHumanReadableByteSize( filelist[i].length() ); String filename = filelist[i].getName(); String fileNameAndSize = filename + " [" + formatedFileSize + "]"; optionlist.append("<option value=\""); optionlist.append(StringEscapeUtils.escapeHtml(filename)); optionlist.append("\">"); optionlist.append(StringEscapeUtils.escapeHtml(fileNameAndSize)); optionlist.append("</option>"); } return optionlist.toString(); } private String getDirectoryOption(String value, String text) { return "<option style=\"background-color:#f0f0f0\" value=\"" + StringEscapeUtils.escapeHtml(value) + "\">" + StringEscapeUtils.escapeHtml(text) + "</option>"; } private Comparator getFileComparator() { return new Comparator() { public int compare(Object a, Object b) { File filea = (File)a; File fileb = (File)b; //--- Sort directories before files, // otherwise alphabetical ignoring case. if (filea.isDirectory() && !fileb.isDirectory()) { return -1; } else if (!filea.isDirectory() && fileb.isDirectory()) { return 1; } else { return filea.getName().compareToIgnoreCase(fileb.getName()); } } }; } }
Issue 3525: Fix directory-changing on windows. git-svn-id: b7e9aa1d6cd963481915708f70423d437278b157@4546 bd66a97b-2aff-0310-9095-89ca5cabf5a6
server/src/com/imcode/imcms/servlet/superadmin/FileAdmin.java
Issue 3525: Fix directory-changing on windows.
Java
agpl-3.0
7c42f2a4bb8f975e365f298b4feb63bf92dc8186
0
ibcn-cloudlet/dianne,ibcn-cloudlet/dianne,ibcn-cloudlet/dianne,ibcn-cloudlet/dianne,ibcn-cloudlet/dianne,ibcn-cloudlet/dianne,ibcn-cloudlet/dianne
/******************************************************************************* * DIANNE - Framework for distributed artificial neural networks * Copyright (C) 2015 iMinds - IBCN - UGent * * This file is part of DIANNE. * * DIANNE is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Contributors: * Tim Verbelen, Steven Bohez, Elias De Coninck *******************************************************************************/ package be.iminds.iot.dianne.tensor; import java.util.Arrays; /** * Represents an n-dimensional tensor in Java * * The actual implementation should be done in native code * * * @author tverbele * */ public class Tensor { public long address; public Tensor(){ this(null, null); } public Tensor(int... dims) { this(null, dims); } public Tensor(int d0, int[] dims){ int[] d = new int[dims.length+1]; d[0] = d0; System.arraycopy(dims, 0, d, 1, dims.length); this.address = init(null, d); } public Tensor(float[] data, int... dims) { this.address = init(data, dims); } private Tensor(long address){ this.address = address; } /** * @return the number of dimensions of this tensor */ public native int dim(); /** * @return the dimensions of this tensor */ public native int[] dims(); /** * @return the total size of the tensor */ public native int size(); /** * the size of the d'th dimension * @param d the dimension to query the size * @return the size of the dimension */ public native int size(final int d); /** * reshape the dimensions of this tensor, the underlying data remains the same */ public native void reshape(final int... d); public void reshape(final int[] d, final int df){ int[] dn = Arrays.copyOf(d, d.length+1); dn[d.length] = df; reshape(dn); } /** * get a value of the tensor * @param d indices of the element * @return the element specified by the index */ public native float get(final int... d); /** * get (a copy of) the raw data for this tensor, this way that the tensor * can be reconstructed with the createTensor(data, dims) factory method */ public native float[] get(); /** * set a value of the tensor * @param v the new value * @param d the indices of the element to set */ public native void set(final float v, final int... d); /** * copy a complete array of raw data into this tensor */ public native void set(final float[] data); /** * fill with fixed value * @param v the new value */ public native void fill(final float v); /** * fill with random values uniformely distributed between 0 and 1 */ public native void rand(); /** * fill with random values Gaussian ("normally") distributed with mean 0.0 and standard deviation 1.0 */ public native void randn(); /** * fill with 0 or 1 sampled using Bernoulli distribution with 0 <= p <= 1 */ public native void bernoulli(float p); /** * check if other tensor has same dimensions */ public native boolean sameDim(final Tensor other); /** * check if other tensor has these dimensions */ public native boolean hasDim(final int... dims); /** * clone this tensor into other tensor, create new one if null or different number of elements * @param other the tensor to clone into * @return the cloned tensor */ public native Tensor copyInto(final Tensor other); /** * clone this tensor - creates a deep copy of this tensor */ public Tensor clone(){ return copyInto(null); } /** * Return a subtensor narrowing dimension dim from index to index+size-1 */ public native Tensor narrow(final int dim, final int index, final int size); /** * Return a subtensor narrowing according to the ranges array. This is interpreted * as narrowing dimension 1 from ranges[0] with size ranges[1], narrowing dimension 2 from * ranges[2] with size ranges[3], etc. */ public Tensor narrow(final int... ranges){ Tensor n = this; for(int i=0;i<ranges.length-1;i+=2){ n = n.narrow(i/2, ranges[i], ranges[i+1]); } return n; } /** * Return a slice at the given index in dimension dim, dimension dim will be removed */ public native Tensor select(final int dim, final int index); /** * calculate the transpose of the tensor */ public native Tensor transpose(Tensor res, final int d1, final int d2); /** * return the diag vec of the tensor */ public native Tensor diag(Tensor res); public boolean equals(Object other){ if(other == null) return false; else if(!(other instanceof Tensor)) return false; else return equals((Tensor) other); } /** * return whether two tensors are equal (note: they have to be the same type to be equal!) * @param other object to compare to * @return true if the other object represents an equal tensor */ public boolean equals(Tensor other){ if(other == null) return false; else if(other.address == this.address) return true; else return equals(other, 0.0f); } /** * equals with threshold (note: they have to be the same type to be equal!) * @param other object to compare to * @return true if the other object represents an equal tensor with values within threshold range */ public boolean equals(Tensor other, float threshold){ if(!this.sameDim(other)) return false; else return equalsData(other, threshold); } @Override public int hashCode(){ return (int)address; } @Override public String toString(){ StringBuilder b = new StringBuilder(); b.append(Arrays.toString(dims())); float[] data = get(); if(data.length > 20){ b.append(Arrays.toString(Arrays.copyOf(data, 20))); b.insert(b.length()-1, "..."); } else { b.append(Arrays.toString(data)); } return b.toString(); } public void finalize(){ free(); } private native long init(float[] data, int[] dims); private native void free(); private native boolean equalsData(Tensor other, float threshold); }
be.iminds.iot.dianne.tensor/src/be/iminds/iot/dianne/tensor/Tensor.java
/******************************************************************************* * DIANNE - Framework for distributed artificial neural networks * Copyright (C) 2015 iMinds - IBCN - UGent * * This file is part of DIANNE. * * DIANNE is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Contributors: * Tim Verbelen, Steven Bohez, Elias De Coninck *******************************************************************************/ package be.iminds.iot.dianne.tensor; import java.util.Arrays; /** * Represents an n-dimensional tensor in Java * * The actual implementation should be done in native code * * * @author tverbele * */ public class Tensor { public long address; public Tensor(){ this(null, null); } public Tensor(int... dims) { this(null, dims); } public Tensor(int d0, int[] dims){ int[] d = new int[dims.length+1]; d[0] = d0; System.arraycopy(dims, 0, d, 1, dims.length); this.address = init(null, d); } public Tensor(float[] data, int... dims) { this.address = init(data, dims); } private Tensor(long address){ this.address = address; } /** * @return the number of dimensions of this tensor */ public native int dim(); /** * @return the dimensions of this tensor */ public native int[] dims(); /** * @return the total size of the tensor */ public native int size(); /** * the size of the d'th dimension * @param d the dimension to query the size * @return the size of the dimension */ public native int size(final int d); /** * reshape the dimensions of this tensor, the underlying data remains the same */ public native void reshape(final int... d); public void reshape(final int[] d, final int df){ int[] dn = Arrays.copyOf(d, d.length+1); dn[d.length] = df; reshape(dn); } /** * get a value of the tensor * @param d indices of the element * @return the element specified by the index */ public native float get(final int... d); /** * get (a copy of) the raw data for this tensor, this way that the tensor * can be reconstructed with the createTensor(data, dims) factory method */ public native float[] get(); /** * set a value of the tensor * @param v the new value * @param d the indices of the element to set */ public native void set(final float v, final int... d); /** * copy a complete array of raw data into this tensor */ public native void set(final float[] data); /** * fill with fixed value * @param v the new value */ public native void fill(final float v); /** * fill with random values uniformely distributed between 0 and 1 */ public native void rand(); /** * fill with random values Gaussian ("normally") distributed with mean 0.0 and standard deviation 1.0 */ public native void randn(); /** * fill with 0 or 1 sampled using Bernoulli distribution with 0 <= p <= 1 */ public native void bernoulli(float p); /** * check if other tensor has same dimensions */ public native boolean sameDim(final Tensor other); /** * check if other tensor has these dimensions */ public native boolean hasDim(final int... dims); /** * clone this tensor into other tensor, create new one if null or different number of elements * @param other the tensor to clone into * @return the cloned tensor */ public native Tensor copyInto(final Tensor other); /** * Return a subtensor narrowing dimension dim from index to index+size-1 */ public native Tensor narrow(final int dim, final int index, final int size); /** * Return a subtensor narrowing according to the ranges array. This is interpreted * as narrowing dimension 1 from ranges[0] with size ranges[1], narrowing dimension 2 from * ranges[2] with size ranges[3], etc. */ public Tensor narrow(final int... ranges){ Tensor n = this; for(int i=0;i<ranges.length-1;i+=2){ n = n.narrow(i/2, ranges[i], ranges[i+1]); } return n; } /** * Return a slice at the given index in dimension dim, dimension dim will be removed */ public native Tensor select(final int dim, final int index); /** * calculate the transpose of the tensor */ public native Tensor transpose(Tensor res, final int d1, final int d2); /** * return the diag vec of the tensor */ public native Tensor diag(Tensor res); public boolean equals(Object other){ if(other == null) return false; else if(!(other instanceof Tensor)) return false; else return equals((Tensor) other); } /** * return whether two tensors are equal (note: they have to be the same type to be equal!) * @param other object to compare to * @return true if the other object represents an equal tensor */ public boolean equals(Tensor other){ if(other == null) return false; else if(other.address == this.address) return true; else return equals(other, 0.0f); } /** * equals with threshold (note: they have to be the same type to be equal!) * @param other object to compare to * @return true if the other object represents an equal tensor with values within threshold range */ public boolean equals(Tensor other, float threshold){ if(!this.sameDim(other)) return false; else return equalsData(other, threshold); } @Override public int hashCode(){ return (int)address; } @Override public String toString(){ StringBuilder b = new StringBuilder(); b.append(Arrays.toString(dims())); float[] data = get(); if(data.length > 20){ b.append(Arrays.toString(Arrays.copyOf(data, 20))); b.insert(b.length()-1, "..."); } else { b.append(Arrays.toString(data)); } return b.toString(); } public void finalize(){ free(); } private native long init(float[] data, int[] dims); private native void free(); private native boolean equalsData(Tensor other, float threshold); }
add Tensor clone convenience method
be.iminds.iot.dianne.tensor/src/be/iminds/iot/dianne/tensor/Tensor.java
add Tensor clone convenience method
Java
lgpl-2.1
4b21ff00a1449af3d9646ea34b71fc586d0c9210
0
beast-dev/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,4ment/beast-mcmc,beast-dev/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,adamallo/beast-mcmc,4ment/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc
/* * SubtreeLeapOperator.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.evomodel.operators; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evolution.util.Taxon; import dr.evolution.util.TaxonList; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.operators.SubtreeLeapOperatorParser; import dr.evomodelxml.operators.TipLeapOperatorParser; import dr.inference.distribution.CauchyDistribution; import dr.inference.operators.AdaptableMCMCOperator; import dr.inference.operators.AdaptationMode; import dr.math.MathUtils; import dr.math.distributions.Distribution; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Implements the Subtree Leap move. * * This move picks a node at random (except for the root) and then moves the parent to any location * that is a certain patristic distance from its starting point (the distance is drawn from a Gaussian). * * It is always possible for the node to move up (potentially becoming the root) but the destination can't * be younger than the original node. All possible destinations are collected and then picked amongst * uniformly. * * @author Andrew Rambaut * @author Luiz Max Carvalho * @author Mathieu Fourment * @version $Id$ */ public class SubtreeLeapOperator extends AbstractAdaptableTreeOperator { public enum DistanceKernelType { NORMAL("normal") { @Override double getDelta(double size) { return Math.abs(MathUtils.nextGaussian() * size); } }, CAUCHY("Cauchy") { @Override double getDelta(double size) { Distribution distK = new CauchyDistribution(0, size); double u = MathUtils.nextDouble(); return Math.abs(distK.quantile(u)); } }; DistanceKernelType(String name) { this.name = name; } @Override public String toString() { return name; } String name; abstract double getDelta(double size); } private double size; private final TreeModel tree; private final DistanceKernelType distanceKernel; private final boolean slideOnly; private final List<NodeRef> tips; /** * Constructor * * @param tree the tree * @param weight the weight * @param size scaling on a unit Gaussian to draw the patristic distance from * @param targetAcceptance the desired acceptance probability * @param distanceKernel the distribution from which to draw the patristic distance * @param mode coercion mode */ public SubtreeLeapOperator(TreeModel tree, double weight, double size, DistanceKernelType distanceKernel, AdaptationMode mode, double targetAcceptance) { this(tree, weight, size, distanceKernel, false, mode, targetAcceptance); } /** * Constructor * * @param tree the tree * @param weight the weight * @param size scaling on a unit Gaussian to draw the patristic distance from * @param targetAcceptance the desired acceptance probability * @param distanceKernel the distribution from which to draw the patristic distance * @param slideOnly if true, only slide up and down the tree, never across (mimics SubtreeSlide) * @param mode coercion mode */ public SubtreeLeapOperator(TreeModel tree, double weight, double size, DistanceKernelType distanceKernel, boolean slideOnly, AdaptationMode mode, double targetAcceptance) { super(mode, targetAcceptance); this.tree = tree; setWeight(weight); this.size = size; this.distanceKernel = distanceKernel; this.slideOnly = slideOnly; this.tips = null; } /** * Constructor that takes a taxon set to pick from for the move. * * @param tree the tree * @param taxa some taxa * @param weight the weight * @param size scaling on a unit Gaussian to draw the patristic distance from * @param mode coercion mode */ public SubtreeLeapOperator(TreeModel tree, TaxonList taxa, double weight, double size, DistanceKernelType distanceKernel, AdaptationMode mode, double targetAcceptance) { super(mode, targetAcceptance); this.tree = tree; setWeight(weight); this.size = size; this.distanceKernel = distanceKernel; this.slideOnly = false; this.tips = new ArrayList<NodeRef>(); for (Taxon taxon : taxa) { boolean found = false; for (int i = 0; i < tree.getExternalNodeCount(); i++) { NodeRef tip = tree.getExternalNode(i); if (tree.getNodeTaxon(tip).equals(taxon)) { tips.add(tip); found = true; break; } } if (!found) { throw new IllegalArgumentException("Taxon, " + taxon.getId() + ", not found in tree with id " + tree.getId()); } } } /** * Do a subtree leap move. * * @return the log-transformed hastings ratio */ public double doOperation() { double logq; final double delta = distanceKernel.getDelta(size); final NodeRef root = tree.getRoot(); NodeRef node; if (tips == null) { // Pick a node (but not the root) do { // choose a random node avoiding root node = tree.getNode(MathUtils.nextInt(tree.getNodeCount())); } while (node == root); } else { // Pick a tip from the specified set of tips. node = tips.get(MathUtils.nextInt(tips.size())); } // get its parent - this is the node we will prune/graft final NodeRef parent = tree.getParent(node); // get the node's sibling final NodeRef sibling = getOtherChild(tree, parent, node); // and its grand parent final NodeRef grandParent = tree.getParent(parent); final Map<NodeRef, Double> destinations = getDestinations(node, parent, sibling, delta, slideOnly); final List<NodeRef> destinationNodes = new ArrayList<NodeRef>(destinations.keySet()); // pick uniformly from this list int r = MathUtils.nextInt(destinations.size()); double forwardProbability = 1.0 / destinations.size(); final NodeRef j = destinationNodes.get(r); final double newHeight = destinations.get(j); final NodeRef jParent = tree.getParent(j); if (jParent != null && newHeight > tree.getNodeHeight(jParent)) { throw new IllegalArgumentException("height error"); } if (newHeight < tree.getNodeHeight(j)) { throw new IllegalArgumentException("height error"); } tree.beginTreeEdit(); if (j == parent || jParent == parent) { // the subtree is not actually moving but the height will change } else { if (grandParent == null) { // if the parent of the original node is the root then the sibling becomes // the root. tree.removeChild(parent, sibling); tree.setRoot(sibling); } else { // remove the parent of node by connecting its sibling to its grandparent. tree.removeChild(parent, sibling); tree.removeChild(grandParent, parent); tree.addChild(grandParent, sibling); } if (jParent == null) { // adding the node to the root of the tree tree.addChild(parent, j); tree.setRoot(parent); } else { // remove destination edge j from its parent tree.removeChild(jParent, j); // add destination edge to the parent of node tree.addChild(parent, j); // and add the parent of i as a child of the former parent of j tree.addChild(jParent, parent); } } tree.endTreeEdit(); tree.setNodeHeight(parent, newHeight); if (tree.getParent(parent) != null && newHeight > tree.getNodeHeight(tree.getParent(parent))) { throw new IllegalArgumentException("height error"); } if (newHeight < tree.getNodeHeight(node)) { throw new IllegalArgumentException("height error"); } if (newHeight < tree.getNodeHeight(getOtherChild(tree, parent, node))) { throw new IllegalArgumentException("height error"); } final Map<NodeRef, Double> reverseDestinations = getDestinations(node, parent, getOtherChild(tree, parent, node), delta, slideOnly); double reverseProbability = 1.0 / reverseDestinations.size(); // hastings ratio = reverse Prob / forward Prob logq = Math.log(reverseProbability) - Math.log(forwardProbability); return logq; } private Map<NodeRef, Double> getDestinations(NodeRef node, NodeRef parent, NodeRef sibling, double delta, boolean slideOnly) { final Map<NodeRef, Double> destinations = new LinkedHashMap<NodeRef, Double>(); // get the parent's height final double height = tree.getNodeHeight(parent); final double heightBelow = height - delta; if (heightBelow > tree.getNodeHeight(node)) { // the destination height below the parent is compatible with the node // see if there are any destinations on the sibling's branch final List<NodeRef> edges = new ArrayList<NodeRef>(); getIntersectingEdges(tree, sibling, heightBelow, edges); // add the intersecting edges and the height for (NodeRef n : edges) { destinations.put(n, heightBelow); } } final double heightAbove = height + delta; NodeRef node1 = parent; // walk up to root boolean done = false; while (!done) { NodeRef parent1 = tree.getParent(node1); if (parent1 != null) { final double height1 = tree.getNodeHeight(parent1); if (height1 < heightAbove) { if (!slideOnly) { // if we are not just sliding up or down... // We haven't reached the height above the original height so go down // the sibling subtree to look for other possible destinations NodeRef sibling1 = getOtherChild(tree, parent1, node1); double heightBelow1 = height1 - (heightAbove - height1); if (heightBelow1 > tree.getNodeHeight(node)) { final List<NodeRef> edges = new ArrayList<NodeRef>(); getIntersectingEdges(tree, sibling1, heightBelow1, edges); // add the intersecting edges and the height for (NodeRef n : edges) { destinations.put(n, heightBelow1); } } } } else { // add the current node as a destination destinations.put(node1, heightAbove); done = true; } node1 = parent1; } else { // node1 is the root - add it as a destination and stop loop destinations.put(node1, heightAbove); done = true; } } return destinations; } private int getIntersectingEdges(Tree tree, NodeRef node, double height, List<NodeRef> edges) { final NodeRef parent = tree.getParent(node); if (tree.getNodeHeight(parent) < height) return 0; if (tree.getNodeHeight(node) < height) { edges.add(node); return 1; } int count = 0; for (int i = 0; i < tree.getChildCount(node); i++) { count += getIntersectingEdges(tree, tree.getChild(node, i), height, edges); } return count; } public double getSize() { return size; } public void setSize(double size) { this.size = size; } @Override protected void setAdaptableParameterValue(double value) { setSize(Math.exp(value)); } @Override protected double getAdaptableParameterValue() { return Math.log(getSize()); } @Override public double getRawParameter() { return getSize(); } public String getAdaptableParameterName() { return "size"; } public String getOperatorName() { if (tips == null) { return SubtreeLeapOperatorParser.SUBTREE_LEAP + "(" + tree.getId() + ")"; } else { return TipLeapOperatorParser.TIP_LEAP + "(" + tree.getId() + ")"; } } }
src/dr/evomodel/operators/SubtreeLeapOperator.java
/* * SubtreeLeapOperator.java * * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.evomodel.operators; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evolution.util.Taxon; import dr.evolution.util.TaxonList; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.operators.SubtreeLeapOperatorParser; import dr.evomodelxml.operators.TipLeapOperatorParser; import dr.inference.distribution.CauchyDistribution; import dr.inference.operators.AdaptableMCMCOperator; import dr.inference.operators.AdaptationMode; import dr.math.MathUtils; import dr.math.distributions.Distribution; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Implements the Subtree Leap move. * * This move picks a node at random (except for the root) and then moves the parent to any location * that is a certain patristic distance from its starting point (the distance is drawn from a Gaussian). * * It is always possible for the node to move up (potentially becoming the root) but the destination can't * be younger than the original node. All possible destinations are collected and then picked amongst * uniformly. * * @author Andrew Rambaut * @author Luiz Max Carvalho * @author Mathieu Fourment * @version $Id$ */ public class SubtreeLeapOperator extends AbstractAdaptableTreeOperator { public enum DistanceKernelType { NORMAL("normal") { @Override double getDelta(double size) { return Math.abs(MathUtils.nextGaussian() * size); } }, CAUCHY("Cauchy") { @Override double getDelta(double size) { Distribution distK = new CauchyDistribution(0, size); double u = MathUtils.nextDouble(); return Math.abs(distK.quantile(u)); } }; DistanceKernelType(String name) { this.name = name; } @Override public String toString() { return name; } String name; abstract double getDelta(double size); } private double size; private final TreeModel tree; private final DistanceKernelType distanceKernel; private final boolean slideOnly; private final List<NodeRef> tips; /** * Constructor * * @param tree the tree * @param weight the weight * @param size scaling on a unit Gaussian to draw the patristic distance from * @param targetAcceptance the desired acceptance probability * @param distanceKernel the distribution from which to draw the patristic distance * @param mode coercion mode */ public SubtreeLeapOperator(TreeModel tree, double weight, double size, DistanceKernelType distanceKernel, AdaptationMode mode, double targetAcceptance) { this(tree, weight, size, distanceKernel, false, mode, targetAcceptance); } /** * Constructor * * @param tree the tree * @param weight the weight * @param size scaling on a unit Gaussian to draw the patristic distance from * @param targetAcceptance the desired acceptance probability * @param distanceKernel the distribution from which to draw the patristic distance * @param slideOnly if true, only slide up and down the tree, never across (mimics SubtreeSlide) * @param mode coercion mode */ public SubtreeLeapOperator(TreeModel tree, double weight, double size, DistanceKernelType distanceKernel, boolean slideOnly, AdaptationMode mode, double targetAcceptance) { super(mode, targetAcceptance); this.tree = tree; setWeight(weight); this.size = size; this.distanceKernel = distanceKernel; this.slideOnly = slideOnly; this.tips = null; } /** * Constructor that takes a taxon set to pick from for the move. * * @param tree the tree * @param taxa some taxa * @param weight the weight * @param size scaling on a unit Gaussian to draw the patristic distance from * @param mode coercion mode */ public SubtreeLeapOperator(TreeModel tree, TaxonList taxa, double weight, double size, DistanceKernelType distanceKernel, AdaptationMode mode, double targetAcceptance) { super(mode, targetAcceptance); this.tree = tree; setWeight(weight); this.size = size; this.distanceKernel = distanceKernel; this.slideOnly = false; this.tips = new ArrayList<NodeRef>(); for (Taxon taxon : taxa) { boolean found = false; for (int i = 0; i < tree.getExternalNodeCount(); i++) { NodeRef tip = tree.getExternalNode(i); if (tree.getNodeTaxon(tip).equals(taxon)) { tips.add(tip); found = true; break; } } if (!found) { throw new IllegalArgumentException("Taxon, " + taxon.getId() + ", not found in tree with id " + tree.getId()); } } } /** * Do a subtree leap move. * * @return the log-transformed hastings ratio */ public double doOperation() { double logq; final double delta = distanceKernel.getDelta(size); final NodeRef root = tree.getRoot(); NodeRef node; if (tips == null) { // Pick a node (but not the root) do { // choose a random node avoiding root node = tree.getNode(MathUtils.nextInt(tree.getNodeCount())); } while (node == root); } else { // Pick a tip from the specified set of tips. node = tips.get(MathUtils.nextInt(tips.size())); } // get its parent - this is the node we will prune/graft final NodeRef parent = tree.getParent(node); // get the node's sibling final NodeRef sibling = getOtherChild(tree, parent, node); // and its grand parent final NodeRef grandParent = tree.getParent(parent); final Map<NodeRef, Double> destinations = getDestinations(node, parent, sibling, delta, slideOnly); final List<NodeRef> destinationNodes = new ArrayList<NodeRef>(destinations.keySet()); // pick uniformly from this list int r = MathUtils.nextInt(destinations.size()); double forwardProbability = 1.0 / destinations.size(); final NodeRef j = destinationNodes.get(r); final double newHeight = destinations.get(j); final NodeRef jParent = tree.getParent(j); if (jParent != null && newHeight > tree.getNodeHeight(jParent)) { throw new IllegalArgumentException("height error"); } if (newHeight < tree.getNodeHeight(j)) { throw new IllegalArgumentException("height error"); } tree.beginTreeEdit(); if (j == parent || jParent == parent) { // the subtree is not actually moving but the height will change } else { if (grandParent == null) { // if the parent of the original node is the root then the sibling becomes // the root. tree.removeChild(parent, sibling); tree.setRoot(sibling); } else { // remove the parent of node by connecting its sibling to its grandparent. tree.removeChild(parent, sibling); tree.removeChild(grandParent, parent); tree.addChild(grandParent, sibling); } if (jParent == null) { // adding the node to the root of the tree tree.addChild(parent, j); tree.setRoot(parent); } else { // remove destination edge j from its parent tree.removeChild(jParent, j); // add destination edge to the parent of node tree.addChild(parent, j); // and add the parent of i as a child of the former parent of j tree.addChild(jParent, parent); } } tree.endTreeEdit(); tree.setNodeHeight(parent, newHeight); if (tree.getParent(parent) != null && newHeight > tree.getNodeHeight(tree.getParent(parent))) { throw new IllegalArgumentException("height error"); } if (newHeight < tree.getNodeHeight(node)) { throw new IllegalArgumentException("height error"); } if (newHeight < tree.getNodeHeight(getOtherChild(tree, parent, node))) { throw new IllegalArgumentException("height error"); } final Map<NodeRef, Double> reverseDestinations = getDestinations(node, parent, getOtherChild(tree, parent, node), delta, slideOnly); double reverseProbability = 1.0 / reverseDestinations.size(); // hastings ratio = reverse Prob / forward Prob logq = Math.log(reverseProbability) - Math.log(forwardProbability); return logq; } private Map<NodeRef, Double> getDestinations(NodeRef node, NodeRef parent, NodeRef sibling, double delta, boolean slideOnly) { final Map<NodeRef, Double> destinations = new LinkedHashMap<NodeRef, Double>(); // get the parent's height final double height = tree.getNodeHeight(parent); final double heightBelow = height - delta; if (!slideOnly && heightBelow > tree.getNodeHeight(node)) { // the destination height below the parent is compatible with the node // see if there are any destinations on the sibling's branch final List<NodeRef> edges = new ArrayList<NodeRef>(); getIntersectingEdges(tree, sibling, heightBelow, edges); // add the intersecting edges and the height for (NodeRef n : edges) { destinations.put(n, heightBelow); } } final double heightAbove = height + delta; NodeRef node1 = parent; // walk up to root boolean done = false; while (!done) { NodeRef parent1 = tree.getParent(node1); if (parent1 != null) { final double height1 = tree.getNodeHeight(parent1); if (!slideOnly && height1 < heightAbove) { // haven't reached the height above the original height so go down // the sibling subtree NodeRef sibling1 = getOtherChild(tree, parent1, node1); double heightBelow1 = height1 - (heightAbove - height1); if (heightBelow1 > tree.getNodeHeight(node)) { final List<NodeRef> edges = new ArrayList<NodeRef>(); getIntersectingEdges(tree, sibling1, heightBelow1, edges); // add the intersecting edges and the height for (NodeRef n : edges) { destinations.put(n, heightBelow1); } } } else { // add the current node as a destination destinations.put(node1, heightAbove); done = true; } node1 = parent1; } else { // node1 is the root - add it as a destination and stop loop destinations.put(node1, heightAbove); done = true; } } return destinations; } private int getIntersectingEdges(Tree tree, NodeRef node, double height, List<NodeRef> edges) { final NodeRef parent = tree.getParent(node); if (tree.getNodeHeight(parent) < height) return 0; if (tree.getNodeHeight(node) < height) { edges.add(node); return 1; } int count = 0; for (int i = 0; i < tree.getChildCount(node); i++) { count += getIntersectingEdges(tree, tree.getChild(node, i), height, edges); } return count; } public double getSize() { return size; } public void setSize(double size) { this.size = size; } @Override protected void setAdaptableParameterValue(double value) { setSize(Math.exp(value)); } @Override protected double getAdaptableParameterValue() { return Math.log(getSize()); } @Override public double getRawParameter() { return getSize(); } public String getAdaptableParameterName() { return "size"; } public String getOperatorName() { if (tips == null) { return SubtreeLeapOperatorParser.SUBTREE_LEAP + "(" + tree.getId() + ")"; } else { return TipLeapOperatorParser.TIP_LEAP + "(" + tree.getId() + ")"; } } }
Fixing STL's slide only function.
src/dr/evomodel/operators/SubtreeLeapOperator.java
Fixing STL's slide only function.
Java
lgpl-2.1
fb681b1fdcdca4585dc1dbd61f743ad683290c13
0
certusoft/swingx,certusoft/swingx
/* * $Id$ * * Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle, * Santa Clara, California 95054, U.S.A. All rights reserved. */ package org.jdesktop.swingx; import java.awt.Component; import java.awt.event.ActionEvent; import java.util.regex.Pattern; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JOptionPane; /** * Simple FindPanel for usage in a JXDialog. * * * @author ?? * @author Jeanette Winzenburg */ public class JXFindPanel extends AbstractPatternPanel { public static final String FIND_NEXT_ACTION_COMMAND = "findNext"; public static final String FIND_PREVIOUS_ACTION_COMMAND = "findPrevious"; protected Searchable searchable; protected JCheckBox wrapCheck; protected JCheckBox backCheck; private boolean initialized; // protected JButton findNext; // protected JButton findPrevious; public JXFindPanel() { this(null); } public JXFindPanel(Searchable searchable) { setSearchable(searchable); initActions(); } /** * Sets the Searchable targeted of this find widget. * Triggers a search with null pattern to release the old * searchable, if any. * * @param searchable */ public void setSearchable(Searchable searchable) { if ((this.searchable != null) && this.searchable.equals(searchable)) return; Searchable old = this.searchable; if (old != null) { old.search((Pattern) null); } this.searchable = searchable; getPatternModel().setFoundIndex(-1); firePropertyChange("searchable", old, this.searchable); } public void addNotify() { init(); super.addNotify(); } protected void init() { if (initialized) return; initialized = true; initComponents(); build(); bind(); setName(getUIString(SEARCH_TITLE)); } //------------------ support synch the model <--> components protected void bind() { super.bind(); getActionContainerFactory().configureButton(wrapCheck, getAction(PatternModel.MATCH_WRAP_ACTION_COMMAND), null); getActionContainerFactory().configureButton(backCheck, getAction(PatternModel.MATCH_BACKWARDS_ACTION_COMMAND), null); // findNext.setAction(getAction(EXECUTE_FIND_NEXT_ACTION_COMMAND)); // findPrevious.setAction(getAction(EXECUTE_FIND_PREVIOUS_ACTION_COMMAND)); } /** * called from listening to empty property of PatternModel. * * this implementation calls super and additionally synchs the * enabled state of FIND_NEXT_ACTION_COMMAND, FIND_PREVIOUS_ACTION_COMMAND * to !empty. */ @Override protected void refreshEmptyFromModel() { super.refreshEmptyFromModel(); boolean enabled = !getPatternModel().isEmpty(); getAction(FIND_NEXT_ACTION_COMMAND).setEnabled(enabled); getAction(FIND_PREVIOUS_ACTION_COMMAND).setEnabled(enabled); } //--------------------- action callbacks /** * Action callback for Find action. * Find next/previous match using current setting of direction flag. * */ public void match() { doFind(); } /** * Action callback for FindNext action. * Sets direction flag to forward and calls find. */ public void findNext() { getPatternModel().setBackwards(false); doFind(); } /** * Action callback for FindPrevious action. * Sets direction flag to previous and calls find. */ public void findPrevious() { getPatternModel().setBackwards(true); doFind(); } protected void doFind() { if (searchable == null) return; int foundIndex = doSearch(); boolean notFound = (foundIndex == -1) && !getPatternModel().isEmpty(); if (notFound) { if (getPatternModel().isWrapping()) { notFound = doSearch() == -1; } } if (notFound) { showNotFoundMessage(); } else { showFoundMessage(); } } /** * @return */ protected int doSearch() { int foundIndex = searchable.search(getPatternModel().getPattern(), getPatternModel().getFoundIndex(), getPatternModel().isBackwards()); getPatternModel().setFoundIndex(foundIndex); return foundIndex; } protected void showFoundMessage() { } /** * */ protected void showNotFoundMessage() { JOptionPane.showMessageDialog(this, "Value not found"); } //-------------------------- initial /** * */ protected void initExecutables() { getActionMap().put(FIND_NEXT_ACTION_COMMAND, createBoundAction(FIND_NEXT_ACTION_COMMAND, "findNext")); getActionMap().put(FIND_PREVIOUS_ACTION_COMMAND, createBoundAction(FIND_PREVIOUS_ACTION_COMMAND, "findPrevious")); super.initExecutables(); } //----------------------------- init ui /** create components. * */ protected void initComponents() { super.initComponents(); wrapCheck = new JCheckBox(); backCheck = new JCheckBox(); // findNext = new JButton(); // findPrevious = new JButton(); } protected void build() { Box lBox = new Box(BoxLayout.LINE_AXIS); lBox.add(searchLabel); lBox.add(new JLabel(":")); lBox.add(new JLabel(" ")); lBox.setAlignmentY(Component.TOP_ALIGNMENT); Box rBox = new Box(BoxLayout.PAGE_AXIS); rBox.add(searchField); rBox.add(matchCheck); rBox.add(wrapCheck); rBox.add(backCheck); // just want to see... // rBox.add(findNext); // rBox.add(findPrevious); rBox.setAlignmentY(Component.TOP_ALIGNMENT); setLayout(new BoxLayout(this, BoxLayout.LINE_AXIS)); add(lBox); add(rBox); } //----------------------- obsolete actions - no longer use //----------------------- kept here to remember adding names etc to resources private abstract class CheckAction extends AbstractAction { public CheckAction(String name) { super(name); } public void actionPerformed(ActionEvent evt) { } } private class MatchAction extends CheckAction { public MatchAction() { super("Match upper/lower case"); putValue(Action.MNEMONIC_KEY, new Integer('M')); } } private class WrapAction extends CheckAction { public WrapAction() { super("Wrap around"); putValue(Action.MNEMONIC_KEY, new Integer('W')); } } private class BackwardAction extends CheckAction { public BackwardAction() { super("Search Backwards"); putValue(Action.MNEMONIC_KEY, new Integer('B')); } } }
src/java/org/jdesktop/swingx/JXFindPanel.java
/* * $Id$ * * Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle, * Santa Clara, California 95054, U.S.A. All rights reserved. */ package org.jdesktop.swingx; import java.awt.Component; import java.awt.event.ActionEvent; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JOptionPane; /** * Simple FindPanel for usage in a JXDialog. * * * @author ?? * @author Jeanette Winzenburg */ public class JXFindPanel extends AbstractPatternPanel { public static final String FIND_NEXT_ACTION_COMMAND = "findNext"; public static final String FIND_PREVIOUS_ACTION_COMMAND = "findPrevious"; protected Searchable searchable; protected JCheckBox wrapCheck; protected JCheckBox backCheck; private boolean initialized; // protected JButton findNext; // protected JButton findPrevious; public JXFindPanel() { this(null); } public JXFindPanel(Searchable searchable) { setSearchable(searchable); initActions(); } /** * Sets the Searchable targeted with this dialog. * * @param searchable */ public void setSearchable(Searchable searchable) { if ((this.searchable != null) && this.searchable.equals(searchable)) return; Object old = this.searchable; this.searchable = searchable; getPatternModel().setFoundIndex(-1); firePropertyChange("searchable", old, this.searchable); } public void addNotify() { init(); super.addNotify(); } protected void init() { if (initialized) return; initialized = true; initComponents(); build(); bind(); setName(getUIString(SEARCH_TITLE)); } //------------------ support synch the model <--> components protected void bind() { super.bind(); getActionContainerFactory().configureButton(wrapCheck, getAction(PatternModel.MATCH_WRAP_ACTION_COMMAND), null); getActionContainerFactory().configureButton(backCheck, getAction(PatternModel.MATCH_BACKWARDS_ACTION_COMMAND), null); // findNext.setAction(getAction(EXECUTE_FIND_NEXT_ACTION_COMMAND)); // findPrevious.setAction(getAction(EXECUTE_FIND_PREVIOUS_ACTION_COMMAND)); } /** * called from listening to empty property of PatternModel. * * this implementation calls super and additionally synchs the * enabled state of FIND_NEXT_ACTION_COMMAND, FIND_PREVIOUS_ACTION_COMMAND * to !empty. */ @Override protected void refreshEmptyFromModel() { super.refreshEmptyFromModel(); boolean enabled = !getPatternModel().isEmpty(); getAction(FIND_NEXT_ACTION_COMMAND).setEnabled(enabled); getAction(FIND_PREVIOUS_ACTION_COMMAND).setEnabled(enabled); } //--------------------- action callbacks /** * Action callback for Find action. * Find next/previous match using current setting of direction flag. * */ public void match() { doFind(); } /** * Action callback for FindNext action. * Sets direction flag to forward and calls find. */ public void findNext() { getPatternModel().setBackwards(false); match(); } /** * Action callback for FindPrevious action. * Sets direction flag to previous and calls find. */ public void findPrevious() { getPatternModel().setBackwards(true); match(); } protected void doFind() { if (searchable == null) return; int foundIndex = doSearch(); if ((foundIndex == -1) && !getPatternModel().isEmpty()){ boolean notFound = true; if (getPatternModel().isWrapping()) { notFound = doSearch() == -1; } if (notFound) { showNotFoundMessage(); } } } /** * @return */ protected int doSearch() { int foundIndex = searchable.search(getPatternModel().getPattern(), getPatternModel().getFoundIndex(), getPatternModel().isBackwards()); getPatternModel().setFoundIndex(foundIndex); return foundIndex; } /** * */ protected void showNotFoundMessage() { JOptionPane.showMessageDialog(this, "Value not found"); } //-------------------------- initial /** * */ protected void initExecutables() { getActionMap().put(FIND_NEXT_ACTION_COMMAND, createBoundAction(FIND_NEXT_ACTION_COMMAND, "findNext")); getActionMap().put(FIND_PREVIOUS_ACTION_COMMAND, createBoundAction(FIND_PREVIOUS_ACTION_COMMAND, "findPrevious")); super.initExecutables(); } //----------------------------- init ui /** create components. * */ protected void initComponents() { super.initComponents(); wrapCheck = new JCheckBox(); backCheck = new JCheckBox(); // findNext = new JButton(); // findPrevious = new JButton(); } protected void build() { Box lBox = new Box(BoxLayout.LINE_AXIS); lBox.add(searchLabel); lBox.add(new JLabel(":")); lBox.add(new JLabel(" ")); lBox.setAlignmentY(Component.TOP_ALIGNMENT); Box rBox = new Box(BoxLayout.PAGE_AXIS); rBox.add(searchField); rBox.add(matchCheck); rBox.add(wrapCheck); rBox.add(backCheck); // just want to see... // rBox.add(findNext); // rBox.add(findPrevious); rBox.setAlignmentY(Component.TOP_ALIGNMENT); setLayout(new BoxLayout(this, BoxLayout.LINE_AXIS)); add(lBox); add(rBox); } //----------------------- obsolete actions - no longer use //----------------------- kept here to remember adding names etc to resources private abstract class CheckAction extends AbstractAction { public CheckAction(String name) { super(name); } public void actionPerformed(ActionEvent evt) { } } private class MatchAction extends CheckAction { public MatchAction() { super("Match upper/lower case"); putValue(Action.MNEMONIC_KEY, new Integer('M')); } } private class WrapAction extends CheckAction { public WrapAction() { super("Wrap around"); putValue(Action.MNEMONIC_KEY, new Integer('W')); } } private class BackwardAction extends CheckAction { public BackwardAction() { super("Search Backwards"); putValue(Action.MNEMONIC_KEY, new Integer('B')); } } }
minor cleanup: - added hook for customizing found state - release old searchable on setSearchable
src/java/org/jdesktop/swingx/JXFindPanel.java
minor cleanup: - added hook for customizing found state - release old searchable on setSearchable
Java
apache-2.0
144e043543afa5071a178b4f6acf43beb2ac7f96
0
SpineEventEngine/core-java,SpineEventEngine/core-java,SpineEventEngine/core-java
/* * Copyright 2018, TeamDev Ltd. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.entity.storage; import io.spine.annotation.Internal; import io.spine.server.entity.Entity; import io.spine.server.entity.storage.EntityColumn.MemoizedValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.newLinkedList; import static io.spine.server.entity.storage.ColumnRecords.getAnnotatedVersion; import static io.spine.util.Exceptions.newIllegalStateException; import static io.spine.validate.Validate.checkNotEmptyOrBlank; import static java.lang.String.format; /** * A utility class for working with {@linkplain EntityColumn entity columns}. * * <p>The methods of all {@link Entity entities} that fit * <a href="http://download.oracle.com/otndocs/jcp/7224-javabeans-1.01-fr-spec-oth-JSpec/"> * the Java Bean</a> getter spec and annotated with {@link Column} * are considered {@linkplain EntityColumn columns}. * * <p>Inherited columns are taken into account too, but building entity hierarchies is strongly * discouraged. * * <p>Note that the returned type of an {@link EntityColumn} getter must either be primitive or * serializable, otherwise a runtime exception is thrown when trying to get an instance of * {@link EntityColumn}. * * @author Dmytro Dashenkov * @see EntityColumn */ @Internal public class Columns { private static final String SPINE_PACKAGE = "io.spine."; private static final String NON_PUBLIC_CLASS_WARNING = "Passed entity class %s is not public. Storage fields won't be extracted."; private static final String NON_PUBLIC_INTERNAL_CLASS_WARNING = "Passed entity class %s is probably a Spine internal non-public entity. " + "Storage fields won't be extracted."; /** * Prevent instantiation of this utility class. */ private Columns() { } /** * Ensures that the entity columns are valid for the specified entity class. * * <p>This method tries to extract {@linkplain EntityColumn entity columns} from the given class, * performing all checks along the way. * * <p>If extraction is performed without errors, the check is passed, if not - failed. * * @param entityClass the class to check entity columns */ public static void checkColumnDefinitions(Class<? extends Entity> entityClass) { checkNotNull(entityClass); obtainColumns(entityClass); } /** * Retrieves an {@link EntityColumn} instance of the given name and from the given entity class. * * <p>If no column is found, an {@link IllegalArgumentException} is thrown. * * @param entityClass the class containing the {@link EntityColumn} definition * @param columnName the entity column {@linkplain EntityColumn#getName() name} * @return an instance of {@link EntityColumn} with the given name * @throws IllegalArgumentException if the {@link EntityColumn} is not found */ static EntityColumn findColumn(Class<? extends Entity> entityClass, String columnName) { checkNotNull(entityClass); checkNotEmptyOrBlank(columnName, "entity column name"); final Collection<EntityColumn> entityColumns = obtainColumns(entityClass); for (EntityColumn column : entityColumns) { if (column.getName() .equals(columnName)) { return column; } } throw new IllegalArgumentException( format("Could not find an EntityColumn description for %s.%s.", entityClass.getCanonicalName(), columnName)); } /** * Retrieves {@linkplain EntityColumn columns} for the given {@code Entity} class. * * <p>Performs checks for entity column definitions correctness along the way. * * <p>If check for correctness fails, throws {@link IllegalStateException}. * * @param entityClass the class containing the {@link EntityColumn} definition * @return a {@link Collection} of {@link EntityColumn} corresponded to entity class * @throws IllegalStateException if entity column definitions are incorrect */ static Collection<EntityColumn> obtainColumns(Class<? extends Entity> entityClass) { checkNotNull(entityClass); final BeanInfo entityDescriptor; try { entityDescriptor = Introspector.getBeanInfo(entityClass); } catch (IntrospectionException e) { throw new IllegalStateException(e); } final Collection<EntityColumn> entityColumns = newLinkedList(); for (PropertyDescriptor property : entityDescriptor.getPropertyDescriptors()) { final Method getter = property.getReadMethod(); final boolean isEntityColumn = getAnnotatedVersion(getter).isPresent(); if (isEntityColumn) { final EntityColumn column = EntityColumn.from(getter); entityColumns.add(column); } } checkRepeatedColumnNames(entityColumns, entityClass); return entityColumns; } /** * Generates the {@linkplain EntityColumn column} values for the given {@linkplain Entity}. * * <p>Retrieves {@linkplain EntityColumn columns} for the given {@code Entity} class, then generates * {@linkplain MemoizedValue memoized values} from them. * * @param entity an {@link Entity} to get the {@linkplain EntityColumn column} values from * @param <E> the type of the {@link Entity} * @return a {@link Map} of the column {@linkplain EntityColumn#getStoredName() * names for storing} to their {@linkplain MemoizedValue memoized values} * @see MemoizedValue */ static <E extends Entity<?, ?>> Map<String, EntityColumn.MemoizedValue> extractColumnValues(E entity) { checkNotNull(entity); final Collection<EntityColumn> entityColumns = obtainColumns(entity.getClass()); return extractColumnValues(entity, entityColumns); } /** * Generates the {@linkplain EntityColumn column} values for the given {@linkplain Entity}. * * <p>Uses given {@linkplain EntityColumn entity columns} for the value extraction. * * <p>This way the process of {@linkplain Columns#obtainColumns(Class) obtaining columns} from * the given {@link Entity} class can be skipped. * * @param entity an {@link Entity} to get the {@linkplain EntityColumn column} values from * @param entityColumns {@linkplain EntityColumn entity columns} which values should be extracted * @param <E> the type of the {@link Entity} * @return a {@link Map} of the column {@linkplain EntityColumn#getStoredName() * names for storing} to their {@linkplain MemoizedValue memoized values} * @see MemoizedValue */ static <E extends Entity<?, ?>> Map<String, MemoizedValue> extractColumnValues( E entity, Collection<EntityColumn> entityColumns) { checkNotNull(entityColumns); checkNotNull(entity); final Class<? extends Entity> entityClass = entity.getClass(); if (!isPublic(entityClass)) { return Collections.emptyMap(); } final Map<String, MemoizedValue> values = recordColumnValuesToMap(entityColumns, entity); return values; } /** * Checks if the given {@link Entity entity class} is public. * * <p>Outputs a message to the log if the class is non-public. * * @param entityClass {@link Entity entity class} to check * @return {@code true} if class is public and {@code false} otherwise */ private static boolean isPublic(Class<? extends Entity> entityClass) { checkNotNull(entityClass); final int modifiers = entityClass.getModifiers(); if (!Modifier.isPublic(modifiers)) { logNonPublicClass(entityClass); return false; } return true; } /** * Generates {@linkplain MemoizedValue memoized values} for the given {@linkplain EntityColumn columns} * of the given {@linkplain Entity}. * * <p>Records the result to {@link Map}. * * @param columns {@link Collection collection} of columns to extract values from * @param entity {@link Entity} from which to extract the values * @return a {@link Map} of the column {@linkplain EntityColumn#getStoredName() * names for storing} to their {@linkplain MemoizedValue memoized values} * @see MemoizedValue */ private static <E extends Entity<?, ?>> Map<String, MemoizedValue> recordColumnValuesToMap( Collection<EntityColumn> columns, E entity) { final Map<String, MemoizedValue> values = new HashMap<>(columns.size()); for (EntityColumn column : columns) { final String name = column.getStoredName(); final MemoizedValue value = column.memoizeFor(entity); values.put(name, value); } return values; } /** * Ensures that the specified columns have no repeated names. * * <p>If the check fails, throws {@link IllegalStateException}. * * @param columns the columns to check * @param entityClass the entity class for the columns * @throws IllegalStateException if columns contain repeated names */ private static void checkRepeatedColumnNames(Iterable<EntityColumn> columns, Class<? extends Entity> entityClass) { final Collection<String> checkedNames = newLinkedList(); for (EntityColumn column : columns) { final String columnName = column.getStoredName(); if (checkedNames.contains(columnName)) { throw newIllegalStateException( "The entity `%s` has columns with the same name for storing `%s`.", entityClass.getName(), columnName); } checkedNames.add(columnName); } } /** * Writes the non-public {@code Entity} class warning into the log unless the passed class * represents one of the Spine internal {@link Entity} implementations. */ private static void logNonPublicClass(Class<? extends Entity> cls) { final String className = cls.getCanonicalName(); final boolean internal = className.startsWith(SPINE_PACKAGE); if (internal) { log().trace(format(NON_PUBLIC_INTERNAL_CLASS_WARNING, className)); } else { log().warn(format(NON_PUBLIC_CLASS_WARNING, className)); } } private static Logger log() { return LogSingleton.INSTANCE.value; } private enum LogSingleton { INSTANCE; @SuppressWarnings("NonSerializableFieldInSerializableClass") private final Logger value = LoggerFactory.getLogger(Columns.class); } }
server/src/main/java/io/spine/server/entity/storage/Columns.java
/* * Copyright 2018, TeamDev Ltd. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.entity.storage; import io.spine.annotation.Internal; import io.spine.server.entity.Entity; import io.spine.server.entity.storage.EntityColumn.MemoizedValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.newLinkedList; import static io.spine.server.entity.storage.ColumnRecords.getAnnotatedVersion; import static io.spine.util.Exceptions.newIllegalStateException; import static io.spine.validate.Validate.checkNotEmptyOrBlank; import static java.lang.String.format; /** * A utility class for working with {@linkplain EntityColumn entity columns}. * * <p>The methods of all {@link Entity entities} that fit * <a href="http://download.oracle.com/otndocs/jcp/7224-javabeans-1.01-fr-spec-oth-JSpec/"> * the Java Bean</a> getter spec and annotated with {@link Column} * are considered {@linkplain EntityColumn columns}. * * <p>Inherited columns are taken into account too, but building entity hierarchies is strongly * discouraged. * * <p>Note that the returned type of an {@link EntityColumn} getter must either be primitive or * serializable, otherwise a runtime exception is thrown when trying to get an instance of * {@link EntityColumn}. * * @author Dmytro Dashenkov * @see EntityColumn */ @Internal public class Columns { private static final String SPINE_PACKAGE = "io.spine."; private static final String NON_PUBLIC_CLASS_WARNING = "Passed entity class %s is not public. Storage fields won't be extracted."; private static final String NON_PUBLIC_INTERNAL_CLASS_WARNING = "Passed entity class %s is probably a Spine internal non-public entity. " + "Storage fields won't be extracted."; /** * Prevent instantiation of this utility class. */ private Columns() { } /** * Ensures that the entity columns are valid for the specified entity class. * * <p>This method tries to extract {@linkplain EntityColumn entity columns} from the given class, * performing all checks along the way. * * <p>If extraction is performed without errors, the check is passed, if not - failed. * * @param entityClass the class to check entity columns */ public static void checkColumnDefinitions(Class<? extends Entity> entityClass) { checkNotNull(entityClass); obtainColumns(entityClass); } /** * Retrieves an {@link EntityColumn} instance of the given name and from the given entity class. * * <p>If no column is found, an {@link IllegalArgumentException} is thrown. * * @param entityClass the class containing the {@link EntityColumn} definition * @param columnName the entity column {@linkplain EntityColumn#getName() name} * @return an instance of {@link EntityColumn} with the given name * @throws IllegalArgumentException if the {@link EntityColumn} is not found */ static EntityColumn findColumn(Class<? extends Entity> entityClass, String columnName) { checkNotNull(entityClass); checkNotEmptyOrBlank(columnName, "entity column name"); final Collection<EntityColumn> entityColumns = obtainColumns(entityClass); for (EntityColumn column : entityColumns) { if (column.getName() .equals(columnName)) { return column; } } throw new IllegalArgumentException( format("Could not find an EntityColumn description for %s.%s.", entityClass.getCanonicalName(), columnName)); } /** * Retrieves {@linkplain EntityColumn columns} for the given {@code Entity} class. * * <p>Performs checks for entity column definitions correctness along the way. * * <p>If check for correctness fails, throws {@link IllegalStateException}. * * @param entityClass the class containing the {@link EntityColumn} definition * @return a {@link Collection} of {@link EntityColumn} corresponded to entity class * @throws IllegalStateException if entity column definitions are incorrect */ static Collection<EntityColumn> obtainColumns(Class<? extends Entity> entityClass) { checkNotNull(entityClass); final BeanInfo entityDescriptor; try { entityDescriptor = Introspector.getBeanInfo(entityClass); } catch (IntrospectionException e) { throw new IllegalStateException(e); } final Collection<EntityColumn> entityColumns = newLinkedList(); for (PropertyDescriptor property : entityDescriptor.getPropertyDescriptors()) { final Method getter = property.getReadMethod(); final boolean isEntityColumn = getAnnotatedVersion(getter).isPresent(); if (isEntityColumn) { final EntityColumn column = EntityColumn.from(getter); entityColumns.add(column); } } checkRepeatedColumnNames(entityColumns, entityClass); return entityColumns; } /** * Generates the {@linkplain EntityColumn column} values for the given {@linkplain Entity}. * * <p>Retrieves {@linkplain EntityColumn columns} for the given {@code Entity} class, then generates * {@linkplain MemoizedValue memoized values} from them. * * @param entity an {@link Entity} to get the {@linkplain EntityColumn column} values from * @param <E> the type of the {@link Entity} * @return a {@link Map} of the column {@linkplain EntityColumn#getStoredName() * names for storing} to their {@linkplain MemoizedValue memoized values} * @see MemoizedValue */ static <E extends Entity<?, ?>> Map<String, EntityColumn.MemoizedValue> extractColumnValues(E entity) { checkNotNull(entity); final Collection<EntityColumn> entityColumns = obtainColumns(entity.getClass()); return extractColumnValues(entity, entityColumns); } /** * Generates the {@linkplain EntityColumn column} values for the given {@linkplain Entity}. * * <p>Uses given {@linkplain EntityColumn entity columns} for the value extraction. * * @param entity an {@link Entity} to get the {@linkplain EntityColumn column} values from * @param entityColumns {@linkplain EntityColumn entity columns} which values should be extracted * @param <E> the type of the {@link Entity} * @return a {@link Map} of the column {@linkplain EntityColumn#getStoredName() * names for storing} to their {@linkplain MemoizedValue memoized values} * @see MemoizedValue */ static <E extends Entity<?, ?>> Map<String, MemoizedValue> extractColumnValues( E entity, Collection<EntityColumn> entityColumns) { checkNotNull(entityColumns); checkNotNull(entity); final Class<? extends Entity> entityClass = entity.getClass(); if (!isPublic(entityClass)) { return Collections.emptyMap(); } final Map<String, MemoizedValue> values = recordColumnValuesToMap(entityColumns, entity); return values; } /** * Checks if the given {@link Entity entity class} is public. * * <p>Outputs a message to the log if the class is non-public. * * @param entityClass {@link Entity entity class} to check * @return {@code true} if class is public and {@code false} otherwise */ private static boolean isPublic(Class<? extends Entity> entityClass) { checkNotNull(entityClass); final int modifiers = entityClass.getModifiers(); if (!Modifier.isPublic(modifiers)) { logNonPublicClass(entityClass); return false; } return true; } /** * Generates {@linkplain MemoizedValue memoized values} for the given {@linkplain EntityColumn columns} * of the given {@linkplain Entity}. * * <p>Records the result to {@link Map}. * * @param columns {@link Collection collection} of columns to extract values from * @param entity {@link Entity} from which to extract the values * @return a {@link Map} of the column {@linkplain EntityColumn#getStoredName() * names for storing} to their {@linkplain MemoizedValue memoized values} * @see MemoizedValue */ private static <E extends Entity<?, ?>> Map<String, MemoizedValue> recordColumnValuesToMap( Collection<EntityColumn> columns, E entity) { final Map<String, MemoizedValue> values = new HashMap<>(columns.size()); for (EntityColumn column : columns) { final String name = column.getStoredName(); final MemoizedValue value = column.memoizeFor(entity); values.put(name, value); } return values; } /** * Ensures that the specified columns have no repeated names. * * <p>If the check fails, throws {@link IllegalStateException}. * * @param columns the columns to check * @param entityClass the entity class for the columns * @throws IllegalStateException if columns contain repeated names */ private static void checkRepeatedColumnNames(Iterable<EntityColumn> columns, Class<? extends Entity> entityClass) { final Collection<String> checkedNames = newLinkedList(); for (EntityColumn column : columns) { final String columnName = column.getStoredName(); if (checkedNames.contains(columnName)) { throw newIllegalStateException( "The entity `%s` has columns with the same name for storing `%s`.", entityClass.getName(), columnName); } checkedNames.add(columnName); } } /** * Writes the non-public {@code Entity} class warning into the log unless the passed class * represents one of the Spine internal {@link Entity} implementations. */ private static void logNonPublicClass(Class<? extends Entity> cls) { final String className = cls.getCanonicalName(); final boolean internal = className.startsWith(SPINE_PACKAGE); if (internal) { log().trace(format(NON_PUBLIC_INTERNAL_CLASS_WARNING, className)); } else { log().warn(format(NON_PUBLIC_CLASS_WARNING, className)); } } private static Logger log() { return LogSingleton.INSTANCE.value; } private enum LogSingleton { INSTANCE; @SuppressWarnings("NonSerializableFieldInSerializableClass") private final Logger value = LoggerFactory.getLogger(Columns.class); } }
Clarify doc
server/src/main/java/io/spine/server/entity/storage/Columns.java
Clarify doc
Java
apache-2.0
6339b8f54751c3c6af4a8911c0f98196f5ec600e
0
AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard,AnySoftKeyboard/AnySoftKeyboard,AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,OmerMachluf/Mykeyboard,OmerMachluf/Mykeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard,OmerMachluf/Mykeyboard,AnySoftKeyboard/AnySoftKeyboard
package com.menny.android.anysoftkeyboard.keyboards; import java.util.HashMap; import android.content.res.Resources; import android.content.res.XmlResourceParser; import android.inputmethodservice.Keyboard; import android.util.Log; import android.view.inputmethod.EditorInfo; import com.menny.android.anysoftkeyboard.AnyKeyboardContextProvider; import com.menny.android.anysoftkeyboard.AnySoftKeyboard; import com.menny.android.anysoftkeyboard.R; import com.menny.android.anysoftkeyboard.Workarounds; import com.menny.android.anysoftkeyboard.Dictionary.Dictionary; public abstract class AnyKeyboard extends Keyboard { protected class ShiftedKeyData { public final char ShiftCharacter; public final AnyKey KeyboardKey; public ShiftedKeyData(AnyKey key) { KeyboardKey = key; ShiftCharacter = (char) key.codes[1]; } } public final static int KEYCODE_LANG_CHANGE = -99; public final static int KEYCODE_SMILEY = -10; //public final static int KEYCODE_DOT_COM = -80; public interface HardKeyboardAction { int getKeyCode(); boolean isAltActive(); boolean isShiftActive(); void setNewKeyCode(int keyCode); } public interface HardKeyboardTranslator { /* * Gets the current state of the hard keyboard, and may change the output key-code. */ void translatePhysicalCharacter(HardKeyboardAction action); } private static final int SHIFT_OFF = 0; private static final int SHIFT_ON = 1; private static final int SHIFT_LOCKED = 2; private int mShiftState = SHIFT_OFF; private final String mKeyboardPrefId; private final String mKeyboardName; private final boolean mLeftToRightLanguageDirection; private final Dictionary.Language mDefaultDictionaryLanguage; private final int mKeyboardIconId; //private final String mKeyboardPrefId; private HashMap<Character, ShiftedKeyData> mSpecialShiftKeys; // private Drawable mShiftLockIcon; // private Drawable mShiftLockPreviewIcon; // private Drawable mOldShiftIcon; // private Drawable mOldShiftPreviewIcon; // private Key mShiftKey; private Key mEnterKey; private Key mSmileyKey; private Key mQuestionMarkKey; private final AnyKeyboardContextProvider mKeyboardContext; protected AnyKeyboard(AnyKeyboardContextProvider context, String keyboardPrefId, int xmlLayoutResId, boolean supportsShift, /*mapping XML id will be added here,*/ int keyboardNameId, /*String keyboardEnabledPref,*/ boolean leftToRightLanguageDirection, Dictionary.Language defaultDictionaryLanguage, int keyboardIconId) { super(context.getApplicationContext(), xmlLayoutResId); mKeyboardContext = context; mKeyboardPrefId = keyboardPrefId; //mSupportsShift = supportsShift; if (keyboardNameId > 0) mKeyboardName = context.getApplicationContext().getResources().getString(keyboardNameId); else mKeyboardName = ""; mLeftToRightLanguageDirection = leftToRightLanguageDirection; mDefaultDictionaryLanguage = defaultDictionaryLanguage; mKeyboardIconId = keyboardIconId; //mKeyboardPrefId = keyboardEnabledPref; Log.i("AnySoftKeyboard", "Done creating keyboard: "+mKeyboardName); //TODO: parsing of the mapping xml: //XmlResourceParser p = getResources().getXml(id from the constructor parameter); //parse to a HashMap? //mTopKeys = new ArrayList<Key>(); // mShiftLockIcon = context.getApplicationContext().getResources().getDrawable(R.drawable.sym_keyboard_shift_locked); // mShiftLockPreviewIcon = context.getApplicationContext().getResources().getDrawable(R.drawable.sym_keyboard_feedback_shift_locked); // mShiftLockPreviewIcon.setBounds(0, 0, mShiftLockPreviewIcon.getIntrinsicWidth(),mShiftLockPreviewIcon.getIntrinsicHeight()); } protected AnyKeyboardContextProvider getKeyboardContext() { return mKeyboardContext; } public Dictionary.Language getDefaultDictionaryLanguage() { return mDefaultDictionaryLanguage; } //this function is called from within the super constructor. @Override protected Key createKeyFromXml(Resources res, Row parent, int x, int y, XmlResourceParser parser) { if (mSpecialShiftKeys == null) mSpecialShiftKeys = new HashMap<Character, ShiftedKeyData>(); AnyKey key = new AnyKey(res, parent, x, y, parser); if ((key.codes != null) && (key.codes.length > 0)) { //creating less sensitive keys if required switch(key.codes[0]) { case 10://enter case KEYCODE_DELETE://delete case KEYCODE_SHIFT://shift key = new LessSensitiveAnyKey(res, parent, x, y, parser); } if (key.codes[0] == 10) { mEnterKey = key; } else if ((key.codes[0] == AnyKeyboard.KEYCODE_SMILEY) && (parent.rowEdgeFlags == Keyboard.EDGE_BOTTOM)) { mSmileyKey = key; } else if ((key.codes[0] == 63) && (parent.rowEdgeFlags == Keyboard.EDGE_BOTTOM)) { mQuestionMarkKey = key; } else if ((key.codes[0] == Keyboard.KEYCODE_MODE_CHANGE) || (key.codes[0] == AnyKeyboard.KEYCODE_LANG_CHANGE)) { if (AnySoftKeyboard.mChangeKeysMode.equals("2")) { key.label = null; key.height = 0; key.width = 0; } else if (AnySoftKeyboard.mChangeKeysMode.equals("3")) { String keyText = (key.codes[0] == Keyboard.KEYCODE_MODE_CHANGE)? res.getString(R.string.change_symbols_regular) : res.getString(R.string.change_lang_regular); key.label = keyText; //key.height *= 1.5; } else { String keyText = (key.codes[0] == Keyboard.KEYCODE_MODE_CHANGE)? res.getString(R.string.change_symbols_wide) : res.getString(R.string.change_lang_wide); key.label = keyText; } } else { //setting the character label if (isAlphabetKey(key)) { key.label = ""+((char)key.codes[0]); } } } if (AnySoftKeyboard.getDEBUG()) Log.v("AnySoftKeyboard", "Key '"+key.codes[0]+"' will have - width: "+key.width+", height:"+key.height+", text: '"+key.label+"'."); setPopupKeyChars(key); if ((key.codes != null) && (key.codes.length > 1)) { int primaryCode = key.codes[0]; if ((primaryCode>0) && (primaryCode<Character.MAX_VALUE)) { Character primary = new Character((char)primaryCode); ShiftedKeyData keyData = new ShiftedKeyData(key); if (!mSpecialShiftKeys.containsKey(primary)) mSpecialShiftKeys.put(primary, keyData); if (AnySoftKeyboard.getDEBUG()) Log.v("AnySoftKeyboard", "Adding mapping ("+primary+"->"+keyData.ShiftCharacter+") to mSpecialShiftKeys."); } } return key; } @Override protected Row createRowFromXml(Resources res, XmlResourceParser parser) { Row aRow = super.createRowFromXml(res, parser); if ((aRow.rowEdgeFlags&EDGE_TOP) != 0) { //top row if (AnySoftKeyboard.mChangeKeysMode.equals("2")) aRow.defaultHeight = 0; else if (AnySoftKeyboard.mChangeKeysMode.equals("3")) aRow.defaultHeight *= 1.5; } return aRow; } private boolean isAlphabetKey(Key key) { return (!key.modifier) && (!key.sticky) && (!key.repeatable) && (key.icon == null) && (key.codes[0] > 0); } public boolean isLetter(char keyValue) { return (Character.isLetter(keyValue) || (keyValue == '\'')); } /** * This looks at the ime options given by the current editor, to set the * appropriate label on the keyboard's enter key (if it has one). */ public void setImeOptions(Resources res, int options) { if (AnySoftKeyboard.getDEBUG()) Log.d("AnySoftKeyboard", "AnyKeyboard.setImeOptions"); if (mEnterKey == null) { return; } switch (options&(EditorInfo.IME_MASK_ACTION|EditorInfo.IME_FLAG_NO_ENTER_ACTION)) { case EditorInfo.IME_ACTION_GO: mEnterKey.iconPreview = null; mEnterKey.icon = null; //there is a problem with LTR languages mEnterKey.label = Workarounds.workaroundCorrectStringDirection(res.getText(R.string.label_go_key)); break; case EditorInfo.IME_ACTION_NEXT: mEnterKey.iconPreview = null; mEnterKey.icon = null; //there is a problem with LTR languages mEnterKey.label = Workarounds.workaroundCorrectStringDirection(res.getText(R.string.label_next_key)); break; case EditorInfo.IME_ACTION_SEARCH: mEnterKey.icon = res.getDrawable(R.drawable.sym_keyboard_search); mEnterKey.label = null; break; case EditorInfo.IME_ACTION_SEND: mEnterKey.iconPreview = null; mEnterKey.icon = null; //there is a problem with LTR languages mEnterKey.label = Workarounds.workaroundCorrectStringDirection(res.getText(R.string.label_send_key)); break; default: mEnterKey.icon = res.getDrawable(R.drawable.sym_keyboard_return); mEnterKey.label = null; break; } } public String getKeyboardName() { //TODO: this should be taken from the strings.xml, right? return mKeyboardName; } public boolean isLeftToRightLanguage() { return mLeftToRightLanguageDirection; } /* * This function is overridden by other alphabet keyboards, for nifty icons */ public int getKeyboardIcon() { return mKeyboardIconId; } // public String getKeyboardKey() // { // return mKeyboardPrefId; // } // public boolean isLetter(char letterCode) // { // if (Character.isLetter(letterCode)) // return true; // else // return false; // } // public void addSuggestions(String currentWord, ArrayList<String> list) // { // } public void setShiftLocked(boolean shiftLocked) { // if (mShiftKey != null) { // if (shiftLocked) { // mShiftKey.on = true; // mShiftKey.icon = mShiftLockIcon; // mShiftState = SHIFT_LOCKED; // } else { // mShiftKey.on = false; // mShiftKey.icon = mShiftLockIcon; // mShiftState = SHIFT_ON; // } // } } @Override public boolean isShifted() { // if (mShiftKey != null) { // return mShiftState != SHIFT_OFF; // } else { return super.isShifted(); // } } // @Override // public boolean setShifted(boolean shiftState) { // boolean shiftChanged = false; // if (mShiftKey != null) { // if (shiftState == false) { // shiftChanged = mShiftState != SHIFT_OFF; // mShiftState = SHIFT_OFF; // mShiftKey.on = false; // mShiftKey.icon = mOldShiftIcon; // } else { // if (mShiftState == SHIFT_OFF) { // shiftChanged = mShiftState == SHIFT_OFF; // mShiftState = SHIFT_ON; // mShiftKey.icon = mShiftLockIcon; // } // } // } else { // return super.setShifted(shiftState); // } // return shiftChanged; // } @Override public boolean setShifted(boolean shiftState) { boolean result = super.setShifted(shiftState); if (AnySoftKeyboard.getDEBUG()) Log.d("AnySoftKeyboard", "setShifted: shiftState:"+shiftState+". result:"+result); mShiftState = shiftState? SHIFT_ON : SHIFT_OFF; if (result) {//layout changed. Need to change labels. //going over the special keys only. for(ShiftedKeyData data : mSpecialShiftKeys.values()) { onKeyShifted(data, shiftState); } // for(Key aKey : getKeys()) // { // onKeyShifted(aKey, shiftState); // } } return result; } public boolean isShiftLocked() { return mShiftState == SHIFT_LOCKED; } protected void onKeyShifted(ShiftedKeyData data, boolean shiftState) { AnyKey aKey = data.KeyboardKey; aKey.label = shiftState? ""+data.ShiftCharacter : ""+((char)aKey.codes[0]); // if (aKey.codes.length > 1) // { // aKey.label = shiftState? ""+((char)aKey.codes[1]) : ""+((char)aKey.codes[0]); // Log.v("AnySoftKeyboard", "setShifted: changed key:"+aKey.label); // } // else // { // Log.v("AnySoftKeyboard", "setShifted: not changed key:"+aKey.label); // } } protected void setPopupKeyChars(Key aKey) { if ((aKey.codes != null) && (aKey.codes.length > 0)) { switch(((char)aKey.codes[0])) { case '\''://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = "\""; break; case '-': aKey.popupResId = R.xml.popup; aKey.popupCharacters = "\'\""; break; case '.'://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = ";:-_\u00b7"; break; case ','://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = "()"; break; case '_': aKey.popupResId = R.xml.popup; aKey.popupCharacters = ",-"; break; //the two below are switched in regular and Internet mode case '?'://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = "!/@\u00bf\u00a1"; break; case '@'://in the generic Internet mode aKey.popupResId = R.xml.popup; aKey.popupCharacters = "!/?\u00bf\u00a1"; break; } } } public void setTextVariation(Resources res, int inputType) { if (AnySoftKeyboard.getDEBUG()) Log.d("AnySoftKeyboard", "setTextVariation"); int variation = inputType & EditorInfo.TYPE_MASK_VARIATION; //if ((keyboardType == NextKeyboardType.Any) && // mInternetKeyboard.isEnabled() && // (variation == EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS // || variation == EditorInfo.TYPE_TEXT_VARIATION_URI)) { switch (variation) { case EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS: case EditorInfo.TYPE_TEXT_VARIATION_URI: if (mSmileyKey != null) { Log.d("AnySoftKeyboard", "Changing smiley key to domains."); mSmileyKey.iconPreview = null;// res.getDrawable(sym_keyboard_key_domain_preview); mSmileyKey.icon = res.getDrawable(R.drawable.sym_keyboard_key_domain); mSmileyKey.label = null; mSmileyKey.text = ".com"; mSmileyKey.popupResId = R.xml.popup_domains; } if (mQuestionMarkKey != null) { Log.d("AnySoftKeyboard", "Changing question mark key to AT."); mQuestionMarkKey.codes[0] = (int)'@'; mQuestionMarkKey.label = "@"; mQuestionMarkKey.popupCharacters = "!/?\u00bf\u00a1"; } break; default: if (mSmileyKey != null) { Log.d("AnySoftKeyboard", "Changing smiley key to smiley."); mSmileyKey.icon = res.getDrawable(R.drawable.sym_keyboard_smiley); mSmileyKey.label = null; mSmileyKey.text = ":-) "; mSmileyKey.popupResId = R.xml.popup_smileys; } if (mQuestionMarkKey != null) { Log.d("AnySoftKeyboard", "Changing question mark key to question."); mQuestionMarkKey.codes[0] = (int)'?'; mQuestionMarkKey.label = "?"; mQuestionMarkKey.popupCharacters = "!/@\u00bf\u00a1"; } break; } } public int getShiftedKeyValue(int primaryCode) { if ((primaryCode>0) && (primaryCode<Character.MAX_VALUE)) { Character c = new Character((char)primaryCode); if (mSpecialShiftKeys.containsKey(c)) { char shifted = mSpecialShiftKeys.get(c).ShiftCharacter; if (AnySoftKeyboard.getDEBUG()) Log.v("AnySoftKeyboard", "Returned the shifted mapping ("+c+"->"+shifted+") from mSpecialShiftKeys."); return shifted; } } //else...best try. return Character.toUpperCase(primaryCode); } class AnyKey extends Keyboard.Key { //private boolean mShiftLockEnabled; public AnyKey(Resources res, Keyboard.Row parent, int x, int y, XmlResourceParser parser) { super(res, parent, x, y, parser); if (popupCharacters != null && popupCharacters.length() == 0) { // If there is a keyboard with no keys specified in popupCharacters popupResId = 0; } } // void enableShiftLock() { // mShiftLockEnabled = true; // } // // @Override // public void onReleased(boolean inside) { // if (!mShiftLockEnabled) { // super.onReleased(inside); // } else { // pressed = !pressed; // } // } } class LessSensitiveAnyKey extends AnyKey { private int mStartX; private int mStartY; private int mEndX; private int mEndY; public LessSensitiveAnyKey(Resources res, Keyboard.Row parent, int x, int y, XmlResourceParser parser) { super(res, parent, x, y, parser); mStartX = this.x; mStartY = this.y; mEndX = this.width + this.x; mEndY = this.height + this.y; switch(codes[0]) { case 10://the enter key! //we want to "click" it only if it in the lower 80% mStartY += (this.height * 0.2); break; case KEYCODE_DELETE: //we want to "click" it only if it in the middle 80% //and in the right 80% mStartY += (this.height * 0.1); mEndY -= (this.height * 0.2); mStartX += (this.width * 0.1); break; case KEYCODE_SHIFT: //we want to "click" it only if it in the left 80% mEndX -= (this.width * 0.15); break; } } /** * Overriding this method so that we can reduce the target area for certain keys. */ @Override public boolean isInside(int clickedX, int clickedY) { return clickedX >= mStartX && clickedX <= mEndX && clickedY >= mStartY && clickedY <= mEndY; // int startX = this.x; // int startY = this.y; // int endX = this.width + this.x; // int endY = this.height + this.y; // // boolean isInside = false; // switch(codes[0]) // { // case 10://the enter key! // //we want to "click" it only if it in the lower 80% // startY += (this.height * 0.2); // isInside = checkIfInside(startX, startY, endX, endY, clickedX, clickedY); // break; // case KEYCODE_DELETE: // //we want to "click" it only if it in the middle 80% // //and in the right 80% // startY += (this.height * 0.1); // endY -= (this.height * 0.2); // startX += (this.width * 0.15); // isInside = checkIfInside(startX, startY, endX, endY, clickedX, clickedY); // break; // case KEYCODE_SHIFT: // //we want to "click" it only if it in the left 80% // endX -= (this.width * 0.2); // isInside = checkIfInside(startX, startY, endX, endY, clickedX, clickedY); // break; // default: // isInside = super.isInside(clickedX, clickedY); // break; // } // // //Log.d("AnySoftKeyboard", "Key "+codes[0]+" x:"+this.x+", y:"+this.y+", height:"+this.height+", width:"+this.width+", clickedX:"+clickedX+", clickedY:"+clickedY+" result:"+isInside); // // return isInside; } // private boolean checkIfInside(int startX, int startY, // int endX, int endY, // int clickedX, int clickedY) // { // return clickedX >= startX && // clickedX <= endX && // clickedY >= startY && // clickedY <= endY; // } } public String getKeyboardPrefId() { return mKeyboardPrefId; } }
src/com/menny/android/anysoftkeyboard/keyboards/AnyKeyboard.java
package com.menny.android.anysoftkeyboard.keyboards; import java.util.HashMap; import android.content.res.Resources; import android.content.res.XmlResourceParser; import android.inputmethodservice.Keyboard; import android.util.Log; import android.view.inputmethod.EditorInfo; import com.menny.android.anysoftkeyboard.AnyKeyboardContextProvider; import com.menny.android.anysoftkeyboard.AnySoftKeyboard; import com.menny.android.anysoftkeyboard.R; import com.menny.android.anysoftkeyboard.Workarounds; import com.menny.android.anysoftkeyboard.Dictionary.Dictionary; public abstract class AnyKeyboard extends Keyboard { protected class ShiftedKeyData { public final char ShiftCharacter; public final AnyKey KeyboardKey; public ShiftedKeyData(AnyKey key) { KeyboardKey = key; ShiftCharacter = (char) key.codes[1]; } } public final static int KEYCODE_LANG_CHANGE = -99; public final static int KEYCODE_SMILEY = -10; //public final static int KEYCODE_DOT_COM = -80; public interface HardKeyboardAction { int getKeyCode(); boolean isAltActive(); boolean isShiftActive(); void setNewKeyCode(int keyCode); } public interface HardKeyboardTranslator { /* * Gets the current state of the hard keyboard, and may change the output key-code. */ void translatePhysicalCharacter(HardKeyboardAction action); } private static final int SHIFT_OFF = 0; private static final int SHIFT_ON = 1; private static final int SHIFT_LOCKED = 2; private int mShiftState = SHIFT_OFF; private final String mKeyboardPrefId; private final String mKeyboardName; private final boolean mLeftToRightLanguageDirection; private final Dictionary.Language mDefaultDictionaryLanguage; private final int mKeyboardIconId; //private final String mKeyboardPrefId; private HashMap<Character, ShiftedKeyData> mSpecialShiftKeys; // private Drawable mShiftLockIcon; // private Drawable mShiftLockPreviewIcon; // private Drawable mOldShiftIcon; // private Drawable mOldShiftPreviewIcon; // private Key mShiftKey; private Key mEnterKey; private Key mSmileyKey; private Key mQuestionMarkKey; private final AnyKeyboardContextProvider mKeyboardContext; protected AnyKeyboard(AnyKeyboardContextProvider context, String keyboardPrefId, int xmlLayoutResId, boolean supportsShift, /*mapping XML id will be added here,*/ int keyboardNameId, /*String keyboardEnabledPref,*/ boolean leftToRightLanguageDirection, Dictionary.Language defaultDictionaryLanguage, int keyboardIconId) { super(context.getApplicationContext(), xmlLayoutResId); mKeyboardContext = context; mKeyboardPrefId = keyboardPrefId; //mSupportsShift = supportsShift; if (keyboardNameId > 0) mKeyboardName = context.getApplicationContext().getResources().getString(keyboardNameId); else mKeyboardName = ""; mLeftToRightLanguageDirection = leftToRightLanguageDirection; mDefaultDictionaryLanguage = defaultDictionaryLanguage; mKeyboardIconId = keyboardIconId; //mKeyboardPrefId = keyboardEnabledPref; Log.i("AnySoftKeyboard", "Done creating keyboard: "+mKeyboardName); //TODO: parsing of the mapping xml: //XmlResourceParser p = getResources().getXml(id from the constructor parameter); //parse to a HashMap? //mTopKeys = new ArrayList<Key>(); // mShiftLockIcon = context.getApplicationContext().getResources().getDrawable(R.drawable.sym_keyboard_shift_locked); // mShiftLockPreviewIcon = context.getApplicationContext().getResources().getDrawable(R.drawable.sym_keyboard_feedback_shift_locked); // mShiftLockPreviewIcon.setBounds(0, 0, mShiftLockPreviewIcon.getIntrinsicWidth(),mShiftLockPreviewIcon.getIntrinsicHeight()); } protected AnyKeyboardContextProvider getKeyboardContext() { return mKeyboardContext; } public Dictionary.Language getDefaultDictionaryLanguage() { return mDefaultDictionaryLanguage; } //this function is called from within the super constructor. @Override protected Key createKeyFromXml(Resources res, Row parent, int x, int y, XmlResourceParser parser) { if (mSpecialShiftKeys == null) mSpecialShiftKeys = new HashMap<Character, ShiftedKeyData>(); AnyKey key = new AnyKey(res, parent, x, y, parser); if ((key.codes != null) && (key.codes.length > 0)) { //creating less sensitive keys if required switch(key.codes[0]) { case 10://enter case KEYCODE_DELETE://delete case KEYCODE_SHIFT://shift key = new LessSensitiveAnyKey(res, parent, x, y, parser); } if (key.codes[0] == 10) { mEnterKey = key; } else if ((key.codes[0] == AnyKeyboard.KEYCODE_SMILEY) && (parent.rowEdgeFlags == Keyboard.EDGE_BOTTOM)) { mSmileyKey = key; } else if ((key.codes[0] == 63) && (parent.rowEdgeFlags == Keyboard.EDGE_BOTTOM)) { mQuestionMarkKey = key; } else if ((key.codes[0] == Keyboard.KEYCODE_MODE_CHANGE) || (key.codes[0] == AnyKeyboard.KEYCODE_LANG_CHANGE)) { if (AnySoftKeyboard.mChangeKeysMode.equals("2")) { key.label = null; key.height = 0; key.width = 0; } else if (AnySoftKeyboard.mChangeKeysMode.equals("3")) { String keyText = (key.codes[0] == Keyboard.KEYCODE_MODE_CHANGE)? res.getString(R.string.change_symbols_regular) : res.getString(R.string.change_lang_regular); key.label = keyText; //key.height *= 1.5; } else { String keyText = (key.codes[0] == Keyboard.KEYCODE_MODE_CHANGE)? res.getString(R.string.change_symbols_wide) : res.getString(R.string.change_lang_wide); key.label = keyText; } } else { //setting the character label if (isAlphabetKey(key)) { key.label = ""+((char)key.codes[0]); } } } if (AnySoftKeyboard.getDEBUG()) Log.v("AnySoftKeyboard", "Key '"+key.codes[0]+"' will have - width: "+key.width+", height:"+key.height+", text: '"+key.label+"'."); setPopupKeyChars(key); if ((key.codes != null) && (key.codes.length > 1)) { int primaryCode = key.codes[0]; if ((primaryCode>0) && (primaryCode<Character.MAX_VALUE)) { Character primary = new Character((char)primaryCode); ShiftedKeyData keyData = new ShiftedKeyData(key); if (!mSpecialShiftKeys.containsKey(primary)) mSpecialShiftKeys.put(primary, keyData); if (AnySoftKeyboard.getDEBUG()) Log.v("AnySoftKeyboard", "Adding mapping ("+primary+"->"+keyData.ShiftCharacter+") to mSpecialShiftKeys."); } } return key; } @Override protected Row createRowFromXml(Resources res, XmlResourceParser parser) { Row aRow = super.createRowFromXml(res, parser); if ((aRow.rowEdgeFlags&EDGE_TOP) != 0) { //top row if (AnySoftKeyboard.mChangeKeysMode.equals("2")) aRow.defaultHeight = 0; else if (AnySoftKeyboard.mChangeKeysMode.equals("3")) aRow.defaultHeight *= 1.5; } return aRow; } private boolean isAlphabetKey(Key key) { return (!key.modifier) && (!key.sticky) && (!key.repeatable) && (key.icon == null) && (key.codes[0] > 0); } public boolean isLetter(char keyValue) { return (Character.isLetter(keyValue) || (keyValue == '\'')); } /** * This looks at the ime options given by the current editor, to set the * appropriate label on the keyboard's enter key (if it has one). */ public void setImeOptions(Resources res, int options) { if (AnySoftKeyboard.getDEBUG()) Log.d("AnySoftKeyboard", "AnyKeyboard.setImeOptions"); if (mEnterKey == null) { return; } switch (options&(EditorInfo.IME_MASK_ACTION|EditorInfo.IME_FLAG_NO_ENTER_ACTION)) { case EditorInfo.IME_ACTION_GO: mEnterKey.iconPreview = null; mEnterKey.icon = null; //there is a problem with LTR languages mEnterKey.label = Workarounds.workaroundCorrectStringDirection(res.getText(R.string.label_go_key)); break; case EditorInfo.IME_ACTION_NEXT: mEnterKey.iconPreview = null; mEnterKey.icon = null; //there is a problem with LTR languages mEnterKey.label = Workarounds.workaroundCorrectStringDirection(res.getText(R.string.label_next_key)); break; case EditorInfo.IME_ACTION_SEARCH: mEnterKey.icon = res.getDrawable(R.drawable.sym_keyboard_search); mEnterKey.label = null; break; case EditorInfo.IME_ACTION_SEND: mEnterKey.iconPreview = null; mEnterKey.icon = null; //there is a problem with LTR languages mEnterKey.label = Workarounds.workaroundCorrectStringDirection(res.getText(R.string.label_send_key)); break; default: mEnterKey.icon = res.getDrawable(R.drawable.sym_keyboard_return); mEnterKey.label = null; break; } } public String getKeyboardName() { //TODO: this should be taken from the strings.xml, right? return mKeyboardName; } public boolean isLeftToRightLanguage() { return mLeftToRightLanguageDirection; } /* * This function is overridden by other alphabet keyboards, for nifty icons */ public int getKeyboardIcon() { return mKeyboardIconId; } // public String getKeyboardKey() // { // return mKeyboardPrefId; // } // public boolean isLetter(char letterCode) // { // if (Character.isLetter(letterCode)) // return true; // else // return false; // } // public void addSuggestions(String currentWord, ArrayList<String> list) // { // } public void setShiftLocked(boolean shiftLocked) { // if (mShiftKey != null) { // if (shiftLocked) { // mShiftKey.on = true; // mShiftKey.icon = mShiftLockIcon; // mShiftState = SHIFT_LOCKED; // } else { // mShiftKey.on = false; // mShiftKey.icon = mShiftLockIcon; // mShiftState = SHIFT_ON; // } // } } @Override public boolean isShifted() { // if (mShiftKey != null) { // return mShiftState != SHIFT_OFF; // } else { return super.isShifted(); // } } // @Override // public boolean setShifted(boolean shiftState) { // boolean shiftChanged = false; // if (mShiftKey != null) { // if (shiftState == false) { // shiftChanged = mShiftState != SHIFT_OFF; // mShiftState = SHIFT_OFF; // mShiftKey.on = false; // mShiftKey.icon = mOldShiftIcon; // } else { // if (mShiftState == SHIFT_OFF) { // shiftChanged = mShiftState == SHIFT_OFF; // mShiftState = SHIFT_ON; // mShiftKey.icon = mShiftLockIcon; // } // } // } else { // return super.setShifted(shiftState); // } // return shiftChanged; // } @Override public boolean setShifted(boolean shiftState) { boolean result = super.setShifted(shiftState); if (AnySoftKeyboard.getDEBUG()) Log.d("AnySoftKeyboard", "setShifted: shiftState:"+shiftState+". result:"+result); mShiftState = shiftState? SHIFT_ON : SHIFT_OFF; if (result) {//layout changed. Need to change labels. //going over the special keys only. for(ShiftedKeyData data : mSpecialShiftKeys.values()) { onKeyShifted(data, shiftState); } // for(Key aKey : getKeys()) // { // onKeyShifted(aKey, shiftState); // } } return result; } public boolean isShiftLocked() { return mShiftState == SHIFT_LOCKED; } protected void onKeyShifted(ShiftedKeyData data, boolean shiftState) { AnyKey aKey = data.KeyboardKey; aKey.label = shiftState? ""+data.ShiftCharacter : ""+((char)aKey.codes[0]); // if (aKey.codes.length > 1) // { // aKey.label = shiftState? ""+((char)aKey.codes[1]) : ""+((char)aKey.codes[0]); // Log.v("AnySoftKeyboard", "setShifted: changed key:"+aKey.label); // } // else // { // Log.v("AnySoftKeyboard", "setShifted: not changed key:"+aKey.label); // } } protected void setPopupKeyChars(Key aKey) { if ((aKey.codes != null) && (aKey.codes.length > 0)) { switch(((char)aKey.codes[0])) { case '\''://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = "\""; break; case '-': aKey.popupResId = R.xml.popup; aKey.popupCharacters = "\'\""; break; case '.'://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = ";:-_\u00b7"; break; //ISSUE 96: duplicate // case ','://in the generic bottom row // aKey.popupResId = R.xml.popup; // aKey.popupCharacters = "'\""; // break; case '_': aKey.popupResId = R.xml.popup; aKey.popupCharacters = ",-"; break; //the two below are switched in regular and Internet mode case '?'://in the generic bottom row aKey.popupResId = R.xml.popup; aKey.popupCharacters = "!/@\u00bf\u00a1()"; break; case '@'://in the generic Internet mode aKey.popupResId = R.xml.popup; aKey.popupCharacters = "!/?\u00bf\u00a1()"; break; } } } public void setTextVariation(Resources res, int inputType) { if (AnySoftKeyboard.getDEBUG()) Log.d("AnySoftKeyboard", "setTextVariation"); int variation = inputType & EditorInfo.TYPE_MASK_VARIATION; //if ((keyboardType == NextKeyboardType.Any) && // mInternetKeyboard.isEnabled() && // (variation == EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS // || variation == EditorInfo.TYPE_TEXT_VARIATION_URI)) { switch (variation) { case EditorInfo.TYPE_TEXT_VARIATION_EMAIL_ADDRESS: case EditorInfo.TYPE_TEXT_VARIATION_URI: if (mSmileyKey != null) { Log.d("AnySoftKeyboard", "Changing smiley key to domains."); mSmileyKey.iconPreview = null;// res.getDrawable(sym_keyboard_key_domain_preview); mSmileyKey.icon = res.getDrawable(R.drawable.sym_keyboard_key_domain); mSmileyKey.label = null; mSmileyKey.text = ".com"; mSmileyKey.popupResId = R.xml.popup_domains; } if (mQuestionMarkKey != null) { Log.d("AnySoftKeyboard", "Changing question mark key to AT."); mQuestionMarkKey.codes[0] = (int)'@'; mQuestionMarkKey.label = "@"; mQuestionMarkKey.popupCharacters = "!/?\u00bf\u00a1"; } break; default: if (mSmileyKey != null) { Log.d("AnySoftKeyboard", "Changing smiley key to smiley."); mSmileyKey.icon = res.getDrawable(R.drawable.sym_keyboard_smiley); mSmileyKey.label = null; mSmileyKey.text = ":-) "; mSmileyKey.popupResId = R.xml.popup_smileys; } if (mQuestionMarkKey != null) { Log.d("AnySoftKeyboard", "Changing question mark key to question."); mQuestionMarkKey.codes[0] = (int)'?'; mQuestionMarkKey.label = "?"; mQuestionMarkKey.popupCharacters = "!/@\u00bf\u00a1"; } break; } } public int getShiftedKeyValue(int primaryCode) { if ((primaryCode>0) && (primaryCode<Character.MAX_VALUE)) { Character c = new Character((char)primaryCode); if (mSpecialShiftKeys.containsKey(c)) { char shifted = mSpecialShiftKeys.get(c).ShiftCharacter; if (AnySoftKeyboard.getDEBUG()) Log.v("AnySoftKeyboard", "Returned the shifted mapping ("+c+"->"+shifted+") from mSpecialShiftKeys."); return shifted; } } //else...best try. return Character.toUpperCase(primaryCode); } class AnyKey extends Keyboard.Key { //private boolean mShiftLockEnabled; public AnyKey(Resources res, Keyboard.Row parent, int x, int y, XmlResourceParser parser) { super(res, parent, x, y, parser); if (popupCharacters != null && popupCharacters.length() == 0) { // If there is a keyboard with no keys specified in popupCharacters popupResId = 0; } } // void enableShiftLock() { // mShiftLockEnabled = true; // } // // @Override // public void onReleased(boolean inside) { // if (!mShiftLockEnabled) { // super.onReleased(inside); // } else { // pressed = !pressed; // } // } } class LessSensitiveAnyKey extends AnyKey { private int mStartX; private int mStartY; private int mEndX; private int mEndY; public LessSensitiveAnyKey(Resources res, Keyboard.Row parent, int x, int y, XmlResourceParser parser) { super(res, parent, x, y, parser); mStartX = this.x; mStartY = this.y; mEndX = this.width + this.x; mEndY = this.height + this.y; switch(codes[0]) { case 10://the enter key! //we want to "click" it only if it in the lower 80% mStartY += (this.height * 0.2); break; case KEYCODE_DELETE: //we want to "click" it only if it in the middle 80% //and in the right 80% mStartY += (this.height * 0.1); mEndY -= (this.height * 0.2); mStartX += (this.width * 0.1); break; case KEYCODE_SHIFT: //we want to "click" it only if it in the left 80% mEndX -= (this.width * 0.15); break; } } /** * Overriding this method so that we can reduce the target area for certain keys. */ @Override public boolean isInside(int clickedX, int clickedY) { return clickedX >= mStartX && clickedX <= mEndX && clickedY >= mStartY && clickedY <= mEndY; // int startX = this.x; // int startY = this.y; // int endX = this.width + this.x; // int endY = this.height + this.y; // // boolean isInside = false; // switch(codes[0]) // { // case 10://the enter key! // //we want to "click" it only if it in the lower 80% // startY += (this.height * 0.2); // isInside = checkIfInside(startX, startY, endX, endY, clickedX, clickedY); // break; // case KEYCODE_DELETE: // //we want to "click" it only if it in the middle 80% // //and in the right 80% // startY += (this.height * 0.1); // endY -= (this.height * 0.2); // startX += (this.width * 0.15); // isInside = checkIfInside(startX, startY, endX, endY, clickedX, clickedY); // break; // case KEYCODE_SHIFT: // //we want to "click" it only if it in the left 80% // endX -= (this.width * 0.2); // isInside = checkIfInside(startX, startY, endX, endY, clickedX, clickedY); // break; // default: // isInside = super.isInside(clickedX, clickedY); // break; // } // // //Log.d("AnySoftKeyboard", "Key "+codes[0]+" x:"+this.x+", y:"+this.y+", height:"+this.height+", width:"+this.width+", clickedX:"+clickedX+", clickedY:"+clickedY+" result:"+isInside); // // return isInside; } // private boolean checkIfInside(int startX, int startY, // int endX, int endY, // int clickedX, int clickedY) // { // return clickedX >= startX && // clickedX <= endX && // clickedY >= startY && // clickedY <= endY; // } } public String getKeyboardPrefId() { return mKeyboardPrefId; } }
Issue 141
src/com/menny/android/anysoftkeyboard/keyboards/AnyKeyboard.java
Issue 141
Java
apache-2.0
de7cd49ae6d67a226b7745eec056ccb03c0702e0
0
joansmith/supernode,bitsofproof/supernode
/* * Copyright 2012 Tamas Blummer [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bitsofproof.supernode.model; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import com.bitsofproof.supernode.core.AddressConverter; import com.bitsofproof.supernode.core.Chain; import com.bitsofproof.supernode.core.Difficulty; import com.bitsofproof.supernode.core.Hash; import com.bitsofproof.supernode.core.Script; import com.bitsofproof.supernode.core.Script.Opcode; import com.bitsofproof.supernode.core.ValidationException; import com.mysema.query.jpa.impl.JPADeleteClause; import com.mysema.query.jpa.impl.JPAQuery; @Component ("jpaBlockStore") class JpaBlockStore implements BlockStore { private static final Logger log = LoggerFactory.getLogger (JpaBlockStore.class); private static final long MAX_BLOCK_SIGOPS = 20000; @Autowired private Chain chain; @PersistenceContext private EntityManager entityManager; @Autowired private PlatformTransactionManager transactionManager; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock (); private CachedHead currentHead = null; private final Map<String, CachedBlock> cachedBlocks = new HashMap<String, CachedBlock> (); private final Map<Long, CachedHead> cachedHeads = new HashMap<Long, CachedHead> (); private final ExecutorService inputProcessor = Executors.newFixedThreadPool (Runtime.getRuntime ().availableProcessors () * 2); private final ExecutorService transactionsProcessor = Executors.newFixedThreadPool (Runtime.getRuntime ().availableProcessors () * 2); private static class CachedHead { private Long id; private CachedBlock last; private double chainWork; private long height; private CachedHead previous; private final Set<CachedBlock> blocks = new HashSet<CachedBlock> (); public Long getId () { return id; } public void setId (Long id) { this.id = id; } public double getChainWork () { return chainWork; } public long getHeight () { return height; } public void setChainWork (double chainWork) { this.chainWork = chainWork; } public void setHeight (long height) { this.height = height; } public Set<CachedBlock> getBlocks () { return blocks; } public CachedHead getPrevious () { return previous; } public void setPrevious (CachedHead previous) { this.previous = previous; } public CachedBlock getLast () { return last; } public void setLast (CachedBlock last) { this.last = last; } } private static class CachedBlock { public CachedBlock (String hash, Long id, CachedBlock previous, long time) { this.hash = hash; this.id = id; this.previous = previous; this.time = time; } private final String hash; private final Long id; private final CachedBlock previous; private final long time; public Long getId () { return id; } public CachedBlock getPrevious () { return previous; } public long getTime () { return time; } public String getHash () { return hash; } @Override public int hashCode () { return hash.hashCode (); } } @Transactional (propagation = Propagation.REQUIRED) @Override public void cache () { try { lock.writeLock ().lock (); log.trace ("Filling chain cache with heads"); QHead head = QHead.head; JPAQuery q = new JPAQuery (entityManager); for ( Head h : q.from (head).list (head) ) { CachedHead sh = new CachedHead (); sh.setId (h.getId ()); sh.setChainWork (h.getChainWork ()); sh.setHeight (h.getHeight ()); if ( h.getPrevious () != null ) { sh.setPrevious (cachedHeads.get (h.getId ())); } cachedHeads.put (h.getId (), sh); if ( currentHead == null || currentHead.getChainWork () < sh.getChainWork () ) { currentHead = sh; } } log.trace ("Filling chain cache with stored blocks"); QBlk block = QBlk.blk; q = new JPAQuery (entityManager); for ( Blk b : q.from (block).list (block) ) { CachedBlock cb = null; if ( b.getPrevious () != null ) { cb = new CachedBlock (b.getHash (), b.getId (), cachedBlocks.get (b.getPrevious ().getHash ()), b.getCreateTime ()); } else { cb = new CachedBlock (b.getHash (), b.getId (), null, b.getCreateTime ()); } cachedBlocks.put (b.getHash (), cb); CachedHead h = cachedHeads.get (b.getHead ().getId ()); h.getBlocks ().add (cb); h.setLast (cb); } } finally { lock.writeLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public Blk getGenesisBlock () { try { lock.readLock ().lock (); QBlk block = QBlk.blk; JPAQuery q = new JPAQuery (entityManager); return q.from (block).orderBy (block.id.asc ()).limit (1).uniqueResult (block); } finally { lock.readLock ().unlock (); } } @Override public boolean isStoredBlock (String hash) { try { lock.readLock ().lock (); return cachedBlocks.get (hash) != null; } finally { lock.readLock ().unlock (); } } @Override public long getChainHeight () { try { lock.readLock ().lock (); return currentHead.getHeight (); } finally { lock.readLock ().unlock (); } } @Override public List<String> getInventory (List<String> locator, String last, int limit) { try { lock.readLock ().lock (); List<String> inventory = new LinkedList<String> (); CachedBlock curr = currentHead.getLast (); CachedBlock prev = curr.getPrevious (); if ( !last.equals (Hash.ZERO_HASH.toString ()) ) { while ( prev != null && !curr.equals (last) ) { curr = prev; prev = curr.getPrevious (); } } do { if ( locator.contains (curr) ) { break; } inventory.add (0, curr.getHash ()); if ( inventory.size () > limit ) { inventory.remove (limit); } curr = prev; if ( prev != null ) { prev = curr.getPrevious (); } } while ( curr != null ); return inventory; } finally { lock.readLock ().unlock (); } } @Override public List<String> getLocator () { try { lock.readLock ().lock (); List<String> locator = new ArrayList<String> (); CachedBlock curr = currentHead.getLast (); locator.add (curr.getHash ()); CachedBlock prev = curr.getPrevious (); for ( int i = 0, step = 1; prev != null; ++i ) { for ( int j = 0; prev != null && j < step; ++j ) { curr = prev; prev = curr.getPrevious (); } locator.add (curr.getHash ()); if ( i >= 10 ) { step *= 2; } } return locator; } finally { lock.readLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public List<TxIn> getSpent (List<String> addresses) { List<TxIn> spent = new ArrayList<TxIn> (); try { lock.readLock ().lock (); QTxOut txout = QTxOut.txOut; QTxIn txin = QTxIn.txIn; QOwner owner = QOwner.owner; JPAQuery query = new JPAQuery (entityManager); for ( TxIn in : query.from (txin).join (txin.source, txout).join (txout.owners, owner).where (owner.address.in (addresses)).list (txin) ) { CachedBlock blockOfIn = cachedBlocks.get (in.getTransaction ().getBlock ().getHash ()); if ( isBlockOnBranch (blockOfIn, currentHead) ) { spent.add (in); } } return spent; } finally { lock.readLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public List<TxOut> getReceived (List<String> addresses) { List<TxOut> received = new ArrayList<TxOut> (); try { lock.readLock ().lock (); QTxOut txout = QTxOut.txOut; QOwner owner = QOwner.owner; JPAQuery query = new JPAQuery (entityManager); for ( TxOut out : query.from (txout).join (txout.owners, owner).where (owner.address.in (addresses)).list (txout) ) { CachedBlock blockOfOut = cachedBlocks.get (out.getTransaction ().getBlock ().getHash ()); if ( isBlockOnBranch (blockOfOut, currentHead) ) { received.add (out); } } return received; } finally { lock.readLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public List<TxOut> getUnspentOutput (List<String> addresses) { try { lock.readLock ().lock (); QTxOut txout = QTxOut.txOut; QOwner owner = QOwner.owner; QUTxOut utxo = QUTxOut.uTxOut; JPAQuery query = new JPAQuery (entityManager); return query.from (utxo).join (utxo.txout, txout).join (txout.owners, owner).where (owner.address.in (addresses)).list (txout); } finally { lock.readLock ().unlock (); } } private static class TransactionContext { Blk block; BigInteger blkSumInput = BigInteger.ZERO; BigInteger blkSumOutput = BigInteger.ZERO; int nsigs = 0; boolean coinbase = true; Map<String, HashMap<Long, TxOut>> resolvedInputs = new HashMap<String, HashMap<Long, TxOut>> (); } @Transactional (propagation = Propagation.REQUIRED, rollbackFor = { Exception.class }) @Override public void storeBlock (Blk b) throws ValidationException { try { lock.writeLock ().lock (); lockedStoreBlock (b); } catch ( ValidationException e ) { throw e; } catch ( Exception e ) { throw new ValidationException ("OTHER exception " + b.toWireDump (), e); } finally { lock.writeLock ().unlock (); } } private void lockedStoreBlock (Blk b) throws ValidationException { CachedBlock cached = cachedBlocks.get (b.getHash ()); if ( cached != null ) { return; } // find previous block CachedBlock cachedPrevious = cachedBlocks.get (b.getPreviousHash ()); if ( cachedPrevious != null ) { Blk prev = null; prev = entityManager.find (Blk.class, (cachedPrevious).getId ()); b.setPrevious (prev); if ( b.getCreateTime () > (System.currentTimeMillis () / 1000) * 2 * 60 * 60 ) { throw new ValidationException ("Future generation attempt " + b.getHash ()); } Head head; if ( prev.getHead ().getLeaf ().equals (prev.getHash ()) ) { // continuing head = prev.getHead (); head.setLeaf (b.getHash ()); head.setHeight (head.getHeight () + 1); head.setChainWork (prev.getChainWork () + Difficulty.getDifficulty (b.getDifficultyTarget ())); head = entityManager.merge (head); } else { // branching head = new Head (); CachedBlock trunkBlock = cachedPrevious; while ( !isBlockOnBranch (trunkBlock, currentHead) ) { trunkBlock = trunkBlock.getPrevious (); } head.setTrunk (trunkBlock.hash); head.setPrevious (prev.getHead ()); head.setLeaf (b.getHash ()); head.setHeight (head.getHeight () + 1); head.setChainWork (prev.getChainWork () + Difficulty.getDifficulty (b.getDifficultyTarget ())); entityManager.persist (head); } b.setHead (head); b.setHeight (head.getHeight ()); b.setChainWork (head.getChainWork ()); if ( b.getHeight () >= chain.getDifficultyReviewBlocks () && b.getHeight () % chain.getDifficultyReviewBlocks () == 0 ) { CachedBlock c = null; CachedBlock p = cachedPrevious; for ( int i = 0; i < chain.getDifficultyReviewBlocks () - 1; ++i ) { c = p; p = c.getPrevious (); } long next = Difficulty.getNextTarget (prev.getCreateTime () - p.getTime (), prev.getDifficultyTarget (), chain.getTargetBlockTime ()); if ( chain.isProduction () && next != b.getDifficultyTarget () ) { throw new ValidationException ("Difficulty does not match expectation " + b.getHash () + " " + b.toWireDump ()); } } else { if ( chain.isProduction () && b.getDifficultyTarget () != prev.getDifficultyTarget () ) { throw new ValidationException ("Illegal attempt to change difficulty " + b.getHash ()); } } b.checkHash (); if ( chain.isProduction () && new Hash (b.getHash ()).toBigInteger ().compareTo (Difficulty.getTarget (b.getDifficultyTarget ())) > 0 ) { throw new ValidationException ("Insufficuent proof of work for current difficulty " + b.getHash () + " " + b.toWireDump ()); } b.parseTransactions (); if ( b.getTransactions ().isEmpty () ) { throw new ValidationException ("Block must have transactions " + b.getHash () + " " + b.toWireDump ()); } b.checkMerkleRoot (); final TransactionContext tcontext = new TransactionContext (); tcontext.block = b; log.trace ("resolving inputs for block " + b.getHash ()); Set<String> needTx = new HashSet<String> (); for ( Tx t : b.getTransactions () ) { HashMap<Long, TxOut> outs = tcontext.resolvedInputs.get (t.getHash ()); if ( outs == null ) { outs = new HashMap<Long, TxOut> (); tcontext.resolvedInputs.put (t.getHash (), outs); } for ( TxOut o : t.getOutputs () ) { outs.put (o.getIx (), o); } for ( TxIn i : t.getInputs () ) { if ( !i.getSourceHash ().equals (Hash.ZERO_HASH_STRING) ) { needTx.add (i.getSourceHash ()); } } } if ( !needTx.isEmpty () ) { resolveWithUTXO (tcontext, needTx); } boolean skip = true; for ( Tx t : b.getTransactions () ) { if ( skip ) // skip coinbase { skip = false; } else { checkTxInputsExist (tcontext, t); } } log.trace ("validating block " + b.getHash ()); List<Callable<TransactionValidationException>> callables = new ArrayList<Callable<TransactionValidationException>> (); for ( final Tx t : b.getTransactions () ) { if ( tcontext.coinbase ) { try { validateTransaction (tcontext, t); } catch ( TransactionValidationException e ) { throw new ValidationException (e.getMessage () + " " + t.toWireDump (), e); } } else { callables.add (new Callable<TransactionValidationException> () { @Override public TransactionValidationException call () { try { validateTransaction (tcontext, t); } catch ( TransactionValidationException e ) { return e; } catch ( Exception e ) { return new TransactionValidationException (e, t); } return null; } }); } } try { for ( Future<TransactionValidationException> e : transactionsProcessor.invokeAll (callables) ) { try { if ( e.get () != null ) { throw new ValidationException (e.get ().getMessage () + " " + e.get ().getTx ().toWireDump (), e.get ()); } } catch ( ExecutionException e1 ) { throw new ValidationException ("corrupted transaction processor", e1); } } } catch ( InterruptedException e1 ) { throw new ValidationException ("interrupted", e1); } // block reward could actually be less... as in 0000000000004c78956f8643262f3622acf22486b120421f893c0553702ba7b5 if ( tcontext.blkSumOutput.subtract (tcontext.blkSumInput).longValue () > ((50L * Tx.COIN) >> (b.getHeight () / 210000L)) ) { throw new ValidationException ("Invalid block reward " + b.getHash () + " " + b.toWireDump ()); } // this is last loop before persist since modifying the entities. for ( Tx t : b.getTransactions () ) { t.setBlock (b); for ( TxIn i : t.getInputs () ) { if ( !i.getSourceHash ().equals (Hash.ZERO_HASH_STRING) ) { i.setSource (tcontext.resolvedInputs.get (i.getSourceHash ()).get (i.getIx ())); } } for ( TxOut o : t.getOutputs () ) { addOwners (o); } } log.trace ("storing block " + b.getHash ()); entityManager.persist (b); // modify transient caches only after persistent changes CachedBlock m = new CachedBlock (b.getHash (), b.getId (), cachedBlocks.get (b.getPrevious ().getHash ()), b.getCreateTime ()); cachedBlocks.put (b.getHash (), m); CachedHead usingHead = cachedHeads.get (head.getId ()); if ( usingHead == null ) { cachedHeads.put (head.getId (), usingHead = new CachedHead ()); } usingHead.setLast (m); usingHead.setChainWork (b.getChainWork ()); usingHead.setHeight (b.getHeight ()); usingHead.getBlocks ().add (m); if ( usingHead.getChainWork () > currentHead.getChainWork () ) { // we have a new trunk // if branching from main we have to revert, then forward unspent cache if ( isBlockOnBranch (cachedBlocks.get (head.getTrunk ()), currentHead) ) { CachedBlock p = currentHead.getLast (); CachedBlock q = p.previous; while ( !q.hash.equals (head.getTrunk ()) ) { backwardUTXO (entityManager.find (Blk.class, p.id)); p = q; q = p.previous; } List<Long> pathToNewHead = new ArrayList<Long> (); p = cachedBlocks.get (usingHead.getLast ()); q = p.previous; while ( !q.hash.equals (head.getTrunk ()) ) { pathToNewHead.add (p.getId ()); } Collections.reverse (pathToNewHead); // spend what now came to trunk for ( Long id : pathToNewHead ) { forwardUTXO (entityManager.find (Blk.class, id)); } } } else if ( b.getHead ().getId () == currentHead.getId () ) { // spend if on the trunk forwardUTXO (b); } // now this is the new trunk currentHead = usingHead; log.trace ("stored block " + b.getHeight () + " " + b.getHash ()); } } private void backwardUTXO (Blk b) throws ValidationException { Set<TxOut> sources = new HashSet<TxOut> (); for ( Tx t : b.getTransactions () ) { sources.addAll (t.getOutputs ()); for ( TxIn in : t.getInputs () ) { if ( in.getSource () != null ) { if ( !sources.contains (in.getSource ()) ) { UTxOut utxo = new UTxOut (); utxo.setHash (in.getSource ().getTransaction ().getHash ()); utxo.setIx (in.getSource ().getIx ()); utxo.setTxout (in.getSource ()); entityManager.persist (utxo); } else { sources.remove (in.getSource ()); } } } } if ( !sources.isEmpty () ) { QUTxOut utxo = QUTxOut.uTxOut; JPADeleteClause d = new JPADeleteClause (entityManager, utxo); if ( d.where (utxo.txout.in (sources)).execute () != sources.size () ) { throw new ValidationException ("FATAL Inconsistent UTXO"); } } } private void forwardUTXO (Blk b) throws ValidationException { Set<TxOut> sources = new HashSet<TxOut> (); for ( Tx t : b.getTransactions () ) { for ( TxIn in : t.getInputs () ) { if ( in.getSource () != null ) { sources.add (in.getSource ()); } } for ( TxOut out : t.getOutputs () ) { if ( !sources.contains (out) ) { UTxOut utxo = new UTxOut (); utxo.setHash (out.getTransaction ().getHash ()); utxo.setIx (out.getIx ()); utxo.setTxout (out); entityManager.persist (utxo); } else { sources.remove (out); } } } if ( !sources.isEmpty () ) { QUTxOut utxo = QUTxOut.uTxOut; JPADeleteClause d = new JPADeleteClause (entityManager, utxo); if ( d.where (utxo.txout.in (sources)).execute () != sources.size () ) { throw new ValidationException ("FATAL Inconsistent UTXO"); } } } private boolean isBlockOnBranch (CachedBlock block, CachedHead branch) { if ( branch.getBlocks ().contains (block) ) { return true; } if ( branch.getPrevious () == null ) { return false; } return isBlockOnBranch (block, branch.getPrevious ()); } private void resolveInputs (TransactionContext tcontext, Tx t) throws ValidationException { Set<String> needTx = new HashSet<String> (); for ( final TxIn i : t.getInputs () ) { HashMap<Long, TxOut> resolved; if ( (resolved = tcontext.resolvedInputs.get (i.getSourceHash ())) == null ) { resolved = new HashMap<Long, TxOut> (); tcontext.resolvedInputs.put (i.getSourceHash (), resolved); needTx.add (i.getSourceHash ()); } } if ( !needTx.isEmpty () ) { resolveWithUTXO (tcontext, needTx); checkTxInputsExist (tcontext, t); } } private void checkTxInputsExist (TransactionContext tcontext, Tx t) throws ValidationException { for ( final TxIn i : t.getInputs () ) { HashMap<Long, TxOut> resolved = tcontext.resolvedInputs.get (i.getSourceHash ()); if ( resolved == null ) { throw new ValidationException ("Transaction refers to unknown or spent transaction " + i.getSourceHash () + " " + t.toWireDump ()); } TxOut out = resolved.get (i.getIx ()); if ( out == null ) { throw new ValidationException ("Transaction refers to unknown or spent output " + i.getSourceHash () + " [" + i.getIx () + "] " + t.toWireDump ()); } if ( tcontext.block != null && out.getTransaction ().getHash ().equals (Hash.ZERO_HASH_STRING) ) { if ( out.getTransaction ().getBlock ().getHeight () > tcontext.block.getHeight () - 100 ) { throw new ValidationException ("coinbase spent too early " + t.toWireDump ()); } } } } private void resolveWithUTXO (TransactionContext tcontext, Set<String> needTx) { QUTxOut utxo = QUTxOut.uTxOut; QTxOut txout = QTxOut.txOut; JPAQuery query = new JPAQuery (entityManager); for ( Object[] o : query.from (utxo).join (utxo.txout, txout).where (utxo.hash.in (needTx)).list (utxo.hash, txout) ) { String hash = (String) o[0]; TxOut out = (TxOut) o[1]; HashMap<Long, TxOut> resolved = tcontext.resolvedInputs.get (hash); if ( resolved == null ) { resolved = new HashMap<Long, TxOut> (); tcontext.resolvedInputs.put (hash, resolved); } resolved.put (out.getIx (), out); } } private void validateTransaction (final TransactionContext tcontext, final Tx t) throws TransactionValidationException { if ( tcontext.block != null && tcontext.coinbase ) { if ( t.getInputs ().size () != 1 || !t.getInputs ().get (0).getSourceHash ().equals (Hash.ZERO_HASH.toString ()) || (chain.isProduction () && tcontext.block.getHeight () > 209378 && (t.getInputs ().get (0).getIx () != 0 || t.getInputs ().get (0) .getSequence () != 0xFFFFFFFFL)) ) { throw new TransactionValidationException ("first transaction must be coinbase ", t); } if ( t.getInputs ().get (0).getScript ().length > 100 || t.getInputs ().get (0).getScript ().length < 2 ) { throw new TransactionValidationException ("coinbase scriptsig must be in 2-100 ", t); } tcontext.coinbase = false; for ( TxOut o : t.getOutputs () ) { try { // some miner add 0 with garbage... if ( chain.isProduction () && o.getValue () != 0 && tcontext.block.getHeight () > 180000 && !Script.isStandard (o.getScript ()) ) { throw new TransactionValidationException ("Nonstandard script rejected", t); } tcontext.blkSumOutput = tcontext.blkSumOutput.add (BigInteger.valueOf (o.getValue ())); tcontext.nsigs += Script.sigOpCount (o.getScript ()); } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } } if ( tcontext.nsigs > MAX_BLOCK_SIGOPS ) { throw new TransactionValidationException ("too many signatures in this block ", t); } } else { if ( t.getInputs ().size () == 1 && t.getInputs ().get (0).getSourceHash ().equals (Hash.ZERO_HASH.toString ()) ) { throw new TransactionValidationException ("coinbase only first in a block", t); } if ( t.getOutputs ().isEmpty () ) { throw new TransactionValidationException ("Transaction must have outputs ", t); } if ( t.getInputs ().isEmpty () ) { throw new TransactionValidationException ("Transaction must have inputs ", t); } if ( tcontext.block != null && tcontext.block.getHeight () > 200000 ) { // BIP 0034 if ( t.getVersion () != 1 ) { throw new TransactionValidationException ("Transaction version must be 1", t); } if ( tcontext.block.getVersion () == 2 && tcontext.coinbase ) { try { if ( Script.intValue (Script.parse (t.getInputs ().get (0).getScript ()).get (0).data) != tcontext.block.getHeight () ) { throw new TransactionValidationException ("Block height mismatch in coinbase", t); } } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } } } long sumOut = 0; for ( TxOut o : t.getOutputs () ) { if ( o.getScript ().length > 520 ) { if ( tcontext.block != null && tcontext.block.getHeight () < 200000 ) { log.trace ("Old DoS at [" + tcontext.block.getHeight () + "]" + tcontext.block.getHash ()); } else { throw new TransactionValidationException ("script too long ", t); } } if ( chain.isProduction () ) { try { if ( tcontext.block.getHeight () > 180000 && !Script.isStandard (o.getScript ()) ) { throw new TransactionValidationException ("Nonstandard script rejected", t); } } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } } if ( tcontext.block != null ) { try { tcontext.nsigs += Script.sigOpCount (o.getScript ()); } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } if ( tcontext.nsigs > MAX_BLOCK_SIGOPS ) { throw new TransactionValidationException ("too many signatures in this block ", t); } } if ( o.getValue () < 0 || o.getValue () > Tx.MAX_MONEY ) { throw new TransactionValidationException ("Transaction output not in money range ", t); } tcontext.blkSumOutput = tcontext.blkSumOutput.add (BigInteger.valueOf (o.getValue ())); sumOut += o.getValue (); if ( sumOut < 0 || sumOut > Tx.MAX_MONEY ) { throw new TransactionValidationException ("Transaction output not in money range ", t); } } long sumIn = 0; int inNumber = 0; List<Callable<TransactionValidationException>> callables = new ArrayList<Callable<TransactionValidationException>> (); for ( final TxIn i : t.getInputs () ) { if ( i.getScript ().length > 520 ) { if ( tcontext.block == null || tcontext.block.getHeight () > 200000 ) { throw new TransactionValidationException ("script too long ", t); } } // this needs to be reset since looping gain over txin i.setSource (tcontext.resolvedInputs.get (i.getSourceHash ()).get (i.getIx ())); sumIn += i.getSource ().getValue (); final int nr = inNumber; callables.add (new Callable<TransactionValidationException> () { @Override public TransactionValidationException call () throws Exception { try { if ( !new Script (t, nr).evaluate (chain.isProduction ()) ) { return new TransactionValidationException ("The transaction script does not evaluate to true in input", t, nr); } synchronized ( tcontext ) { tcontext.blkSumInput = tcontext.blkSumInput.add (BigInteger.valueOf (i.getSource ().getValue ())); } } catch ( Exception e ) { return new TransactionValidationException (e, t, nr); } return null; } }); ++inNumber; } if ( sumOut > sumIn ) { throw new TransactionValidationException ("Transaction value out more than in", t); } if ( tcontext.block == null && (sumIn - sumOut) < Tx.COIN / 10000 ) { throw new TransactionValidationException ("There is no free lunch.", t); } List<Future<TransactionValidationException>> results; try { results = inputProcessor.invokeAll (callables); } catch ( InterruptedException e1 ) { throw new TransactionValidationException (e1, t); } for ( Future<TransactionValidationException> r : results ) { TransactionValidationException ex; try { ex = r.get (); } catch ( InterruptedException e ) { throw new TransactionValidationException (e, t); } catch ( ExecutionException e ) { throw new TransactionValidationException (e, t); } if ( ex != null ) { throw ex; } } } } private void addOwners (TxOut out) throws TransactionValidationException { List<Owner> owners = new ArrayList<Owner> (); parseOwners (out.getScript (), out, owners); out.setOwners (owners); } private void parseOwners (byte[] script, TxOut out, List<Owner> owners) throws TransactionValidationException { List<Script.Token> parsed; try { parsed = Script.parse (out.getScript ()); if ( parsed.size () == 2 && parsed.get (0).data != null && parsed.get (1).op == Opcode.OP_CHECKSIG ) { // pay to key Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (Hash.keyHash (parsed.get (0).data), false, chain)); o.setOutpoint (out); owners.add (o); out.setVotes (1L); } else if ( parsed.size () == 5 && parsed.get (0).op == Opcode.OP_DUP && parsed.get (1).op == Opcode.OP_HASH160 && parsed.get (2).data != null && parsed.get (3).op == Opcode.OP_EQUALVERIFY && parsed.get (4).op == Opcode.OP_CHECKSIG ) { // pay to address Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (parsed.get (2).data, false, chain)); o.setOutpoint (out); owners.add (o); out.setVotes (1L); } else if ( parsed.size () == 3 && parsed.get (0).op == Opcode.OP_HASH160 && parsed.get (1).data != null && parsed.get (1).data.length == 20 && parsed.get (2).op == Opcode.OP_EQUAL ) { byte[] hash = parsed.get (1).data; if ( hash.length == 20 ) { // BIP 0013 Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (hash, true, chain)); o.setOutpoint (out); owners.add (o); out.setVotes (1L); } } else { for ( int i = 0; i < parsed.size (); ++i ) { if ( parsed.get (i).op == Opcode.OP_CHECKMULTISIG || parsed.get (i).op == Opcode.OP_CHECKMULTISIGVERIFY ) { if ( chain.isProduction () ) { int nkeys = parsed.get (i - 1).op.ordinal () - Opcode.OP_1.ordinal () + 1; for ( int j = 0; j < nkeys; ++j ) { Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (Hash.keyHash (parsed.get (i - j - 2).data), true, chain)); o.setOutpoint (out); owners.add (o); } out.setVotes ((long) parsed.get (i - nkeys - 2).op.ordinal () - Opcode.OP_1.ordinal () + 1); return; } } } } } catch ( ValidationException e ) { throw new TransactionValidationException (e, out.getTransaction ()); } } @Override public String getHeadHash () { try { lock.readLock ().lock (); return currentHead.getLast ().getHash (); } finally { lock.readLock ().unlock (); } } @Override public boolean isEmpty () { try { lock.readLock ().lock (); QHead head = QHead.head; JPAQuery q = new JPAQuery (entityManager); return q.from (head).list (head).isEmpty (); } finally { lock.readLock ().unlock (); } } @Transactional (propagation = Propagation.REQUIRED, rollbackFor = { Exception.class }) @Override public void resetStore (Chain chain) throws TransactionValidationException { Blk genesis = chain.getGenesis (); addOwners (genesis.getTransactions ().get (0).getOutputs ().get (0)); Head h = new Head (); h.setLeaf (genesis.getHash ()); h.setHeight (0); h.setChainWork (Difficulty.getDifficulty (genesis.getDifficultyTarget ())); entityManager.persist (h); genesis.setHead (h); entityManager.persist (genesis); UTxOut utxo = new UTxOut (); utxo.setHash (genesis.getTransactions ().get (0).getHash ()); utxo.setIx (0); utxo.setTxout (genesis.getTransactions ().get (0).getOutputs ().get (0)); entityManager.persist (utxo); } @Transactional (propagation = Propagation.MANDATORY) @Override public Blk getBlock (String hash) { CachedBlock cached = null; try { lock.readLock ().lock (); cached = cachedBlocks.get (hash); if ( cached == null ) { return null; } } finally { lock.readLock ().unlock (); } return entityManager.find (Blk.class, cached.getId ()); } @Transactional (propagation = Propagation.REQUIRED) @Override public void validateTransaction (Tx t) throws ValidationException { try { lock.readLock ().lock (); TransactionContext tcontext = new TransactionContext (); tcontext.block = null; tcontext.coinbase = false; tcontext.nsigs = 0; resolveInputs (tcontext, t); validateTransaction (tcontext, t); } finally { lock.readLock ().unlock (); } } }
src/main/java/com/bitsofproof/supernode/model/JpaBlockStore.java
/* * Copyright 2012 Tamas Blummer [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bitsofproof.supernode.model; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import com.bitsofproof.supernode.core.AddressConverter; import com.bitsofproof.supernode.core.Chain; import com.bitsofproof.supernode.core.Difficulty; import com.bitsofproof.supernode.core.Hash; import com.bitsofproof.supernode.core.Script; import com.bitsofproof.supernode.core.Script.Opcode; import com.bitsofproof.supernode.core.ValidationException; import com.mysema.query.jpa.impl.JPADeleteClause; import com.mysema.query.jpa.impl.JPAQuery; @Component ("jpaBlockStore") class JpaBlockStore implements BlockStore { private static final Logger log = LoggerFactory.getLogger (JpaBlockStore.class); private static final long MAX_BLOCK_SIGOPS = 20000; @Autowired private Chain chain; @PersistenceContext private EntityManager entityManager; @Autowired private PlatformTransactionManager transactionManager; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock (); private CachedHead currentHead = null; private final Map<String, CachedBlock> cachedBlocks = new HashMap<String, CachedBlock> (); private final Map<Long, CachedHead> cachedHeads = new HashMap<Long, CachedHead> (); private final ExecutorService inputProcessor = Executors.newFixedThreadPool (Runtime.getRuntime ().availableProcessors () * 2); private final ExecutorService transactionsProcessor = Executors.newFixedThreadPool (Runtime.getRuntime ().availableProcessors () * 2); private static class CachedHead { private Long id; private CachedBlock last; private double chainWork; private long height; private CachedHead previous; private final Set<CachedBlock> blocks = new HashSet<CachedBlock> (); public Long getId () { return id; } public void setId (Long id) { this.id = id; } public double getChainWork () { return chainWork; } public long getHeight () { return height; } public void setChainWork (double chainWork) { this.chainWork = chainWork; } public void setHeight (long height) { this.height = height; } public Set<CachedBlock> getBlocks () { return blocks; } public CachedHead getPrevious () { return previous; } public void setPrevious (CachedHead previous) { this.previous = previous; } public CachedBlock getLast () { return last; } public void setLast (CachedBlock last) { this.last = last; } } private static class CachedBlock { public CachedBlock (String hash, Long id, CachedBlock previous, long time) { this.hash = hash; this.id = id; this.previous = previous; this.time = time; } private final String hash; private final Long id; private final CachedBlock previous; private final long time; public Long getId () { return id; } public CachedBlock getPrevious () { return previous; } public long getTime () { return time; } public String getHash () { return hash; } @Override public int hashCode () { return hash.hashCode (); } } @Transactional (propagation = Propagation.REQUIRED) @Override public void cache () { try { lock.writeLock ().lock (); log.trace ("Filling chain cache with heads"); QHead head = QHead.head; JPAQuery q = new JPAQuery (entityManager); for ( Head h : q.from (head).list (head) ) { CachedHead sh = new CachedHead (); sh.setId (h.getId ()); sh.setChainWork (h.getChainWork ()); sh.setHeight (h.getHeight ()); if ( h.getPrevious () != null ) { sh.setPrevious (cachedHeads.get (h.getId ())); } cachedHeads.put (h.getId (), sh); if ( currentHead == null || currentHead.getChainWork () < sh.getChainWork () ) { currentHead = sh; } } log.trace ("Filling chain cache with stored blocks"); QBlk block = QBlk.blk; q = new JPAQuery (entityManager); for ( Blk b : q.from (block).list (block) ) { CachedBlock cb = null; if ( b.getPrevious () != null ) { cb = new CachedBlock (b.getHash (), b.getId (), cachedBlocks.get (b.getPrevious ().getHash ()), b.getCreateTime ()); } else { cb = new CachedBlock (b.getHash (), b.getId (), null, b.getCreateTime ()); } cachedBlocks.put (b.getHash (), cb); CachedHead h = cachedHeads.get (b.getHead ().getId ()); h.getBlocks ().add (cb); h.setLast (cb); } } finally { lock.writeLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public Blk getGenesisBlock () { try { lock.readLock ().lock (); QBlk block = QBlk.blk; JPAQuery q = new JPAQuery (entityManager); return q.from (block).orderBy (block.id.asc ()).limit (1).uniqueResult (block); } finally { lock.readLock ().unlock (); } } @Override public boolean isStoredBlock (String hash) { try { lock.readLock ().lock (); return cachedBlocks.get (hash) != null; } finally { lock.readLock ().unlock (); } } @Override public long getChainHeight () { try { lock.readLock ().lock (); return currentHead.getHeight (); } finally { lock.readLock ().unlock (); } } @Override public List<String> getInventory (List<String> locator, String last, int limit) { try { lock.readLock ().lock (); List<String> inventory = new LinkedList<String> (); CachedBlock curr = currentHead.getLast (); CachedBlock prev = curr.getPrevious (); if ( !last.equals (Hash.ZERO_HASH.toString ()) ) { while ( prev != null && !curr.equals (last) ) { curr = prev; prev = curr.getPrevious (); } } do { if ( locator.contains (curr) ) { break; } inventory.add (0, curr.getHash ()); if ( inventory.size () > limit ) { inventory.remove (limit); } curr = prev; if ( prev != null ) { prev = curr.getPrevious (); } } while ( curr != null ); return inventory; } finally { lock.readLock ().unlock (); } } @Override public List<String> getLocator () { try { lock.readLock ().lock (); List<String> locator = new ArrayList<String> (); CachedBlock curr = currentHead.getLast (); locator.add (curr.getHash ()); CachedBlock prev = curr.getPrevious (); for ( int i = 0, step = 1; prev != null; ++i ) { for ( int j = 0; prev != null && j < step; ++j ) { curr = prev; prev = curr.getPrevious (); } locator.add (curr.getHash ()); if ( i >= 10 ) { step *= 2; } } return locator; } finally { lock.readLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public List<TxIn> getSpent (List<String> addresses) { List<TxIn> spent = new ArrayList<TxIn> (); try { lock.readLock ().lock (); QTxOut txout = QTxOut.txOut; QTxIn txin = QTxIn.txIn; QOwner owner = QOwner.owner; JPAQuery query = new JPAQuery (entityManager); for ( TxIn in : query.from (txin).join (txin.source, txout).join (txout.owners, owner).where (owner.address.in (addresses)).list (txin) ) { CachedBlock blockOfIn = cachedBlocks.get (in.getTransaction ().getBlock ().getHash ()); if ( isBlockOnBranch (blockOfIn, currentHead) ) { spent.add (in); } } return spent; } finally { lock.readLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public List<TxOut> getReceived (List<String> addresses) { List<TxOut> received = new ArrayList<TxOut> (); try { lock.readLock ().lock (); QTxOut txout = QTxOut.txOut; QOwner owner = QOwner.owner; JPAQuery query = new JPAQuery (entityManager); for ( TxOut out : query.from (txout).join (txout.owners, owner).where (owner.address.in (addresses)).list (txout) ) { CachedBlock blockOfOut = cachedBlocks.get (out.getTransaction ().getBlock ().getHash ()); if ( isBlockOnBranch (blockOfOut, currentHead) ) { received.add (out); } } return received; } finally { lock.readLock ().unlock (); } } @Override @Transactional (propagation = Propagation.MANDATORY) public List<TxOut> getUnspentOutput (List<String> addresses) { try { lock.readLock ().lock (); QTxOut txout = QTxOut.txOut; QOwner owner = QOwner.owner; QUTxOut utxo = QUTxOut.uTxOut; JPAQuery query = new JPAQuery (entityManager); return query.from (utxo).join (utxo.txout, txout).join (txout.owners, owner).where (owner.address.in (addresses)).list (txout); } finally { lock.readLock ().unlock (); } } private static class TransactionContext { Blk block; BigInteger blkSumInput = BigInteger.ZERO; BigInteger blkSumOutput = BigInteger.ZERO; int nsigs = 0; boolean coinbase = true; Map<String, HashMap<Long, TxOut>> resolvedInputs = new HashMap<String, HashMap<Long, TxOut>> (); } @Transactional (propagation = Propagation.REQUIRED, rollbackFor = { Exception.class }) @Override public void storeBlock (Blk b) throws ValidationException { try { lock.writeLock ().lock (); lockedStoreBlock (b); } catch ( ValidationException e ) { throw e; } catch ( Exception e ) { throw new ValidationException ("OTHER exception " + b.toWireDump (), e); } finally { lock.writeLock ().unlock (); } } private void lockedStoreBlock (Blk b) throws ValidationException { CachedBlock cached = cachedBlocks.get (b.getHash ()); if ( cached != null ) { return; } // find previous block CachedBlock cachedPrevious = cachedBlocks.get (b.getPreviousHash ()); if ( cachedPrevious != null ) { Blk prev = null; prev = entityManager.find (Blk.class, (cachedPrevious).getId ()); b.setPrevious (prev); if ( b.getCreateTime () > (System.currentTimeMillis () / 1000) * 2 * 60 * 60 ) { throw new ValidationException ("Future generation attempt " + b.getHash ()); } Head head; if ( prev.getHead ().getLeaf ().equals (prev.getHash ()) ) { // continuing head = prev.getHead (); head.setLeaf (b.getHash ()); head.setHeight (head.getHeight () + 1); head.setChainWork (prev.getChainWork () + Difficulty.getDifficulty (b.getDifficultyTarget ())); head = entityManager.merge (head); } else { // branching head = new Head (); CachedBlock trunkBlock = cachedPrevious; while ( !isBlockOnBranch (trunkBlock, currentHead) ) { trunkBlock = trunkBlock.getPrevious (); } head.setTrunk (trunkBlock.hash); head.setPrevious (prev.getHead ()); head.setLeaf (b.getHash ()); head.setHeight (head.getHeight () + 1); head.setChainWork (prev.getChainWork () + Difficulty.getDifficulty (b.getDifficultyTarget ())); entityManager.persist (head); } b.setHead (head); b.setHeight (head.getHeight ()); b.setChainWork (head.getChainWork ()); if ( b.getHeight () >= chain.getDifficultyReviewBlocks () && b.getHeight () % chain.getDifficultyReviewBlocks () == 0 ) { CachedBlock c = null; CachedBlock p = cachedPrevious; for ( int i = 0; i < chain.getDifficultyReviewBlocks () - 1; ++i ) { c = p; p = c.getPrevious (); } long next = Difficulty.getNextTarget (prev.getCreateTime () - p.getTime (), prev.getDifficultyTarget (), chain.getTargetBlockTime ()); if ( chain.isProduction () && next != b.getDifficultyTarget () ) { throw new ValidationException ("Difficulty does not match expectation " + b.getHash () + " " + b.toWireDump ()); } } else { if ( chain.isProduction () && b.getDifficultyTarget () != prev.getDifficultyTarget () ) { throw new ValidationException ("Illegal attempt to change difficulty " + b.getHash ()); } } b.checkHash (); if ( chain.isProduction () && new Hash (b.getHash ()).toBigInteger ().compareTo (Difficulty.getTarget (b.getDifficultyTarget ())) > 0 ) { throw new ValidationException ("Insufficuent proof of work for current difficulty " + b.getHash () + " " + b.toWireDump ()); } b.parseTransactions (); if ( b.getTransactions ().isEmpty () ) { throw new ValidationException ("Block must have transactions " + b.getHash () + " " + b.toWireDump ()); } b.checkMerkleRoot (); final TransactionContext tcontext = new TransactionContext (); tcontext.block = b; log.trace ("resolving inputs for block " + b.getHash ()); Set<String> needTx = new HashSet<String> (); for ( Tx t : b.getTransactions () ) { HashMap<Long, TxOut> outs = tcontext.resolvedInputs.get (t.getHash ()); if ( outs == null ) { outs = new HashMap<Long, TxOut> (); tcontext.resolvedInputs.put (t.getHash (), outs); } for ( TxOut o : t.getOutputs () ) { outs.put (o.getIx (), o); } for ( TxIn i : t.getInputs () ) { if ( !i.getSourceHash ().equals (Hash.ZERO_HASH_STRING) ) { needTx.add (i.getSourceHash ()); } } } if ( !needTx.isEmpty () ) { resolveWithUTXO (tcontext, needTx); } boolean skip = true; for ( Tx t : b.getTransactions () ) { if ( skip ) // skip coinbase { skip = false; } else { checkTxInputsExist (tcontext, t); } } log.trace ("validating block " + b.getHash ()); List<Callable<TransactionValidationException>> callables = new ArrayList<Callable<TransactionValidationException>> (); for ( final Tx t : b.getTransactions () ) { if ( tcontext.coinbase ) { try { validateTransaction (tcontext, t); } catch ( TransactionValidationException e ) { throw new ValidationException (e.getMessage () + " " + t.toWireDump (), e); } } else { callables.add (new Callable<TransactionValidationException> () { @Override public TransactionValidationException call () { try { validateTransaction (tcontext, t); } catch ( TransactionValidationException e ) { return e; } catch ( Exception e ) { return new TransactionValidationException (e, t); } return null; } }); } } try { for ( Future<TransactionValidationException> e : transactionsProcessor.invokeAll (callables) ) { try { if ( e.get () != null ) { throw new ValidationException (e.get ().getMessage () + " " + e.get ().getTx ().toWireDump (), e.get ()); } } catch ( ExecutionException e1 ) { throw new ValidationException ("corrupted transaction processor", e1); } } } catch ( InterruptedException e1 ) { throw new ValidationException ("interrupted", e1); } // block reward could actually be less... as in 0000000000004c78956f8643262f3622acf22486b120421f893c0553702ba7b5 if ( tcontext.blkSumOutput.subtract (tcontext.blkSumInput).longValue () > ((50L * Tx.COIN) >> (b.getHeight () / 210000L)) ) { throw new ValidationException ("Invalid block reward " + b.getHash () + " " + b.toWireDump ()); } // this is last loop before persist since modifying the entities. for ( Tx t : b.getTransactions () ) { t.setBlock (b); for ( TxIn i : t.getInputs () ) { if ( !i.getSourceHash ().equals (Hash.ZERO_HASH_STRING) ) { i.setSource (tcontext.resolvedInputs.get (i.getSourceHash ()).get (i.getIx ())); } } for ( TxOut o : t.getOutputs () ) { addOwners (o); } } log.trace ("storing block " + b.getHash ()); entityManager.persist (b); // modify transient caches only after persistent changes CachedBlock m = new CachedBlock (b.getHash (), b.getId (), cachedBlocks.get (b.getPrevious ().getHash ()), b.getCreateTime ()); cachedBlocks.put (b.getHash (), m); CachedHead usingHead = cachedHeads.get (head.getId ()); if ( usingHead == null ) { cachedHeads.put (head.getId (), usingHead = new CachedHead ()); } usingHead.setLast (m); usingHead.setChainWork (b.getChainWork ()); usingHead.setHeight (b.getHeight ()); usingHead.getBlocks ().add (m); if ( usingHead.getChainWork () > currentHead.getChainWork () ) { // we have a new trunk // if branching from main we have to revert, then forward unspent cache if ( isBlockOnBranch (cachedBlocks.get (head.getTrunk ()), currentHead) ) { CachedBlock p = currentHead.getLast (); CachedBlock q = p.previous; while ( !q.hash.equals (head.getTrunk ()) ) { backwardUTXO (entityManager.find (Blk.class, p.id)); p = q; q = p.previous; } List<Long> pathToNewHead = new ArrayList<Long> (); p = cachedBlocks.get (usingHead.getLast ()); q = p.previous; while ( !q.hash.equals (head.getTrunk ()) ) { pathToNewHead.add (p.getId ()); } Collections.reverse (pathToNewHead); // spend what now came to trunk for ( Long id : pathToNewHead ) { forwardUTXO (entityManager.find (Blk.class, id)); } } } else if ( b.getHead ().getId () == currentHead.getId () ) { // spend if on the trunk forwardUTXO (b); } // now this is the new trunk currentHead = usingHead; log.trace ("stored block " + b.getHeight () + " " + b.getHash ()); } } private void backwardUTXO (Blk b) throws ValidationException { Set<TxOut> sources = new HashSet<TxOut> (); for ( Tx t : b.getTransactions () ) { sources.addAll (t.getOutputs ()); for ( TxIn in : t.getInputs () ) { if ( in.getSource () != null ) { if ( !sources.contains (in.getSource ()) ) { UTxOut utxo = new UTxOut (); utxo.setHash (in.getSource ().getTransaction ().getHash ()); utxo.setIx (in.getSource ().getIx ()); utxo.setTxout (in.getSource ()); entityManager.persist (utxo); } else { sources.remove (in.getSource ()); } } } } if ( !sources.isEmpty () ) { QUTxOut utxo = QUTxOut.uTxOut; JPADeleteClause d = new JPADeleteClause (entityManager, utxo); if ( d.where (utxo.txout.in (sources)).execute () != sources.size () ) { throw new ValidationException ("FATAL Inconsistent UTXO"); } } } private void forwardUTXO (Blk b) throws ValidationException { Set<TxOut> sources = new HashSet<TxOut> (); for ( Tx t : b.getTransactions () ) { for ( TxIn in : t.getInputs () ) { if ( in.getSource () != null ) { sources.add (in.getSource ()); } } for ( TxOut out : t.getOutputs () ) { if ( !sources.contains (out) ) { UTxOut utxo = new UTxOut (); utxo.setHash (out.getTransaction ().getHash ()); utxo.setIx (out.getIx ()); utxo.setTxout (out); entityManager.persist (utxo); } else { sources.remove (out); } } } if ( !sources.isEmpty () ) { QUTxOut utxo = QUTxOut.uTxOut; JPADeleteClause d = new JPADeleteClause (entityManager, utxo); if ( d.where (utxo.txout.in (sources)).execute () != sources.size () ) { throw new ValidationException ("FATAL Inconsistent UTXO"); } } } private boolean isBlockOnBranch (CachedBlock block, CachedHead branch) { if ( branch.getBlocks ().contains (block) ) { return true; } if ( branch.getPrevious () == null ) { return false; } return isBlockOnBranch (block, branch.getPrevious ()); } private void resolveInputs (TransactionContext tcontext, Tx t) throws ValidationException { Set<String> needTx = new HashSet<String> (); for ( final TxIn i : t.getInputs () ) { HashMap<Long, TxOut> resolved; if ( (resolved = tcontext.resolvedInputs.get (i.getSourceHash ())) == null ) { resolved = new HashMap<Long, TxOut> (); tcontext.resolvedInputs.put (i.getSourceHash (), resolved); needTx.add (i.getSourceHash ()); } } if ( !needTx.isEmpty () ) { resolveWithUTXO (tcontext, needTx); checkTxInputsExist (tcontext, t); } } private void checkTxInputsExist (TransactionContext tcontext, Tx t) throws ValidationException { for ( final TxIn i : t.getInputs () ) { HashMap<Long, TxOut> resolved = tcontext.resolvedInputs.get (i.getSourceHash ()); if ( resolved == null ) { throw new ValidationException ("Transaction refers to unknown or spent transaction " + i.getSourceHash () + " " + t.toWireDump ()); } TxOut out = resolved.get (i.getIx ()); if ( out == null ) { throw new ValidationException ("Transaction refers to unknown or spent output " + i.getSourceHash () + " [" + i.getIx () + "] " + t.toWireDump ()); } if ( tcontext.block != null && out.getTransaction ().getHash ().equals (Hash.ZERO_HASH_STRING) ) { if ( out.getTransaction ().getBlock ().getHeight () > tcontext.block.getHeight () - 100 ) { throw new ValidationException ("coinbase spent too early " + t.toWireDump ()); } } } } private void resolveWithUTXO (TransactionContext tcontext, Set<String> needTx) { QUTxOut utxo = QUTxOut.uTxOut; JPAQuery query = new JPAQuery (entityManager); for ( UTxOut u : query.from (utxo).where (utxo.hash.in (needTx)).list (utxo) ) { HashMap<Long, TxOut> resolved = tcontext.resolvedInputs.get (u.getHash ()); if ( resolved == null ) { resolved = new HashMap<Long, TxOut> (); tcontext.resolvedInputs.put (u.getHash (), resolved); } resolved.put (u.getTxout ().getIx (), u.getTxout ()); } } private void validateTransaction (final TransactionContext tcontext, final Tx t) throws TransactionValidationException { if ( tcontext.block != null && tcontext.coinbase ) { if ( t.getInputs ().size () != 1 || !t.getInputs ().get (0).getSourceHash ().equals (Hash.ZERO_HASH.toString ()) || (chain.isProduction () && tcontext.block.getHeight () > 209378 && (t.getInputs ().get (0).getIx () != 0 || t.getInputs ().get (0) .getSequence () != 0xFFFFFFFFL)) ) { throw new TransactionValidationException ("first transaction must be coinbase ", t); } if ( t.getInputs ().get (0).getScript ().length > 100 || t.getInputs ().get (0).getScript ().length < 2 ) { throw new TransactionValidationException ("coinbase scriptsig must be in 2-100 ", t); } tcontext.coinbase = false; for ( TxOut o : t.getOutputs () ) { try { // some miner add 0 with garbage... if ( chain.isProduction () && o.getValue () != 0 && tcontext.block.getHeight () > 180000 && !Script.isStandard (o.getScript ()) ) { throw new TransactionValidationException ("Nonstandard script rejected", t); } tcontext.blkSumOutput = tcontext.blkSumOutput.add (BigInteger.valueOf (o.getValue ())); tcontext.nsigs += Script.sigOpCount (o.getScript ()); } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } } if ( tcontext.nsigs > MAX_BLOCK_SIGOPS ) { throw new TransactionValidationException ("too many signatures in this block ", t); } } else { if ( t.getInputs ().size () == 1 && t.getInputs ().get (0).getSourceHash ().equals (Hash.ZERO_HASH.toString ()) ) { throw new TransactionValidationException ("coinbase only first in a block", t); } if ( t.getOutputs ().isEmpty () ) { throw new TransactionValidationException ("Transaction must have outputs ", t); } if ( t.getInputs ().isEmpty () ) { throw new TransactionValidationException ("Transaction must have inputs ", t); } if ( tcontext.block != null && tcontext.block.getHeight () > 200000 ) { // BIP 0034 if ( t.getVersion () != 1 ) { throw new TransactionValidationException ("Transaction version must be 1", t); } if ( tcontext.block.getVersion () == 2 && tcontext.coinbase ) { try { if ( Script.intValue (Script.parse (t.getInputs ().get (0).getScript ()).get (0).data) != tcontext.block.getHeight () ) { throw new TransactionValidationException ("Block height mismatch in coinbase", t); } } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } } } long sumOut = 0; for ( TxOut o : t.getOutputs () ) { if ( o.getScript ().length > 520 ) { if ( tcontext.block != null && tcontext.block.getHeight () < 200000 ) { log.trace ("Old DoS at [" + tcontext.block.getHeight () + "]" + tcontext.block.getHash ()); } else { throw new TransactionValidationException ("script too long ", t); } } if ( chain.isProduction () ) { try { if ( tcontext.block.getHeight () > 180000 && !Script.isStandard (o.getScript ()) ) { throw new TransactionValidationException ("Nonstandard script rejected", t); } } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } } if ( tcontext.block != null ) { try { tcontext.nsigs += Script.sigOpCount (o.getScript ()); } catch ( ValidationException e ) { throw new TransactionValidationException (e, t); } if ( tcontext.nsigs > MAX_BLOCK_SIGOPS ) { throw new TransactionValidationException ("too many signatures in this block ", t); } } if ( o.getValue () < 0 || o.getValue () > Tx.MAX_MONEY ) { throw new TransactionValidationException ("Transaction output not in money range ", t); } tcontext.blkSumOutput = tcontext.blkSumOutput.add (BigInteger.valueOf (o.getValue ())); sumOut += o.getValue (); if ( sumOut < 0 || sumOut > Tx.MAX_MONEY ) { throw new TransactionValidationException ("Transaction output not in money range ", t); } } long sumIn = 0; int inNumber = 0; List<Callable<TransactionValidationException>> callables = new ArrayList<Callable<TransactionValidationException>> (); for ( final TxIn i : t.getInputs () ) { if ( i.getScript ().length > 520 ) { if ( tcontext.block == null || tcontext.block.getHeight () > 200000 ) { throw new TransactionValidationException ("script too long ", t); } } // this needs to be reset since looping gain over txin i.setSource (tcontext.resolvedInputs.get (i.getSourceHash ()).get (i.getIx ())); sumIn += i.getSource ().getValue (); final int nr = inNumber; callables.add (new Callable<TransactionValidationException> () { @Override public TransactionValidationException call () throws Exception { try { if ( !new Script (t, nr).evaluate (chain.isProduction ()) ) { return new TransactionValidationException ("The transaction script does not evaluate to true in input", t, nr); } synchronized ( tcontext ) { tcontext.blkSumInput = tcontext.blkSumInput.add (BigInteger.valueOf (i.getSource ().getValue ())); } } catch ( Exception e ) { return new TransactionValidationException (e, t, nr); } return null; } }); ++inNumber; } if ( sumOut > sumIn ) { throw new TransactionValidationException ("Transaction value out more than in", t); } if ( tcontext.block == null && (sumIn - sumOut) < Tx.COIN / 10000 ) { throw new TransactionValidationException ("There is no free lunch.", t); } List<Future<TransactionValidationException>> results; try { results = inputProcessor.invokeAll (callables); } catch ( InterruptedException e1 ) { throw new TransactionValidationException (e1, t); } for ( Future<TransactionValidationException> r : results ) { TransactionValidationException ex; try { ex = r.get (); } catch ( InterruptedException e ) { throw new TransactionValidationException (e, t); } catch ( ExecutionException e ) { throw new TransactionValidationException (e, t); } if ( ex != null ) { throw ex; } } } } private void addOwners (TxOut out) throws TransactionValidationException { List<Owner> owners = new ArrayList<Owner> (); parseOwners (out.getScript (), out, owners); out.setOwners (owners); } private void parseOwners (byte[] script, TxOut out, List<Owner> owners) throws TransactionValidationException { List<Script.Token> parsed; try { parsed = Script.parse (out.getScript ()); if ( parsed.size () == 2 && parsed.get (0).data != null && parsed.get (1).op == Opcode.OP_CHECKSIG ) { // pay to key Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (Hash.keyHash (parsed.get (0).data), false, chain)); o.setOutpoint (out); owners.add (o); out.setVotes (1L); } else if ( parsed.size () == 5 && parsed.get (0).op == Opcode.OP_DUP && parsed.get (1).op == Opcode.OP_HASH160 && parsed.get (2).data != null && parsed.get (3).op == Opcode.OP_EQUALVERIFY && parsed.get (4).op == Opcode.OP_CHECKSIG ) { // pay to address Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (parsed.get (2).data, false, chain)); o.setOutpoint (out); owners.add (o); out.setVotes (1L); } else if ( parsed.size () == 3 && parsed.get (0).op == Opcode.OP_HASH160 && parsed.get (1).data != null && parsed.get (1).data.length == 20 && parsed.get (2).op == Opcode.OP_EQUAL ) { byte[] hash = parsed.get (1).data; if ( hash.length == 20 ) { // BIP 0013 Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (hash, true, chain)); o.setOutpoint (out); owners.add (o); out.setVotes (1L); } } else { for ( int i = 0; i < parsed.size (); ++i ) { if ( parsed.get (i).op == Opcode.OP_CHECKMULTISIG || parsed.get (i).op == Opcode.OP_CHECKMULTISIGVERIFY ) { if ( chain.isProduction () ) { int nkeys = parsed.get (i - 1).op.ordinal () - Opcode.OP_1.ordinal () + 1; for ( int j = 0; j < nkeys; ++j ) { Owner o = new Owner (); o.setAddress (AddressConverter.toSatoshiStyle (Hash.keyHash (parsed.get (i - j - 2).data), true, chain)); o.setOutpoint (out); owners.add (o); } out.setVotes ((long) parsed.get (i - nkeys - 2).op.ordinal () - Opcode.OP_1.ordinal () + 1); return; } } } } } catch ( ValidationException e ) { throw new TransactionValidationException (e, out.getTransaction ()); } } @Override public String getHeadHash () { try { lock.readLock ().lock (); return currentHead.getLast ().getHash (); } finally { lock.readLock ().unlock (); } } @Override public boolean isEmpty () { try { lock.readLock ().lock (); QHead head = QHead.head; JPAQuery q = new JPAQuery (entityManager); return q.from (head).list (head).isEmpty (); } finally { lock.readLock ().unlock (); } } @Transactional (propagation = Propagation.REQUIRED, rollbackFor = { Exception.class }) @Override public void resetStore (Chain chain) throws TransactionValidationException { Blk genesis = chain.getGenesis (); addOwners (genesis.getTransactions ().get (0).getOutputs ().get (0)); Head h = new Head (); h.setLeaf (genesis.getHash ()); h.setHeight (0); h.setChainWork (Difficulty.getDifficulty (genesis.getDifficultyTarget ())); entityManager.persist (h); genesis.setHead (h); entityManager.persist (genesis); UTxOut utxo = new UTxOut (); utxo.setHash (genesis.getTransactions ().get (0).getHash ()); utxo.setIx (0); utxo.setTxout (genesis.getTransactions ().get (0).getOutputs ().get (0)); entityManager.persist (utxo); } @Transactional (propagation = Propagation.MANDATORY) @Override public Blk getBlock (String hash) { CachedBlock cached = null; try { lock.readLock ().lock (); cached = cachedBlocks.get (hash); if ( cached == null ) { return null; } } finally { lock.readLock ().unlock (); } return entityManager.find (Blk.class, cached.getId ()); } @Transactional (propagation = Propagation.REQUIRED) @Override public void validateTransaction (Tx t) throws ValidationException { try { lock.readLock ().lock (); TransactionContext tcontext = new TransactionContext (); tcontext.block = null; tcontext.coinbase = false; tcontext.nsigs = 0; resolveInputs (tcontext, t); validateTransaction (tcontext, t); } finally { lock.readLock ().unlock (); } } }
trying join with UTXO and TXOUT
src/main/java/com/bitsofproof/supernode/model/JpaBlockStore.java
trying join with UTXO and TXOUT
Java
apache-2.0
95b31ccd5716acda30f4b5a016718eb23eb1348a
0
cprice404/config,melrief/config,typesafehub/config,jasonchaffee/config,meln1k/config,yvvq/config,twitter-forks/config,roznalex/config,malexandert/config,leakingtapan/config,wmudge/config,roznalex/config,zeq9069/config,melrief/config,leakingtapan/config,cprice404/config,rmuhamedgaliev/config,rmuhamedgaliev/config,twitter-forks/config,wmudge/config,shrutig/config,fpringvaldsen/config,zeq9069/config,malexandert/config,shrutig/config,meln1k/config,jasonchaffee/config,Applied-Duality/config,typesafehub/config,fpringvaldsen/config,Applied-Duality/config,yvvq/config
/** * Copyright (C) 2011-2012 Typesafe Inc. <http://typesafe.com> */ package com.typesafe.config.impl; import java.io.File; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Callable; import com.typesafe.config.Config; import com.typesafe.config.ConfigException; import com.typesafe.config.ConfigIncluder; import com.typesafe.config.ConfigObject; import com.typesafe.config.ConfigOrigin; import com.typesafe.config.ConfigParseOptions; import com.typesafe.config.ConfigParseable; import com.typesafe.config.ConfigValue; import com.typesafe.config.impl.SimpleIncluder.NameSource; /** This is public but is only supposed to be used by the "config" package */ public class ConfigImpl { private static class LoaderCache { private Config currentSystemProperties; private WeakReference<ClassLoader> currentLoader; private Map<String, Config> cache; LoaderCache() { this.currentSystemProperties = null; this.currentLoader = new WeakReference<ClassLoader>(null); this.cache = new HashMap<String, Config>(); } // for now, caching as long as the loader remains the same, // drop entire cache if it changes. synchronized Config getOrElseUpdate(ClassLoader loader, String key, Callable<Config> updater) { if (loader != currentLoader.get()) { // reset the cache if we start using a different loader cache.clear(); currentLoader = new WeakReference<ClassLoader>(loader); } Config systemProperties = systemPropertiesAsConfig(); if (systemProperties != currentSystemProperties) { cache.clear(); currentSystemProperties = systemProperties; } Config config = cache.get(key); if (config == null) { try { config = updater.call(); } catch (RuntimeException e) { throw e; // this will include ConfigException } catch (Exception e) { throw new ConfigException.Generic(e.getMessage(), e); } if (config == null) throw new ConfigException.BugOrBroken("null config from cache updater"); cache.put(key, config); } return config; } } private static class LoaderCacheHolder { static final LoaderCache cache = new LoaderCache(); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config computeCachedConfig(ClassLoader loader, String key, Callable<Config> updater) { LoaderCache cache; try { cache = LoaderCacheHolder.cache; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } return cache.getOrElseUpdate(loader, key, updater); } static class FileNameSource implements SimpleIncluder.NameSource { @Override public ConfigParseable nameToParseable(String name, ConfigParseOptions parseOptions) { return Parseable.newFile(new File(name), parseOptions); } }; static class ClasspathNameSource implements SimpleIncluder.NameSource { @Override public ConfigParseable nameToParseable(String name, ConfigParseOptions parseOptions) { return Parseable.newResources(name, parseOptions); } }; static class ClasspathNameSourceWithClass implements SimpleIncluder.NameSource { final private Class<?> klass; public ClasspathNameSourceWithClass(Class<?> klass) { this.klass = klass; } @Override public ConfigParseable nameToParseable(String name, ConfigParseOptions parseOptions) { return Parseable.newResources(klass, name, parseOptions); } }; /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject parseResourcesAnySyntax(Class<?> klass, String resourceBasename, ConfigParseOptions baseOptions) { NameSource source = new ClasspathNameSourceWithClass(klass); return SimpleIncluder.fromBasename(source, resourceBasename, baseOptions); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject parseResourcesAnySyntax(String resourceBasename, ConfigParseOptions baseOptions) { NameSource source = new ClasspathNameSource(); return SimpleIncluder.fromBasename(source, resourceBasename, baseOptions); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject parseFileAnySyntax(File basename, ConfigParseOptions baseOptions) { NameSource source = new FileNameSource(); return SimpleIncluder.fromBasename(source, basename.getPath(), baseOptions); } static AbstractConfigObject emptyObject(String originDescription) { ConfigOrigin origin = originDescription != null ? SimpleConfigOrigin .newSimple(originDescription) : null; return emptyObject(origin); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config emptyConfig(String originDescription) { return emptyObject(originDescription).toConfig(); } static AbstractConfigObject empty(ConfigOrigin origin) { return emptyObject(origin); } // default origin for values created with fromAnyRef and no origin specified final private static ConfigOrigin defaultValueOrigin = SimpleConfigOrigin .newSimple("hardcoded value"); final private static ConfigBoolean defaultTrueValue = new ConfigBoolean( defaultValueOrigin, true); final private static ConfigBoolean defaultFalseValue = new ConfigBoolean( defaultValueOrigin, false); final private static ConfigNull defaultNullValue = new ConfigNull( defaultValueOrigin); final private static SimpleConfigList defaultEmptyList = new SimpleConfigList( defaultValueOrigin, Collections.<AbstractConfigValue> emptyList()); final private static SimpleConfigObject defaultEmptyObject = SimpleConfigObject .empty(defaultValueOrigin); private static SimpleConfigList emptyList(ConfigOrigin origin) { if (origin == null || origin == defaultValueOrigin) return defaultEmptyList; else return new SimpleConfigList(origin, Collections.<AbstractConfigValue> emptyList()); } private static AbstractConfigObject emptyObject(ConfigOrigin origin) { // we want null origin to go to SimpleConfigObject.empty() to get the // origin "empty config" rather than "hardcoded value" if (origin == defaultValueOrigin) return defaultEmptyObject; else return SimpleConfigObject.empty(origin); } private static ConfigOrigin valueOrigin(String originDescription) { if (originDescription == null) return defaultValueOrigin; else return SimpleConfigOrigin.newSimple(originDescription); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigValue fromAnyRef(Object object, String originDescription) { ConfigOrigin origin = valueOrigin(originDescription); return fromAnyRef(object, origin, FromMapMode.KEYS_ARE_KEYS); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject fromPathMap( Map<String, ? extends Object> pathMap, String originDescription) { ConfigOrigin origin = valueOrigin(originDescription); return (ConfigObject) fromAnyRef(pathMap, origin, FromMapMode.KEYS_ARE_PATHS); } static AbstractConfigValue fromAnyRef(Object object, ConfigOrigin origin, FromMapMode mapMode) { if (origin == null) throw new ConfigException.BugOrBroken( "origin not supposed to be null"); if (object == null) { if (origin != defaultValueOrigin) return new ConfigNull(origin); else return defaultNullValue; } else if (object instanceof Boolean) { if (origin != defaultValueOrigin) { return new ConfigBoolean(origin, (Boolean) object); } else if ((Boolean) object) { return defaultTrueValue; } else { return defaultFalseValue; } } else if (object instanceof String) { return new ConfigString(origin, (String) object); } else if (object instanceof Number) { // here we always keep the same type that was passed to us, // rather than figuring out if a Long would fit in an Int // or a Double has no fractional part. i.e. deliberately // not using ConfigNumber.newNumber() when we have a // Double, Integer, or Long. if (object instanceof Double) { return new ConfigDouble(origin, (Double) object, null); } else if (object instanceof Integer) { return new ConfigInt(origin, (Integer) object, null); } else if (object instanceof Long) { return new ConfigLong(origin, (Long) object, null); } else { return ConfigNumber.newNumber(origin, ((Number) object).doubleValue(), null); } } else if (object instanceof Map) { if (((Map<?, ?>) object).isEmpty()) return emptyObject(origin); if (mapMode == FromMapMode.KEYS_ARE_KEYS) { Map<String, AbstractConfigValue> values = new HashMap<String, AbstractConfigValue>(); for (Map.Entry<?, ?> entry : ((Map<?, ?>) object).entrySet()) { Object key = entry.getKey(); if (!(key instanceof String)) throw new ConfigException.BugOrBroken( "bug in method caller: not valid to create ConfigObject from map with non-String key: " + key); AbstractConfigValue value = fromAnyRef(entry.getValue(), origin, mapMode); values.put((String) key, value); } return new SimpleConfigObject(origin, values); } else { return PropertiesParser.fromPathMap(origin, (Map<?, ?>) object); } } else if (object instanceof Iterable) { Iterator<?> i = ((Iterable<?>) object).iterator(); if (!i.hasNext()) return emptyList(origin); List<AbstractConfigValue> values = new ArrayList<AbstractConfigValue>(); while (i.hasNext()) { AbstractConfigValue v = fromAnyRef(i.next(), origin, mapMode); values.add(v); } return new SimpleConfigList(origin, values); } else { throw new ConfigException.BugOrBroken( "bug in method caller: not valid to create ConfigValue from: " + object); } } private static class DefaultIncluderHolder { static final ConfigIncluder defaultIncluder = new SimpleIncluder(null); } static ConfigIncluder defaultIncluder() { try { return DefaultIncluderHolder.defaultIncluder; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } private static Properties getSystemProperties() { // Avoid ConcurrentModificationException due to parallel setting of system properties by copying properties final Properties systemProperties = System.getProperties(); final Properties systemPropertiesCopy = new Properties(); synchronized (systemProperties) { systemPropertiesCopy.putAll(systemProperties); } return systemPropertiesCopy; } private static AbstractConfigObject loadSystemProperties() { return (AbstractConfigObject) Parseable.newProperties(getSystemProperties(), ConfigParseOptions.defaults().setOriginDescription("system properties")).parse(); } private static class SystemPropertiesHolder { // this isn't final due to the reloadSystemPropertiesConfig() hack below static volatile AbstractConfigObject systemProperties = loadSystemProperties(); } static AbstractConfigObject systemPropertiesAsConfigObject() { try { return SystemPropertiesHolder.systemProperties; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config systemPropertiesAsConfig() { return systemPropertiesAsConfigObject().toConfig(); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static void reloadSystemPropertiesConfig() { // ConfigFactory.invalidateCaches() relies on this having the side // effect that it drops all caches SystemPropertiesHolder.systemProperties = loadSystemProperties(); } private static AbstractConfigObject loadEnvVariables() { Map<String, String> env = System.getenv(); Map<String, AbstractConfigValue> m = new HashMap<String, AbstractConfigValue>(); for (Map.Entry<String, String> entry : env.entrySet()) { String key = entry.getKey(); m.put(key, new ConfigString(SimpleConfigOrigin.newSimple("env var " + key), entry .getValue())); } return new SimpleConfigObject(SimpleConfigOrigin.newSimple("env variables"), m, ResolveStatus.RESOLVED, false /* ignoresFallbacks */); } private static class EnvVariablesHolder { static final AbstractConfigObject envVariables = loadEnvVariables(); } static AbstractConfigObject envVariablesAsConfigObject() { try { return EnvVariablesHolder.envVariables; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config envVariablesAsConfig() { return envVariablesAsConfigObject().toConfig(); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config defaultReference(final ClassLoader loader) { return computeCachedConfig(loader, "defaultReference", new Callable<Config>() { @Override public Config call() { Config unresolvedResources = Parseable .newResources("reference.conf", ConfigParseOptions.defaults().setClassLoader(loader)) .parse().toConfig(); return systemPropertiesAsConfig().withFallback(unresolvedResources).resolve(); } }); } private static class DebugHolder { private static String LOADS = "loads"; private static Map<String, Boolean> loadDiagnostics() { Map<String, Boolean> result = new HashMap<String, Boolean>(); result.put(LOADS, false); // People do -Dconfig.trace=foo,bar to enable tracing of different things String s = System.getProperty("config.trace"); if (s == null) { return result; } else { String[] keys = s.split(","); for (String k : keys) { if (k.equals(LOADS)) { result.put(LOADS, true); } else { System.err.println("config.trace property contains unknown trace topic '" + k + "'"); } } return result; } } private static final Map<String, Boolean> diagnostics = loadDiagnostics(); private static final boolean traceLoadsEnabled = diagnostics.get(LOADS); static boolean traceLoadsEnabled() { return traceLoadsEnabled; } } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static boolean traceLoadsEnabled() { try { return DebugHolder.traceLoadsEnabled(); } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } public static void trace(String message) { System.err.println(message); } // the basic idea here is to add the "what" and have a canonical // toplevel error message. the "original" exception may however have extra // detail about what happened. call this if you have a better "what" than // further down on the stack. static ConfigException.NotResolved improveNotResolved(Path what, ConfigException.NotResolved original) { String newMessage = what.render() + " has not been resolved, you need to call Config#resolve()," + " see API docs for Config#resolve()"; if (newMessage.equals(original.getMessage())) return original; else return new ConfigException.NotResolved(newMessage, original); } }
config/src/main/java/com/typesafe/config/impl/ConfigImpl.java
/** * Copyright (C) 2011-2012 Typesafe Inc. <http://typesafe.com> */ package com.typesafe.config.impl; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Callable; import com.typesafe.config.Config; import com.typesafe.config.ConfigException; import com.typesafe.config.ConfigIncluder; import com.typesafe.config.ConfigObject; import com.typesafe.config.ConfigOrigin; import com.typesafe.config.ConfigParseOptions; import com.typesafe.config.ConfigParseable; import com.typesafe.config.ConfigValue; import com.typesafe.config.impl.SimpleIncluder.NameSource; /** This is public but is only supposed to be used by the "config" package */ public class ConfigImpl { private static class LoaderCache { private Config currentSystemProperties; private ClassLoader currentLoader; private Map<String, Config> cache; LoaderCache() { this.currentSystemProperties = null; this.currentLoader = null; this.cache = new HashMap<String, Config>(); } // for now, caching as long as the loader remains the same, // drop entire cache if it changes. synchronized Config getOrElseUpdate(ClassLoader loader, String key, Callable<Config> updater) { if (loader != currentLoader) { // reset the cache if we start using a different loader cache.clear(); currentLoader = loader; } Config systemProperties = systemPropertiesAsConfig(); if (systemProperties != currentSystemProperties) { cache.clear(); currentSystemProperties = systemProperties; } Config config = cache.get(key); if (config == null) { try { config = updater.call(); } catch (RuntimeException e) { throw e; // this will include ConfigException } catch (Exception e) { throw new ConfigException.Generic(e.getMessage(), e); } if (config == null) throw new ConfigException.BugOrBroken("null config from cache updater"); cache.put(key, config); } return config; } } private static class LoaderCacheHolder { static final LoaderCache cache = new LoaderCache(); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config computeCachedConfig(ClassLoader loader, String key, Callable<Config> updater) { LoaderCache cache; try { cache = LoaderCacheHolder.cache; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } return cache.getOrElseUpdate(loader, key, updater); } static class FileNameSource implements SimpleIncluder.NameSource { @Override public ConfigParseable nameToParseable(String name, ConfigParseOptions parseOptions) { return Parseable.newFile(new File(name), parseOptions); } }; static class ClasspathNameSource implements SimpleIncluder.NameSource { @Override public ConfigParseable nameToParseable(String name, ConfigParseOptions parseOptions) { return Parseable.newResources(name, parseOptions); } }; static class ClasspathNameSourceWithClass implements SimpleIncluder.NameSource { final private Class<?> klass; public ClasspathNameSourceWithClass(Class<?> klass) { this.klass = klass; } @Override public ConfigParseable nameToParseable(String name, ConfigParseOptions parseOptions) { return Parseable.newResources(klass, name, parseOptions); } }; /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject parseResourcesAnySyntax(Class<?> klass, String resourceBasename, ConfigParseOptions baseOptions) { NameSource source = new ClasspathNameSourceWithClass(klass); return SimpleIncluder.fromBasename(source, resourceBasename, baseOptions); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject parseResourcesAnySyntax(String resourceBasename, ConfigParseOptions baseOptions) { NameSource source = new ClasspathNameSource(); return SimpleIncluder.fromBasename(source, resourceBasename, baseOptions); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject parseFileAnySyntax(File basename, ConfigParseOptions baseOptions) { NameSource source = new FileNameSource(); return SimpleIncluder.fromBasename(source, basename.getPath(), baseOptions); } static AbstractConfigObject emptyObject(String originDescription) { ConfigOrigin origin = originDescription != null ? SimpleConfigOrigin .newSimple(originDescription) : null; return emptyObject(origin); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config emptyConfig(String originDescription) { return emptyObject(originDescription).toConfig(); } static AbstractConfigObject empty(ConfigOrigin origin) { return emptyObject(origin); } // default origin for values created with fromAnyRef and no origin specified final private static ConfigOrigin defaultValueOrigin = SimpleConfigOrigin .newSimple("hardcoded value"); final private static ConfigBoolean defaultTrueValue = new ConfigBoolean( defaultValueOrigin, true); final private static ConfigBoolean defaultFalseValue = new ConfigBoolean( defaultValueOrigin, false); final private static ConfigNull defaultNullValue = new ConfigNull( defaultValueOrigin); final private static SimpleConfigList defaultEmptyList = new SimpleConfigList( defaultValueOrigin, Collections.<AbstractConfigValue> emptyList()); final private static SimpleConfigObject defaultEmptyObject = SimpleConfigObject .empty(defaultValueOrigin); private static SimpleConfigList emptyList(ConfigOrigin origin) { if (origin == null || origin == defaultValueOrigin) return defaultEmptyList; else return new SimpleConfigList(origin, Collections.<AbstractConfigValue> emptyList()); } private static AbstractConfigObject emptyObject(ConfigOrigin origin) { // we want null origin to go to SimpleConfigObject.empty() to get the // origin "empty config" rather than "hardcoded value" if (origin == defaultValueOrigin) return defaultEmptyObject; else return SimpleConfigObject.empty(origin); } private static ConfigOrigin valueOrigin(String originDescription) { if (originDescription == null) return defaultValueOrigin; else return SimpleConfigOrigin.newSimple(originDescription); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigValue fromAnyRef(Object object, String originDescription) { ConfigOrigin origin = valueOrigin(originDescription); return fromAnyRef(object, origin, FromMapMode.KEYS_ARE_KEYS); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static ConfigObject fromPathMap( Map<String, ? extends Object> pathMap, String originDescription) { ConfigOrigin origin = valueOrigin(originDescription); return (ConfigObject) fromAnyRef(pathMap, origin, FromMapMode.KEYS_ARE_PATHS); } static AbstractConfigValue fromAnyRef(Object object, ConfigOrigin origin, FromMapMode mapMode) { if (origin == null) throw new ConfigException.BugOrBroken( "origin not supposed to be null"); if (object == null) { if (origin != defaultValueOrigin) return new ConfigNull(origin); else return defaultNullValue; } else if (object instanceof Boolean) { if (origin != defaultValueOrigin) { return new ConfigBoolean(origin, (Boolean) object); } else if ((Boolean) object) { return defaultTrueValue; } else { return defaultFalseValue; } } else if (object instanceof String) { return new ConfigString(origin, (String) object); } else if (object instanceof Number) { // here we always keep the same type that was passed to us, // rather than figuring out if a Long would fit in an Int // or a Double has no fractional part. i.e. deliberately // not using ConfigNumber.newNumber() when we have a // Double, Integer, or Long. if (object instanceof Double) { return new ConfigDouble(origin, (Double) object, null); } else if (object instanceof Integer) { return new ConfigInt(origin, (Integer) object, null); } else if (object instanceof Long) { return new ConfigLong(origin, (Long) object, null); } else { return ConfigNumber.newNumber(origin, ((Number) object).doubleValue(), null); } } else if (object instanceof Map) { if (((Map<?, ?>) object).isEmpty()) return emptyObject(origin); if (mapMode == FromMapMode.KEYS_ARE_KEYS) { Map<String, AbstractConfigValue> values = new HashMap<String, AbstractConfigValue>(); for (Map.Entry<?, ?> entry : ((Map<?, ?>) object).entrySet()) { Object key = entry.getKey(); if (!(key instanceof String)) throw new ConfigException.BugOrBroken( "bug in method caller: not valid to create ConfigObject from map with non-String key: " + key); AbstractConfigValue value = fromAnyRef(entry.getValue(), origin, mapMode); values.put((String) key, value); } return new SimpleConfigObject(origin, values); } else { return PropertiesParser.fromPathMap(origin, (Map<?, ?>) object); } } else if (object instanceof Iterable) { Iterator<?> i = ((Iterable<?>) object).iterator(); if (!i.hasNext()) return emptyList(origin); List<AbstractConfigValue> values = new ArrayList<AbstractConfigValue>(); while (i.hasNext()) { AbstractConfigValue v = fromAnyRef(i.next(), origin, mapMode); values.add(v); } return new SimpleConfigList(origin, values); } else { throw new ConfigException.BugOrBroken( "bug in method caller: not valid to create ConfigValue from: " + object); } } private static class DefaultIncluderHolder { static final ConfigIncluder defaultIncluder = new SimpleIncluder(null); } static ConfigIncluder defaultIncluder() { try { return DefaultIncluderHolder.defaultIncluder; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } private static Properties getSystemProperties() { // Avoid ConcurrentModificationException due to parallel setting of system properties by copying properties final Properties systemProperties = System.getProperties(); final Properties systemPropertiesCopy = new Properties(); synchronized (systemProperties) { systemPropertiesCopy.putAll(systemProperties); } return systemPropertiesCopy; } private static AbstractConfigObject loadSystemProperties() { return (AbstractConfigObject) Parseable.newProperties(getSystemProperties(), ConfigParseOptions.defaults().setOriginDescription("system properties")).parse(); } private static class SystemPropertiesHolder { // this isn't final due to the reloadSystemPropertiesConfig() hack below static volatile AbstractConfigObject systemProperties = loadSystemProperties(); } static AbstractConfigObject systemPropertiesAsConfigObject() { try { return SystemPropertiesHolder.systemProperties; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config systemPropertiesAsConfig() { return systemPropertiesAsConfigObject().toConfig(); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static void reloadSystemPropertiesConfig() { // ConfigFactory.invalidateCaches() relies on this having the side // effect that it drops all caches SystemPropertiesHolder.systemProperties = loadSystemProperties(); } private static AbstractConfigObject loadEnvVariables() { Map<String, String> env = System.getenv(); Map<String, AbstractConfigValue> m = new HashMap<String, AbstractConfigValue>(); for (Map.Entry<String, String> entry : env.entrySet()) { String key = entry.getKey(); m.put(key, new ConfigString(SimpleConfigOrigin.newSimple("env var " + key), entry .getValue())); } return new SimpleConfigObject(SimpleConfigOrigin.newSimple("env variables"), m, ResolveStatus.RESOLVED, false /* ignoresFallbacks */); } private static class EnvVariablesHolder { static final AbstractConfigObject envVariables = loadEnvVariables(); } static AbstractConfigObject envVariablesAsConfigObject() { try { return EnvVariablesHolder.envVariables; } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config envVariablesAsConfig() { return envVariablesAsConfigObject().toConfig(); } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static Config defaultReference(final ClassLoader loader) { return computeCachedConfig(loader, "defaultReference", new Callable<Config>() { @Override public Config call() { Config unresolvedResources = Parseable .newResources("reference.conf", ConfigParseOptions.defaults().setClassLoader(loader)) .parse().toConfig(); return systemPropertiesAsConfig().withFallback(unresolvedResources).resolve(); } }); } private static class DebugHolder { private static String LOADS = "loads"; private static Map<String, Boolean> loadDiagnostics() { Map<String, Boolean> result = new HashMap<String, Boolean>(); result.put(LOADS, false); // People do -Dconfig.trace=foo,bar to enable tracing of different things String s = System.getProperty("config.trace"); if (s == null) { return result; } else { String[] keys = s.split(","); for (String k : keys) { if (k.equals(LOADS)) { result.put(LOADS, true); } else { System.err.println("config.trace property contains unknown trace topic '" + k + "'"); } } return result; } } private static final Map<String, Boolean> diagnostics = loadDiagnostics(); private static final boolean traceLoadsEnabled = diagnostics.get(LOADS); static boolean traceLoadsEnabled() { return traceLoadsEnabled; } } /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ public static boolean traceLoadsEnabled() { try { return DebugHolder.traceLoadsEnabled(); } catch (ExceptionInInitializerError e) { throw ConfigImplUtil.extractInitializerError(e); } } public static void trace(String message) { System.err.println(message); } // the basic idea here is to add the "what" and have a canonical // toplevel error message. the "original" exception may however have extra // detail about what happened. call this if you have a better "what" than // further down on the stack. static ConfigException.NotResolved improveNotResolved(Path what, ConfigException.NotResolved original) { String newMessage = what.render() + " has not been resolved, you need to call Config#resolve()," + " see API docs for Config#resolve()"; if (newMessage.equals(original.getMessage())) return original; else return new ConfigException.NotResolved(newMessage, original); } }
Use a WeakReference to cache class loaders Fixes #171
config/src/main/java/com/typesafe/config/impl/ConfigImpl.java
Use a WeakReference to cache class loaders
Java
apache-2.0
bd70528441890456283e5e2ecf8ca59493b7cdf6
0
IhorZak/ALog
/* * Copyright 2022 Ihor Zakhozhyi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ua.pp.ihorzak.alog; import android.annotation.SuppressLint; import java.io.File; import java.io.FileOutputStream; import java.io.PrintWriter; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.concurrent.Executor; import java.util.concurrent.Executors; /** * {@link ALogPrinter} implementation that uses file to print logging messages. * * @author Ihor Zakhozhyi <[email protected]> */ final class FileALogPrinter implements ALogPrinter { private static final String THREAD_NAME = "FileALogPrinter"; private static final String FORMAT_DATE_TIME_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; private final String mFilePath; private final boolean mAppend; private final Executor mExecutor; private final DateFormat mDateFormat; private PrintWriter mPrintWriter; private boolean mHasInitializationFailed; /** * Constructor. * * @param filePath Path of the file to print logging messages into. * @param append True if file should be appended in case it exists, false if file content * should be overwritten in case it exists. */ @SuppressLint("SimpleDateFormat") FileALogPrinter(String filePath, boolean append) { mFilePath = filePath; mAppend = append; mExecutor = Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, THREAD_NAME)); mDateFormat = new SimpleDateFormat(FORMAT_DATE_TIME_PATTERN); mHasInitializationFailed = false; } @SuppressWarnings("ResultOfMethodCallIgnored") @Override public void print(ALogLevel level, CharSequence tag, CharSequence message) { if (!mHasInitializationFailed) { mExecutor.execute( () -> { if (mPrintWriter == null) { try { File file = new File(mFilePath); File directoryFile = file.getParentFile(); if (directoryFile != null) { directoryFile.mkdirs(); } mPrintWriter = new PrintWriter(new FileOutputStream(file, mAppend)); } catch (Throwable throwable) { mHasInitializationFailed = true; throwable.printStackTrace(); } } if (mPrintWriter != null) { mPrintWriter.append(mDateFormat.format(new Date())) .append(' ') .append(level.getLabel()) .append(' ') .append(tag) .append(' ') .append(message); if (message.length() == 0 || message.charAt(message.length() - 1) != '\n') { mPrintWriter.println(); } mPrintWriter.flush(); } } ); } } }
library/src/main/java/ua/pp/ihorzak/alog/FileALogPrinter.java
/* * Copyright 2022 Ihor Zakhozhyi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ua.pp.ihorzak.alog; import android.annotation.SuppressLint; import java.io.File; import java.io.FileOutputStream; import java.io.PrintWriter; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.concurrent.Executor; import java.util.concurrent.Executors; /** * {@link ALogPrinter} implementation that uses file to print logging messages. * * @author Ihor Zakhozhyi <[email protected]> */ final class FileALogPrinter implements ALogPrinter { private static final String THREAD_NAME = "FileALogPrinter"; private static final String FORMAT_DATE_TIME_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; private final String mFilePath; private final boolean mAppend; private final Executor mExecutor; private final DateFormat mDateFormat; private PrintWriter mPrintWriter; private boolean mHasInitializationFailed; /** * Constructor. * * @param filePath Path of the file to print logging messages into. * @param append True if file should be appended in case it exists, false if file content * should be overwritten in case it exists. */ @SuppressLint("SimpleDateFormat") FileALogPrinter(String filePath, boolean append) { mFilePath = filePath; mAppend = append; mExecutor = Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, THREAD_NAME)); mDateFormat = new SimpleDateFormat(FORMAT_DATE_TIME_PATTERN); mHasInitializationFailed = false; } @SuppressWarnings("ResultOfMethodCallIgnored") @Override public void print(ALogLevel level, CharSequence tag, CharSequence message) { if (!mHasInitializationFailed) { mExecutor.execute( () -> { if (mPrintWriter == null) { try { File file = new File(mFilePath); File directoryFile = file.getParentFile(); if (directoryFile != null) { directoryFile.mkdirs(); } mPrintWriter = new PrintWriter(new FileOutputStream(file, mAppend)); } catch (Throwable throwable) { mHasInitializationFailed = true; throwable.printStackTrace(); } } if (mPrintWriter != null) { mPrintWriter.append(mDateFormat.format(new Date())) .append(' ') .append(level.getLabel()) .append(' ') .append(tag) .append(' ') .append(message) .append('\n') .flush(); } } ); } } }
Fix end new line handling in FileALogPrinter
library/src/main/java/ua/pp/ihorzak/alog/FileALogPrinter.java
Fix end new line handling in FileALogPrinter
Java
apache-2.0
819a9250c9c5ddee37af71668b99785abb92b431
0
ublanquet/training-java,ublanquet/training-java,ublanquet/training-java
package persistance.dao; import persistance.model.Company; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.time.LocalDateTime; import java.util.ArrayList; public class DaoCompany implements DaoCompanyI { /** * create company in db. * @param c company obj * @return generated id */ public Long create(Company c) { Connection connect = Utils.getConnection(); long generatedKey = 0; try { PreparedStatement p = connect.prepareStatement("INSERT INTO company " + "(name) " + "VALUES (?)", Statement.RETURN_GENERATED_KEYS); p.setString(1, c.getName()); long affectedRows = p.executeUpdate(); if (affectedRows > 0) { generatedKey = Utils.getGeneratedKey(p); c.setId(generatedKey); } p.close(); LOGGER.info(" Company created, generated ID : " + generatedKey); } catch (SQLException e) { LOGGER.error("Error creating company" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return generatedKey; } /** * uate company in db. * @param c company obj */ public void update(Company c) { Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("UPDATE company SET " + "name = ? " + " WHERE company.id = ?"); p.setString(1, c.getName()); long affectedRows = p.executeUpdate(); p.close(); LOGGER.info(affectedRows + " rows updated"); } catch (SQLException e) { LOGGER.error("Error updating entry" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } } /** * retrieve all companies. * @param min offset * @param max nb to return * @return companies list */ public ArrayList<Company> selectAll(Long min, Long max) { ArrayList<Company> resultList = new ArrayList<>(); ResultSet rs; Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("SELECT * FROM company " + "LIMIT ?, ?"); p.setLong(1, min); p.setLong(2, max); rs = p.executeQuery(); while (rs.next()) { Company c = new Company(rs.getLong("id"), rs.getString("name")); resultList.add(c); } rs.close(); p.close(); } catch (SQLException e) { LOGGER.error("Error retrieving companies" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return resultList; } /** * retrieve all companies. * @return companies list */ public ArrayList<Company> selectAll() { ArrayList<Company> resultList = new ArrayList<>(); ResultSet rs; Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("SELECT * FROM company;"); rs = p.executeQuery(); while (rs.next()) { Company c = new Company(rs.getLong("id"), rs.getString("name")); resultList.add(c); } rs.close(); p.close(); } catch (SQLException e) { LOGGER.error("Error retrieving companies" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return resultList; } /** * retrieve company by id. * @param id id * @return company obj */ public Company getById(Long id) { ResultSet rs; Company c = new Company(); Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("SELECT * FROM company WHERE company.id = ?"); p.setLong(1, id); rs = p.executeQuery(); while (rs.next()) { LocalDateTime intro = rs.getTimestamp("introduced") == null ? null : rs.getTimestamp("introduced").toLocalDateTime(); LocalDateTime disco = rs.getTimestamp("discontinued") == null ? null : rs.getTimestamp("discontinued").toLocalDateTime(); c = new Company(rs.getLong("id"), rs.getString("name")); } rs.close(); p.close(); } catch (SQLException e) { LOGGER.error("Error retrieving company of ID " + id + "%n" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return c; } /** * delete from db. * @param id id * @return deleted rows numbers */ public int delete(Long id) { Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("DELETE FROM company WHERE company.id = ?"); p.setLong(1, id); int affectedRows = p.executeUpdate(); p.close(); LOGGER.info(affectedRows + " company deleted, id : " + id); return affectedRows; } catch (SQLException e) { LOGGER.error("Error deleting company of ID " + id + "%n" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return 0; } }
src/main/java/persistance/dao/DaoCompany.java
package persistance.dao; import persistance.model.Company; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.time.LocalDateTime; import java.util.ArrayList; public class DaoCompany implements DaoCompanyI { /** * create company in db. * @param c company obj * @return generated id */ public Long create(Company c) { Connection connect = Utils.getConnection(); long generatedKey = 0; try { PreparedStatement p = connect.prepareStatement("INSERT INTO company " + "(name) " + "VALUES (?)", Statement.RETURN_GENERATED_KEYS); p.setString(1, c.getName()); long affectedRows = p.executeUpdate(); if (affectedRows > 0) { generatedKey = Utils.getGeneratedKey(p); c.setId(generatedKey); } p.close(); LOGGER.info(" Company created, generated ID : " + generatedKey); } catch (SQLException e) { LOGGER.error("Error creating company" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return generatedKey; } /** * uate company in db. * @param c company obj */ public void update(Company c) { Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("UPDATE company SET " + "name = ? " + " WHERE company.id = ?"); p.setString(1, c.getName()); long affectedRows = p.executeUpdate(); p.close(); LOGGER.info(affectedRows + " rows updated"); } catch (SQLException e) { LOGGER.error("Error updating entry" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } } /** * retrieve all companies. * @param min offset * @param max nb to return * @return companies list */ public ArrayList<Company> selectAll(Long min, Long max) { ArrayList<Company> resultList = new ArrayList<>(); ResultSet rs; Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("SELECT * FROM company " + "LIMIT ?, ?"); p.setLong(1, min); p.setLong(2, max); rs = p.executeQuery(); while (rs.next()) { Company c = new Company(rs.getLong("id"), rs.getString("name")); resultList.add(c); } rs.close(); p.close(); } catch (SQLException e) { LOGGER.error("Error retrieving companies" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return resultList; } /** * retrieve all companies. * @return companies list */ public ArrayList<Company> selectAll() { ArrayList<Company> resultList = new ArrayList<>(); ResultSet rs; Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("SELECT * FROM company;"); rs = p.executeQuery(); while (rs.next()) { Company c = new Company(rs.getLong("id"), rs.getString("name")); resultList.add(c); } rs.close(); p.close(); } catch (SQLException e) { LOGGER.error("Error retrieving companies" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return resultList; } /** * retrieve company by id. * @param id id * @return company obj */ public Company getById(Long id) { ResultSet rs; Company c = new Company(); Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("SELECT * FROM company WHERE company.id = ?"); p.setLong(1, id); rs = p.executeQuery(); while (rs.next()) { LocalDateTime intro = rs.getTimestamp("introduced") == null ? null : rs.getTimestamp("introduced").toLocalDateTime(); LocalDateTime disco = rs.getTimestamp("discontinued") == null ? null : rs.getTimestamp("discontinued").toLocalDateTime(); c = new Company(rs.getLong("id"), rs.getString("name")); } rs.close(); p.close(); } catch (SQLException e) { LOGGER.error("Error retrieving company of ID " + id + "%n" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return c; } /** * delete from db. * @param id id * @return deleted rows numbers */ public int delete(Long id) { Connection connect = Utils.getConnection(); try { PreparedStatement p = connect.prepareStatement("DELETE FROM company WHERE company.id = ?"); p.setLong(1, id); int affectedRows = p.executeUpdate(); p.close(); LOGGER.info(affectedRows + " company deleted, id : " + id); return affectedRows; } catch (SQLException e) { LOGGER.error("Error deleting company of ID " + id + "%n" + e.getMessage() + e.getSQLState() + e.getStackTrace()); } finally { try { connect.close(); } catch (SQLException ex) { LOGGER.error("Error closing connection"); } } return 0; } /** * start transaction. */ public void startTransaction() { Utils.startTransaction(); } /** * commit transaction. */ public void commitTransaction() { Utils.commitTransaction(); } }
[refactor]persistance utils cleanup
src/main/java/persistance/dao/DaoCompany.java
[refactor]persistance utils cleanup
Java
apache-2.0
6689f958d34d9f1fb3875fe32e48a254b1530fe7
0
laborautonomo/jitsi,martin7890/jitsi,dkcreinoso/jitsi,459below/jitsi,marclaporte/jitsi,damencho/jitsi,laborautonomo/jitsi,jibaro/jitsi,cobratbq/jitsi,Metaswitch/jitsi,iant-gmbh/jitsi,iant-gmbh/jitsi,level7systems/jitsi,iant-gmbh/jitsi,bhatvv/jitsi,marclaporte/jitsi,dkcreinoso/jitsi,martin7890/jitsi,bhatvv/jitsi,Metaswitch/jitsi,pplatek/jitsi,ibauersachs/jitsi,bebo/jitsi,cobratbq/jitsi,pplatek/jitsi,HelioGuilherme66/jitsi,martin7890/jitsi,jitsi/jitsi,tuijldert/jitsi,cobratbq/jitsi,procandi/jitsi,martin7890/jitsi,laborautonomo/jitsi,ringdna/jitsi,procandi/jitsi,dkcreinoso/jitsi,ibauersachs/jitsi,dkcreinoso/jitsi,ibauersachs/jitsi,gpolitis/jitsi,mckayclarey/jitsi,bebo/jitsi,ringdna/jitsi,mckayclarey/jitsi,dkcreinoso/jitsi,cobratbq/jitsi,tuijldert/jitsi,cobratbq/jitsi,jitsi/jitsi,procandi/jitsi,mckayclarey/jitsi,mckayclarey/jitsi,pplatek/jitsi,pplatek/jitsi,level7systems/jitsi,bebo/jitsi,procandi/jitsi,ibauersachs/jitsi,jibaro/jitsi,laborautonomo/jitsi,ringdna/jitsi,martin7890/jitsi,ringdna/jitsi,gpolitis/jitsi,pplatek/jitsi,mckayclarey/jitsi,bebo/jitsi,gpolitis/jitsi,tuijldert/jitsi,Metaswitch/jitsi,tuijldert/jitsi,bhatvv/jitsi,459below/jitsi,level7systems/jitsi,damencho/jitsi,HelioGuilherme66/jitsi,ringdna/jitsi,jibaro/jitsi,tuijldert/jitsi,HelioGuilherme66/jitsi,459below/jitsi,level7systems/jitsi,procandi/jitsi,iant-gmbh/jitsi,bhatvv/jitsi,bebo/jitsi,459below/jitsi,ibauersachs/jitsi,jitsi/jitsi,laborautonomo/jitsi,jibaro/jitsi,HelioGuilherme66/jitsi,gpolitis/jitsi,marclaporte/jitsi,jitsi/jitsi,damencho/jitsi,HelioGuilherme66/jitsi,level7systems/jitsi,jibaro/jitsi,damencho/jitsi,bhatvv/jitsi,iant-gmbh/jitsi,marclaporte/jitsi,jitsi/jitsi,gpolitis/jitsi,459below/jitsi,damencho/jitsi,marclaporte/jitsi,Metaswitch/jitsi
/* * SIP Communicator, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.media; import java.io.*; import java.net.*; import java.text.*; import java.util.*; import javax.media.*; import javax.media.format.*; import javax.media.protocol.*; import javax.media.rtp.*; import javax.media.rtp.event.*; import javax.sdp.*; import net.java.sip.communicator.service.media.*; import net.java.sip.communicator.service.media.MediaException; import net.java.sip.communicator.service.netaddr.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.service.protocol.event.*; import net.java.sip.communicator.util.*; import net.java.sip.communicator.impl.media.codec.*; import javax.media.control.*; /** * Contains parameters associated with a particular Call such as media (audio * video), a reference to the call itself, RTPManagers and others. * <p> * Currently the class works the following way:<p> * We create 2 rtp managers (one for video and one for audio) upon * initialization of this call session and initialize/bind them on local * addresses. * <p> * When we are asked to create an SDP offer we ask the <tt>MediaControl</tt> * for the Formats/Encodings that we support and create a media description that * would advertise these formats as well as the ports that our RTP managers are * bound upon. * <p> * When we need to process an incoming offer we ask the <tt>MediaControl</tt> * for the Formats/Encodings that we support, intersect them with those that * were sent by the offerer and make <tt>MediaControl</tt> configure our source * processor so that it would transmit in the format that it is expected to * according to the format set that resulted from the intersection. We also * prepare our <tt>RTPManager</tt>-s to send streams for every media type * requested in the offer. (Note that these streams are not started until * the associated call enters the CONNECTED state). * <p> * Processing an SDP answer is quite similar to processing an offer with the * exception that the intersection of all supported formats has been performed * bye the remote party and we only need to configure our processor and * <tt>RTPManager</tt>s. * * @todo implement SendStreamListener. * @todo implement ReceiveStreamListener. * * @author Emil Ivov * @author Ryan Ricard */ public class CallSessionImpl implements CallSession , CallParticipantListener , CallChangeListener , ReceiveStreamListener , SendStreamListener , SessionListener , ControllerListener { private static final Logger logger = Logger.getLogger(CallSessionImpl.class); /** * The call associated with this session. */ private Call call = null; /** * The session address that is used for audio communication in this call. */ private SessionAddress audioSessionAddress = null; /** * The public address returned by the net address manager for the audio * session address. */ private InetSocketAddress audioPublicAddress = null; /** * The session address that is used for video communication in this call. */ private SessionAddress videoSessionAddress = null; /** * The public address returned by the net address manager for the video * session address. */ private InetSocketAddress videoPublicAddress = null; /** * The rtpManager that handles audio streams in this session. */ private RTPManager audioRtpManager = RTPManager.newInstance(); /** * The rtpManager that handles video streams in this session. */ private RTPManager videoRtpManager = RTPManager.newInstance(); /** * The media service instance that created us. */ private MediaServiceImpl mediaServCallback = null; /** * The minimum port number that we'd like our rtp managers to bind upon. */ private static int minPortNumber = 5000; /** * The maximum port number that we'd like our rtp managers to bind upon. */ private static int maxPortNumber = 6000; /** * The name of the property indicating the length of our receive buffer. */ private static final String PROPERTY_NAME_RECEIVE_BUFFER_LENGTH = "net.java.sip.communicator.impl.media.RECEIVE_BUFFER_LENGTH"; /** * The list of currently active players that we have created during this * session. */ private List players = new ArrayList(); /** * The list of currently open Video frames that we have created during this * session. */ private List videoFrames = new ArrayList(); /** * The Custom Data Destination used for this call session. */ private URL dataSink = null; /** * List of RTP format strings which are supported by SIP Communicator in addition * to the JMF standard formats. * * @see #registerCustomCodecFormats(RTPManager) * @see MediaControl#registerCustomCodecs() */ private static final javax.media.Format[] CUSTOM_CODEC_FORMATS = new javax.media.Format[] { new AudioFormat(Constants.ILBC_RTP), new AudioFormat(Constants.ALAW_RTP), new AudioFormat(Constants.SPEEX_RTP) }; /** * Creates a new session for the specified <tt>call</tt> with a custom * destination for incoming data. * * @param call The call associated with this session. * @param mediaServCallback the media service instance that created us. * @param dataSink the place to send incoming data. */ public CallSessionImpl(Call call, MediaServiceImpl mediaServCallback, URL dataSink ) { this.call = call; this.mediaServCallback = mediaServCallback; this.dataSink = dataSink; call.addCallChangeListener(this); initializePortNumbers(); } /** * Creates a new session for the specified <tt>call</tt>. * * @param call The call associated with this session. * @param mediaServCallback the media service instance that created us. */ public CallSessionImpl(Call call, MediaServiceImpl mediaServCallback) { this(call, mediaServCallback, null); } /** * Returns the call associated with this Session. * * @return the Call associated with this session. */ public Call getCall() { return call; } /** * Returns the port that we are using for receiving video data in this * <tt>CallSession</tt>. * <p> * @return the port number we are using for receiving video data in this * <tt>CallSession</tt>. */ public int getVideoPort() { return videoSessionAddress.getDataPort(); } /** * Returns the port that we are using for receiving audio data in this * <tt>CallSession</tt>. * <p> * @return the port number we are using for receiving audio data in this * <tt>CallSession</tt>. */ public int getAudioPort() { return audioSessionAddress.getDataPort(); } /** * Returns the rtp manager that we are using for audio streams. * @return the RTPManager instance that we are using for audio streams. */ public RTPManager getAudioRtpManager() { return this.audioRtpManager; } /** * Returns the rtp manager that we are using for video streams. * @return the RTPManager instance that we are using for audio streams. */ public RTPManager getVideoRtpManager() { return this.videoRtpManager; } /** * Opens all streams that have been initialized for local RTP managers. * * @throws MediaException if start() fails for all send streams. */ private void startStreaming() throws MediaException { //start all audio streams boolean startedAtLeastOneStream = false; RTPManager rtpManager = getAudioRtpManager(); Vector sendStreams = rtpManager.getSendStreams(); if(sendStreams != null && sendStreams.size() > 0) { logger.trace("Will be starting " + sendStreams.size() + " audio send streams."); Iterator ssIter = sendStreams.iterator(); while (ssIter.hasNext()) { SendStream stream = (SendStream) ssIter.next(); try { /** @todo are we sure we want to connect here? */ stream.getDataSource().connect(); stream.start(); startedAtLeastOneStream = true; } catch (IOException ex) { logger.warn("Failed to start stream.", ex); } } } else { logger.trace("No audio send streams will be started."); } //start video streams rtpManager = getVideoRtpManager(); sendStreams = rtpManager.getSendStreams(); if(sendStreams != null && sendStreams.size() > 0) { logger.trace("Will be starting " + sendStreams.size() + " video send streams."); Iterator ssIter = sendStreams.iterator(); while (ssIter.hasNext()) { SendStream stream = (SendStream) ssIter.next(); try { stream.start(); startedAtLeastOneStream = true; } catch (IOException ex) { logger.warn("Failed to start stream.", ex); } } } else { logger.trace("No video send streams will be started."); } if(!startedAtLeastOneStream && sendStreams.size() > 0) { stopStreaming(); throw new MediaException("Failed to start streaming" , MediaException.INTERNAL_ERROR); } } /** * Stops and closes all streams that have been initialized for local * RTP managers. */ private void stopStreaming() { stopStreaming(getAudioRtpManager(), "audio"); this.audioRtpManager = null; stopStreaming(getVideoRtpManager(), "video"); this.videoRtpManager = null; } /** * Stops and closes all streams currently handled by <tt>rtpManager</tt>. * * @param rtpManager the rtpManager whose streams we'll be stopping. */ private void stopStreaming(RTPManager rtpManager, String rtpManagerDescription) { Vector sendStreams = rtpManager.getSendStreams(); Iterator ssIter = sendStreams.iterator(); while(ssIter.hasNext()) { SendStream stream = (SendStream) ssIter.next(); try { stream.getDataSource().stop(); stream.stop(); stream.close(); } catch (IOException ex) { logger.warn("Failed to stop stream.", ex); } } Vector receiveStreams = rtpManager.getReceiveStreams(); Iterator rsIter = receiveStreams.iterator(); while(rsIter.hasNext()) { ReceiveStream stream = (ReceiveStream) rsIter.next(); try { stream.getDataSource().stop(); } catch (IOException ex) { logger.warn("Failed to stop stream.", ex); } } //remove targets rtpManager.removeTargets("Session ended."); printFlowStatistics(rtpManager); //stop listening rtpManager.removeReceiveStreamListener(this); rtpManager.removeSendStreamListener(this); rtpManager.removeSessionListener(this); rtpManager.dispose(); } /** * Prints all statistics available for rtpManager. (Method contributed by * Michael Koch). * * @param rtpManager the RTP manager that we'd like to print statistics for. */ private void printFlowStatistics(RTPManager rtpManager) { String rtpManagerDescription = (rtpManager == getAudioRtpManager()) ? "(for audio flows)" : "(for video flows)"; //print flow statistics. GlobalTransmissionStats s = rtpManager.getGlobalTransmissionStats(); logger.info( "global transmission stats (" + rtpManagerDescription + "): \n" + "bytes sent: " + s.getBytesSent() + "\n" + "local colls: " + s.getLocalColls() + "\n" + "remote colls: " + s.getRemoteColls() + "\n" + "RTCP sent: " + s.getRTCPSent() + "\n" + "RTP sent: " + s.getRTPSent() + "\n" + "transmit failed: " + s.getTransmitFailed() ); GlobalReceptionStats rs = rtpManager.getGlobalReceptionStats(); logger.info( "global reception stats (" + rtpManagerDescription + "): \n" + "bad RTCP packets: " + rs.getBadRTCPPkts() + "\n" + "bad RTP packets: " + rs.getBadRTPkts() + "\n" + "bytes received: " + rs.getBytesRecd() + "\n" + "local collisions: " + rs.getLocalColls() + "\n" + "malformed BYEs: " + rs.getMalformedBye() + "\n" + "malformed RRs: " + rs.getMalformedRR() + "\n" + "malformed SDESs: " + rs.getMalformedSDES() + "\n" + "malformed SRs: " + rs.getMalformedSR() + "\n" + "packets looped: " + rs.getPacketsLooped() + "\n" + "packets received: " + rs.getPacketsRecd() + "\n" + "remote collisions: " + rs.getRemoteColls() + "\n" + "RTCPs received: " + rs.getRTCPRecd() + "\n" + "SRRs received: " + rs.getSRRecd() + "\n" + "transmit failed: " + rs.getTransmitFailed() + "\n" + "unknown types: " + rs.getUnknownTypes() ); } /** * The method is meant for use by protocol service implementations when * willing to send an invitation to a remote callee. The * resources (address and port) allocated for the <tt>callParticipant</tt> * should be kept by the media service implementation until the originating * <tt>callParticipant</tt> enters the DISCONNECTED state. Subsequent sdp * offers/answers requested for the <tt>Call</tt> that the original * <tt>callParticipant</tt> belonged to MUST receive the same IP/port couple * as the first one in order to allow for conferencing. The associated port * will be released once the call has ended. * * @todo implement ice. * * @return a new SDP description String advertising all params of * <tt>callSession</tt>. * * @throws MediaException code SERVICE_NOT_STARTED if this method is called * before the service was started. */ public String createSdpOffer() throws net.java.sip.communicator.service.media.MediaException { return createSessionDescription(null, null).toString(); } /** * The method is meant for use by protocol service implementations when * willing to send an invitation to a remote callee. The intendedDestination * parameter, may contain the address that the offer is to be sent to. In * case it is null we'll try our best to determine a default local address. * * @param intendedDestination the address of the call participant that the * descriptions is to be sent to. * @return a new SDP description String advertising all params of * <tt>callSession</tt>. * * @throws MediaException code SERVICE_NOT_STARTED if this method is called * before the service was started. */ public String createSdpOffer(InetAddress intendedDestination) throws net.java.sip.communicator.service.media.MediaException { return createSessionDescription(null, intendedDestination).toString(); } /** * The method is meant for use by protocol service implementations upon * reception of an SDP answer in response to an offer sent by us earlier. * * @param sdpAnswerStr the SDP answer that we'd like to handle. * @param responder the participant that has sent the answer. * * @throws MediaException code SERVICE_NOT_STARTED if this method is called * before the service was started. * @throws ParseException if sdpAnswerStr does not contain a valid sdp * String. */ public synchronized void processSdpAnswer(CallParticipant responder, String sdpAnswerStr) throws MediaException, ParseException { logger.trace("Parsing sdp answer: " + sdpAnswerStr); //first parse the answer SessionDescription sdpAnswer = null; try { sdpAnswer = mediaServCallback.getSdpFactory() .createSessionDescription(sdpAnswerStr); } catch (SdpParseException ex) { throw new ParseException("Failed to parse SDPOffer: " + ex.getMessage() , ex.getCharOffset()); } //extract media descriptions Vector mediaDescriptions = null; try { mediaDescriptions = sdpAnswer.getMediaDescriptions(true); } catch (SdpException exc) { logger.error("failed to extract media descriptions", exc); throw new MediaException("failed to extract media descriptions" , MediaException.INTERNAL_ERROR , exc); } //add the RTP targets this.initStreamTargets(sdpAnswer.getConnection(), mediaDescriptions); //create and init the streams (don't start streaming just yet but wait //for the call to enter the connected state). createSendStreams(mediaDescriptions); } /** * The method is meant for use by protocol service implementations when * willing to respond to an invitation received from a remote caller. Apart * from simply generating an SDP response description, the method records * details * * @param sdpOfferStr the SDP offer that we'd like to create an answer for. * @param offerer the participant that has sent the offer. * * @return a String containing an SDP answer descibing parameters of the * <tt>Call</tt> associated with this session and matching those advertised * by the caller in their <tt>sdpOffer</tt>. * * @throws MediaException code INTERNAL_ERROR if processing the offer and/or * generating the anser fail for some reason. * @throws ParseException if <tt>sdpOfferStr</tt> does not contain a valid * sdp string. */ public String processSdpOffer(CallParticipant offerer, String sdpOfferStr) throws MediaException, ParseException { //first parse the offer SessionDescription sdpOffer = null; try { sdpOffer = mediaServCallback.getSdpFactory() .createSessionDescription(sdpOfferStr); } catch (SdpParseException ex) { throw new ParseException("Failed to parse SDPOffer: " + ex.getMessage() , ex.getCharOffset()); } //create an sdp answer. SessionDescription sdpAnswer = createSessionDescription(sdpOffer, null); //extract the remote addresses. Vector mediaDescriptions = null; try { mediaDescriptions = sdpOffer.getMediaDescriptions(true); } catch (SdpException exc) { logger.error("failed to extract media descriptions", exc); throw new MediaException("failed to extract media descriptions" , MediaException.INTERNAL_ERROR , exc); } //add the RTP targets this.initStreamTargets(sdpOffer.getConnection(), mediaDescriptions); //create and init the streams (don't start streaming just yet but wait //for the call to enter the connected state). createSendStreams(mediaDescriptions); return sdpAnswer.toString(); } /** * Creates a DataSource for all encodings in the mediaDescriptions vector * and initializes send streams in our rtp managers for every stream in the * data source. * @param mediaDescriptions a <tt>Vector</tt> containing * <tt>MediaDescription</tt> instances as sent by the remote side with their * SDP description. * @throws MediaException if we fail to create our data source with the * proper encodings and/or fail to initialize the RTP managers with the * necessary streams and/or don't find encodings supported by both the * remote participant and the local controller. */ private void createSendStreams(Vector mediaDescriptions) throws MediaException { //extract the encodings these media descriptions specify Hashtable mediaEncodings = extractMediaEncodings(mediaDescriptions); //make our processor output in these encodings. DataSource dataSource = mediaServCallback.getMediaControl(getCall()) .createDataSourceForEncodings(mediaEncodings); //get all the steams that our processor creates as output. PushBufferStream[] streams = ((PushBufferDataSource)dataSource).getStreams(); //for each stream - determine whether it is a video or an audio //stream and assign it to the corresponding rtpmanager for (int i = 0; i < streams.length; i++) { RTPManager rtpManager = null; if(streams[i].getFormat() instanceof VideoFormat) { rtpManager = getVideoRtpManager(); } else if (streams[i].getFormat() instanceof AudioFormat) { rtpManager = getAudioRtpManager(); } else { logger.warn("We are apparently capable of sending a format " +" that is neither videro nor audio. Is " +"this really possible?:" +streams[i].getFormat()); continue; } try { SendStream stream = rtpManager.createSendStream(dataSource, i); logger.trace("Created a send stream for format " + streams[i].getFormat()); } catch (Exception exc) { throw new MediaException( "Failed to create an RTP send stream for format " + streams[i].getFormat() , MediaException.IO_ERROR , exc); } } } /** * Extracts the addresses that our interlocutor has sent for receiving media * and adds them as targets to our RTP manager. * * @param globalConnParam the global <tt>Connection</tt> (if there was one) * specified by our interlocutor outside any media description. * @param mediaDescriptions a Vector containing all media descriptions sent * by our interlocutor, that we'd use to verify whether connection level * parameters have been specified. * * @throws ParseException if there was a problem with the sdp * @throws MediaException if we simply fail to initialize the remote * addresses or set them as targets on our RTPManagers. */ private void initStreamTargets(Connection globalConnParam, Vector mediaDescriptions) throws MediaException, ParseException { try { String globalConnectionAddress = null; if (globalConnParam != null) globalConnectionAddress = globalConnParam.getAddress(); Iterator mediaDescsIter = mediaDescriptions.iterator(); while (mediaDescsIter.hasNext()) { SessionAddress target = null; MediaDescription mediaDescription = (MediaDescription) mediaDescsIter.next(); int port = mediaDescription.getMedia().getMediaPort(); String type = mediaDescription.getMedia().getMediaType(); // If there\u2019s a global address, we use it. // If there isn\u2019t a global address, we get the address from // the media Description // Fix by Pablo L. - Telefonica String address; if (globalConnectionAddress != null) { address = globalConnectionAddress; } else { address = mediaDescription.getConnection().getAddress(); } //check if we have a media level address Connection mediaLevelConnection = mediaDescription. getConnection(); if (mediaLevelConnection != null) { address = mediaLevelConnection.getAddress(); } InetAddress inetAddress = null; try { inetAddress = InetAddress.getByName(address); } catch (UnknownHostException exc) { throw new MediaException( "Failed to resolve address " + address , MediaException.NETWORK_ERROR , exc); } //create the session address for this media type and add it to //the RTPManager. target = new SessionAddress(inetAddress, port); /** @todo the following line assumes that we have a single rtp * manager per media type which is not necessarily true (e.g. we * may two distinct video streams: 1 for a webcam video and another * one desktop capture stream) */ RTPManager rtpManager = type.equals("video") ? getVideoRtpManager() : getAudioRtpManager(); try { rtpManager.addTarget(target); logger.trace("added target " + target + " for type " + type); } catch (Exception exc) { throw new MediaException("Failed to add RTPManager target." , MediaException.INTERNAL_ERROR , exc); } } } catch(SdpParseException exc) { throw new ParseException("Failed to parse SDP data. Error on line " + exc.getLineNumber() + " " + exc.getMessage() , exc.getCharOffset()); } } /** * Creates an SDP description of this session using the offer descirption * (if not null) for limiting. The intendedDestination parameter, which may * contain the address that the offer is to be sent to, will only be used if * the <tt>offer</tt> or its connection parameter are <tt>null</tt>. In the * oposite case we are using the address provided in the connection param as * an intended destination. * * @param offer the call participant meant to receive the offer or null if * we are to construct our own offer. * @param intendedDestination the address of the call participant that the * descriptions is to be sent to. * @return a SessionDescription of this CallSession. * * @throws MediaException code INTERNAL_ERROR if we get an SDP exception * while creating and/or parsing the sdp description. */ private SessionDescription createSessionDescription( SessionDescription offer, InetAddress intendedDestination) throws MediaException { try { SessionDescription sessDescr = mediaServCallback.getSdpFactory().createSessionDescription(); //"v=0" Version v = mediaServCallback.getSdpFactory().createVersion(0); sessDescr.setVersion(v); //we don't yet implement ice so just try to choose a local address //that corresponds to the address provided by the offer or as an //intended destination. NetworkAddressManagerService netAddressManager = MediaActivator.getNetworkAddressManagerService(); if(offer != null) { Connection c = offer.getConnection(); if(c != null) { try { intendedDestination = InetAddress.getByName(c. getAddress()); } catch (SdpParseException ex) { logger.warn("error reading remote sdp. " + c.toString() + " is not a valid connection parameter.", ex); } catch (UnknownHostException ex) { logger.warn("error reading remote sdp. " + c.toString() + " does not contain a valid address.", ex); } } } allocateMediaPorts(intendedDestination); InetAddress publicIpAddress = audioPublicAddress.getAddress(); String addrType = publicIpAddress instanceof Inet6Address ? Connection.IP6 : Connection.IP4; //spaces in the user name mess everything up. //bug report - Alessandro Melzi Origin o = mediaServCallback.getSdpFactory().createOrigin( call.getProtocolProvider().getAccountID().getUserID() , 0 , 0 , "IN" , addrType , publicIpAddress.getHostAddress()); sessDescr.setOrigin(o); //c= Connection c = mediaServCallback.getSdpFactory().createConnection( "IN" , addrType , publicIpAddress.getHostAddress()); sessDescr.setConnection(c); //"s=-" SessionName s = mediaServCallback.getSdpFactory().createSessionName("-"); sessDescr.setSessionName(s); //"t=0 0" TimeDescription t = mediaServCallback.getSdpFactory().createTimeDescription(); Vector timeDescs = new Vector(); timeDescs.add(t); sessDescr.setTimeDescriptions(timeDescs); //media descriptions. Vector offeredMediaDescriptions = null; if(offer != null) offeredMediaDescriptions = offer.getMediaDescriptions(false); logger.debug("Will create media descs with: audio public address=" + audioPublicAddress + " and video public address=" + videoPublicAddress); Vector mediaDescs = createMediaDescriptions(offeredMediaDescriptions , audioPublicAddress , videoPublicAddress); sessDescr.setMediaDescriptions(mediaDescs); if (logger.isTraceEnabled()) { logger.trace("Generated SDP - " + sessDescr.toString()); } return sessDescr; } catch (SdpException exc) { throw new MediaException( "An SDP exception occurred while generating local " + "sdp description" , MediaException.INTERNAL_ERROR , exc); } } /** * Creates a vector containing SDP descriptions of media types and formats * that we support. If the offerVector is non null * @param offerMediaDescs the media descriptions sent by the offerer (could * be null). * * @param publicAudioAddress the <tt>InetSocketAddress</tt> that we should * be using for sending audio. * @param publicVideoAddress the <tt>InetSocketAddress</tt> that we should * be using for sending video. * * @return a <tt>Vector</tt> containing media descriptions that we support * and (if this is an answer to an offer) that the offering * <tt>CallParticipant</tt> supports as well. * * @throws SdpException we fail creating the media descriptions * @throws MediaException with code UNSUPPORTED_FORMAT_SET_ERROR if we don't * support any of the offered media formats. */ private Vector createMediaDescriptions( Vector offerMediaDescs, InetSocketAddress publicAudioAddress, InetSocketAddress publicVideoAddress) throws SdpException ,MediaException { //supported audio formats. String[] supportedAudioEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedAudioEncodings(); //supported video formats String[] supportedVideoEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedVideoEncodings(); //if there was an offer extract the offered media formats and use //the intersection between the formats we support and those in the //offer. if (offerMediaDescs != null && offerMediaDescs.size() > 0) { Vector offeredVideoEncodings = new Vector(); Vector offeredAudioEncodings = new Vector(); Iterator offerDescsIter = offerMediaDescs.iterator(); while (offerDescsIter.hasNext()) { MediaDescription desc = (MediaDescription) offerDescsIter.next(); Media media = desc.getMedia(); String mediaType = media.getMediaType(); if (mediaType.equalsIgnoreCase("video")) { offeredVideoEncodings = media.getMediaFormats(true); continue; } if (mediaType.equalsIgnoreCase("audio")) { offeredAudioEncodings = media.getMediaFormats(true); continue; } } //now intersect the offered encodings with what we support Hashtable encodings = new Hashtable(2); encodings.put("audio", offeredAudioEncodings); encodings.put("video", offeredVideoEncodings); encodings = intersectMediaEncodings(encodings); List intersectedAudioEncsList = (List)encodings.get("audio"); List intersectedVideoEncsList = (List)encodings.get("video"); //now replace the encodings arrays with the intersection supportedAudioEncodings = new String[intersectedAudioEncsList.size()]; supportedVideoEncodings = new String[intersectedVideoEncsList.size()]; for (int i = 0; i < supportedAudioEncodings.length; i++) supportedAudioEncodings[i] = (String)intersectedAudioEncsList.get(i); for (int i = 0; i < supportedVideoEncodings.length; i++) supportedVideoEncodings[i] = (String)intersectedVideoEncsList.get(i); } Vector mediaDescs = new Vector(); if(supportedAudioEncodings.length > 0) { //--------Audio media description //make sure preferred formats come first MediaDescription am = mediaServCallback.getSdpFactory().createMediaDescription( "audio" , publicAudioAddress.getPort() , 1 , "RTP/AVP" , supportedAudioEncodings); if (!mediaServCallback.getDeviceConfiguration() .isAudioCaptureSupported()) { am.setAttribute("recvonly", null); } mediaDescs.add(am); } //--------Video media description if(supportedVideoEncodings.length> 0) { //"m=video 22222 RTP/AVP 34"; MediaDescription vm = mediaServCallback.getSdpFactory().createMediaDescription( "video" , publicVideoAddress.getPort() , 1 , "RTP/AVP" , supportedVideoEncodings); if (!mediaServCallback.getDeviceConfiguration() .isVideoCaptureSupported()) { vm.setAttribute("recvonly", null); } mediaDescs.add(vm); } /** @todo record formats for participant. */ return mediaDescs; } /** * Compares audio/video encodings in the <tt>offeredEncodings</tt> * hashtable with those supported by the currently valid media controller * and returns the set of those that were present in both. The hashtable * a maps "audio"/"video" specifier to a list of encodings present in both * the source <tt>offeredEncodings</tt> hashtable and the list of supported * encodings. * * @param offeredEncodings a Hashtable containing sets of encodings that an * interlocutor has sent to us. * @return a <tt>Hashtable</tt> mapping an "audio"/"video" specifier to a * list of encodings present in both the source <tt>offeredEncodings</tt> * hashtable and the list of encodings supported by the local media * controller. * @throws MediaException code UNSUPPORTED_FORMAT_SET_ERROR if the * intersection of both encoding sets does not contain any elements. */ private Hashtable intersectMediaEncodings(Hashtable offeredEncodings) throws MediaException { //audio encodings supported by the media controller String[] supportedAudioEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedAudioEncodings(); //video encodings supported by the media controller String[] supportedVideoEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedVideoEncodings(); //audio encodings offered by the remote party List offeredAudioEncodings = (List)offeredEncodings.get("audio"); //video encodings offered by the remote party List offeredVideoEncodings = (List)offeredEncodings.get("video"); //recreate the formats we create according to what the other party //offered. List supportedAudioEncsList = Arrays.asList(supportedAudioEncodings); List intersectedAudioEncsList = new LinkedList(); List supportedVideoEncsList = Arrays.asList(supportedVideoEncodings); List intersectedVideoEncsList = new LinkedList(); //intersect supported audio formats with offered audio formats if (offeredAudioEncodings != null && offeredAudioEncodings.size() > 0) { Iterator offeredAudioEncsIter = offeredAudioEncodings.iterator(); while (offeredAudioEncsIter.hasNext()) { String format = (String) offeredAudioEncsIter.next(); if (supportedAudioEncsList.contains(format)) intersectedAudioEncsList.add(format); } } if (offeredVideoEncodings != null && offeredVideoEncodings.size() > 0) { //intersect supported video formats with offered video formats Iterator offeredVideoEncsIter = offeredVideoEncodings.iterator(); while (offeredVideoEncsIter.hasNext()) { String format = (String) offeredVideoEncsIter.next(); if (supportedVideoEncsList.contains(format)) intersectedVideoEncsList.add(format); } } //if the intersection contains no common formats then we need to //bail. if (intersectedAudioEncsList.size() == 0 && intersectedVideoEncsList.size() == 0) { throw new MediaException( "None of the offered formats was supported by this " + "media implementation" , MediaException.UNSUPPORTED_FORMAT_SET_ERROR); } Hashtable intersection = new Hashtable(2); intersection.put("audio", intersectedAudioEncsList); intersection.put("video", intersectedVideoEncsList); return intersection; } /** * Returns a <tt>Hashtable</tt> mapping media types (e.g. audio or video) * to lists of JMF encoding strings corresponding to the SDP formats * specified in the <tt>mediaDescriptions</tt> vector. * @param mediaDescriptions a <tt>Vector</tt> containing * <tt>MediaDescription</tt> instances extracted from an SDP offer or * answer. * @return a <tt>Hashtable</tt> mapping media types (e.g. audio or video) * to lists of JMF encoding strings corresponding to the SDP formats * specified in the <tt>mediaDescriptions</tt> vector. */ private Hashtable extractMediaEncodings(Vector mediaDescriptions) { Hashtable mediaEncodings = new Hashtable(2); Iterator descriptionsIter = mediaDescriptions.iterator(); while(descriptionsIter.hasNext()) { MediaDescription mediaDescription = (MediaDescription)descriptionsIter.next(); Media media = mediaDescription.getMedia(); Vector mediaFormats = null; String mediaType = null; try { mediaFormats = media.getMediaFormats(true); mediaType = media.getMediaType(); } catch (SdpParseException ex) { //this shouldn't happen since nist-sdp is not doing //lasy parsing but log anyway logger.warn("Error parsing sdp.",ex); continue; } if(mediaFormats.size() > 0) { List jmfEncodings = MediaUtils.sdpToJmfEncodings(mediaFormats); if(jmfEncodings.size() > 0) mediaEncodings.put(mediaType, jmfEncodings); } } logger.trace("Possible media encodings="+mediaEncodings); return mediaEncodings; } /** * Create the RTP managers and bind them on some ports. */ private void initializePortNumbers() { //first reset to default values minPortNumber = 5000; maxPortNumber = 6000; //then set to anything the user might have specified. String minPortNumberStr = MediaActivator.getConfigurationService() .getString(MediaService.MIN_PORT_NUMBER_PROPERTY_NAME); if (minPortNumberStr != null) { try{ minPortNumber = Integer.parseInt(minPortNumberStr); }catch (NumberFormatException ex){ logger.warn(minPortNumberStr + " is not a valid min port number value. " +"using min port " + minPortNumber); } } String maxPortNumberStr = MediaActivator.getConfigurationService() .getString(MediaService.MAX_PORT_NUMBER_PROPERTY_NAME); if (maxPortNumberStr != null) { try{ maxPortNumber = Integer.parseInt(maxPortNumberStr); }catch (NumberFormatException ex){ logger.warn(maxPortNumberStr + " is not a valid max port number value. " +"using max port " + maxPortNumber, ex); } } } /** * Allocates a local port for the RTP manager, tries to obtain a public * address for it and after succeeding makes the network address manager * protect the address until we are ready to bind on it. * * @param intendedDestination a destination that the rtp manager would be * communicating with. * @param sessionAddress the sessionAddress that we're locally bound on. * @param bindRetries the number of times that we need to retry a bind. * * @return the SocketAddress the public address that the network address * manager returned for the session address that we're bound on. * * @throws MediaException if we fail to initialize rtp manager. */ private InetSocketAddress allocatePort(InetAddress intendedDestination, SessionAddress sessionAddress, int bindRetries) throws MediaException { InetSocketAddress publicAddress = null; boolean initialized = false; NetworkAddressManagerService netAddressManager = MediaActivator.getNetworkAddressManagerService(); //try to initialize a public address for the rtp manager. for (int i = bindRetries; i > 0; i--) { //first try to obtain a binding for the address. try { publicAddress = netAddressManager .getPublicAddressFor(intendedDestination, sessionAddress.getDataPort()); initialized =true; break; } catch (IOException ex) { logger.warn("Retrying a bind because of a failure. " + "Failed Address is: " + sessionAddress.toString(), ex); //reinit the session address we tried with and prepare to retry. sessionAddress .setDataPort(sessionAddress.getDataPort()+2); sessionAddress .setControlPort(sessionAddress.getControlPort()+2); } } if(!initialized) throw new MediaException("Failed to bind to a local port in " + Integer.toString(bindRetries) + " tries." , MediaException.INTERNAL_ERROR); return publicAddress; } /** * Looks for free ports and initializes the RTP manager according toe the * specified <tt>intendedDestination</tt>. * * @param intendedDestination the InetAddress that we will be transmitting * to. * @throws MediaException if we fail initializing the RTP managers. */ private void allocateMediaPorts(InetAddress intendedDestination) throws MediaException { InetAddress inAddrAny = null; try { //create an ipv4 any address since it also works when accepting //ipv6 connections. inAddrAny = InetAddress.getByName(NetworkUtils.IN_ADDR_ANY); } catch (UnknownHostException ex) { //this shouldn't happen. throw new MediaException("Failed to create the ANY inet address." , MediaException.INTERNAL_ERROR , ex); } //check the number of times that we'd have to rety binding to local //ports before giving up. String bindRetriesStr = MediaActivator.getConfigurationService().getString( MediaService.BIND_RETRIES_PROPERTY_NAME); int bindRetries = MediaService.BIND_RETRIES_DEFAULT_VALUE; try { if(bindRetriesStr != null && bindRetriesStr.length() > 0) bindRetries = Integer.parseInt(bindRetriesStr); } catch (NumberFormatException ex) { logger.warn(bindRetriesStr + " is not a valid value for number of bind retries." , ex); } //initialize audio rtp manager. audioSessionAddress = new SessionAddress(inAddrAny, minPortNumber); audioPublicAddress = allocatePort(intendedDestination, audioSessionAddress, bindRetries); logger.debug("AudioSessionAddress="+audioSessionAddress); logger.debug("AudioPublicAddress="+audioPublicAddress); //augment min port number so that no one else tries to bind here. minPortNumber = audioSessionAddress.getDataPort() + 2; //initialize video rtp manager. videoSessionAddress = new SessionAddress(inAddrAny, minPortNumber); videoPublicAddress = allocatePort(intendedDestination, videoSessionAddress, bindRetries); //augment min port number so that no one else tries to bind here. minPortNumber = videoSessionAddress.getDataPort() + 2; //if we have reached the max port number - reinit. if(minPortNumber > maxPortNumber -2) initializePortNumbers(); //now init the rtp managers and make them bind initializeRtpManager(audioRtpManager, audioSessionAddress); initializeRtpManager(videoRtpManager, videoSessionAddress); } /** * Initializes the RTP manager so that it would start listening on the * <tt>address</tt> session address. The method also initializes the RTP * manager buffer control. * * @param rtpManager the <tt>RTPManager</tt> to initialize. * @param bindAddress the <tt>SessionAddress</tt> to use when initializing the * RTPManager. * * @throws MediaException if we fail to initialize the RTP manager. */ private void initializeRtpManager(RTPManager rtpManager, SessionAddress bindAddress) throws MediaException { try { rtpManager.initialize(bindAddress); } catch (Exception exc) { logger.error("Failed to init an RTP manager.", exc); throw new MediaException("Failed to init an RTP manager." , MediaException.IO_ERROR , exc); } //it appears that if we don't do this managers don't play // You can try out some other buffer size to see // if you can get better smoothness. BufferControl bc = (BufferControl)rtpManager .getControl(BufferControl.class.getName()); if (bc != null) { long buff = 100; String buffStr = MediaActivator.getConfigurationService() .getString(PROPERTY_NAME_RECEIVE_BUFFER_LENGTH); try { if(buffStr != null && buffStr.length() > 0) buff = Long.parseLong(buffStr); } catch (NumberFormatException exc) { logger.warn(buffStr + " is not a valid receive buffer value (integer)." , exc); } buff = bc.setBufferLength(buff); logger.trace("set receiver buffer len to=" + buff); bc.setEnabledThreshold(true); bc.setMinimumThreshold(100); } //add listeners rtpManager.addReceiveStreamListener(this); rtpManager.addSendStreamListener(this); rtpManager.addSessionListener(this); } /** * Registers the RTP formats which are supported by SIP Communicator in addition * to the JMF standard formats. This has to be done for every RTP Manager instance. * * @param rtpManager The manager with which to register the formats. * @see MediaControl#registerCustomCodecs() */ static void registerCustomCodecFormats(RTPManager rtpManager) { for (int i=0; i<CUSTOM_CODEC_FORMATS.length; i++) { javax.media.Format format = CUSTOM_CODEC_FORMATS[i]; logger.debug("registering format " + format + " with RTP manager"); /* * NOTE ([email protected]): com.sun.media.rtp.RtpSessionMgr.addFormat * leaks memory, since it stores the Format in a static Vector. * AFAIK there is no easy way around it, but the memory impact * should not be too bad. */ rtpManager.addFormat( format, MediaUtils.jmfToSdpEncoding(format.getEncoding())); } } /** * Indicates that a change has occurred in the state of the source call. * @param evt the <tt>CallChangeEvent</tt> instance containing the source * calls and its old and new state. */ public void callStateChanged(CallChangeEvent evt) { if( evt.getNewValue() == CallState.CALL_IN_PROGRESS && evt.getNewValue() != evt.getOldValue()) { try { startStreaming(); mediaServCallback.getMediaControl(getCall()) .startProcessingMedia(this); } catch (MediaException ex) { /** @todo need to notify someone */ logger.error("Failed to start streaming.", ex); } } else if( evt.getNewValue() == CallState.CALL_ENDED && evt.getNewValue() != evt.getOldValue()) { stopStreaming(); mediaServCallback.getMediaControl(getCall()) .stopProcessingMedia(this); //close all players that we have created in this session Iterator playersIter = players.iterator(); while(playersIter.hasNext()) { Player player = ( Player )playersIter.next(); player.stop(); player.deallocate(); player.close(); playersIter.remove(); } //close all video frames that we have created in this session Iterator videoFramesIter = videoFrames.iterator(); while(videoFramesIter.hasNext()) { javax.swing.JFrame frame = ( javax.swing.JFrame )videoFramesIter.next(); frame.setVisible(false); frame.dispose(); videoFramesIter.remove(); } //remove ourselves as listeners from the call evt.getSourceCall().removeCallChangeListener(this); RTPManager audioRtpMan = getAudioRtpManager(); if(audioRtpMan != null) audioRtpMan.dispose(); RTPManager videoRtpMan = getVideoRtpManager(); if(videoRtpMan != null) videoRtpMan.dispose(); } } /** * Indicates that a change has occurred in the status of the source * CallParticipant. * * @param evt The <tt>CallParticipantChangeEvent</tt> instance containing * the source event as well as its previous and its new status. */ public void participantStateChanged(CallParticipantChangeEvent evt) { /** @todo implement participantStateChanged() */ /** @todo remove target for participant. */ } /** * Indicates that a new call participant has joined the source call. * @param evt the <tt>CallParticipantEvent</tt> containing the source call * and call participant. */ public synchronized void callParticipantAdded(CallParticipantEvent evt) { CallParticipant sourceParticipant = evt.getSourceCallParticipant(); sourceParticipant.addCallParticipantListener(this); } /** * Indicates that a call participant has left the source call. * @param evt the <tt>CallParticipantEvent</tt> containing the source call * and call participant. */ public void callParticipantRemoved(CallParticipantEvent evt) { } //-------- dummy implementations of listener methods that we don't need /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantImageChanged(CallParticipantChangeEvent evt) { } /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantDisplayNameChanged(CallParticipantChangeEvent evt) { } /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantTransportAddressChanged( CallParticipantChangeEvent evt) { /** @todo i am not sure we should be ignoring this one ... */ } /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantAddressChanged(CallParticipantChangeEvent evt) { } //implementation of jmf listener methods /** * Method called back in the SessionListener to notify * listener of all Session Events.SessionEvents could be one * of NewParticipantEvent or LocalCollisionEvent. * * @param event the newly received SessionEvent */ public synchronized void update(SessionEvent event) { if (event instanceof NewParticipantEvent) { Participant participant = ( (NewParticipantEvent) event).getParticipant(); if (logger.isDebugEnabled()) { logger.debug("A new participant had just joined: " + participant.getCNAME()); } } else { if (logger.isDebugEnabled()) { logger.debug( "Received the following JMF Session event - " + event.getClass().getName() + "=" + event); } } } /** * Method called back in the RTPSessionListener to notify * listener of all SendStream Events. * * @param event the newly received SendStreamEvent */ public synchronized void update(SendStreamEvent event) { logger.debug( "received the following JMF SendStreamEvent - " + event.getClass().getName() + "="+ event); } /** * Method called back in the RTPSessionListener to notify * listener of all ReceiveStream Events. * * @param evt the newly received ReceiveStreamEvent */ public synchronized void update(ReceiveStreamEvent evt) { RTPManager mgr = (RTPManager) evt.getSource(); Participant participant = evt.getParticipant(); // could be null. ReceiveStream stream = evt.getReceiveStream(); // could be null. if (evt instanceof NewReceiveStreamEvent) { try { logger.debug("received a new incoming stream. " + evt); stream = ( (NewReceiveStreamEvent) evt).getReceiveStream(); DataSource ds = stream.getDataSource(); // Find out the formats. RTPControl ctl = (RTPControl) ds.getControl( "javax.media.rtp.RTPControl"); if (logger.isDebugEnabled()) { if (ctl != null) { logger.debug("Received new RTP stream: " + ctl.getFormat()); } else { logger.debug("Received new RTP stream"); } } Player player = null; //if we are using a custom destination, create a processor //if not, a player will suffice if (dataSink != null) { player = Manager.createProcessor(ds); } else { player = Manager.createPlayer(ds); } player.addControllerListener(this); //a processor needs to be configured then realized. if (dataSink != null) { ((Processor)player).configure(); } else { player.realize(); } players.add(player); } catch (Exception e) { logger.error("NewReceiveStreamEvent exception ", e); return; } } else if (evt instanceof StreamMappedEvent) { if (stream != null && stream.getDataSource() != null) { DataSource ds = stream.getDataSource(); // Find out the formats. RTPControl ctl = (RTPControl) ds.getControl( "javax.media.rtp.RTPControl"); if (logger.isDebugEnabled()) { String msg = "The previously unidentified stream "; if (ctl != null) { msg += ctl.getFormat(); } msg += " had now been identified as sent by: " + participant.getCNAME(); logger.debug(msg); } } } else if (evt instanceof ByeEvent) { logger.debug("Got \"bye\" from: " + participant.getCNAME()); } } /** * This method is called when an event is generated by a * <code>Controller</code> that this listener is registered with. * @param ce The event generated. */ public synchronized void controllerUpdate(ControllerEvent ce) { logger.debug("Received a ControllerEvent: " + ce); Player player = (Player) ce.getSourceController(); if (player == null) { return; } //if configuration is completed and this is a processor //we need to set file format and explicitly call realize(). if (ce instanceof ConfigureCompleteEvent) { try { ((Processor)player).setContentDescriptor( new FileTypeDescriptor(FileTypeDescriptor.WAVE)); player.realize(); } catch (Exception exc) { logger.error("failed to record to file", exc); } } // Get this when the internal players are realized. if (ce instanceof RealizeCompleteEvent) { //set the volume as it is not on max by default. //XXX: I am commenting this since apparently it is causing some //problems on windows. //GainControl gc // = (GainControl)player.getControl(GainControl.class.getName()); //if (gc != null) //{ // logger.debug("Setting volume to max"); // gc.setLevel(1); //} //else // logger.debug("Player does not have gain control."); logger.debug("A player was realized and will be started."); player.start(); if (dataSink != null) { try { logger.info("starting recording to file: "+dataSink); MediaLocator dest = new MediaLocator(dataSink); DataSource ds = ((Processor)player).getDataOutput(); DataSink sink = Manager.createDataSink( ((Processor)player).getDataOutput(), dest); player.start(); //do we know the output file's duration RecordInitiator record = new RecordInitiator(sink); record.start(); } catch(Exception e) { logger.error("failed while trying to record to file",e); } } else { player.start(); } /** @todo video frame is currently handled with very ugly test code * please don't forget to remove */ //------------ ugly video test code starts here -------------------- java.awt.Component vc = player.getVisualComponent(); if(vc != null) { javax.swing.JFrame frame = new javax.swing.JFrame(); frame.setTitle("SIP Communicator - Video Call"); frame.getContentPane().add(vc); frame.pack(); //center java.awt.Dimension frameSize = frame.getSize(); //ugly resize if too tiny if(frameSize.width < 300) { frame.setSize(frameSize.width * 2, frameSize.height * 2); frameSize = frame.getSize(); } java.awt.Dimension screenSize = java.awt.Toolkit.getDefaultToolkit().getScreenSize(); frame.setLocation((screenSize.width - frameSize.width)/2 ,(screenSize.height - frameSize.height)/2); frame.setVisible(true); videoFrames.add(frame); } //------------- ugly video test code ends here --------------------- } if (ce instanceof StartEvent) { logger.debug("Received a StartEvent"); } if (ce instanceof ControllerErrorEvent) { logger.error( "The following error was reported while starting a player" + ce); } if (ce instanceof ControllerClosedEvent) { logger.debug("Received a ControllerClosedEvent"); } } /** * The record initiator is started after taking a call that is supposed to * be answered by a mailbox plug-in. It waits for the outgoing message to * stop transmitting and starts recording whatever comes after that. */ private class RecordInitiator extends Thread { private DataSink sink; public RecordInitiator(DataSink sink) { this.sink = sink; } public void run() { //determine how long to wait for the outgoing //message to stop playing javax.media.Time timeToWait = mediaServCallback .getMediaControl(call) .getOutputDuration(); //if the time is unknown, we will start recording immediately if (timeToWait != javax.media.Time.TIME_UNKNOWN) { double millisToWait = timeToWait.getSeconds() * 1000; long timeStartedPlaying = System.currentTimeMillis(); while (System.currentTimeMillis() < timeStartedPlaying + millisToWait) { try { Thread.sleep(100); } catch (InterruptedException e) { logger.error("Interrupted while waiting to start " + "recording incoming message",e); } } } //open the dataSink and start recording try { sink.open(); sink.start(); } catch (IOException e) { logger.error("IO Exception while attempting to start " + "recording incoming message",e); } } } }
src/net/java/sip/communicator/impl/media/CallSessionImpl.java
/* * SIP Communicator, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.media; import java.io.*; import java.net.*; import java.text.*; import java.util.*; import javax.media.*; import javax.media.format.*; import javax.media.protocol.*; import javax.media.rtp.*; import javax.media.rtp.event.*; import javax.sdp.*; import net.java.sip.communicator.service.media.*; import net.java.sip.communicator.service.media.MediaException; import net.java.sip.communicator.service.netaddr.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.service.protocol.event.*; import net.java.sip.communicator.util.*; import net.java.sip.communicator.impl.media.codec.*; import javax.media.control.*; /** * Contains parameters associated with a particular Call such as media (audio * video), a reference to the call itself, RTPManagers and others. * <p> * Currently the class works the following way:<p> * We create 2 rtp managers (one for video and one for audio) upon * initialization of this call session and initialize/bind them on local * addresses. * <p> * When we are asked to create an SDP offer we ask the <tt>MediaControl</tt> * for the Formats/Encodings that we support and create a media description that * would advertise these formats as well as the ports that our RTP managers are * bound upon. * <p> * When we need to process an incoming offer we ask the <tt>MediaControl</tt> * for the Formats/Encodings that we support, intersect them with those that * were sent by the offerer and make <tt>MediaControl</tt> configure our source * processor so that it would transmit in the format that it is expected to * according to the format set that resulted from the intersection. We also * prepare our <tt>RTPManager</tt>-s to send streams for every media type * requested in the offer. (Note that these streams are not started until * the associated call enters the CONNECTED state). * <p> * Processing an SDP answer is quite similar to processing an offer with the * exception that the intersection of all supported formats has been performed * bye the remote party and we only need to configure our processor and * <tt>RTPManager</tt>s. * * @todo implement SendStreamListener. * @todo implement ReceiveStreamListener. * * @author Emil Ivov * @author Ryan Ricard */ public class CallSessionImpl implements CallSession , CallParticipantListener , CallChangeListener , ReceiveStreamListener , SendStreamListener , SessionListener , ControllerListener { private static final Logger logger = Logger.getLogger(CallSessionImpl.class); /** * The call associated with this session. */ private Call call = null; /** * The session address that is used for audio communication in this call. */ private SessionAddress audioSessionAddress = null; /** * The public address returned by the net address manager for the audio * session address. */ private InetSocketAddress audioPublicAddress = null; /** * The session address that is used for video communication in this call. */ private SessionAddress videoSessionAddress = null; /** * The public address returned by the net address manager for the video * session address. */ private InetSocketAddress videoPublicAddress = null; /** * The rtpManager that handles audio streams in this session. */ private RTPManager audioRtpManager = RTPManager.newInstance(); /** * The rtpManager that handles video streams in this session. */ private RTPManager videoRtpManager = RTPManager.newInstance(); /** * The media service instance that created us. */ private MediaServiceImpl mediaServCallback = null; /** * The minimum port number that we'd like our rtp managers to bind upon. */ private static int minPortNumber = 5000; /** * The maximum port number that we'd like our rtp managers to bind upon. */ private static int maxPortNumber = 6000; /** * The name of the property indicating the length of our receive buffer. */ private static final String PROPERTY_NAME_RECEIVE_BUFFER_LENGTH = "net.java.sip.communicator.impl.media.RECEIVE_BUFFER_LENGTH"; /** * The list of currently active players that we have created during this * session. */ private List players = new ArrayList(); /** * The list of currently open Video frames that we have created during this * session. */ private List videoFrames = new ArrayList(); /** * The Custom Data Destination used for this call session. */ private URL dataSink = null; /** * List of RTP format strings which are supported by SIP Communicator in addition * to the JMF standard formats. * * @see #registerCustomCodecFormats(RTPManager) * @see MediaControl#registerCustomCodecs() */ private static final javax.media.Format[] CUSTOM_CODEC_FORMATS = new javax.media.Format[] { new AudioFormat(Constants.ILBC_RTP), new AudioFormat(Constants.ALAW_RTP), new AudioFormat(Constants.SPEEX_RTP) }; /** * Creates a new session for the specified <tt>call</tt> with a custom * destination for incoming data. * * @param call The call associated with this session. * @param mediaServCallback the media service instance that created us. * @param dataSink the place to send incoming data. */ public CallSessionImpl(Call call, MediaServiceImpl mediaServCallback, URL dataSink ) { this.call = call; this.mediaServCallback = mediaServCallback; this.dataSink = dataSink; call.addCallChangeListener(this); initializePortNumbers(); } /** * Creates a new session for the specified <tt>call</tt>. * * @param call The call associated with this session. * @param mediaServCallback the media service instance that created us. */ public CallSessionImpl(Call call, MediaServiceImpl mediaServCallback) { this(call, mediaServCallback, null); } /** * Returns the call associated with this Session. * * @return the Call associated with this session. */ public Call getCall() { return call; } /** * Returns the port that we are using for receiving video data in this * <tt>CallSession</tt>. * <p> * @return the port number we are using for receiving video data in this * <tt>CallSession</tt>. */ public int getVideoPort() { return videoSessionAddress.getDataPort(); } /** * Returns the port that we are using for receiving audio data in this * <tt>CallSession</tt>. * <p> * @return the port number we are using for receiving audio data in this * <tt>CallSession</tt>. */ public int getAudioPort() { return audioSessionAddress.getDataPort(); } /** * Returns the rtp manager that we are using for audio streams. * @return the RTPManager instance that we are using for audio streams. */ public RTPManager getAudioRtpManager() { return this.audioRtpManager; } /** * Returns the rtp manager that we are using for video streams. * @return the RTPManager instance that we are using for audio streams. */ public RTPManager getVideoRtpManager() { return this.videoRtpManager; } /** * Opens all streams that have been initialized for local RTP managers. * * @throws MediaException if start() fails for all send streams. */ private void startStreaming() throws MediaException { //start all audio streams boolean startedAtLeastOneStream = false; RTPManager rtpManager = getAudioRtpManager(); Vector sendStreams = rtpManager.getSendStreams(); if(sendStreams != null && sendStreams.size() > 0) { logger.trace("Will be starting " + sendStreams.size() + " audio send streams."); Iterator ssIter = sendStreams.iterator(); while (ssIter.hasNext()) { SendStream stream = (SendStream) ssIter.next(); try { /** @todo are we sure we want to connect here? */ stream.getDataSource().connect(); stream.start(); startedAtLeastOneStream = true; } catch (IOException ex) { logger.warn("Failed to start stream.", ex); } } } else { logger.trace("No audio send streams will be started."); } //start video streams rtpManager = getVideoRtpManager(); sendStreams = rtpManager.getSendStreams(); if(sendStreams != null && sendStreams.size() > 0) { logger.trace("Will be starting " + sendStreams.size() + " video send streams."); Iterator ssIter = sendStreams.iterator(); while (ssIter.hasNext()) { SendStream stream = (SendStream) ssIter.next(); try { stream.start(); startedAtLeastOneStream = true; } catch (IOException ex) { logger.warn("Failed to start stream.", ex); } } } else { logger.trace("No video send streams will be started."); } if(!startedAtLeastOneStream && sendStreams.size() > 0) { stopStreaming(); throw new MediaException("Failed to start streaming" , MediaException.INTERNAL_ERROR); } } /** * Stops and closes all streams that have been initialized for local * RTP managers. */ private void stopStreaming() { stopStreaming(getAudioRtpManager(), "audio"); this.audioRtpManager = null; stopStreaming(getVideoRtpManager(), "video"); this.videoRtpManager = null; } /** * Stops and closes all streams currently handled by <tt>rtpManager</tt>. * * @param rtpManager the rtpManager whose streams we'll be stopping. */ private void stopStreaming(RTPManager rtpManager, String rtpManagerDescription) { Vector sendStreams = rtpManager.getSendStreams(); Iterator ssIter = sendStreams.iterator(); while(ssIter.hasNext()) { SendStream stream = (SendStream) ssIter.next(); try { stream.getDataSource().stop(); stream.stop(); stream.close(); } catch (IOException ex) { logger.warn("Failed to stop stream.", ex); } } Vector receiveStreams = rtpManager.getReceiveStreams(); Iterator rsIter = receiveStreams.iterator(); while(rsIter.hasNext()) { ReceiveStream stream = (ReceiveStream) rsIter.next(); try { stream.getDataSource().stop(); } catch (IOException ex) { logger.warn("Failed to stop stream.", ex); } } //remove targets rtpManager.removeTargets("Session ended."); printFlowStatistics(rtpManager); //stop listening rtpManager.removeReceiveStreamListener(this); rtpManager.removeSendStreamListener(this); rtpManager.removeSessionListener(this); rtpManager.dispose(); } /** * Prints all statistics available for rtpManager. (Method contributed by * Michael Koch). * * @param rtpManager the RTP manager that we'd like to print statistics for. */ private void printFlowStatistics(RTPManager rtpManager) { String rtpManagerDescription = (rtpManager == getAudioRtpManager()) ? "(for audio flows)" : "(for video flows)"; //print flow statistics. GlobalTransmissionStats s = rtpManager.getGlobalTransmissionStats(); logger.info( "global transmission stats (" + rtpManagerDescription + "): \n" + "bytes sent: " + s.getBytesSent() + "\n" + "local colls: " + s.getLocalColls() + "\n" + "remote colls: " + s.getRemoteColls() + "\n" + "RTCP sent: " + s.getRTCPSent() + "\n" + "RTP sent: " + s.getRTPSent() + "\n" + "transmit failed: " + s.getTransmitFailed() ); GlobalReceptionStats rs = rtpManager.getGlobalReceptionStats(); logger.info( "global reception stats (" + rtpManagerDescription + "): \n" + "bad RTCP packets: " + rs.getBadRTCPPkts() + "\n" + "bad RTP packets: " + rs.getBadRTPkts() + "\n" + "bytes received: " + rs.getBytesRecd() + "\n" + "local collisions: " + rs.getLocalColls() + "\n" + "malformed BYEs: " + rs.getMalformedBye() + "\n" + "malformed RRs: " + rs.getMalformedRR() + "\n" + "malformed SDESs: " + rs.getMalformedSDES() + "\n" + "malformed SRs: " + rs.getMalformedSR() + "\n" + "packets looped: " + rs.getPacketsLooped() + "\n" + "packets received: " + rs.getPacketsRecd() + "\n" + "remote collisions: " + rs.getRemoteColls() + "\n" + "RTCPs received: " + rs.getRTCPRecd() + "\n" + "SRRs received: " + rs.getSRRecd() + "\n" + "transmit failed: " + rs.getTransmitFailed() + "\n" + "unknown types: " + rs.getUnknownTypes() ); } /** * The method is meant for use by protocol service implementations when * willing to send an invitation to a remote callee. The * resources (address and port) allocated for the <tt>callParticipant</tt> * should be kept by the media service implementation until the originating * <tt>callParticipant</tt> enters the DISCONNECTED state. Subsequent sdp * offers/answers requested for the <tt>Call</tt> that the original * <tt>callParticipant</tt> belonged to MUST receive the same IP/port couple * as the first one in order to allow for conferencing. The associated port * will be released once the call has ended. * * @todo implement ice. * * @return a new SDP description String advertising all params of * <tt>callSession</tt>. * * @throws MediaException code SERVICE_NOT_STARTED if this method is called * before the service was started. */ public String createSdpOffer() throws net.java.sip.communicator.service.media.MediaException { return createSessionDescription(null, null).toString(); } /** * The method is meant for use by protocol service implementations when * willing to send an invitation to a remote callee. The intendedDestination * parameter, may contain the address that the offer is to be sent to. In * case it is null we'll try our best to determine a default local address. * * @param intendedDestination the address of the call participant that the * descriptions is to be sent to. * @return a new SDP description String advertising all params of * <tt>callSession</tt>. * * @throws MediaException code SERVICE_NOT_STARTED if this method is called * before the service was started. */ public String createSdpOffer(InetAddress intendedDestination) throws net.java.sip.communicator.service.media.MediaException { return createSessionDescription(null, intendedDestination).toString(); } /** * The method is meant for use by protocol service implementations upon * reception of an SDP answer in response to an offer sent by us earlier. * * @param sdpAnswerStr the SDP answer that we'd like to handle. * @param responder the participant that has sent the answer. * * @throws MediaException code SERVICE_NOT_STARTED if this method is called * before the service was started. * @throws ParseException if sdpAnswerStr does not contain a valid sdp * String. */ public synchronized void processSdpAnswer(CallParticipant responder, String sdpAnswerStr) throws MediaException, ParseException { logger.trace("Parsing sdp answer: " + sdpAnswerStr); //first parse the answer SessionDescription sdpAnswer = null; try { sdpAnswer = mediaServCallback.getSdpFactory() .createSessionDescription(sdpAnswerStr); } catch (SdpParseException ex) { throw new ParseException("Failed to parse SDPOffer: " + ex.getMessage() , ex.getCharOffset()); } //extract media descriptions Vector mediaDescriptions = null; try { mediaDescriptions = sdpAnswer.getMediaDescriptions(true); } catch (SdpException exc) { logger.error("failed to extract media descriptions", exc); throw new MediaException("failed to extract media descriptions" , MediaException.INTERNAL_ERROR , exc); } //add the RTP targets this.initStreamTargets(sdpAnswer.getConnection(), mediaDescriptions); //create and init the streams (don't start streaming just yet but wait //for the call to enter the connected state). createSendStreams(mediaDescriptions); } /** * The method is meant for use by protocol service implementations when * willing to respond to an invitation received from a remote caller. Apart * from simply generating an SDP response description, the method records * details * * @param sdpOfferStr the SDP offer that we'd like to create an answer for. * @param offerer the participant that has sent the offer. * * @return a String containing an SDP answer descibing parameters of the * <tt>Call</tt> associated with this session and matching those advertised * by the caller in their <tt>sdpOffer</tt>. * * @throws MediaException code INTERNAL_ERROR if processing the offer and/or * generating the anser fail for some reason. * @throws ParseException if <tt>sdpOfferStr</tt> does not contain a valid * sdp string. */ public String processSdpOffer(CallParticipant offerer, String sdpOfferStr) throws MediaException, ParseException { //first parse the offer SessionDescription sdpOffer = null; try { sdpOffer = mediaServCallback.getSdpFactory() .createSessionDescription(sdpOfferStr); } catch (SdpParseException ex) { throw new ParseException("Failed to parse SDPOffer: " + ex.getMessage() , ex.getCharOffset()); } //create an sdp answer. SessionDescription sdpAnswer = createSessionDescription(sdpOffer, null); //extract the remote addresses. Vector mediaDescriptions = null; try { mediaDescriptions = sdpOffer.getMediaDescriptions(true); } catch (SdpException exc) { logger.error("failed to extract media descriptions", exc); throw new MediaException("failed to extract media descriptions" , MediaException.INTERNAL_ERROR , exc); } //add the RTP targets this.initStreamTargets(sdpOffer.getConnection(), mediaDescriptions); //create and init the streams (don't start streaming just yet but wait //for the call to enter the connected state). createSendStreams(mediaDescriptions); return sdpAnswer.toString(); } /** * Creates a DataSource for all encodings in the mediaDescriptions vector * and initializes send streams in our rtp managers for every stream in the * data source. * @param mediaDescriptions a <tt>Vector</tt> containing * <tt>MediaDescription</tt> instances as sent by the remote side with their * SDP description. * @throws MediaException if we fail to create our data source with the * proper encodings and/or fail to initialize the RTP managers with the * necessary streams and/or don't find encodings supported by both the * remote participant and the local controller. */ private void createSendStreams(Vector mediaDescriptions) throws MediaException { //extract the encodings these media descriptions specify Hashtable mediaEncodings = extractMediaEncodings(mediaDescriptions); //make our processor output in these encodings. DataSource dataSource = mediaServCallback.getMediaControl(getCall()) .createDataSourceForEncodings(mediaEncodings); //get all the steams that our processor creates as output. PushBufferStream[] streams = ((PushBufferDataSource)dataSource).getStreams(); //for each stream - determine whether it is a video or an audio //stream and assign it to the corresponding rtpmanager for (int i = 0; i < streams.length; i++) { RTPManager rtpManager = null; if(streams[i].getFormat() instanceof VideoFormat) { rtpManager = getVideoRtpManager(); } else if (streams[i].getFormat() instanceof AudioFormat) { rtpManager = getAudioRtpManager(); } else { logger.warn("We are apparently capable of sending a format " +" that is neither videro nor audio. Is " +"this really possible?:" +streams[i].getFormat()); continue; } try { SendStream stream = rtpManager.createSendStream(dataSource, i); logger.trace("Created a send stream for format " + streams[i].getFormat()); } catch (Exception exc) { throw new MediaException( "Failed to create an RTP send stream for format " + streams[i].getFormat() , MediaException.IO_ERROR , exc); } } } /** * Extracts the addresses that our interlocutor has sent for receiving media * and adds them as targets to our RTP manager. * * @param globalConnParam the global <tt>Connection</tt> (if there was one) * specified by our interlocutor outside any media description. * @param mediaDescriptions a Vector containing all media descriptions sent * by our interlocutor, that we'd use to verify whether connection level * parameters have been specified. * * @throws ParseException if there was a problem with the sdp * @throws MediaException if we simply fail to initialize the remote * addresses or set them as targets on our RTPManagers. */ private void initStreamTargets(Connection globalConnParam, Vector mediaDescriptions) throws MediaException, ParseException { try { String globalConnectionAddress = null; if (globalConnParam != null) globalConnectionAddress = globalConnParam.getAddress(); Iterator mediaDescsIter = mediaDescriptions.iterator(); while (mediaDescsIter.hasNext()) { SessionAddress target = null; MediaDescription mediaDescription = (MediaDescription) mediaDescsIter.next(); int port = mediaDescription.getMedia().getMediaPort(); String type = mediaDescription.getMedia().getMediaType(); // If there\u2019s a global address, we use it. // If there isn\u2019t a global address, we get the address from // the media Description // Fix by Pablo L. - Telefonica String address; if (globalConnectionAddress != null) { address = globalConnectionAddress; } else { address = mediaDescription.getConnection().getAddress(); } //check if we have a media level address Connection mediaLevelConnection = mediaDescription. getConnection(); if (mediaLevelConnection != null) { address = mediaLevelConnection.getAddress(); } InetAddress inetAddress = null; try { inetAddress = InetAddress.getByName(address); } catch (UnknownHostException exc) { throw new MediaException( "Failed to resolve address " + address , MediaException.NETWORK_ERROR , exc); } //create the session address for this media type and add it to //the RTPManager. target = new SessionAddress(inetAddress, port); /** @todo the following line assumes that we have a single rtp * manager per media type which is not necessarily true (e.g. we * may two distinct video streams: 1 for a webcam video and another * one desktop capture stream) */ RTPManager rtpManager = type.equals("video") ? getVideoRtpManager() : getAudioRtpManager(); try { rtpManager.addTarget(target); logger.trace("added target " + target + " for type " + type); } catch (Exception exc) { throw new MediaException("Failed to add RTPManager target." , MediaException.INTERNAL_ERROR , exc); } } } catch(SdpParseException exc) { throw new ParseException("Failed to parse SDP data. Error on line " + exc.getLineNumber() + " " + exc.getMessage() , exc.getCharOffset()); } } /** * Creates an SDP description of this session using the offer descirption * (if not null) for limiting. The intendedDestination parameter, which may * contain the address that the offer is to be sent to, will only be used if * the <tt>offer</tt> or its connection parameter are <tt>null</tt>. In the * oposite case we are using the address provided in the connection param as * an intended destination. * * @param offer the call participant meant to receive the offer or null if * we are to construct our own offer. * @param intendedDestination the address of the call participant that the * descriptions is to be sent to. * @return a SessionDescription of this CallSession. * * @throws MediaException code INTERNAL_ERROR if we get an SDP exception * while creating and/or parsing the sdp description. */ private SessionDescription createSessionDescription( SessionDescription offer, InetAddress intendedDestination) throws MediaException { try { SessionDescription sessDescr = mediaServCallback.getSdpFactory().createSessionDescription(); //"v=0" Version v = mediaServCallback.getSdpFactory().createVersion(0); sessDescr.setVersion(v); //we don't yet implement ice so just try to choose a local address //that corresponds to the address provided by the offer or as an //intended destination. NetworkAddressManagerService netAddressManager = MediaActivator.getNetworkAddressManagerService(); if(offer != null) { Connection c = offer.getConnection(); if(c != null) { try { intendedDestination = InetAddress.getByName(c. getAddress()); } catch (SdpParseException ex) { logger.warn("error reading remote sdp. " + c.toString() + " is not a valid connection parameter.", ex); } catch (UnknownHostException ex) { logger.warn("error reading remote sdp. " + c.toString() + " does not contain a valid address.", ex); } } } allocateMediaPorts(intendedDestination); InetAddress publicIpAddress = audioPublicAddress.getAddress(); String addrType = publicIpAddress instanceof Inet6Address ? Connection.IP6 : Connection.IP4; //spaces in the user name mess everything up. //bug report - Alessandro Melzi Origin o = mediaServCallback.getSdpFactory().createOrigin( call.getProtocolProvider().getAccountID().getUserID() , 0 , 0 , "IN" , addrType , publicIpAddress.getHostAddress()); sessDescr.setOrigin(o); //c= Connection c = mediaServCallback.getSdpFactory().createConnection( "IN" , addrType , publicIpAddress.getHostAddress()); sessDescr.setConnection(c); //"s=-" SessionName s = mediaServCallback.getSdpFactory().createSessionName("-"); sessDescr.setSessionName(s); //"t=0 0" TimeDescription t = mediaServCallback.getSdpFactory().createTimeDescription(); Vector timeDescs = new Vector(); timeDescs.add(t); sessDescr.setTimeDescriptions(timeDescs); //media descriptions. Vector offeredMediaDescriptions = null; if(offer != null) offeredMediaDescriptions = offer.getMediaDescriptions(false); logger.debug("Will create media descs with: audio public address=" + audioPublicAddress + " and video public address=" + videoPublicAddress); Vector mediaDescs = createMediaDescriptions(offeredMediaDescriptions , audioPublicAddress , videoPublicAddress); sessDescr.setMediaDescriptions(mediaDescs); if (logger.isTraceEnabled()) { logger.trace("Generated SDP - " + sessDescr.toString()); } return sessDescr; } catch (SdpException exc) { throw new MediaException( "An SDP exception occurred while generating local " + "sdp description" , MediaException.INTERNAL_ERROR , exc); } } /** * Creates a vector containing SDP descriptions of media types and formats * that we support. If the offerVector is non null * @param offerMediaDescs the media descriptions sent by the offerer (could * be null). * * @param publicAudioAddress the <tt>InetSocketAddress</tt> that we should * be using for sending audio. * @param publicVideoAddress the <tt>InetSocketAddress</tt> that we should * be using for sending video. * * @return a <tt>Vector</tt> containing media descriptions that we support * and (if this is an answer to an offer) that the offering * <tt>CallParticipant</tt> supports as well. * * @throws SdpException we fail creating the media descriptions * @throws MediaException with code UNSUPPORTED_FORMAT_SET_ERROR if we don't * support any of the offered media formats. */ private Vector createMediaDescriptions( Vector offerMediaDescs, InetSocketAddress publicAudioAddress, InetSocketAddress publicVideoAddress) throws SdpException ,MediaException { //supported audio formats. String[] supportedAudioEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedAudioEncodings(); //supported video formats String[] supportedVideoEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedVideoEncodings(); //if there was an offer extract the offered media formats and use //the intersection between the formats we support and those in the //offer. if (offerMediaDescs != null && offerMediaDescs.size() > 0) { Vector offeredVideoEncodings = new Vector(); Vector offeredAudioEncodings = new Vector(); Iterator offerDescsIter = offerMediaDescs.iterator(); while (offerDescsIter.hasNext()) { MediaDescription desc = (MediaDescription) offerDescsIter.next(); Media media = desc.getMedia(); String mediaType = media.getMediaType(); if (mediaType.equalsIgnoreCase("video")) { offeredVideoEncodings = media.getMediaFormats(true); continue; } if (mediaType.equalsIgnoreCase("audio")) { offeredAudioEncodings = media.getMediaFormats(true); continue; } } //now intersect the offered encodings with what we support Hashtable encodings = new Hashtable(2); encodings.put("audio", offeredAudioEncodings); encodings.put("video", offeredVideoEncodings); encodings = intersectMediaEncodings(encodings); List intersectedAudioEncsList = (List)encodings.get("audio"); List intersectedVideoEncsList = (List)encodings.get("video"); //now replace the encodings arrays with the intersection supportedAudioEncodings = new String[intersectedAudioEncsList.size()]; supportedVideoEncodings = new String[intersectedVideoEncsList.size()]; for (int i = 0; i < supportedAudioEncodings.length; i++) supportedAudioEncodings[i] = (String)intersectedAudioEncsList.get(i); for (int i = 0; i < supportedVideoEncodings.length; i++) supportedVideoEncodings[i] = (String)intersectedVideoEncsList.get(i); } Vector mediaDescs = new Vector(); if(supportedAudioEncodings.length > 0) { //--------Audio media description //make sure preferred formats come first MediaDescription am = mediaServCallback.getSdpFactory().createMediaDescription( "audio" , publicAudioAddress.getPort() , 1 , "RTP/AVP" , supportedAudioEncodings); if (!mediaServCallback.getDeviceConfiguration() .isAudioCaptureSupported()) { am.setAttribute("recvonly", null); } mediaDescs.add(am); } //--------Video media description if(supportedVideoEncodings.length> 0) { //"m=video 22222 RTP/AVP 34"; MediaDescription vm = mediaServCallback.getSdpFactory().createMediaDescription( "video" , publicVideoAddress.getPort() , 1 , "RTP/AVP" , supportedVideoEncodings); if (!mediaServCallback.getDeviceConfiguration() .isVideoCaptureSupported()) { vm.setAttribute("recvonly", null); } mediaDescs.add(vm); } /** @todo record formats for participant. */ return mediaDescs; } /** * Compares audio/video encodings in the <tt>offeredEncodings</tt> * hashtable with those supported by the currently valid media controller * and returns the set of those that were present in both. The hashtable * a maps "audio"/"video" specifier to a list of encodings present in both * the source <tt>offeredEncodings</tt> hashtable and the list of supported * encodings. * * @param offeredEncodings a Hashtable containing sets of encodings that an * interlocutor has sent to us. * @return a <tt>Hashtable</tt> mapping an "audio"/"video" specifier to a * list of encodings present in both the source <tt>offeredEncodings</tt> * hashtable and the list of encodings supported by the local media * controller. * @throws MediaException code UNSUPPORTED_FORMAT_SET_ERROR if the * intersection of both encoding sets does not contain any elements. */ private Hashtable intersectMediaEncodings(Hashtable offeredEncodings) throws MediaException { //audio encodings supported by the media controller String[] supportedAudioEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedAudioEncodings(); //video encodings supported by the media controller String[] supportedVideoEncodings = mediaServCallback .getMediaControl(getCall()) .getSupportedVideoEncodings(); //audio encodings offered by the remote party List offeredAudioEncodings = (List)offeredEncodings.get("audio"); //video encodings offered by the remote party List offeredVideoEncodings = (List)offeredEncodings.get("video"); //recreate the formats we create according to what the other party //offered. List supportedAudioEncsList = Arrays.asList(supportedAudioEncodings); List intersectedAudioEncsList = new LinkedList(); List supportedVideoEncsList = Arrays.asList(supportedVideoEncodings); List intersectedVideoEncsList = new LinkedList(); //intersect supported audio formats with offered audio formats if (offeredAudioEncodings != null && offeredAudioEncodings.size() > 0) { Iterator offeredAudioEncsIter = offeredAudioEncodings.iterator(); while (offeredAudioEncsIter.hasNext()) { String format = (String) offeredAudioEncsIter.next(); if (supportedAudioEncsList.contains(format)) intersectedAudioEncsList.add(format); } } if (offeredVideoEncodings != null && offeredVideoEncodings.size() > 0) { //intersect supported video formats with offered video formats Iterator offeredVideoEncsIter = offeredVideoEncodings.iterator(); while (offeredVideoEncsIter.hasNext()) { String format = (String) offeredVideoEncsIter.next(); if (supportedVideoEncsList.contains(format)) intersectedVideoEncsList.add(format); } } //if the intersection contains no common formats then we need to //bail. if (intersectedAudioEncsList.size() == 0 && intersectedVideoEncsList.size() == 0) { throw new MediaException( "None of the offered formats was supported by this " + "media implementation" , MediaException.UNSUPPORTED_FORMAT_SET_ERROR); } Hashtable intersection = new Hashtable(2); intersection.put("audio", intersectedAudioEncsList); intersection.put("video", intersectedVideoEncsList); return intersection; } /** * Returns a <tt>Hashtable</tt> mapping media types (e.g. audio or video) * to lists of JMF encoding strings corresponding to the SDP formats * specified in the <tt>mediaDescriptions</tt> vector. * @param mediaDescriptions a <tt>Vector</tt> containing * <tt>MediaDescription</tt> instances extracted from an SDP offer or * answer. * @return a <tt>Hashtable</tt> mapping media types (e.g. audio or video) * to lists of JMF encoding strings corresponding to the SDP formats * specified in the <tt>mediaDescriptions</tt> vector. */ private Hashtable extractMediaEncodings(Vector mediaDescriptions) { Hashtable mediaEncodings = new Hashtable(2); Iterator descriptionsIter = mediaDescriptions.iterator(); while(descriptionsIter.hasNext()) { MediaDescription mediaDescription = (MediaDescription)descriptionsIter.next(); Media media = mediaDescription.getMedia(); Vector mediaFormats = null; String mediaType = null; try { mediaFormats = media.getMediaFormats(true); mediaType = media.getMediaType(); } catch (SdpParseException ex) { //this shouldn't happen since nist-sdp is not doing //lasy parsing but log anyway logger.warn("Error parsing sdp.",ex); continue; } if(mediaFormats.size() > 0) { List jmfEncodings = MediaUtils.sdpToJmfEncodings(mediaFormats); if(jmfEncodings.size() > 0) mediaEncodings.put(mediaType, jmfEncodings); } } logger.trace("Possible media encodings="+mediaEncodings); return mediaEncodings; } /** * Create the RTP managers and bind them on some ports. */ private void initializePortNumbers() { //first reset to default values minPortNumber = 5000; maxPortNumber = 6000; //then set to anything the user might have specified. String minPortNumberStr = MediaActivator.getConfigurationService() .getString(MediaService.MIN_PORT_NUMBER_PROPERTY_NAME); if (minPortNumberStr != null) { try{ minPortNumber = Integer.parseInt(minPortNumberStr); }catch (NumberFormatException ex){ logger.warn(minPortNumberStr + " is not a valid min port number value. " +"using min port " + minPortNumber); } } String maxPortNumberStr = MediaActivator.getConfigurationService() .getString(MediaService.MAX_PORT_NUMBER_PROPERTY_NAME); if (maxPortNumberStr != null) { try{ maxPortNumber = Integer.parseInt(maxPortNumberStr); }catch (NumberFormatException ex){ logger.warn(maxPortNumberStr + " is not a valid max port number value. " +"using max port " + maxPortNumber, ex); } } } /** * Allocates a local port for the RTP manager, tries to obtain a public * address for it and after succeeding makes the network address manager * protect the address until we are ready to bind on it. * * @param intendedDestination a destination that the rtp manager would be * communicating with. * @param sessionAddress the sessionAddress that we're locally bound on. * @param bindRetries the number of times that we need to retry a bind. * * @return the SocketAddress the public address that the network address * manager returned for the session address that we're bound on. * * @throws MediaException if we fail to initialize rtp manager. */ private InetSocketAddress allocatePort(InetAddress intendedDestination, SessionAddress sessionAddress, int bindRetries) throws MediaException { InetSocketAddress publicAddress = null; boolean initialized = false; NetworkAddressManagerService netAddressManager = MediaActivator.getNetworkAddressManagerService(); //try to initialize a public address for the rtp manager. for (int i = bindRetries; i > 0; i--) { //first try to obtain a binding for the address. try { publicAddress = netAddressManager .getPublicAddressFor(intendedDestination, sessionAddress.getDataPort()); initialized =true; break; } catch (IOException ex) { logger.warn("Retrying a bind because of a failure. " + "Failed Address is: " + sessionAddress.toString(), ex); //reinit the session address we tried with and prepare to retry. sessionAddress .setDataPort(sessionAddress.getDataPort()+2); sessionAddress .setControlPort(sessionAddress.getControlPort()+2); } } if(!initialized) throw new MediaException("Failed to bind to a local port in " + Integer.toString(bindRetries) + " tries." , MediaException.INTERNAL_ERROR); return publicAddress; } /** * Looks for free ports and initializes the RTP manager according toe the * specified <tt>intendedDestination</tt>. * * @param intendedDestination the InetAddress that we will be transmitting * to. * @throws MediaException if we fail initializing the RTP managers. */ private void allocateMediaPorts(InetAddress intendedDestination) throws MediaException { InetAddress inAddrAny = null; try { //create an ipv4 any address since it also works when accepting //ipv6 connections. inAddrAny = InetAddress.getByName(NetworkUtils.IN_ADDR_ANY); } catch (UnknownHostException ex) { //this shouldn't happen. throw new MediaException("Failed to create the ANY inet address." , MediaException.INTERNAL_ERROR , ex); } //check the number of times that we'd have to rety binding to local //ports before giving up. String bindRetriesStr = MediaActivator.getConfigurationService().getString( MediaService.BIND_RETRIES_PROPERTY_NAME); int bindRetries = MediaService.BIND_RETRIES_DEFAULT_VALUE; try { if(bindRetriesStr != null && bindRetriesStr.length() > 0) bindRetries = Integer.parseInt(bindRetriesStr); } catch (NumberFormatException ex) { logger.warn(bindRetriesStr + " is not a valid value for number of bind retries." , ex); } //initialize audio rtp manager. audioSessionAddress = new SessionAddress(inAddrAny, minPortNumber); audioPublicAddress = allocatePort(intendedDestination, audioSessionAddress, bindRetries); logger.debug("AudioSessionAddress="+audioSessionAddress); logger.debug("AudioPublicAddress="+audioPublicAddress); //augment min port number so that no one else tries to bind here. minPortNumber = audioSessionAddress.getDataPort() + 2; //initialize video rtp manager. videoSessionAddress = new SessionAddress(inAddrAny, minPortNumber); videoPublicAddress = allocatePort(intendedDestination, videoSessionAddress, bindRetries); //augment min port number so that no one else tries to bind here. minPortNumber = videoSessionAddress.getDataPort() + 2; //if we have reached the max port number - reinit. if(minPortNumber > maxPortNumber -2) initializePortNumbers(); //now init the rtp managers and make them bind initializeRtpManager(audioRtpManager, audioSessionAddress); initializeRtpManager(videoRtpManager, videoSessionAddress); } /** * Initializes the RTP manager so that it would start listening on the * <tt>address</tt> session address. The method also initializes the RTP * manager buffer control. * * @param rtpManager the <tt>RTPManager</tt> to initialize. * @param bindAddress the <tt>SessionAddress</tt> to use when initializing the * RTPManager. * * @throws MediaException if we fail to initialize the RTP manager. */ private void initializeRtpManager(RTPManager rtpManager, SessionAddress bindAddress) throws MediaException { try { rtpManager.initialize(bindAddress); } catch (Exception exc) { logger.error("Failed to init an RTP manager.", exc); throw new MediaException("Failed to init an RTP manager." , MediaException.IO_ERROR , exc); } //it appears that if we don't do this managers don't play // You can try out some other buffer size to see // if you can get better smoothness. BufferControl bc = (BufferControl)rtpManager .getControl(BufferControl.class.getName()); if (bc != null) { long buff = 500; String buffStr = MediaActivator.getConfigurationService() .getString(PROPERTY_NAME_RECEIVE_BUFFER_LENGTH); try { if(buffStr != null && buffStr.length() > 0) buff = Long.parseLong(buffStr); } catch (NumberFormatException exc) { logger.warn(buffStr + " is not a valid receive buffer value (integer)." , exc); } buff = bc.setBufferLength(buff); logger.trace("set receiver buffer len to=" + buff); bc.setEnabledThreshold(true); bc.setMinimumThreshold(100); } //add listeners rtpManager.addReceiveStreamListener(this); rtpManager.addSendStreamListener(this); rtpManager.addSessionListener(this); } /** * Registers the RTP formats which are supported by SIP Communicator in addition * to the JMF standard formats. This has to be done for every RTP Manager instance. * * @param rtpManager The manager with which to register the formats. * @see MediaControl#registerCustomCodecs() */ static void registerCustomCodecFormats(RTPManager rtpManager) { for (int i=0; i<CUSTOM_CODEC_FORMATS.length; i++) { javax.media.Format format = CUSTOM_CODEC_FORMATS[i]; logger.debug("registering format " + format + " with RTP manager"); /* * NOTE ([email protected]): com.sun.media.rtp.RtpSessionMgr.addFormat * leaks memory, since it stores the Format in a static Vector. * AFAIK there is no easy way around it, but the memory impact * should not be too bad. */ rtpManager.addFormat( format, MediaUtils.jmfToSdpEncoding(format.getEncoding())); } } /** * Indicates that a change has occurred in the state of the source call. * @param evt the <tt>CallChangeEvent</tt> instance containing the source * calls and its old and new state. */ public void callStateChanged(CallChangeEvent evt) { if( evt.getNewValue() == CallState.CALL_IN_PROGRESS && evt.getNewValue() != evt.getOldValue()) { try { startStreaming(); mediaServCallback.getMediaControl(getCall()) .startProcessingMedia(this); } catch (MediaException ex) { /** @todo need to notify someone */ logger.error("Failed to start streaming.", ex); } } else if( evt.getNewValue() == CallState.CALL_ENDED && evt.getNewValue() != evt.getOldValue()) { stopStreaming(); mediaServCallback.getMediaControl(getCall()) .stopProcessingMedia(this); //close all players that we have created in this session Iterator playersIter = players.iterator(); while(playersIter.hasNext()) { Player player = ( Player )playersIter.next(); player.stop(); player.deallocate(); player.close(); playersIter.remove(); } //close all video frames that we have created in this session Iterator videoFramesIter = videoFrames.iterator(); while(videoFramesIter.hasNext()) { javax.swing.JFrame frame = ( javax.swing.JFrame )videoFramesIter.next(); frame.setVisible(false); frame.dispose(); videoFramesIter.remove(); } //remove ourselves as listeners from the call evt.getSourceCall().removeCallChangeListener(this); RTPManager audioRtpMan = getAudioRtpManager(); if(audioRtpMan != null) audioRtpMan.dispose(); RTPManager videoRtpMan = getVideoRtpManager(); if(videoRtpMan != null) videoRtpMan.dispose(); } } /** * Indicates that a change has occurred in the status of the source * CallParticipant. * * @param evt The <tt>CallParticipantChangeEvent</tt> instance containing * the source event as well as its previous and its new status. */ public void participantStateChanged(CallParticipantChangeEvent evt) { /** @todo implement participantStateChanged() */ /** @todo remove target for participant. */ } /** * Indicates that a new call participant has joined the source call. * @param evt the <tt>CallParticipantEvent</tt> containing the source call * and call participant. */ public synchronized void callParticipantAdded(CallParticipantEvent evt) { CallParticipant sourceParticipant = evt.getSourceCallParticipant(); sourceParticipant.addCallParticipantListener(this); } /** * Indicates that a call participant has left the source call. * @param evt the <tt>CallParticipantEvent</tt> containing the source call * and call participant. */ public void callParticipantRemoved(CallParticipantEvent evt) { } //-------- dummy implementations of listener methods that we don't need /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantImageChanged(CallParticipantChangeEvent evt) { } /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantDisplayNameChanged(CallParticipantChangeEvent evt) { } /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantTransportAddressChanged( CallParticipantChangeEvent evt) { /** @todo i am not sure we should be ignoring this one ... */ } /** * Ignore - we're not concerned by this event inside a call session. * * @param evt ignore. */ public void participantAddressChanged(CallParticipantChangeEvent evt) { } //implementation of jmf listener methods /** * Method called back in the SessionListener to notify * listener of all Session Events.SessionEvents could be one * of NewParticipantEvent or LocalCollisionEvent. * * @param event the newly received SessionEvent */ public synchronized void update(SessionEvent event) { if (event instanceof NewParticipantEvent) { Participant participant = ( (NewParticipantEvent) event).getParticipant(); if (logger.isDebugEnabled()) { logger.debug("A new participant had just joined: " + participant.getCNAME()); } } else { if (logger.isDebugEnabled()) { logger.debug( "Received the following JMF Session event - " + event.getClass().getName() + "=" + event); } } } /** * Method called back in the RTPSessionListener to notify * listener of all SendStream Events. * * @param event the newly received SendStreamEvent */ public synchronized void update(SendStreamEvent event) { logger.debug( "received the following JMF SendStreamEvent - " + event.getClass().getName() + "="+ event); } /** * Method called back in the RTPSessionListener to notify * listener of all ReceiveStream Events. * * @param evt the newly received ReceiveStreamEvent */ public synchronized void update(ReceiveStreamEvent evt) { RTPManager mgr = (RTPManager) evt.getSource(); Participant participant = evt.getParticipant(); // could be null. ReceiveStream stream = evt.getReceiveStream(); // could be null. if (evt instanceof NewReceiveStreamEvent) { try { logger.debug("received a new incoming stream. " + evt); stream = ( (NewReceiveStreamEvent) evt).getReceiveStream(); DataSource ds = stream.getDataSource(); // Find out the formats. RTPControl ctl = (RTPControl) ds.getControl( "javax.media.rtp.RTPControl"); if (logger.isDebugEnabled()) { if (ctl != null) { logger.debug("Received new RTP stream: " + ctl.getFormat()); } else { logger.debug("Received new RTP stream"); } } Player player = null; //if we are using a custom destination, create a processor //if not, a player will suffice if (dataSink != null) { player = Manager.createProcessor(ds); } else { player = Manager.createPlayer(ds); } player.addControllerListener(this); //a processor needs to be configured then realized. if (dataSink != null) { ((Processor)player).configure(); } else { player.realize(); } players.add(player); } catch (Exception e) { logger.error("NewReceiveStreamEvent exception ", e); return; } } else if (evt instanceof StreamMappedEvent) { if (stream != null && stream.getDataSource() != null) { DataSource ds = stream.getDataSource(); // Find out the formats. RTPControl ctl = (RTPControl) ds.getControl( "javax.media.rtp.RTPControl"); if (logger.isDebugEnabled()) { String msg = "The previously unidentified stream "; if (ctl != null) { msg += ctl.getFormat(); } msg += " had now been identified as sent by: " + participant.getCNAME(); logger.debug(msg); } } } else if (evt instanceof ByeEvent) { logger.debug("Got \"bye\" from: " + participant.getCNAME()); } } /** * This method is called when an event is generated by a * <code>Controller</code> that this listener is registered with. * @param ce The event generated. */ public synchronized void controllerUpdate(ControllerEvent ce) { logger.debug("Received a ControllerEvent: " + ce); Player player = (Player) ce.getSourceController(); if (player == null) { return; } //if configuration is completed and this is a processor //we need to set file format and explicitly call realize(). if (ce instanceof ConfigureCompleteEvent) { try { ((Processor)player).setContentDescriptor( new FileTypeDescriptor(FileTypeDescriptor.WAVE)); player.realize(); } catch (Exception exc) { logger.error("failed to record to file", exc); } } // Get this when the internal players are realized. if (ce instanceof RealizeCompleteEvent) { //set the volume as it is not on max by default. //XXX: I am commenting this since apparently it is causing some //problems on windows. //GainControl gc // = (GainControl)player.getControl(GainControl.class.getName()); //if (gc != null) //{ // logger.debug("Setting volume to max"); // gc.setLevel(1); //} //else // logger.debug("Player does not have gain control."); logger.debug("A player was realized and will be started."); player.start(); if (dataSink != null) { try { logger.info("starting recording to file: "+dataSink); MediaLocator dest = new MediaLocator(dataSink); DataSource ds = ((Processor)player).getDataOutput(); DataSink sink = Manager.createDataSink( ((Processor)player).getDataOutput(), dest); player.start(); //do we know the output file's duration RecordInitiator record = new RecordInitiator(sink); record.start(); } catch(Exception e) { logger.error("failed while trying to record to file",e); } } else { player.start(); } /** @todo video frame is currently handled with very ugly test code * please don't forget to remove */ //------------ ugly video test code starts here -------------------- java.awt.Component vc = player.getVisualComponent(); if(vc != null) { javax.swing.JFrame frame = new javax.swing.JFrame(); frame.setTitle("SIP Communicator - Video Call"); frame.getContentPane().add(vc); frame.pack(); //center java.awt.Dimension frameSize = frame.getSize(); //ugly resize if too tiny if(frameSize.width < 300) { frame.setSize(frameSize.width * 2, frameSize.height * 2); frameSize = frame.getSize(); } java.awt.Dimension screenSize = java.awt.Toolkit.getDefaultToolkit().getScreenSize(); frame.setLocation((screenSize.width - frameSize.width)/2 ,(screenSize.height - frameSize.height)/2); frame.setVisible(true); videoFrames.add(frame); } //------------- ugly video test code ends here --------------------- } if (ce instanceof StartEvent) { logger.debug("Received a StartEvent"); } if (ce instanceof ControllerErrorEvent) { logger.error( "The following error was reported while starting a player" + ce); } if (ce instanceof ControllerClosedEvent) { logger.debug("Received a ControllerClosedEvent"); } } /** * The record initiator is started after taking a call that is supposed to * be answered by a mailbox plug-in. It waits for the outgoing message to * stop transmitting and starts recording whatever comes after that. */ private class RecordInitiator extends Thread { private DataSink sink; public RecordInitiator(DataSink sink) { this.sink = sink; } public void run() { //determine how long to wait for the outgoing //message to stop playing javax.media.Time timeToWait = mediaServCallback .getMediaControl(call) .getOutputDuration(); //if the time is unknown, we will start recording immediately if (timeToWait != javax.media.Time.TIME_UNKNOWN) { double millisToWait = timeToWait.getSeconds() * 1000; long timeStartedPlaying = System.currentTimeMillis(); while (System.currentTimeMillis() < timeStartedPlaying + millisToWait) { try { Thread.sleep(100); } catch (InterruptedException e) { logger.error("Interrupted while waiting to start " + "recording incoming message",e); } } } //open the dataSink and start recording try { sink.open(); sink.start(); } catch (IOException e) { logger.error("IO Exception while attempting to start " + "recording incoming message",e); } } } }
Setting the receive buffer to 100ms
src/net/java/sip/communicator/impl/media/CallSessionImpl.java
Setting the receive buffer to 100ms
Java
apache-2.0
325142dda849c7150f41b975114cccab90608da6
0
ashwinrayaprolu1984/ringojs,ringo/ringojs,Transcordia/ringojs,Transcordia/ringojs,ashwinrayaprolu1984/ringojs,Transcordia/ringojs,ashwinrayaprolu1984/ringojs,ashwinrayaprolu1984/ringojs,ringo/ringojs,oberhamsi/ringojs,oberhamsi/ringojs,Transcordia/ringojs,ringo/ringojs,oberhamsi/ringojs,ringo/ringojs
/* * Helma License Notice * * The contents of this file are subject to the Helma License * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. A copy of the License is available at * http://adele.helma.org/download/helma/license.txt * * Copyright 1998-2003 Helma Software. All Rights Reserved. * * $RCSfile: ZipRepository.java,v $ * $Author: hannes $ * $Revision: 1.11 $ * $Date: 2006/04/07 14:37:11 $ */ package org.ringojs.repository; import org.ringojs.util.StringUtils; import java.io.File; import java.io.IOException; import java.util.*; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipException; import java.net.URL; import java.net.MalformedURLException; import java.lang.ref.WeakReference; public final class ZipRepository extends AbstractRepository { // zip file serving sub-repositories and zip file resources private File file; // weak reference to the zip file private WeakReference<ZipFile> zipFile; // the relative path of this repository within the zip file private final String entryPath; // the nested directory depth of this repository within the zip file private int depth; private long lastModified = -1; private boolean exists; /** * Constructs a ZipRespository using the given zip file. * @param path path to zip file * @throws ZipException a zip encoding related error occurred * @throws IOException an I/O error occurred */ public ZipRepository(String path) throws ZipException, IOException { this(new File(path)); } /** * Constructs a ZipRespository using the given zip file. * @param file zip file * @throws ZipException a zip encoding related error occurred * @throws IOException an I/O error occurred */ public ZipRepository(File file) throws ZipException, IOException { // make sure our file has an absolute path, // see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4117557 if (!file.isAbsolute()) { file = file.getAbsoluteFile(); } this.file = file; this.parent = null; name = file.getName(); path = file.getPath() + '/'; depth = 0; entryPath = ""; } /** * Constructs a ZipRepository using the zip entryName belonging to the given * zip file and top-level repository * @param file the zip file * @param parent repository * @param entryPath the entry path name * @throws ZipException a zip encoding related error occurred * @throws IOException an I/O error occurred */ private ZipRepository(File file, ZipRepository parent, String entryPath) throws ZipException, IOException { if (entryPath == null) { throw new NullPointerException("entryPath must not be null"); } this.file = file; this.parent = parent; this.entryPath = entryPath; String[] pathArray = StringUtils.split(entryPath, SEPARATOR); depth = pathArray.length; name = pathArray[depth - 1]; path = parent.getPath() + name + '/'; } /** * Returns a java.util.zip.ZipFile for this repository. * @return a ZipFile for reading * @throws IOException an I/O related error occurred */ protected synchronized ZipFile getZipFile() throws IOException { if (parent instanceof ZipRepository) { return ((ZipRepository) parent).getZipFile(); } ZipFile zip = zipFile == null ? null : zipFile.get(); if (zip == null || lastModified != file.lastModified()) { if (zip != null) { try { zip.close(); } catch (Exception ignore) {} } zip = new ZipFile(file); zipFile = new WeakReference<ZipFile>(zip); lastModified = file.lastModified(); } return zip; } private String getChildName(String name) { if (entryPath.length() == 0) { return name; } else if (entryPath.endsWith("/")) { return entryPath + name; } else { return entryPath + "/" + name; } } /** * Called to create a child resource for this repository */ @Override protected Resource lookupResource(String name) throws IOException { AbstractResource res = resources.get(name); if (res == null) { String childName = getChildName(name); ZipEntry entry = getZipFile().getEntry(childName); res = new ZipResource(childName, this, entry); resources.put(name, res); } return res; } /** * Checks wether this resource actually (still) exists * @return true if the resource exists */ public boolean exists() throws IOException { if (lastModified != file.lastModified()) { try { ZipFile zip = getZipFile(); exists = entryPath.length() == 0 ? zip != null : zip.getEntry(entryPath) != null; } catch (IOException ex) { exists = false; } } return exists; } protected AbstractRepository createChildRepository(String name) throws IOException { return new ZipRepository(file, this, getChildName(name)); } protected void getResources(List<Resource> list, boolean recursive) throws IOException { Map<String,ZipEntry> entries = getChildEntries(); for (Map.Entry<String, ZipEntry> entry : entries.entrySet()) { String entryName = entry.getKey(); if (!entry.getValue().isDirectory()) { AbstractResource res = resources.get(entryName); if (res == null) { ZipEntry zipEntry = entry.getValue(); res = new ZipResource(zipEntry.getName(), this, zipEntry); resources.put(entryName, res); } list.add(res); } else if (recursive) { lookupRepository(entryName).getResources(list, true); } } } public Repository[] getRepositories() throws IOException { List<Repository> list = new ArrayList<Repository>(); Map<String,ZipEntry> entries = getChildEntries(); for (Map.Entry<String, ZipEntry> entry : entries.entrySet()) { if (entry.getValue().isDirectory()) { list.add(lookupRepository(entry.getKey())); } } return list.toArray(new Repository[list.size()]); } public URL getUrl() throws MalformedURLException { // return a Jar URL as described in // http://java.sun.com/j2se/1.5.0/docs/api/java/net/JarURLConnection.html if (parent instanceof ZipRepository) { return new URL(parent.getUrl() + name + "/"); } else { String baseUrl = "jar:file:" + file + "!/"; return entryPath.length() == 0 ? new URL(baseUrl) : new URL(baseUrl + entryPath + "/"); } } public long lastModified() { return file.lastModified(); } public long getChecksum() { return file.lastModified(); } @Override public int hashCode() { return 17 + (37 * file.hashCode()) + (37 * path.hashCode()); } @Override public boolean equals(Object obj) { if (!(obj instanceof ZipRepository)) { return false; } ZipRepository rep = (ZipRepository) obj; return (file.equals(rep.file) && path.equals(rep.path)); } @Override public String toString() { return "ZipRepository[" + path + "]"; } private Map<String, ZipEntry> getChildEntries() throws IOException { ZipFile zipfile = getZipFile(); Map<String, ZipEntry> map = new TreeMap<String, ZipEntry>(); Enumeration en = zipfile.entries(); while (en.hasMoreElements()) { ZipEntry entry = (ZipEntry) en.nextElement(); String entryName = entry.getName(); if (!entryName.regionMatches(0, entryPath, 0, entryPath.length())) { // names don't match - not a child of ours continue; } String[] entrypath = StringUtils.split(entryName, SEPARATOR); if (depth > 0 && !name.equals(entrypath[depth-1])) { // catch case where our name is Foo and other's is FooBar continue; } if (entrypath.length == depth + 1) { map.put(entrypath[depth], entry); } } return map; } }
src/org/ringojs/repository/ZipRepository.java
/* * Helma License Notice * * The contents of this file are subject to the Helma License * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. A copy of the License is available at * http://adele.helma.org/download/helma/license.txt * * Copyright 1998-2003 Helma Software. All Rights Reserved. * * $RCSfile: ZipRepository.java,v $ * $Author: hannes $ * $Revision: 1.11 $ * $Date: 2006/04/07 14:37:11 $ */ package org.ringojs.repository; import org.ringojs.util.StringUtils; import java.io.File; import java.io.IOException; import java.util.*; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipException; import java.net.URL; import java.net.MalformedURLException; import java.lang.ref.WeakReference; public final class ZipRepository extends AbstractRepository { // zip file serving sub-repositories and zip file resources private File file; // weak reference to the zip file private WeakReference<ZipFile> zipFile; // the relative path of this repository within the zip file private final String entryPath; // the nested directory depth of this repository within the zip file private int depth; private long lastModified = -1; private boolean exists; /** * Constructs a ZipRespository using the given zip file. * @param path path to zip file * @throws ZipException a zip encoding related error occurred * @throws IOException an I/O error occurred */ public ZipRepository(String path) throws ZipException, IOException { this(new File(path)); } /** * Constructs a ZipRespository using the given zip file. * @param file zip file * @throws ZipException a zip encoding related error occurred * @throws IOException an I/O error occurred */ public ZipRepository(File file) throws ZipException, IOException { // make sure our file has an absolute path, // see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4117557 this.file = file.isAbsolute() ? file : file.getAbsoluteFile(); this.parent = null; name = file.getName(); path = file.getAbsolutePath() + '/'; depth = 0; entryPath = ""; } /** * Constructs a ZipRepository using the zip entryName belonging to the given * zip file and top-level repository * @param file the zip file * @param parent repository * @param entryPath the entry path name * @throws ZipException a zip encoding related error occurred * @throws IOException an I/O error occurred */ private ZipRepository(File file, ZipRepository parent, String entryPath) throws ZipException, IOException { if (entryPath == null) { throw new NullPointerException("entryPath must not be null"); } this.file = file; this.parent = parent; this.entryPath = entryPath; String[] pathArray = StringUtils.split(entryPath, SEPARATOR); depth = pathArray.length; name = pathArray[depth - 1]; path = parent.getPath() + name + '/'; } /** * Returns a java.util.zip.ZipFile for this repository. * @return a ZipFile for reading * @throws IOException an I/O related error occurred */ protected synchronized ZipFile getZipFile() throws IOException { if (parent instanceof ZipRepository) { return ((ZipRepository) parent).getZipFile(); } ZipFile zip = zipFile == null ? null : zipFile.get(); if (zip == null || lastModified != file.lastModified()) { if (zip != null) { try { zip.close(); } catch (Exception ignore) {} } zip = new ZipFile(file); zipFile = new WeakReference<ZipFile>(zip); lastModified = file.lastModified(); } return zip; } private String getChildName(String name) { if (entryPath.length() == 0) { return name; } else if (entryPath.endsWith("/")) { return entryPath + name; } else { return entryPath + "/" + name; } } /** * Called to create a child resource for this repository */ @Override protected Resource lookupResource(String name) throws IOException { AbstractResource res = resources.get(name); if (res == null) { String childName = getChildName(name); ZipEntry entry = getZipFile().getEntry(childName); res = new ZipResource(childName, this, entry); resources.put(name, res); } return res; } /** * Checks wether this resource actually (still) exists * @return true if the resource exists */ public boolean exists() throws IOException { if (lastModified != file.lastModified()) { try { ZipFile zip = getZipFile(); exists = entryPath.length() == 0 ? zip != null : zip.getEntry(entryPath) != null; } catch (IOException ex) { exists = false; } } return exists; } protected AbstractRepository createChildRepository(String name) throws IOException { return new ZipRepository(file, this, getChildName(name)); } protected void getResources(List<Resource> list, boolean recursive) throws IOException { Map<String,ZipEntry> entries = getChildEntries(); for (Map.Entry<String, ZipEntry> entry : entries.entrySet()) { String entryName = entry.getKey(); if (!entry.getValue().isDirectory()) { AbstractResource res = resources.get(entryName); if (res == null) { ZipEntry zipEntry = entry.getValue(); res = new ZipResource(zipEntry.getName(), this, zipEntry); resources.put(entryName, res); } list.add(res); } else if (recursive) { lookupRepository(entryName).getResources(list, true); } } } public Repository[] getRepositories() throws IOException { List<Repository> list = new ArrayList<Repository>(); Map<String,ZipEntry> entries = getChildEntries(); for (Map.Entry<String, ZipEntry> entry : entries.entrySet()) { if (entry.getValue().isDirectory()) { list.add(lookupRepository(entry.getKey())); } } return list.toArray(new Repository[list.size()]); } public URL getUrl() throws MalformedURLException { // return a Jar URL as described in // http://java.sun.com/j2se/1.5.0/docs/api/java/net/JarURLConnection.html if (parent instanceof ZipRepository) { return new URL(parent.getUrl() + name + "/"); } else { String baseUrl = "jar:file:" + file + "!/"; return entryPath.length() == 0 ? new URL(baseUrl) : new URL(baseUrl + entryPath + "/"); } } public long lastModified() { return file.lastModified(); } public long getChecksum() { return file.lastModified(); } @Override public int hashCode() { return 17 + (37 * file.hashCode()) + (37 * path.hashCode()); } @Override public boolean equals(Object obj) { if (!(obj instanceof ZipRepository)) { return false; } ZipRepository rep = (ZipRepository) obj; return (file.equals(rep.file) && path.equals(rep.path)); } @Override public String toString() { return "ZipRepository[" + path + "]"; } private Map<String, ZipEntry> getChildEntries() throws IOException { ZipFile zipfile = getZipFile(); Map<String, ZipEntry> map = new TreeMap<String, ZipEntry>(); Enumeration en = zipfile.entries(); while (en.hasMoreElements()) { ZipEntry entry = (ZipEntry) en.nextElement(); String entryName = entry.getName(); if (!entryName.regionMatches(0, entryPath, 0, entryPath.length())) { // names don't match - not a child of ours continue; } String[] entrypath = StringUtils.split(entryName, SEPARATOR); if (depth > 0 && !name.equals(entrypath[depth-1])) { // catch case where our name is Foo and other's is FooBar continue; } if (entrypath.length == depth + 1) { map.put(entrypath[depth], entry); } } return map; } }
Minor cleanup in ZipRepository constructor
src/org/ringojs/repository/ZipRepository.java
Minor cleanup in ZipRepository constructor
Java
apache-2.0
7344bf4f8ae4487a6b8e0a9f3609105382c3ae3c
0
javild/opencga,j-coll/opencga,opencb/opencga,javild/opencga,opencb/opencga,javild/opencga,opencb/opencga,j-coll/opencga,j-coll/opencga,opencb/opencga,javild/opencga,j-coll/opencga,javild/opencga,javild/opencga,opencb/opencga,opencb/opencga,j-coll/opencga,j-coll/opencga
package org.opencb.opencga.core.common; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Paths; /** * Created by hpccoll1 on 11/05/15. */ public class UriUtils { public static void checkUri(URI uri, String uriName, String schema) throws IOException { if(uri == null || uri.getScheme() != null && !uri.getScheme().equals(schema)) { throw new IOException("Expected file:// uri scheme for " + uriName); } } public static URI createUri(String input) throws URISyntaxException { URI sourceUri = new URI(null, input, null); if (sourceUri.getScheme() == null || sourceUri.getScheme().isEmpty()) { sourceUri = Paths.get(input).toUri(); } return sourceUri; } public static URI createDirectoryUri(String input) throws URISyntaxException { URI uri = createUri(input); // If path does not ends with / , create a new URI with path + "/" if(!uri.getPath().endsWith("/")) { uri = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath() + "/", uri.getQuery(), uri.getFragment()); } return uri; } }
opencga-core/src/main/java/org/opencb/opencga/core/common/UriUtils.java
package org.opencb.opencga.core.common; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Paths; /** * Created by hpccoll1 on 11/05/15. */ public class UriUtils { public static void checkUri(URI uri, String uriName, String schema) throws IOException { if(uri == null || uri.getScheme() != null && !uri.getScheme().equals(schema)) { throw new IOException("Expected file:// uri scheme for " + uriName); } } public static URI createUri(String input) throws URISyntaxException { URI sourceUri = new URI(null, input, null); if (sourceUri.getScheme() == null || sourceUri.getScheme().isEmpty()) { sourceUri = Paths.get(input).toUri(); } return sourceUri; } public static URI createDirectoryUri(String input) throws URISyntaxException { if(!input.endsWith("/")) { input += "/"; } return createUri(input); } }
core: Fix UriUtils createDirectoryUri Did not create a directory uri (ended with "/") if the given input was without scheme, and the file does not exist in the file system.
opencga-core/src/main/java/org/opencb/opencga/core/common/UriUtils.java
core: Fix UriUtils createDirectoryUri
Java
apache-2.0
8a25dbc0da65395c75ccf8cdde2722cee7af5087
0
oriontribunal/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,bozimmerman/CoffeeMud,MaxRau/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud,MaxRau/CoffeeMud,oriontribunal/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud,Tycheo/coffeemud
package com.planet_ink.coffee_mud.Abilities.Spells; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2009 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class Spell_TimeStop extends Spell { public String ID() { return "Spell_TimeStop"; } public String name(){return "Time Stop";} public String displayText(){return "(Time is Stopped)";} protected int canAffectCode(){return CAN_ROOMS|CAN_MOBS;} protected int canTargetCode(){return 0;} protected int overrideMana(){return 100;} public int classificationCode(){return Ability.ACODE_SPELL|Ability.DOMAIN_ALTERATION;} public int abstractQuality(){ return Ability.QUALITY_MALICIOUS;} protected Vector fixed=new Vector(); public void setMiscText(String newText) { super.setMiscText(newText); fixed=new Vector(); } public void unInvoke() { // undo the affects of this spell if((affected!=null)&&(canBeUninvoked())) { if(affected instanceof Room) { Room room=(Room)affected; room.showHappens(CMMsg.MSG_OK_VISUAL, "Time starts moving again..."); if(invoker!=null) { Ability me=invoker.fetchEffect(ID()); if(me!=null) me.unInvoke(); } CMLib.threads().resumeTicking(room,-1); for(int i=0;i<fixed.size();i++) { MOB mob2=(MOB)fixed.elementAt(i); CMLib.threads().resumeTicking(mob2,-1); } fixed=new Vector(); } else if(affected instanceof MOB) { MOB mob=(MOB)affected; CMLib.threads().resumeTicking(mob,-1); if(mob.location()!=null) { mob.location().show(mob, null, CMMsg.MSG_OK_VISUAL, "Time starts moving again..."); Ability me=mob.location().fetchEffect(ID()); if(me!=null) me.unInvoke(); } } } super.unInvoke(); } public boolean okMessage(Environmental myHost, CMMsg msg) { if((affected!=null) &&(affected instanceof Room)) { switch(msg.targetMinor()) { case CMMsg.TYP_ENTER: case CMMsg.TYP_LEAVE: case CMMsg.TYP_FLEE: if(msg.source()==invoker) msg.source().tell("You cannot travel beyond the time stopped area."); else msg.source().tell("Nothing just happened. You didn't do that."); return false; default: if((msg.source()!=invoker) &&(!CMath.bset(msg.sourceCode(),CMMsg.MASK_ALWAYS)) &&(!CMath.bset(msg.targetCode(),CMMsg.MASK_ALWAYS))) { msg.source().tell("Time is stopped. Nothing just happened. You didn't do that."); return false; } } } return super.okMessage(myHost,msg); } public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto, int asLevel) { // the invoke method for spells receives as // parameters the invoker, and the REMAINING // command line parameters, divided into words, // and added as String objects to a vector. if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; Environmental target = mob.location(); if(target.fetchEffect(this.ID())!=null) { mob.tell(mob,null,null,"Time has already been stopped here!"); return false; } boolean success=proficiencyCheck(mob,0,auto); if(success) { // it worked, so build a copy of this ability, // and add it to the affects list of the // affected MOB. Then tell everyone else // what happened. CMMsg msg = CMClass.getMsg(mob, target, this,verbalCastCode(mob,target,auto),(auto?"T":"^S<S-NAME> speak(s) and gesture(s) and t")+"ime suddenly STOPS!^?"); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); Room room=mob.location(); fixed=new Vector(); CMLib.threads().suspendTicking(room,-1); for(int m=0;m<mob.location().numInhabitants();m++) { MOB mob2=mob.location().fetchInhabitant(m); if(mob2!=mob) { fixed.addElement(mob2); CMLib.threads().suspendTicking(mob2,-1); if((mob.getVictim()==null)&&(mob.mayIFight(mob2))) mob.setVictim(mob2); } } beneficialAffect(mob,room,asLevel,3); } } else return beneficialWordsFizzle(mob,null,"<S-NAME> incant(s) for awhile, but the spell fizzles."); // return whether it worked return success; } }
com/planet_ink/coffee_mud/Abilities/Spells/Spell_TimeStop.java
package com.planet_ink.coffee_mud.Abilities.Spells; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2009 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class Spell_TimeStop extends Spell { public String ID() { return "Spell_TimeStop"; } public String name(){return "Time Stop";} public String displayText(){return "(Time is Stopped)";} protected int canAffectCode(){return CAN_ROOMS|CAN_MOBS;} protected int canTargetCode(){return 0;} protected int overrideMana(){return 100;} public int classificationCode(){return Ability.ACODE_SPELL|Ability.DOMAIN_ALTERATION;} public int abstractQuality(){ return Ability.QUALITY_MALICIOUS;} protected Vector fixed=new Vector(); public void setMiscText(String newText) { super.setMiscText(newText); fixed=new Vector(); } public void unInvoke() { // undo the affects of this spell if((affected!=null)&&(canBeUninvoked())) { if(affected instanceof Room) { Room room=(Room)affected; room.showHappens(CMMsg.MSG_OK_VISUAL, "Time starts moving again..."); if(invoker!=null) { Ability me=invoker.fetchEffect(ID()); if(me!=null) me.unInvoke(); } CMLib.threads().resumeTicking(room,-1); for(int i=0;i<fixed.size();i++) { MOB mob2=(MOB)fixed.elementAt(i); CMLib.threads().resumeTicking(mob2,-1); } fixed=new Vector(); } else if(affected instanceof MOB) { MOB mob=(MOB)affected; CMLib.threads().resumeTicking(mob,-1); if(mob.location()!=null) { mob.location().show(mob, null, CMMsg.MSG_OK_VISUAL, "Time starts moving again..."); Ability me=mob.location().fetchEffect(ID()); if(me!=null) me.unInvoke(); } } } super.unInvoke(); } public boolean okMessage(Environmental myHost, CMMsg msg) { if((affected!=null) &&(affected instanceof Room)) { switch(msg.targetMinor()) { case CMMsg.TYP_ENTER: case CMMsg.TYP_LEAVE: case CMMsg.TYP_FLEE: if(msg.source()==invoker) msg.source().tell("You cannot travel beyond the time stopped area."); else msg.source().tell("Nothing just happened. You didn't do that."); return false; default: if((msg.source()!=invoker) &&(!CMath.bset(msg.sourceCode(),CMMsg.MASK_ALWAYS)) &&(!CMath.bset(msg.targetCode(),CMMsg.MASK_ALWAYS))) { msg.source().tell("Time is stopped. Nothing just happened. You didn't do that."); return false; } } } return super.okMessage(myHost,msg); } public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto, int asLevel) { // the invoke method for spells receives as // parameters the invoker, and the REMAINING // command line parameters, divided into words, // and added as String objects to a vector. if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; Environmental target = mob.location(); if(target.fetchEffect(this.ID())!=null) { mob.tell(mob,null,null,"Time has already been stopped here!"); return false; } boolean success=proficiencyCheck(mob,0,auto); if(success) { // it worked, so build a copy of this ability, // and add it to the affects list of the // affected MOB. Then tell everyone else // what happened. CMMsg msg = CMClass.getMsg(mob, target, this,verbalCastCode(mob,target,auto),(auto?"T":"^S<S-NAME> speak(s) and gesture(s) and t")+"ime suddenly STOPS!^?"); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); Room room=mob.location(); fixed=new Vector(); CMLib.threads().suspendTicking(room,-1); for(int m=0;m<mob.location().numInhabitants();m++) { MOB mob2=mob.location().fetchInhabitant(m); if(mob2!=mob) { fixed.addElement(mob2); CMLib.threads().suspendTicking(mob2,-1); } } beneficialAffect(mob,room,asLevel,3); } } else return beneficialWordsFizzle(mob,null,"<S-NAME> incant(s) for awhile, but the spell fizzles."); // return whether it worked return success; } }
git-svn-id: svn://192.168.1.10/public/CoffeeMud@8031 0d6f1817-ed0e-0410-87c9-987e46238f29
com/planet_ink/coffee_mud/Abilities/Spells/Spell_TimeStop.java
Java
apache-2.0
ca653d0481d3b43b1a1111a508d1ed586ca31ebd
0
cbarrin/EAGERFloodlight,cbarrin/EAGERFloodlight,cbarrin/EAGERFloodlight
package net.floodlightcontroller.randomizer.web; import net.floodlightcontroller.randomizer.IRandomizerService; import net.floodlightcontroller.randomizer.Server; import org.restlet.resource.Get; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by geddingsbarrineau on 10/29/16. * */ public class InfoResource extends ServerResource { protected static Logger log = LoggerFactory.getLogger(InfoResource.class); public static class InfoJsonSerializerWrapper { private final String prefix; private final List<Server> servers; public InfoJsonSerializerWrapper(String prefix, List<Server> servers) { this.prefix = prefix; this.servers = servers; } } @Get public Object getEAGERInfo() { IRandomizerService randomizerService = (IRandomizerService) getContext().getAttributes().get(IRandomizerService.class.getCanonicalName()); Map<String, String> ret = new HashMap<>(); // FIXME: This is broken with the new prefixes implementation ret.put("current-prefix", randomizerService.getCurrentPrefix().toString()); return ret; } }
src/main/java/net/floodlightcontroller/randomizer/web/InfoResource.java
package net.floodlightcontroller.randomizer.web; import net.floodlightcontroller.randomizer.IRandomizerService; import net.floodlightcontroller.randomizer.Server; import org.restlet.resource.Get; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by geddingsbarrineau on 10/29/16. * */ public class InfoResource extends ServerResource { protected static Logger log = LoggerFactory.getLogger(InfoResource.class); public static class InfoJsonSerializerWrapper { private final String prefix; private final List<Server> servers; public InfoJsonSerializerWrapper(String prefix, List<Server> servers) { this.prefix = prefix; this.servers = servers; } } @Get public Map<String, String> getEAGERInfo() { IRandomizerService randomizerService = (IRandomizerService) getContext().getAttributes().get(IRandomizerService.class.getCanonicalName()); Map<String, String> ret = new HashMap<>(); ret.put("current-prefix", randomizerService.getCurrentPrefix().toString()); return ret; } }
Small fixes to the REST API after the server update.
src/main/java/net/floodlightcontroller/randomizer/web/InfoResource.java
Small fixes to the REST API after the server update.
Java
apache-2.0
b11369c73d012dc3eb5543cdb4ad83498e1ba0ca
0
MyersResearchGroup/iBioSim,MyersResearchGroup/iBioSim,MyersResearchGroup/iBioSim
/** * */ package biomodel.gui.schematic; import java.awt.Rectangle; import java.io.File; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.HashMap; import java.util.Hashtable; import java.util.Properties; import java.util.Vector; import javax.swing.JOptionPane; import org.sbml.libsbml.CompModelPlugin; import org.sbml.libsbml.CompartmentGlyph; import org.sbml.libsbml.Constraint; import org.sbml.libsbml.Event; import org.sbml.libsbml.EventAssignment; import org.sbml.libsbml.Layout; import org.sbml.libsbml.Model; import org.sbml.libsbml.ModifierSpeciesReference; import org.sbml.libsbml.Reaction; import org.sbml.libsbml.ReactionGlyph; import org.sbml.libsbml.Rule; import org.sbml.libsbml.Species; import org.sbml.libsbml.SpeciesGlyph; import org.sbml.libsbml.SpeciesReference; import org.sbml.libsbml.SpeciesReferenceGlyph; import org.sbml.libsbml.TextGlyph; import main.Gui; //import javax.xml.bind.JAXBElement.GlobalScope; import biomodel.gui.Grid; import biomodel.gui.movie.MovieAppearance; import biomodel.gui.textualeditor.SBMLutilities; import biomodel.parser.BioModel; import biomodel.util.GlobalConstants; import com.mxgraph.model.mxCell; import com.mxgraph.model.mxGeometry; import com.mxgraph.model.mxICell; import com.mxgraph.swing.mxGraphComponent; import com.mxgraph.util.mxConstants; import com.mxgraph.util.mxPoint; import com.mxgraph.view.mxGraph; import com.mxgraph.view.mxStylesheet; /** * @author Tyler [email protected] * */ public class BioGraph extends mxGraph { private double DIS_BETWEEN_NEIGHBORING_EDGES = 35.0; private double SECOND_SELF_INFLUENCE_DISTANCE = 20; private HashMap<String, mxCell> speciesToMxCellMap; private HashMap<String, mxCell> reactionsToMxCellMap; private HashMap<String, mxCell> rulesToMxCellMap; private HashMap<String, mxCell> constraintsToMxCellMap; private HashMap<String, mxCell> eventsToMxCellMap; private HashMap<String, mxCell> influencesToMxCellMap; private HashMap<String, mxCell> componentsToMxCellMap; private HashMap<String, mxCell> componentsConnectionsToMxCellMap; private HashMap<String, mxCell> drawnPromoterToMxCellMap; private HashMap<String, mxCell> gridRectangleToMxCellMap; mxCell cell = new mxCell(); private BioModel gcm; public final String CELL_NOT_FULLY_CONNECTED = "cell not fully connected"; private final String CELL_VALUE_NOT_FOUND = "cell value not found"; // only bother the user about bad promoters once. //This should be improved to happen once per GCM file if this will be a common error. public boolean isBuilding = false; public boolean dynamic = false; // Keep track of how many elements did not have positioning info. // This allows us to stack them in the topleft corner until they // are positioned by the user or a layout algorithm. int unpositionedSpeciesComponentCount = 0; /** * constructor * @param gcm */ public BioGraph(BioModel gcm) { super(); // Turn editing off to prevent mxGraph from letting the user change the // label on the cell. We want to do this using the property windows. this.setCellsEditable(false); this.gcm = gcm; this.initializeMaps(); createStyleSheets(); } /** * sets the hash maps to null */ private void initializeMaps(){ speciesToMxCellMap = new HashMap<String, mxCell>(); reactionsToMxCellMap = new HashMap<String, mxCell>(); rulesToMxCellMap = new HashMap<String, mxCell>(); constraintsToMxCellMap = new HashMap<String, mxCell>(); eventsToMxCellMap = new HashMap<String, mxCell>(); componentsToMxCellMap = new HashMap<String, mxCell>(); influencesToMxCellMap = new HashMap<String, mxCell>(); componentsConnectionsToMxCellMap = new HashMap<String, mxCell>(); drawnPromoterToMxCellMap = new HashMap<String, mxCell>(); gridRectangleToMxCellMap = new HashMap<String, mxCell>(); } //GRAPH BUILDING /** * appplies a layout to the graphComponent * * @param ident * @param graphComponent */ public void applyLayout(String ident, mxGraphComponent graphComponent){ Layouting.applyLayout(ident, this, graphComponent); } /** * Builds the graph based on the internal representation * @return */ public boolean buildGraph() { this.isBuilding = true; // remove all the cells from the graph (vertices and edges) this.removeCells(this.getChildCells(this.getDefaultParent(), true, true)); initializeMaps(); assert(this.gcm != null); // Start an undo transaction this.getModel().beginUpdate(); boolean needsPositioning = false; unpositionedSpeciesComponentCount = 0; //createGraphCompartmentFromModel("default"); //put the grid cells in first so that they're below the other cells addGridCells(); // add species for(String sp : gcm.getSpecies()){ if (gcm.getSBMLDocument().getModel().getSpecies(sp).getAnnotationString().contains("type=\"grid\"")) continue; if(createGraphSpeciesFromModel(sp)) needsPositioning = true; } Model m = gcm.getSBMLDocument().getModel(); int x = 225; int y = 50; // add reactions Layout layout = gcm.createLayout(); for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (BioModel.isDegradationReaction(r)) continue; if (BioModel.isDiffusionReaction(r)) continue; if (BioModel.isProductionReaction(r)) continue; if (r.getAnnotationString().contains("Complex")) continue; if (r.getAnnotationString().contains("Constitutive")) continue; if (r.getAnnotationString().contains("grid")) continue; if (layout.getReactionGlyph(r.getId()) != null || r.getId().startsWith("Production_")) { if(!r.getId().startsWith("Production_") && createGraphReactionFromModel(r.getId())) needsPositioning = true; } else { if (r.getNumModifiers() > 0 || (r.getNumReactants()>1 && r.getNumProducts()>1) || r.getNumReactants()==0 || r.getNumProducts()==0) { ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").createReactionGlyph(); reactionGlyph.setId(r.getId()); reactionGlyph.setReactionId(r.getId()); reactionGlyph.getBoundingBox().setX(x); reactionGlyph.getBoundingBox().setY(y); reactionGlyph.getBoundingBox().setWidth(GlobalConstants.DEFAULT_REACTION_WIDTH); reactionGlyph.getBoundingBox().setHeight(GlobalConstants.DEFAULT_REACTION_HEIGHT); TextGlyph textGlyph = null; if (layout.getTextGlyph(r.getId())!=null) { textGlyph = layout.getTextGlyph(r.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId(r.getId()); textGlyph.setGraphicalObjectId(r.getId()); textGlyph.setText(r.getId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); x+=50; y+=25; if(createGraphReactionFromModel(r.getId())) needsPositioning = true; } } } // add rules for (long i = 0; i < m.getNumRules(); i++) { Rule rule = m.getRule(i); if (layout.getReactionGlyph(rule.getMetaId())!=null) { if(createGraphRuleFromModel(rule.getMetaId())) { needsPositioning = true; } } else { ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").createReactionGlyph(); reactionGlyph.setId(rule.getMetaId()); reactionGlyph.setReactionId(rule.getMetaId()); reactionGlyph.getBoundingBox().setX(x); reactionGlyph.getBoundingBox().setY(y); reactionGlyph.getBoundingBox().setWidth(GlobalConstants.DEFAULT_RULE_WIDTH); reactionGlyph.getBoundingBox().setHeight(GlobalConstants.DEFAULT_RULE_HEIGHT); TextGlyph textGlyph = null; if (layout.getTextGlyph(rule.getMetaId())!=null) { textGlyph = layout.getTextGlyph(rule.getMetaId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId(rule.getMetaId()); textGlyph.setGraphicalObjectId(rule.getMetaId()); textGlyph.setText(rule.getMetaId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); x+=50; y+=25; if(createGraphRuleFromModel(rule.getMetaId())) needsPositioning = true; } } // add constraints for (long i = 0; i < m.getNumConstraints(); i++) { Constraint constraint = m.getConstraint(i); if (layout.getReactionGlyph(constraint.getMetaId())!=null) { if(createGraphConstraintFromModel(constraint.getMetaId())) { needsPositioning = true; } } else { ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").createReactionGlyph(); reactionGlyph.setId(constraint.getMetaId()); reactionGlyph.setReactionId(constraint.getMetaId()); reactionGlyph.getBoundingBox().setX(x); reactionGlyph.getBoundingBox().setY(y); reactionGlyph.getBoundingBox().setWidth(GlobalConstants.DEFAULT_CONSTRAINT_WIDTH); reactionGlyph.getBoundingBox().setHeight(GlobalConstants.DEFAULT_CONSTRAINT_HEIGHT); TextGlyph textGlyph = null; if (layout.getTextGlyph(constraint.getMetaId())!=null) { textGlyph = layout.getTextGlyph(constraint.getMetaId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId(constraint.getMetaId()); textGlyph.setGraphicalObjectId(constraint.getMetaId()); textGlyph.setText(constraint.getMetaId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); x+=50; y+=25; if(createGraphConstraintFromModel(constraint.getMetaId())) needsPositioning = true; } } // add events for (long i = 0; i < m.getNumEvents(); i++) { Event event = m.getEvent(i); if (layout.getReactionGlyph(event.getId())!=null) { if(createGraphEventFromModel(event.getId())) { needsPositioning = true; } } else { ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").createReactionGlyph(); reactionGlyph.setId(event.getId()); reactionGlyph.setReactionId(event.getId()); reactionGlyph.getBoundingBox().setX(x); reactionGlyph.getBoundingBox().setY(y); reactionGlyph.getBoundingBox().setWidth(GlobalConstants.DEFAULT_EVENT_WIDTH); reactionGlyph.getBoundingBox().setHeight(GlobalConstants.DEFAULT_EVENT_HEIGHT); TextGlyph textGlyph = null; if (layout.getTextGlyph(event.getId())!=null) { textGlyph = layout.getTextGlyph(event.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId(event.getId()); textGlyph.setGraphicalObjectId(event.getId()); textGlyph.setText(event.getId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); x+=50; y+=25; if(createGraphEventFromModel(event.getId())) needsPositioning = true; } } // add all components if (gcm.isGridEnabled()) { for (long i = 0; i < layout.getNumCompartmentGlyphs(); i++) { String comp = layout.getCompartmentGlyph(i).getId(); //these are not meant to be displayed //if (comp.contains("GRID__")) // continue; if (createGraphComponentFromModel(comp)) needsPositioning = true; } } else { CompModelPlugin sbmlCompModel = gcm.getSBMLCompModel(); for (long i = 0; i < sbmlCompModel.getNumSubmodels(); i++) { String comp = sbmlCompModel.getSubmodel(i).getId(); //String comp = gcm.getSBMLCompModel().getSubmodel(i).getId(); //these are not meant to be displayed //if (comp.contains("GRID__")) // continue; if (createGraphComponentFromModel(comp)) needsPositioning = true; } } // add all the drawn promoters for(String prom : gcm.getPromoters()){ if (gcm.isPromoterExplicit(prom)) { if(createGraphDrawnPromoterFromModel(prom)) needsPositioning = true; } } boolean needsRedrawn = false; // add all the edges. for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (r.getAnnotationString().contains("grid")) continue; if (r.getAnnotationString().contains("Complex")) { for (int j = 0; j < r.getNumReactants(); j++) { String id = r.getReactant(j).getSpecies() + "+>" + r.getProduct(0).getSpecies(); this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getReactant(j).getSpecies()), this.getSpeciesCell(r.getProduct(0).getSpecies())); String style = "COMPLEX"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } } else if (BioModel.isProductionReaction(r)) { String promoterId = r.getId().replace("Production_",""); if (gcm.isPromoterExplicit(promoterId)) { for (int j = 0; j < r.getNumProducts(); j++) { if (r.getProduct(j).getSpecies().endsWith("_mRNA")) continue; String id = promoterId + "->" + r.getProduct(j).getSpecies(); mxCell production = (mxCell)this.insertEdge(this.getDefaultParent(), id, "", this.getDrawnPromoterCell(promoterId), this.getSpeciesCell(r.getProduct(j).getSpecies())); production.setStyle("PRODUCTION"); } for (int j = 0; j < r.getNumModifiers(); j++) { if (BioModel.isRepressor(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } else if (BioModel.isActivator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "ACTIVATION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } else if (r.getModifier(j).getAnnotationString().contains(GlobalConstants.NOINFLUENCE)) { String id = r.getModifier(j).getSpecies() + "x>" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "NOINFLUENCE"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } else if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); id = r.getModifier(j).getSpecies() + "->" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); style = "ACTIVATION"; cell = this.getInfluence(id); cell.setStyle(style); } } } else { for (int j = 0; j < r.getNumModifiers(); j++) { for (int k = 0; k < r.getNumProducts(); k++) { if (BioModel.isRepressor(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } else if (BioModel.isActivator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "ACTIVATION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } else if (r.getModifier(j).getAnnotationString().contains(GlobalConstants.NOINFLUENCE)) { String id = r.getModifier(j).getSpecies() + "x>" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "NOINFLUENCE"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } else if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); style = "ACTIVATION"; cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } } } } } } //add reactions for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (BioModel.isDegradationReaction(r)) continue; if (BioModel.isDiffusionReaction(r)) continue; if (BioModel.isProductionReaction(r)) continue; if (r.getAnnotationString().contains("Complex")) continue; if (r.getAnnotationString().contains("Constitutive")) continue; if (r.getAnnotationString().contains("grid")) continue; ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(r.getId()); if (reactionGlyph != null) { while (reactionGlyph.getNumSpeciesReferenceGlyphs() > 0) reactionGlyph.removeSpeciesReferenceGlyph(0); for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s = r.getReactant(j); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), s.getSpecies() + "__" + r.getId(), "", this.getSpeciesCell(s.getSpecies()), this.getReactionsCell(r.getId())); if (r.getReversible()) { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()+",r"); else cell.setValue("r"); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN + ";" + mxConstants.STYLE_STARTARROW + "=" + mxConstants.ARROW_OPEN); } else { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); } String reactant = s.getSpecies(); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(reactant); speciesReferenceGlyph.setSpeciesGlyphId(reactant); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } for (int j = 0; j < r.getNumModifiers(); j++) { ModifierSpeciesReference s = r.getModifier(j); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), s.getSpecies() + "__" + r.getId(), "", this.getSpeciesCell(s.getSpecies()), this.getReactionsCell(r.getId())); if (r.getReversible()) cell.setValue("m"); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.NONE); String modifier = s.getSpecies(); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(modifier); speciesReferenceGlyph.setSpeciesGlyphId(modifier); speciesReferenceGlyph.setRole("modifier"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s = r.getProduct(k); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), r.getId() + "__" + s.getSpecies(), "", this.getReactionsCell(r.getId()), this.getSpeciesCell(s.getSpecies())); if (r.getReversible()) { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()+",p"); else cell.setValue("p"); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN + ";" + mxConstants.STYLE_STARTARROW + "=" + mxConstants.ARROW_OPEN); } else { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); } String product = s.getSpecies(); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(product); speciesReferenceGlyph.setSpeciesGlyphId(product); speciesReferenceGlyph.setRole("product"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } else { for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s1 = r.getReactant(j); for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s2 = r.getProduct(k); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), s1.getSpecies() + "_" + r.getId() + "_" + s2.getSpecies(), "", this.getSpeciesCell(s1.getSpecies()), this.getSpeciesCell(s2.getSpecies())); cell.setValue(r.getId()); if (r.getReversible()) { cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN + ";" + mxConstants.STYLE_STARTARROW + "=" + mxConstants.ARROW_OPEN); } else { cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); } } } } } //add rules for (int i = 0; i < m.getNumRules(); i++) { Rule r = m.getRule(i); ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(r.getMetaId()); if (reactionGlyph != null) { while (reactionGlyph.getNumSpeciesReferenceGlyphs() > 0) reactionGlyph.removeSpeciesReferenceGlyph(0); String initStr = SBMLutilities.myFormulaToString(r.getMath()); String[] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + r.getMetaId(), "", this.getSpeciesCell(species.getId()), this.getRulesCell(r.getMetaId())); cell.setStyle("RULE_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } // Add variable if (r.isAssignment() || r.isRate()) { Species species = m.getSpecies(r.getVariable()); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), r.getMetaId() + "__" + species.getId(), "", this.getRulesCell(r.getMetaId()), this.getSpeciesCell(species.getId())); cell.setStyle("RULE_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("product"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } } // add constraints for (int i = 0; i < m.getNumConstraints(); i++) { Constraint c = m.getConstraint(i); ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(c.getMetaId()); if (reactionGlyph != null) { while (reactionGlyph.getNumSpeciesReferenceGlyphs() > 0) reactionGlyph.removeSpeciesReferenceGlyph(0); String initStr = SBMLutilities.myFormulaToString(c.getMath()); String[] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + c.getMetaId(), "", this.getSpeciesCell(species.getId()), this.getConstraintsCell(c.getMetaId())); cell.setStyle("CONSTRAINT_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } } // add event edges for (int i = 0; i < m.getNumEvents(); i++) { Event e = m.getEvent(i); ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(e.getId()); if (reactionGlyph != null) { while (reactionGlyph.getNumSpeciesReferenceGlyphs() > 0) reactionGlyph.removeSpeciesReferenceGlyph(0); if (e.isSetTrigger()) { String initStr = SBMLutilities.myFormulaToString(e.getTrigger().getMath()); String[] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + e.getId(), "", this.getSpeciesCell(species.getId()), this.getEventsCell(e.getId())); cell.setStyle("EVENT_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } if (e.isSetDelay()) { String initStr = SBMLutilities.myFormulaToString(e.getDelay().getMath()); String [] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + e.getId(), "", this.getSpeciesCell(species.getId()), this.getEventsCell(e.getId())); cell.setStyle("EVENT_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } if (e.isSetPriority()) { String initStr = SBMLutilities.myFormulaToString(e.getPriority().getMath()); String [] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + e.getId(), "", this.getSpeciesCell(species.getId()), this.getEventsCell(e.getId())); cell.setStyle("EVENT_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } // Add variable for (int k = 0; k < e.getNumEventAssignments(); k++) { EventAssignment ea = e.getEventAssignment(k); String initStr = SBMLutilities.myFormulaToString(ea.getMath()); String [] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + e.getId(), "", this.getSpeciesCell(species.getId()), this.getEventsCell(e.getId())); cell.setStyle("EVENT_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } Species species = m.getSpecies(ea.getVariable()); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), e.getId() + "__" + species.getId(), "", this.getEventsCell(e.getId()), this.getSpeciesCell(species.getId())); cell.setStyle("EVENT_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("product"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } } addEdgeOffsets(); this.getModel().endUpdate(); this.isBuilding = false; // if we found any incorrectly marked promoters we need to redraw. Do so now. // The promoters should all pass the second time. if(needsRedrawn){ return buildGraph(); } return needsPositioning; } /** * Loop through all the edges and add control points to reposition them * if they are laying over the top of any other edges. */ public void addEdgeOffsets(){ // Make a hash where the key is a string built from the ids of the source and destination // of all the edges. The source and destination will be sorted so that the same two // source-destination pair will always map to the same key. The value is a list // of edges. That way if there are ever more then one edge between pairs, // we can modify the geometry so they don't overlap. HashMap<String, Vector<mxCell>> edgeHash = new HashMap<String, Vector<mxCell>>(); // build a temporary structure mapping sets of edge endpoints to edges // map influences Model m = gcm.getSBMLDocument().getModel(); for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (r.getAnnotationString().contains("Complex")) { for (int j = 0; j < r.getNumReactants(); j++) { String endA = r.getReactant(j).getSpecies(); String endB = r.getProduct(0).getSpecies(); String id = r.getReactant(j).getSpecies() + "+>" + r.getProduct(0).getSpecies(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } else if (BioModel.isProductionReaction(r)) { String promoterId = r.getId().replace("Production_",""); if (!gcm.isPromoterExplicit(promoterId)) { for (int j = 0; j < r.getNumModifiers(); j++) { for (int k = 0; k < r.getNumProducts(); k++) { String endA = r.getModifier(j).getSpecies(); String endB = r.getProduct(k).getSpecies(); if (BioModel.isRepressor(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } else if (BioModel.isActivator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } else if (r.getModifier(j).getAnnotationString().contains(GlobalConstants.NOINFLUENCE)) { String id = r.getModifier(j).getSpecies() + "x>" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } else if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } key = endA + " " + endB; cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } } } else { for (int j = 0; j < r.getNumModifiers(); j++) { String endA = r.getModifier(j).getSpecies(); String endB = promoterId; if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); id = r.getModifier(j).getSpecies() + "-|" + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } key = endA + " " + endB; cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } } } } for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (BioModel.isDegradationReaction(r)) continue; if (BioModel.isDiffusionReaction(r)) continue; if (BioModel.isProductionReaction(r)) continue; if (r.getAnnotationString().contains("Complex")) continue; if (r.getAnnotationString().contains("Constitutive")) continue; if (gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(r.getId()) != null) { for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s = r.getReactant(j); String endA = s.getSpecies(); String endB = r.getId(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(s.getSpecies() + "__" + r.getId()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } for (int j = 0; j < r.getNumModifiers(); j++) { ModifierSpeciesReference s = r.getModifier(j); String endA = s.getSpecies(); String endB = r.getId(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(s.getSpecies() + "__" + r.getId()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s = r.getProduct(k); String endA = r.getId(); String endB = s.getSpecies(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(r.getId() + "__" + s.getSpecies()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } else { for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s1 = r.getReactant(j); for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s2 = r.getProduct(k); String endA = s1.getSpecies(); String endB = s2.getSpecies(); // ignore anything connected directly to a drawn promoter //if(endA.equals(GlobalConstants.NONE) || endB.equals(GlobalConstants.NONE)) // continue; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(s1.getSpecies() + "_" + r.getId() + "_" + s2.getSpecies()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } } } // map components edges for (long i = 0; i < gcm.getSBMLCompModel().getNumSubmodels(); i++) { String compName = gcm.getSBMLCompModel().getSubmodel(i).getId(); for (String propName : gcm.getInputs(compName).keySet()) { String targetName = gcm.getInputs(compName).get(propName); String type = "Input"; String key = compName + " "+type+" " + targetName; mxCell cell = componentsConnectionsToMxCellMap.get(key); String simpleKey = compName + " " + targetName; if(edgeHash.containsKey(simpleKey) == false) edgeHash.put(simpleKey, new Vector<mxCell>()); edgeHash.get(simpleKey).add(cell); } for (String propName : gcm.getOutputs(compName).keySet()) { String targetName = gcm.getOutputs(compName).get(propName); String type = "Output"; String key = compName + " "+type+" " + targetName; mxCell cell = componentsConnectionsToMxCellMap.get(key); String simpleKey = compName + " " + targetName; if(edgeHash.containsKey(simpleKey) == false) edgeHash.put(simpleKey, new Vector<mxCell>()); edgeHash.get(simpleKey).add(cell); } } // loop through every set of edge endpoints and then move them if needed. for(Vector<mxCell> vec:edgeHash.values()){ if(vec.size() > 1 && vec.get(0) != null){ mxCell source = (mxCell)vec.get(0).getSource(); mxCell target = (mxCell)vec.get(0).getTarget(); // find the end and center points mxGeometry t; t = source.getGeometry(); mxPoint sp = new mxPoint(t.getCenterX(), t.getCenterY()); t = target.getGeometry(); mxPoint tp = new mxPoint(t.getCenterX(), t.getCenterY()); mxPoint cp = new mxPoint((tp.getX()+sp.getX())/2.0, (tp.getY()+sp.getY())/2.0); // check for self-influence if(source == target){ mxCell c = vec.get(0); mxGeometry geom = c.getGeometry(); // set the self-influence's point to the left of the influence. // This causes the graph library to draw it rounded in that direction. mxPoint p = new mxPoint( cp.getX() - t.getWidth()/2-SECOND_SELF_INFLUENCE_DISTANCE, cp.getY() ); Vector<mxPoint> points = new Vector<mxPoint>(); points.add(p); geom.setPoints(points); c.setGeometry(geom); continue; } // make a unit vector that points in the direction perpendicular to the // direction from one endpoint to the other. 90 degrees rotated means flip // the x and y coordinates. mxPoint dVec = new mxPoint(-(sp.getY()-tp.getY()), sp.getX()-tp.getX()); double magnitude = Math.sqrt(dVec.getX()*dVec.getX() + dVec.getY()*dVec.getY()); // avoid divide-by-zero errors magnitude = Math.max(magnitude, .1); // normalize dVec.setX(dVec.getX()/magnitude); dVec.setY(dVec.getY()/magnitude); // loop through all the edges, create a new midpoint and apply it. // also move the edge center to the midpoint so that labels won't be // on top of each other. for(int i=0; i<vec.size(); i++){ double offset = i-(vec.size()-1.0)/2.0; mxCell edge = vec.get(i); //cell.setGeometry(new mxGeometry(0, 0, 100, 100)); mxGeometry geom = edge.getGeometry(); Vector<mxPoint> points = new Vector<mxPoint>(); mxPoint p = new mxPoint( cp.getX()+dVec.getX()*offset*DIS_BETWEEN_NEIGHBORING_EDGES, cp.getY()+dVec.getY()*offset*DIS_BETWEEN_NEIGHBORING_EDGES ); points.add(p); geom.setPoints(points); // geom.setX(p.getX()); // geom.setY(p.getY()); edge.setGeometry(geom); } } } // for(Object edgeo:this.getSelectionCell()){ // mxCell edge = (mxCell)edgeo; // int s = edge.getSource().getEdgeCount(); // int t = edge.getTarget().getEdgeCount() // // if(edge.getSource().getEdgeCount() > 1 && edge.getTarget().getEdgeCount() > 1){ // // the source and target have multiple edges, now loop through them all... // // // //cell.setGeometry(new mxGeometry(0, 0, 100, 100)); // mxGeometry geom = new mxGeometry(); // Vector<mxPoint> points = new Vector<mxPoint>(); // mxPoint p = new mxPoint(50.0, 50.0); // points.add(p); // geom.setPoints(points); // edge.setGeometry(geom); // } // } } /** * adds grid rectangles via cell vertices */ public void addGridCells() { if (gcm.getGrid().isEnabled()) { int gridRows = gcm.getGrid().getNumRows(); int gridCols = gcm.getGrid().getNumCols(); double gridWidth = gcm.getGrid().getGridGeomWidth(); double gridHeight = gcm.getGrid().getGridGeomHeight(); //creates an mxCell/vertex for each grid rectangle //these are later accessible via ID via the hash map for (int row = 0; row < gridRows; ++row) { for (int col = 0; col < gridCols; ++col) { String id = "ROW" + row + "_COL" + col; double currX = 15 + col*gridWidth; double currY = 15 + row*gridHeight; CellValueObject cvo = new CellValueObject(id, "Rectangle", null); mxGeometry geometry = new mxGeometry(currX, currY, gridWidth, gridHeight); mxCell vertex = new mxCell(cvo, geometry, null); vertex.setId(id); vertex.setVertex(true); vertex.setConnectable(false); vertex.setStyle("GRID_RECTANGLE"); addCell(vertex, this.defaultParent); gridRectangleToMxCellMap.put(id, vertex); } } } } //POSITION UPDATING /** * Called after a layout is chosen and applied. * Updates the gcm's postitioning using the * positioning on the graph. */ public void updateAllSpeciesPosition(){ for(mxCell cell:this.speciesToMxCellMap.values()){ updateInternalPosition(cell); } for(mxCell cell:this.reactionsToMxCellMap.values()){ updateInternalPosition(cell); } for(mxCell cell:this.componentsToMxCellMap.values()){ updateInternalPosition(cell); } for(mxCell cell:this.drawnPromoterToMxCellMap.values()){ updateInternalPosition(cell); } } /** * Given a cell that must be a species or component, * update the internal model to reflect it's coordinates. * Called when a cell is dragged with the GUI. */ public void updateInternalPosition(mxCell cell){ mxGeometry geom = cell.getGeometry(); if (getCellType(cell).equals(GlobalConstants.SPECIES) || getCellType(cell).equals(GlobalConstants.PROMOTER)) { Layout layout = null; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { layout = gcm.getSBMLLayout().getLayout("iBioSim"); } else { layout = gcm.getSBMLLayout().createLayout(); layout.setId("iBioSim"); } SpeciesGlyph speciesGlyph = null; if (layout.getSpeciesGlyph((String)cell.getId())!=null) { speciesGlyph = layout.getSpeciesGlyph((String)cell.getId()); } else { speciesGlyph = layout.createSpeciesGlyph(); speciesGlyph.setId((String)cell.getId()); speciesGlyph.setSpeciesId((String)cell.getId()); } speciesGlyph.getBoundingBox().setX(geom.getX()); speciesGlyph.getBoundingBox().setY(geom.getY()); speciesGlyph.getBoundingBox().setWidth(geom.getWidth()); speciesGlyph.getBoundingBox().setHeight(geom.getHeight()); TextGlyph textGlyph = null; if (layout.getTextGlyph((String)cell.getId())!=null) { textGlyph = layout.getTextGlyph((String)cell.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId((String)cell.getId()); textGlyph.setGraphicalObjectId((String)cell.getId()); textGlyph.setText((String)cell.getId()); textGlyph.setBoundingBox(speciesGlyph.getBoundingBox()); } else if (getCellType(cell).equals(GlobalConstants.REACTION)|| getCellType(cell).equals(GlobalConstants.RULE)|| getCellType(cell).equals(GlobalConstants.CONSTRAINT)|| getCellType(cell).equals(GlobalConstants.EVENT)) { Layout layout = null; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { layout = gcm.getSBMLLayout().getLayout("iBioSim"); } else { layout = gcm.getSBMLLayout().createLayout(); layout.setId("iBioSim"); } ReactionGlyph reactionGlyph = null; if (layout.getReactionGlyph((String)cell.getId())!=null) { reactionGlyph = layout.getReactionGlyph((String)cell.getId()); } else { reactionGlyph = layout.createReactionGlyph(); reactionGlyph.setId((String)cell.getId()); reactionGlyph.setReactionId((String)cell.getId()); } reactionGlyph.getBoundingBox().setX(geom.getX()); reactionGlyph.getBoundingBox().setY(geom.getY()); reactionGlyph.getBoundingBox().setWidth(geom.getWidth()); reactionGlyph.getBoundingBox().setHeight(geom.getHeight()); TextGlyph textGlyph = null; if (layout.getTextGlyph((String)cell.getId())!=null) { textGlyph = layout.getTextGlyph((String)cell.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId((String)cell.getId()); textGlyph.setGraphicalObjectId((String)cell.getId()); textGlyph.setText((String)cell.getId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); } else if (getCellType(cell).equals(GlobalConstants.COMPONENT)) { Layout layout = null; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { layout = gcm.getSBMLLayout().getLayout("iBioSim"); } else { layout = gcm.getSBMLLayout().createLayout(); layout.setId("iBioSim"); } CompartmentGlyph compGlyph = null; if (layout.getCompartmentGlyph((String)cell.getId())!=null) { compGlyph = layout.getCompartmentGlyph((String)cell.getId()); } else { compGlyph = layout.createCompartmentGlyph(); compGlyph.setId((String)cell.getId()); compGlyph.setCompartmentId((String)cell.getId()); } compGlyph.getBoundingBox().setX(geom.getX()); compGlyph.getBoundingBox().setY(geom.getY()); compGlyph.getBoundingBox().setWidth(geom.getWidth()); compGlyph.getBoundingBox().setHeight(geom.getHeight()); TextGlyph textGlyph = null; if (layout.getTextGlyph((String)cell.getId())!=null) { textGlyph = layout.getTextGlyph((String)cell.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId((String)cell.getId()); textGlyph.setGraphicalObjectId((String)cell.getId()); textGlyph.setText((String)cell.getId()); textGlyph.setBoundingBox(compGlyph.getBoundingBox()); } } /** * Given a species, component, or drawn promoter cell, position it * using the properties. */ private boolean sizeAndPositionFromProperties(mxCell cell){ double x = 0; double y = 0; double width = 0; double height = 0; boolean needsPositioning = false; if (getCellType(cell).equals(GlobalConstants.SPECIES)|| getCellType(cell).equals(GlobalConstants.PROMOTER)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getSpeciesGlyph((String)cell.getId())!=null) { SpeciesGlyph speciesGlyph = layout.getSpeciesGlyph((String)cell.getId()); x = speciesGlyph.getBoundingBox().getPosition().getXOffset(); y = speciesGlyph.getBoundingBox().getPosition().getYOffset(); width = speciesGlyph.getBoundingBox().getDimensions().getWidth(); height = speciesGlyph.getBoundingBox().getDimensions().getHeight(); } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; gcm.placeSpecies((String)cell.getId(), x, y, height, width); } } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.REACTION) || getCellType(cell).equals(GlobalConstants.RULE) || getCellType(cell).equals(GlobalConstants.CONSTRAINT) || getCellType(cell).equals(GlobalConstants.EVENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getReactionGlyph((String)cell.getId())!=null) { ReactionGlyph reactionGlyph = layout.getReactionGlyph((String)cell.getId()); x = reactionGlyph.getBoundingBox().getPosition().getXOffset(); y = reactionGlyph.getBoundingBox().getPosition().getYOffset(); width = reactionGlyph.getBoundingBox().getDimensions().getWidth(); height = reactionGlyph.getBoundingBox().getDimensions().getHeight(); } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; gcm.placeReaction((String)cell.getId(), x, y, height, width); } } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.COMPONENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getCompartmentGlyph((String)cell.getId())!=null) { CompartmentGlyph compGlyph = layout.getCompartmentGlyph((String)cell.getId()); x = compGlyph.getBoundingBox().getPosition().getXOffset(); y = compGlyph.getBoundingBox().getPosition().getYOffset(); width = compGlyph.getBoundingBox().getDimensions().getWidth(); height = compGlyph.getBoundingBox().getDimensions().getHeight(); } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; gcm.placeCompartment((String)cell.getId(), x, y, height, width); } } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } cell.setGeometry(new mxGeometry(x, y, width, height)); return needsPositioning; } /** * redraws the grid components */ public void updateGrid() { dynamic = true; this.removeCells(this.getChildCells(this.getDefaultParent(), true, true)); gridRectangleToMxCellMap.clear(); addGridCells(); componentsToMxCellMap.clear(); Grid grid = gcm.getGrid(); double gridWidth = grid.getGridGeomWidth(); double gridHeight = grid.getGridGeomHeight(); componentsToMxCellMap.clear(); //ADD COMPONENTS for (int row = 0; row < grid.getNumRows(); ++row) { for (int col = 0; col < grid.getNumCols(); ++col) { if (grid.getOccupancyFromLocation(row, col) == true) { double currX = 15 + col*gridWidth; double currY = 15 + row*gridHeight; String compID = grid.getCompIDFromLocation(row, col); grid.setNodeRectangle(compID, new Rectangle((int) currX, (int) currY, (int) gridWidth, (int) gridHeight)); Rectangle componentRectangle = grid.getSnapRectangleFromCompID(compID); String fullCompID = compID; if (compID.contains("_of_")) compID = compID.split("_")[0]; if (compID.length() > 10) { compID = compID.substring(0,9) + "..."; } CellValueObject compcvo = new CellValueObject(compID, "Component", null); mxCell compCell = (mxCell) this.insertVertex(this.getDefaultParent(), fullCompID, compcvo, componentRectangle.getX(), componentRectangle.getY(), componentRectangle.getWidth(), componentRectangle.getHeight()); compCell.setConnectable(false); compCell.setStyle("GRIDCOMPARTMENT"); componentsToMxCellMap.put(fullCompID, compCell); } } } } //GET METHODS /** * returns GlobalConstants.SPECIES, GlobalConstants.COMPONENT, GlobalConstants.INFLUENCE, or GlobalConstants.COMPONENT_CONNECTION. * @param cell */ public String getCellType(mxCell cell){ if(cell.isEdge()){ String sourceType = getCellType(cell.getSource()); String targetType = getCellType(cell.getTarget()); if(sourceType == CELL_VALUE_NOT_FOUND || targetType == CELL_VALUE_NOT_FOUND){ return CELL_NOT_FULLY_CONNECTED; } else if(sourceType == GlobalConstants.COMPONENT || targetType == GlobalConstants.COMPONENT){ return GlobalConstants.COMPONENT_CONNECTION; } else if(sourceType == GlobalConstants.PROMOTER && targetType == GlobalConstants.SPECIES){ return GlobalConstants.PRODUCTION; } else if (sourceType == GlobalConstants.SPECIES && targetType == GlobalConstants.SPECIES && (gcm.getSBMLDocument().getModel().getNumReactions() > 0) && cell.getValue() != null && (gcm.getSBMLDocument().getModel().getReaction((String)cell.getValue()) != null)) { return GlobalConstants.REACTION_EDGE; } else if (sourceType == GlobalConstants.REACTION || targetType == GlobalConstants.REACTION) { return GlobalConstants.REACTION_EDGE; } else if (sourceType == GlobalConstants.RULE || targetType == GlobalConstants.RULE) { return GlobalConstants.RULE_EDGE; } else if (sourceType == GlobalConstants.CONSTRAINT || targetType == GlobalConstants.CONSTRAINT) { return GlobalConstants.CONSTRAINT_EDGE; } else if (sourceType == GlobalConstants.EVENT || targetType == GlobalConstants.EVENT) { return GlobalConstants.EVENT_EDGE; } else { return GlobalConstants.INFLUENCE; } } //cell is a vertex else{ String type = ((CellValueObject)(cell.getValue())).type; if(type.equals("Component")) return GlobalConstants.COMPONENT; else if(type.equals("Species")) return GlobalConstants.SPECIES; else if(type.equals("Promoter")) return GlobalConstants.PROMOTER; else if(type.equals("Reaction")) return GlobalConstants.REACTION; else if(type.equals("Rule")) return GlobalConstants.RULE; else if(type.equals("Constraint")) return GlobalConstants.CONSTRAINT; else if(type.equals("Event")) return GlobalConstants.EVENT; else if (type.equals("Rectangle")) return GlobalConstants.GRID_RECTANGLE; else return CELL_VALUE_NOT_FOUND; } } /** * * @param cell * @return */ public String getCellType(mxICell cell) { return getCellType((mxCell)cell); } public String getModelFileName(String compId) { return gcm.getModelFileName(compId).replace(".xml", ".gcm"); } /** * * @param id * @return */ public mxCell getSpeciesCell(String id){ return speciesToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getReactionsCell(String id){ return reactionsToMxCellMap.get(id); } public mxCell getRulesCell(String id){ return rulesToMxCellMap.get(id); } public mxCell getConstraintsCell(String id){ return constraintsToMxCellMap.get(id); } public mxCell getEventsCell(String id){ return eventsToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getComponentCell(String id){ return componentsToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getDrawnPromoterCell(String id){ return drawnPromoterToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getInfluence(String id){ return (influencesToMxCellMap.get(id)); } /** * returns the mxCell corresponding to the id passed in * this mxCell is a grid rectangle on the grid * * @param id the id of the grid rectangle's cell * @return the corresponding cell */ public mxCell getGridRectangleCellFromID(String id) { return gridRectangleToMxCellMap.get(id); } /** * returns if the cell is selectable or not * * @param cell the cell that is or isn't selectable */ @Override public boolean isCellSelectable(Object cell) { mxCell tempCell = (mxCell)cell; //if it's a grid cell, it's not selectable //otherwise, do the default behavior if (tempCell.getStyle().equals("GRID_RECTANGLE")) return false; return isCellsSelectable(); } //GRAPH PART CREATION /** * * @param id * @return */ private boolean createGraphComponentFromModel(String id){ boolean needsPositioning = false; double x = 0; double y = 0; double width = 0; double height = 0; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getCompartmentGlyph(id)!=null) { CompartmentGlyph compGlyph = layout.getCompartmentGlyph(id); x = compGlyph.getBoundingBox().getPosition().getXOffset(); y = compGlyph.getBoundingBox().getPosition().getYOffset(); width = compGlyph.getBoundingBox().getDimensions().getWidth(); height = compGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } //set the correct compartment status BioModel compGCMFile = new BioModel(gcm.getPath()); boolean compart = false; //String modelFileName = gcm.getModelFileName(id).replace(".xml", ".gcm"); String modelFileName = gcm.getModelFileName(id); File compFile = new File(gcm.getPath() + File.separator + modelFileName); if (compGCMFile != null && compFile.exists()) { compGCMFile.load(gcm.getPath() + File.separator + modelFileName); compart = compGCMFile.IsWithinCompartment(); } else { JOptionPane.showMessageDialog(Gui.frame, "A model definition cannot be found for " + modelFileName + ".\nDropping component from the schematic.\n", "Warning", JOptionPane.WARNING_MESSAGE); return false; } if(x < -9998 || y < -9998){ unpositionedSpeciesComponentCount += 1; needsPositioning = true; // Line the unpositioned species up nicely. The mod is there as a rough // and dirty way to prevent // them going off the bottom or right hand side of the screen. x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); } String truncGCM = modelFileName.replace(".xml", ""); String truncID = ""; //if the id is too long, truncate it if (truncGCM.length() > 10) truncGCM = truncGCM.substring(0, 9) + "..."; if (id.length() > 10) truncID = id.substring(0, 9) + "..."; else truncID = id; String label = truncID + "\n" + truncGCM; CellValueObject cvo = new CellValueObject(label, "Component", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, x, y, width, height); this.componentsToMxCellMap.put(id, (mxCell)insertedVertex); //pass whether or not the component is a compartment, as the styles are different this.setComponentStyles(id, compart); // now draw the edges that connect the component for (String propName : gcm.getInputs(id).keySet()) { // input, the arrow should point in from the species String topSpecies = gcm.getInputs(id).get(propName); Object createdEdge = this.insertEdge(this.getDefaultParent(), "", "", this.getSpeciesCell(topSpecies),insertedVertex); String key = id + " Input " + topSpecies; componentsConnectionsToMxCellMap.put(key, (mxCell)createdEdge); this.updateComponentConnectionVisuals((mxCell)createdEdge, propName); } // now draw the edges that connect the component for (String propName : gcm.getOutputs(id).keySet()) { // output, the arrow should point out to the species String topSpecies = gcm.getOutputs(id).get(propName); Object createdEdge = this.insertEdge(this.getDefaultParent(), "", "", insertedVertex, this.getSpeciesCell(topSpecies)); String key = id + " Output " + topSpecies; componentsConnectionsToMxCellMap.put(key, (mxCell)createdEdge); this.updateComponentConnectionVisuals((mxCell)createdEdge, propName); } return needsPositioning; } /** * creates a vertex on the graph using the internal model. * @param id * * @return: A bool, true if the species had to be positioned. */ private boolean createGraphSpeciesFromModel(String sp){ String type = BioModel.getSpeciesType(gcm.getSBMLDocument(),sp); if (gcm.getDiffusionReaction(sp)!=null) type += " (D)"; if (gcm.isSpeciesConstitutive(sp)) type += " (C)"; if (type.equals(GlobalConstants.MRNA)) return false; String truncID = ""; if (sp.length() > 12) truncID = sp.substring(0, 11) + "..."; else truncID = sp; String label = truncID + '\n' + type; CellValueObject cvo = new CellValueObject(label, "Species", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), sp, cvo, 1, 1, 1, 1); this.speciesToMxCellMap.put(sp, (mxCell)insertedVertex); this.setSpeciesStyles(sp); return sizeAndPositionFromProperties((mxCell)insertedVertex); } /** * creates a vertex on the graph using the internal model. * @param id * * @return: A bool, true if the reaction had to be positioned. */ private boolean createGraphReactionFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Reaction", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.reactionsToMxCellMap.put(id, (mxCell)insertedVertex); this.setReactionStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } private boolean createGraphRuleFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Rule", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.rulesToMxCellMap.put(id, (mxCell)insertedVertex); this.setRuleStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } private boolean createGraphConstraintFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Constraint", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.constraintsToMxCellMap.put(id, (mxCell)insertedVertex); this.setConstraintStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } private boolean createGraphEventFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Event", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.eventsToMxCellMap.put(id, (mxCell)insertedVertex); this.setEventStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } /** * Creates a drawn promoter using the internal model * @param pname * @return */ private boolean createGraphDrawnPromoterFromModel(String id){ String truncID; if (id.length() > 8) truncID = id.substring(0, 7) + "..."; else truncID = id; CellValueObject cvo = new CellValueObject(truncID, "Promoter", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.drawnPromoterToMxCellMap.put(id, (mxCell)insertedVertex); this.setDrawnPromoterStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } /** * creates an edge between two graph entities */ @Override public Object insertEdge(Object parent, String id, Object value, Object source, Object target, String style){ Object ret = super.insertEdge(parent, id, value, source, target, style); this.influencesToMxCellMap.put(id, (mxCell)ret); return ret; } //VISUALS /** * Given an id, update the style of the influence based on the internal model. */ /* private void updateInfluenceVisuals(String id){ Properties prop = gcm.getInfluences().get(id); //gcm.getSBMLDocument().getModel(); if(prop == null) throw new Error("Invalid id '"+id+"'. Valid ids were:" + String.valueOf(gcm.getInfluences().keySet())); // build the edge style String style = "defaultEdge;" + mxConstants.STYLE_ENDARROW + "="; if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.ACTIVATION)) style = "ACTIVATION"; else if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.REPRESSION)) style = "REPRESSION"; else if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.NOINFLUENCE)) style = "NOINFLUENCE"; else if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.COMPLEX)) style = "COMPLEX"; else style = "DEFAULT"; // apply the style mxCell cell = this.getInfluence(id); cell.setStyle(style); // apply the promoter name as a label, only if the promoter isn't drawn. if(gcm.influenceHasExplicitPromoter(id) == false) cell.setValue(prop.getProperty(GlobalConstants.PROMOTER)); }; */ /** * * @param cell * @param label */ public void updateComponentConnectionVisuals(mxCell cell, String label){ //cell.setStyle(mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); cell.setStyle("COMPONENT_EDGE"); cell.setValue("Port " + label); // position the label as intelligently as possible mxGeometry geom = cell.getGeometry(); if(this.getCellType(cell.getSource()) == GlobalConstants.COMPONENT){ geom.setX(-.6); } else{ geom.setX(.6); } cell.setGeometry(geom); } /** * Builds the style sheets that will be used by the graph. */ public void createStyleSheets(){ mxStylesheet stylesheet = this.getStylesheet(); //species Hashtable<String, Object> style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FILLCOLOR, "#5CB4F2"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, true); stylesheet.putCellStyle("SPECIES", style); //reactions style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_ELLIPSE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FILLCOLOR, "#C7007B"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); stylesheet.putCellStyle("REACTION", style); //rules style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_SWIMLANE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#FFFF00"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("RULE", style); //constraints style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_HEXAGON); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#FF0000"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("CONSTRAINT", style); //events style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#00FF00"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("EVENT", style); //components style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("COMPONENT", style); //grid components style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("GRIDCOMPONENT", style); //compartments style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, true); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("COMPARTMENT", style); //grid compartments style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, true); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("GRIDCOMPARTMENT", style); //grid rectangle style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "none"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_MOVABLE, false); style.put(mxConstants.STYLE_RESIZABLE, false); style.put(mxConstants.STYLE_NOLABEL, true); stylesheet.putCellStyle("GRID_RECTANGLE", style); //component edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#FFAA00"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); stylesheet.putCellStyle("COMPONENT_EDGE", style); //production edge (promoter to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#34BA04"); style.put(mxConstants.STYLE_STROKECOLOR, "#34BA04"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_EDGE, mxConstants.EDGESTYLE_ENTITY_RELATION); stylesheet.putCellStyle("PRODUCTION", style); //activation edge (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#34BA04"); style.put(mxConstants.STYLE_STROKECOLOR, "#34BA04"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_BLOCK); stylesheet.putCellStyle("ACTIVATION", style); //repression edge (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#FA2A2A"); style.put(mxConstants.STYLE_STROKECOLOR, "#FA2A2A"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OVAL); stylesheet.putCellStyle("REPRESSION", style); //no influence (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#000000"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_DIAMOND); style.put(mxConstants.STYLE_DASHED, "true"); stylesheet.putCellStyle("NOINFLUENCE", style); //complex formation edge (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#4E5D9C"); style.put(mxConstants.STYLE_STROKECOLOR, "#4E5D9C"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "true"); stylesheet.putCellStyle("COMPLEX", style); //reaction edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F2861B"); style.put(mxConstants.STYLE_STROKECOLOR, "#F2861B"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("REACTION_EDGE", style); //rule edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F2861B"); style.put(mxConstants.STYLE_STROKECOLOR, "#F2861B"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("RULE_EDGE", style); //constraint edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F2861B"); style.put(mxConstants.STYLE_STROKECOLOR, "#F2861B"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("CONSTRAINT_EDGE", style); //event edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F2861B"); style.put(mxConstants.STYLE_STROKECOLOR, "#F2861B"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("EVENT_EDGE", style); //default edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#000000"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_CLASSIC); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("DEFAULT", style); //explicit promoter style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RHOMBUS); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F00E0E"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("EXPLICIT_PROMOTER", style); } //STYLE SETTING /** * * @param id */ private void setSpeciesStyles(String id){ String style="SPECIES;"; mxCell cell = this.getSpeciesCell(id); cell.setStyle(style); } /** * * @param id */ private void setReactionStyles(String id){ String style="REACTION;"; mxCell cell = this.getReactionsCell(id); cell.setStyle(style); } private void setRuleStyles(String id){ String style="RULE;"; mxCell cell = this.getRulesCell(id); cell.setStyle(style); } private void setConstraintStyles(String id){ String style="CONSTRAINT;"; mxCell cell = this.getConstraintsCell(id); cell.setStyle(style); } private void setEventStyles(String id){ String style="EVENT;"; mxCell cell = this.getEventsCell(id); cell.setStyle(style); } /** * * @param id * @param compart */ private void setComponentStyles(String id, boolean compart){ String style = ""; if (gcm.getGrid().isEnabled()) { if (compart) style = "GRIDCOMPARTMENT;"; else style = "GRIDCOMPONENT;"; } else { if (compart) style = "COMPARTMENT;"; else style = "COMPONENT;"; } mxCell cell = this.getComponentCell(id); cell.setStyle(style); } /** * * @param id */ private void setDrawnPromoterStyles(String id){ String style="EXPLICIT_PROMOTER"; mxCell cell = this.getDrawnPromoterCell(id); cell.setStyle(style); } //ANIMATION /** * */ public void setSpeciesAnimationValue(String species, MovieAppearance appearance) { mxCell cell = this.speciesToMxCellMap.get(species); if (cell != null) { setCellAnimationValue(cell, appearance); } } /** * */ public void setComponentAnimationValue(String component, MovieAppearance appearance) { mxCell cell = this.componentsToMxCellMap.get(component); setCellAnimationValue(cell, appearance); } public void setGridRectangleAnimationValue(String gridLocation, MovieAppearance appearance) { mxCell cell = this.gridRectangleToMxCellMap.get(gridLocation); setCellAnimationValue(cell, appearance); } /** * Applies the MovieAppearance to the cell * @param cell * @param appearance * @param properties */ private void setCellAnimationValue(mxCell cell, MovieAppearance appearance) { if (appearance == null) return; // color String newStyle = cell.getStyle() + ";"; if (appearance.color != null) { newStyle += mxConstants.STYLE_FILLCOLOR + "=" + Integer.toHexString(appearance.color.getRGB()) + ";"; newStyle += mxConstants.STYLE_OPACITY + "=" + 75; } // opacity if (appearance.opacity != null) { newStyle += ";"; double op = (appearance.opacity) * 100.0; newStyle += mxConstants.STYLE_OPACITY + "=" + String.valueOf(op); } if (newStyle != null) cell.setStyle(newStyle); // size if (appearance.size != null) { double x = 0; double y = 0; double width = 0; double height = 0; if (getCellType(cell).equals(GlobalConstants.SPECIES)|| getCellType(cell).equals(GlobalConstants.PROMOTER)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getSpeciesGlyph((String)cell.getId())!=null) { SpeciesGlyph speciesGlyph = layout.getSpeciesGlyph((String)cell.getId()); x = speciesGlyph.getBoundingBox().getPosition().getXOffset(); y = speciesGlyph.getBoundingBox().getPosition().getYOffset(); width = speciesGlyph.getBoundingBox().getDimensions().getWidth(); height = speciesGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.REACTION)|| getCellType(cell).equals(GlobalConstants.RULE)|| getCellType(cell).equals(GlobalConstants.CONSTRAINT)|| getCellType(cell).equals(GlobalConstants.EVENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getReactionGlyph((String)cell.getId())!=null) { ReactionGlyph reactionGlyph = layout.getReactionGlyph((String)cell.getId()); x = reactionGlyph.getBoundingBox().getPosition().getXOffset(); y = reactionGlyph.getBoundingBox().getPosition().getYOffset(); width = reactionGlyph.getBoundingBox().getDimensions().getWidth(); height = reactionGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.COMPONENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getCompartmentGlyph((String)cell.getId())!=null) { CompartmentGlyph compGlyph = layout.getCompartmentGlyph((String)cell.getId()); x = compGlyph.getBoundingBox().getPosition().getXOffset(); y = compGlyph.getBoundingBox().getPosition().getYOffset(); width = compGlyph.getBoundingBox().getDimensions().getWidth(); height = compGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } double aspect_ratio = height / width; double centerX = x + width/2.0; double centerY = y + height/2.0; mxGeometry startG = cell.getGeometry(); startG.setWidth(appearance.size); startG.setHeight(appearance.size * aspect_ratio); startG.setX(centerX - startG.getWidth()/2.0); startG.setY(centerY - startG.getHeight()/2.0); } } //CELL VALUE OBJECT CLASS /** * The object that gets set as the mxCell value object. * It is basically a way to store a property and label. */ public class CellValueObject extends Object implements Serializable{ private static final long serialVersionUID = 918273645; public Properties prop; public String label; public String type; @Override /** * */ public String toString(){ return this.label; } /** * * @param oos * @throws IOException */ private void writeObject(ObjectOutputStream oos) throws IOException { oos.writeObject(label); oos.writeObject(prop); } /** * * @param ois * @throws ClassNotFoundException * @throws IOException */ private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { label = ois.readObject().toString(); prop = (Properties)ois.readObject(); } /** * * * @param label * @param prop */ public CellValueObject(String label, String type, Properties prop){ if(label == null || type == null) throw new Error("Neither label nor type can be null!"); this.label = label; this.type = type; this.prop = prop; } } //FUNCTIONS THAT TYLER COMMENTED OUT THAT WILL PROBABLY NEVER BE USED // /** // * Overwrite the parent insertVertex and additionally put the vertex into our hashmap. // * @return // */ // public Object insertVertex(Object parent, String id, Object value, double x, double y, double width, double height){ // Object ret = super.insertVertex(parent, id, value, x, y, width, height); // this.speciesToMxCellMap.put(id, (mxCell)ret); // return ret; // } // // /** // * returns the name of the component-to-species connection // * @param compName // * @param speciesName // * @return // */ // /* // private String getComponentConnectionName(String compName, String speciesName){ // return compName + " (component connection) " + speciesName; // } // */ // // /** // * creates a vertex on the graph using the internal model. // * @param id // * // * @return: A bool, true if the species had to be positioned. // */ // /* // private boolean createGraphCompartmentFromModel(String sp){ // Properties prop = new Properties(); // String id = sp; // CellValueObject cvo = new CellValueObject(id, prop); // Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); // this.speciesToMxCellMap.put(id, (mxCell)insertedVertex); // // this.setSpeciesStyles(sp); // // return sizeAndPositionFromProperties((mxCell)insertedVertex, prop); // } // */ // // /** // * called after a species is deleted. Make sure to delete it from // * the internal model. // * @param id // */ // public void speciesRemoved(String id){ // this.speciesToMxCellMap.remove(id); // } // public void influenceRemoved(String id){ // this.influencesToMxCellMap.remove(id); // } }
gui/src/biomodel/gui/schematic/BioGraph.java
/** * */ package biomodel.gui.schematic; import java.awt.Rectangle; import java.io.File; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.HashMap; import java.util.Hashtable; import java.util.Properties; import java.util.Vector; import javax.swing.JOptionPane; import org.sbml.libsbml.CompModelPlugin; import org.sbml.libsbml.CompartmentGlyph; import org.sbml.libsbml.Constraint; import org.sbml.libsbml.Event; import org.sbml.libsbml.Layout; import org.sbml.libsbml.Model; import org.sbml.libsbml.ModifierSpeciesReference; import org.sbml.libsbml.Reaction; import org.sbml.libsbml.ReactionGlyph; import org.sbml.libsbml.Rule; import org.sbml.libsbml.Species; import org.sbml.libsbml.SpeciesGlyph; import org.sbml.libsbml.SpeciesReference; import org.sbml.libsbml.SpeciesReferenceGlyph; import org.sbml.libsbml.TextGlyph; import main.Gui; //import javax.xml.bind.JAXBElement.GlobalScope; import biomodel.gui.Grid; import biomodel.gui.movie.MovieAppearance; import biomodel.gui.textualeditor.SBMLutilities; import biomodel.parser.BioModel; import biomodel.util.GlobalConstants; import com.mxgraph.model.mxCell; import com.mxgraph.model.mxGeometry; import com.mxgraph.model.mxICell; import com.mxgraph.swing.mxGraphComponent; import com.mxgraph.util.mxConstants; import com.mxgraph.util.mxPoint; import com.mxgraph.view.mxGraph; import com.mxgraph.view.mxStylesheet; /** * @author Tyler [email protected] * */ public class BioGraph extends mxGraph { private double DIS_BETWEEN_NEIGHBORING_EDGES = 35.0; private double SECOND_SELF_INFLUENCE_DISTANCE = 20; private HashMap<String, mxCell> speciesToMxCellMap; private HashMap<String, mxCell> reactionsToMxCellMap; private HashMap<String, mxCell> rulesToMxCellMap; private HashMap<String, mxCell> constraintsToMxCellMap; private HashMap<String, mxCell> eventsToMxCellMap; private HashMap<String, mxCell> influencesToMxCellMap; private HashMap<String, mxCell> componentsToMxCellMap; private HashMap<String, mxCell> componentsConnectionsToMxCellMap; private HashMap<String, mxCell> drawnPromoterToMxCellMap; private HashMap<String, mxCell> gridRectangleToMxCellMap; mxCell cell = new mxCell(); private BioModel gcm; public final String CELL_NOT_FULLY_CONNECTED = "cell not fully connected"; private final String CELL_VALUE_NOT_FOUND = "cell value not found"; // only bother the user about bad promoters once. //This should be improved to happen once per GCM file if this will be a common error. public boolean isBuilding = false; public boolean dynamic = false; // Keep track of how many elements did not have positioning info. // This allows us to stack them in the topleft corner until they // are positioned by the user or a layout algorithm. int unpositionedSpeciesComponentCount = 0; /** * constructor * @param gcm */ public BioGraph(BioModel gcm) { super(); // Turn editing off to prevent mxGraph from letting the user change the // label on the cell. We want to do this using the property windows. this.setCellsEditable(false); this.gcm = gcm; this.initializeMaps(); createStyleSheets(); } /** * sets the hash maps to null */ private void initializeMaps(){ speciesToMxCellMap = new HashMap<String, mxCell>(); reactionsToMxCellMap = new HashMap<String, mxCell>(); rulesToMxCellMap = new HashMap<String, mxCell>(); constraintsToMxCellMap = new HashMap<String, mxCell>(); eventsToMxCellMap = new HashMap<String, mxCell>(); componentsToMxCellMap = new HashMap<String, mxCell>(); influencesToMxCellMap = new HashMap<String, mxCell>(); componentsConnectionsToMxCellMap = new HashMap<String, mxCell>(); drawnPromoterToMxCellMap = new HashMap<String, mxCell>(); gridRectangleToMxCellMap = new HashMap<String, mxCell>(); } //GRAPH BUILDING /** * appplies a layout to the graphComponent * * @param ident * @param graphComponent */ public void applyLayout(String ident, mxGraphComponent graphComponent){ Layouting.applyLayout(ident, this, graphComponent); } /** * Builds the graph based on the internal representation * @return */ public boolean buildGraph() { this.isBuilding = true; // remove all the cells from the graph (vertices and edges) this.removeCells(this.getChildCells(this.getDefaultParent(), true, true)); initializeMaps(); assert(this.gcm != null); // Start an undo transaction this.getModel().beginUpdate(); boolean needsPositioning = false; unpositionedSpeciesComponentCount = 0; //createGraphCompartmentFromModel("default"); //put the grid cells in first so that they're below the other cells addGridCells(); // add species for(String sp : gcm.getSpecies()){ if (gcm.getSBMLDocument().getModel().getSpecies(sp).getAnnotationString().contains("type=\"grid\"")) continue; if(createGraphSpeciesFromModel(sp)) needsPositioning = true; } Model m = gcm.getSBMLDocument().getModel(); int x = 225; int y = 50; // add reactions Layout layout = gcm.createLayout(); for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (BioModel.isDegradationReaction(r)) continue; if (BioModel.isDiffusionReaction(r)) continue; if (BioModel.isProductionReaction(r)) continue; if (r.getAnnotationString().contains("Complex")) continue; if (r.getAnnotationString().contains("Constitutive")) continue; if (r.getAnnotationString().contains("grid")) continue; if (layout.getReactionGlyph(r.getId()) != null || r.getId().startsWith("Production_")) { if(!r.getId().startsWith("Production_") && createGraphReactionFromModel(r.getId())) needsPositioning = true; } else { if (r.getNumModifiers() > 0 || (r.getNumReactants()>1 && r.getNumProducts()>1) || r.getNumReactants()==0 || r.getNumProducts()==0) { ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").createReactionGlyph(); reactionGlyph.setId(r.getId()); reactionGlyph.setReactionId(r.getId()); reactionGlyph.getBoundingBox().setX(x); reactionGlyph.getBoundingBox().setY(y); reactionGlyph.getBoundingBox().setWidth(GlobalConstants.DEFAULT_REACTION_WIDTH); reactionGlyph.getBoundingBox().setHeight(GlobalConstants.DEFAULT_REACTION_HEIGHT); TextGlyph textGlyph = null; if (layout.getTextGlyph(r.getId())!=null) { textGlyph = layout.getTextGlyph(r.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId(r.getId()); textGlyph.setGraphicalObjectId(r.getId()); textGlyph.setText(r.getId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); x+=50; y+=25; if(createGraphReactionFromModel(r.getId())) needsPositioning = true; } } } // add rules for (long i = 0; i < m.getNumRules(); i++) { Rule rule = m.getRule(i); if (layout.getReactionGlyph(rule.getMetaId())!=null) { if(createGraphRuleFromModel(rule.getMetaId())) { needsPositioning = true; } } else { ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").createReactionGlyph(); reactionGlyph.setId(rule.getMetaId()); reactionGlyph.setReactionId(rule.getMetaId()); reactionGlyph.getBoundingBox().setX(x); reactionGlyph.getBoundingBox().setY(y); reactionGlyph.getBoundingBox().setWidth(GlobalConstants.DEFAULT_RULE_WIDTH); reactionGlyph.getBoundingBox().setHeight(GlobalConstants.DEFAULT_RULE_HEIGHT); TextGlyph textGlyph = null; if (layout.getTextGlyph(rule.getMetaId())!=null) { textGlyph = layout.getTextGlyph(rule.getMetaId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId(rule.getMetaId()); textGlyph.setGraphicalObjectId(rule.getMetaId()); textGlyph.setText(rule.getMetaId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); x+=50; y+=25; if(createGraphRuleFromModel(rule.getMetaId())) needsPositioning = true; } } // add constraints for (long i = 0; i < m.getNumConstraints(); i++) { Constraint constraint = m.getConstraint(i); if (layout.getReactionGlyph(constraint.getMetaId())!=null) { if(createGraphConstraintFromModel(constraint.getMetaId())) { needsPositioning = true; } } else { // create layout for constraints } } // add events for (long i = 0; i < m.getNumEvents(); i++) { Event event = m.getEvent(i); if (layout.getReactionGlyph(event.getId())!=null) { if(createGraphEventFromModel(event.getId())) { needsPositioning = true; } } else { // create layout for events } } // add all components if (gcm.isGridEnabled()) { for (long i = 0; i < layout.getNumCompartmentGlyphs(); i++) { String comp = layout.getCompartmentGlyph(i).getId(); //these are not meant to be displayed //if (comp.contains("GRID__")) // continue; if (createGraphComponentFromModel(comp)) needsPositioning = true; } } else { CompModelPlugin sbmlCompModel = gcm.getSBMLCompModel(); for (long i = 0; i < sbmlCompModel.getNumSubmodels(); i++) { String comp = sbmlCompModel.getSubmodel(i).getId(); //String comp = gcm.getSBMLCompModel().getSubmodel(i).getId(); //these are not meant to be displayed //if (comp.contains("GRID__")) // continue; if (createGraphComponentFromModel(comp)) needsPositioning = true; } } // add all the drawn promoters for(String prom : gcm.getPromoters()){ if (gcm.isPromoterExplicit(prom)) { if(createGraphDrawnPromoterFromModel(prom)) needsPositioning = true; } } boolean needsRedrawn = false; // add all the edges. for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (r.getAnnotationString().contains("grid")) continue; if (r.getAnnotationString().contains("Complex")) { for (int j = 0; j < r.getNumReactants(); j++) { String id = r.getReactant(j).getSpecies() + "+>" + r.getProduct(0).getSpecies(); this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getReactant(j).getSpecies()), this.getSpeciesCell(r.getProduct(0).getSpecies())); String style = "COMPLEX"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } } else if (BioModel.isProductionReaction(r)) { String promoterId = r.getId().replace("Production_",""); if (gcm.isPromoterExplicit(promoterId)) { for (int j = 0; j < r.getNumProducts(); j++) { if (r.getProduct(j).getSpecies().endsWith("_mRNA")) continue; String id = promoterId + "->" + r.getProduct(j).getSpecies(); mxCell production = (mxCell)this.insertEdge(this.getDefaultParent(), id, "", this.getDrawnPromoterCell(promoterId), this.getSpeciesCell(r.getProduct(j).getSpecies())); production.setStyle("PRODUCTION"); } for (int j = 0; j < r.getNumModifiers(); j++) { if (BioModel.isRepressor(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } else if (BioModel.isActivator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "ACTIVATION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } else if (r.getModifier(j).getAnnotationString().contains(GlobalConstants.NOINFLUENCE)) { String id = r.getModifier(j).getSpecies() + "x>" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "NOINFLUENCE"; mxCell cell = this.getInfluence(id); cell.setStyle(style); } else if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); id = r.getModifier(j).getSpecies() + "->" + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getDrawnPromoterCell(promoterId)); style = "ACTIVATION"; cell = this.getInfluence(id); cell.setStyle(style); } } } else { for (int j = 0; j < r.getNumModifiers(); j++) { for (int k = 0; k < r.getNumProducts(); k++) { if (BioModel.isRepressor(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } else if (BioModel.isActivator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "ACTIVATION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } else if (r.getModifier(j).getAnnotationString().contains(GlobalConstants.NOINFLUENCE)) { String id = r.getModifier(j).getSpecies() + "x>" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "NOINFLUENCE"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } else if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); String style = "REPRESSION"; mxCell cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; this.insertEdge(this.getDefaultParent(), id, "", this.getSpeciesCell(r.getModifier(j).getSpecies()), this.getSpeciesCell(r.getProduct(k).getSpecies())); style = "ACTIVATION"; cell = this.getInfluence(id); cell.setStyle(style); cell.setValue(promoterId); } } } } } } //add reactions for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (BioModel.isDegradationReaction(r)) continue; if (BioModel.isDiffusionReaction(r)) continue; if (BioModel.isProductionReaction(r)) continue; if (r.getAnnotationString().contains("Complex")) continue; if (r.getAnnotationString().contains("Constitutive")) continue; if (r.getAnnotationString().contains("grid")) continue; ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(r.getId()); if (reactionGlyph != null) { while (reactionGlyph.getNumSpeciesReferenceGlyphs() > 0) reactionGlyph.removeSpeciesReferenceGlyph(0); for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s = r.getReactant(j); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), s.getSpecies() + "__" + r.getId(), "", this.getSpeciesCell(s.getSpecies()), this.getReactionsCell(r.getId())); if (r.getReversible()) { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()+",r"); else cell.setValue("r"); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN + ";" + mxConstants.STYLE_STARTARROW + "=" + mxConstants.ARROW_OPEN); } else { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); } String reactant = s.getSpecies(); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(reactant); speciesReferenceGlyph.setSpeciesGlyphId(reactant); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } for (int j = 0; j < r.getNumModifiers(); j++) { ModifierSpeciesReference s = r.getModifier(j); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), s.getSpecies() + "__" + r.getId(), "", this.getSpeciesCell(s.getSpecies()), this.getReactionsCell(r.getId())); if (r.getReversible()) cell.setValue("m"); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.NONE); String modifier = s.getSpecies(); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(modifier); speciesReferenceGlyph.setSpeciesGlyphId(modifier); speciesReferenceGlyph.setRole("modifier"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s = r.getProduct(k); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), r.getId() + "__" + s.getSpecies(), "", this.getReactionsCell(r.getId()), this.getSpeciesCell(s.getSpecies())); if (r.getReversible()) { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()+",p"); else cell.setValue("p"); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN + ";" + mxConstants.STYLE_STARTARROW + "=" + mxConstants.ARROW_OPEN); } else { if (s.getStoichiometry() != 1.0) cell.setValue(s.getStoichiometry()); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); } String product = s.getSpecies(); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(product); speciesReferenceGlyph.setSpeciesGlyphId(product); speciesReferenceGlyph.setRole("product"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } else { for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s1 = r.getReactant(j); for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s2 = r.getProduct(k); mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), s1.getSpecies() + "_" + r.getId() + "_" + s2.getSpecies(), "", this.getSpeciesCell(s1.getSpecies()), this.getSpeciesCell(s2.getSpecies())); cell.setValue(r.getId()); if (r.getReversible()) { cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN + ";" + mxConstants.STYLE_STARTARROW + "=" + mxConstants.ARROW_OPEN); } else { cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); } } } } } //add rules for (int i = 0; i < m.getNumRules(); i++) { Rule r = m.getRule(i); ReactionGlyph reactionGlyph = gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(r.getMetaId()); if (reactionGlyph != null) { while (reactionGlyph.getNumSpeciesReferenceGlyphs() > 0) reactionGlyph.removeSpeciesReferenceGlyph(0); // Add support String initStr = SBMLutilities.myFormulaToString(r.getMath()); String[] vars = initStr.split(" |\\(|\\)|\\,"); for (int j = 0; j < vars.length; j++) { Species species = m.getSpecies(vars[j]); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), species.getId() + "__" + r.getMetaId(), "", this.getSpeciesCell(species.getId()), this.getRulesCell(r.getMetaId())); cell.setStyle("RULE_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("substrate"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } // Add variable if (r.isAssignment() || r.isRate()) { Species species = m.getSpecies(r.getVariable()); if (species != null) { mxCell cell = (mxCell)this.insertEdge(this.getDefaultParent(), r.getMetaId() + "__" + species.getId(), "", this.getRulesCell(r.getMetaId()), this.getSpeciesCell(species.getId())); cell.setStyle("REACTION_EDGE;" + mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); SpeciesReferenceGlyph speciesReferenceGlyph = reactionGlyph.createSpeciesReferenceGlyph(); speciesReferenceGlyph.setId(species.getId()); speciesReferenceGlyph.setSpeciesGlyphId(species.getId()); speciesReferenceGlyph.setRole("product"); /* LineSegment lineSegment = speciesReferenceGlyph.createLineSegment(); lineSegment.setStart(cell.getSource().getGeometry().getCenterX(),cell.getSource().getGeometry().getCenterY()); lineSegment.setEnd(cell.getTarget().getGeometry().getCenterX(),cell.getTarget().getGeometry().getCenterY()); */ } } } } // TODO: ADD CONSTRAINT EDGES // TODO: ADD EVENT EDGES addEdgeOffsets(); this.getModel().endUpdate(); this.isBuilding = false; // if we found any incorrectly marked promoters we need to redraw. Do so now. // The promoters should all pass the second time. if(needsRedrawn){ return buildGraph(); } return needsPositioning; } /** * Loop through all the edges and add control points to reposition them * if they are laying over the top of any other edges. */ public void addEdgeOffsets(){ // Make a hash where the key is a string built from the ids of the source and destination // of all the edges. The source and destination will be sorted so that the same two // source-destination pair will always map to the same key. The value is a list // of edges. That way if there are ever more then one edge between pairs, // we can modify the geometry so they don't overlap. HashMap<String, Vector<mxCell>> edgeHash = new HashMap<String, Vector<mxCell>>(); // build a temporary structure mapping sets of edge endpoints to edges // map influences Model m = gcm.getSBMLDocument().getModel(); for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (r.getAnnotationString().contains("Complex")) { for (int j = 0; j < r.getNumReactants(); j++) { String endA = r.getReactant(j).getSpecies(); String endB = r.getProduct(0).getSpecies(); String id = r.getReactant(j).getSpecies() + "+>" + r.getProduct(0).getSpecies(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } else if (BioModel.isProductionReaction(r)) { String promoterId = r.getId().replace("Production_",""); if (!gcm.isPromoterExplicit(promoterId)) { for (int j = 0; j < r.getNumModifiers(); j++) { for (int k = 0; k < r.getNumProducts(); k++) { String endA = r.getModifier(j).getSpecies(); String endB = r.getProduct(k).getSpecies(); if (BioModel.isRepressor(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } else if (BioModel.isActivator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } else if (r.getModifier(j).getAnnotationString().contains(GlobalConstants.NOINFLUENCE)) { String id = r.getModifier(j).getSpecies() + "x>" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } else if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); id = r.getModifier(j).getSpecies() + "-|" + r.getProduct(k).getSpecies() + "," + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } key = endA + " " + endB; cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } } } else { for (int j = 0; j < r.getNumModifiers(); j++) { String endA = r.getModifier(j).getSpecies(); String endB = promoterId; if (BioModel.isRegulator(r.getModifier(j))) { String id = r.getModifier(j).getSpecies() + "->" + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); id = r.getModifier(j).getSpecies() + "-|" + promoterId; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } key = endA + " " + endB; cell = this.getInfluence(id); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } } } } for (int i = 0; i < m.getNumReactions(); i++) { Reaction r = m.getReaction(i); if (BioModel.isDegradationReaction(r)) continue; if (BioModel.isDiffusionReaction(r)) continue; if (BioModel.isProductionReaction(r)) continue; if (r.getAnnotationString().contains("Complex")) continue; if (r.getAnnotationString().contains("Constitutive")) continue; if (gcm.getSBMLLayout().getLayout("iBioSim").getReactionGlyph(r.getId()) != null) { for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s = r.getReactant(j); String endA = s.getSpecies(); String endB = r.getId(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(s.getSpecies() + "__" + r.getId()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } for (int j = 0; j < r.getNumModifiers(); j++) { ModifierSpeciesReference s = r.getModifier(j); String endA = s.getSpecies(); String endB = r.getId(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(s.getSpecies() + "__" + r.getId()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s = r.getProduct(k); String endA = r.getId(); String endB = s.getSpecies(); if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(r.getId() + "__" + s.getSpecies()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } else { for (int j = 0; j < r.getNumReactants(); j++) { SpeciesReference s1 = r.getReactant(j); for (int k = 0; k < r.getNumProducts(); k++) { SpeciesReference s2 = r.getProduct(k); String endA = s1.getSpecies(); String endB = s2.getSpecies(); // ignore anything connected directly to a drawn promoter //if(endA.equals(GlobalConstants.NONE) || endB.equals(GlobalConstants.NONE)) // continue; if(endA.compareTo(endB) > 0){ // swap the strings String t = endA; endA = endB; endB = t; } String key = endA + " " + endB; mxCell cell = this.getInfluence(s1.getSpecies() + "_" + r.getId() + "_" + s2.getSpecies()); if(edgeHash.containsKey(key) == false) edgeHash.put(key, new Vector<mxCell>()); edgeHash.get(key).add(cell); } } } } // map components edges for (long i = 0; i < gcm.getSBMLCompModel().getNumSubmodels(); i++) { String compName = gcm.getSBMLCompModel().getSubmodel(i).getId(); for (String propName : gcm.getInputs(compName).keySet()) { String targetName = gcm.getInputs(compName).get(propName); String type = "Input"; String key = compName + " "+type+" " + targetName; mxCell cell = componentsConnectionsToMxCellMap.get(key); String simpleKey = compName + " " + targetName; if(edgeHash.containsKey(simpleKey) == false) edgeHash.put(simpleKey, new Vector<mxCell>()); edgeHash.get(simpleKey).add(cell); } for (String propName : gcm.getOutputs(compName).keySet()) { String targetName = gcm.getOutputs(compName).get(propName); String type = "Output"; String key = compName + " "+type+" " + targetName; mxCell cell = componentsConnectionsToMxCellMap.get(key); String simpleKey = compName + " " + targetName; if(edgeHash.containsKey(simpleKey) == false) edgeHash.put(simpleKey, new Vector<mxCell>()); edgeHash.get(simpleKey).add(cell); } } // loop through every set of edge endpoints and then move them if needed. for(Vector<mxCell> vec:edgeHash.values()){ if(vec.size() > 1 && vec.get(0) != null){ mxCell source = (mxCell)vec.get(0).getSource(); mxCell target = (mxCell)vec.get(0).getTarget(); // find the end and center points mxGeometry t; t = source.getGeometry(); mxPoint sp = new mxPoint(t.getCenterX(), t.getCenterY()); t = target.getGeometry(); mxPoint tp = new mxPoint(t.getCenterX(), t.getCenterY()); mxPoint cp = new mxPoint((tp.getX()+sp.getX())/2.0, (tp.getY()+sp.getY())/2.0); // check for self-influence if(source == target){ mxCell c = vec.get(0); mxGeometry geom = c.getGeometry(); // set the self-influence's point to the left of the influence. // This causes the graph library to draw it rounded in that direction. mxPoint p = new mxPoint( cp.getX() - t.getWidth()/2-SECOND_SELF_INFLUENCE_DISTANCE, cp.getY() ); Vector<mxPoint> points = new Vector<mxPoint>(); points.add(p); geom.setPoints(points); c.setGeometry(geom); continue; } // make a unit vector that points in the direction perpendicular to the // direction from one endpoint to the other. 90 degrees rotated means flip // the x and y coordinates. mxPoint dVec = new mxPoint(-(sp.getY()-tp.getY()), sp.getX()-tp.getX()); double magnitude = Math.sqrt(dVec.getX()*dVec.getX() + dVec.getY()*dVec.getY()); // avoid divide-by-zero errors magnitude = Math.max(magnitude, .1); // normalize dVec.setX(dVec.getX()/magnitude); dVec.setY(dVec.getY()/magnitude); // loop through all the edges, create a new midpoint and apply it. // also move the edge center to the midpoint so that labels won't be // on top of each other. for(int i=0; i<vec.size(); i++){ double offset = i-(vec.size()-1.0)/2.0; mxCell edge = vec.get(i); //cell.setGeometry(new mxGeometry(0, 0, 100, 100)); mxGeometry geom = edge.getGeometry(); Vector<mxPoint> points = new Vector<mxPoint>(); mxPoint p = new mxPoint( cp.getX()+dVec.getX()*offset*DIS_BETWEEN_NEIGHBORING_EDGES, cp.getY()+dVec.getY()*offset*DIS_BETWEEN_NEIGHBORING_EDGES ); points.add(p); geom.setPoints(points); // geom.setX(p.getX()); // geom.setY(p.getY()); edge.setGeometry(geom); } } } // for(Object edgeo:this.getSelectionCell()){ // mxCell edge = (mxCell)edgeo; // int s = edge.getSource().getEdgeCount(); // int t = edge.getTarget().getEdgeCount() // // if(edge.getSource().getEdgeCount() > 1 && edge.getTarget().getEdgeCount() > 1){ // // the source and target have multiple edges, now loop through them all... // // // //cell.setGeometry(new mxGeometry(0, 0, 100, 100)); // mxGeometry geom = new mxGeometry(); // Vector<mxPoint> points = new Vector<mxPoint>(); // mxPoint p = new mxPoint(50.0, 50.0); // points.add(p); // geom.setPoints(points); // edge.setGeometry(geom); // } // } } /** * adds grid rectangles via cell vertices */ public void addGridCells() { if (gcm.getGrid().isEnabled()) { int gridRows = gcm.getGrid().getNumRows(); int gridCols = gcm.getGrid().getNumCols(); double gridWidth = gcm.getGrid().getGridGeomWidth(); double gridHeight = gcm.getGrid().getGridGeomHeight(); //creates an mxCell/vertex for each grid rectangle //these are later accessible via ID via the hash map for (int row = 0; row < gridRows; ++row) { for (int col = 0; col < gridCols; ++col) { String id = "ROW" + row + "_COL" + col; double currX = 15 + col*gridWidth; double currY = 15 + row*gridHeight; CellValueObject cvo = new CellValueObject(id, "Rectangle", null); mxGeometry geometry = new mxGeometry(currX, currY, gridWidth, gridHeight); mxCell vertex = new mxCell(cvo, geometry, null); vertex.setId(id); vertex.setVertex(true); vertex.setConnectable(false); vertex.setStyle("GRID_RECTANGLE"); addCell(vertex, this.defaultParent); gridRectangleToMxCellMap.put(id, vertex); } } } } //POSITION UPDATING /** * Called after a layout is chosen and applied. * Updates the gcm's postitioning using the * positioning on the graph. */ public void updateAllSpeciesPosition(){ for(mxCell cell:this.speciesToMxCellMap.values()){ updateInternalPosition(cell); } for(mxCell cell:this.reactionsToMxCellMap.values()){ updateInternalPosition(cell); } for(mxCell cell:this.componentsToMxCellMap.values()){ updateInternalPosition(cell); } for(mxCell cell:this.drawnPromoterToMxCellMap.values()){ updateInternalPosition(cell); } } /** * Given a cell that must be a species or component, * update the internal model to reflect it's coordinates. * Called when a cell is dragged with the GUI. */ public void updateInternalPosition(mxCell cell){ mxGeometry geom = cell.getGeometry(); if (getCellType(cell).equals(GlobalConstants.SPECIES) || getCellType(cell).equals(GlobalConstants.PROMOTER)) { Layout layout = null; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { layout = gcm.getSBMLLayout().getLayout("iBioSim"); } else { layout = gcm.getSBMLLayout().createLayout(); layout.setId("iBioSim"); } SpeciesGlyph speciesGlyph = null; if (layout.getSpeciesGlyph((String)cell.getId())!=null) { speciesGlyph = layout.getSpeciesGlyph((String)cell.getId()); } else { speciesGlyph = layout.createSpeciesGlyph(); speciesGlyph.setId((String)cell.getId()); speciesGlyph.setSpeciesId((String)cell.getId()); } speciesGlyph.getBoundingBox().setX(geom.getX()); speciesGlyph.getBoundingBox().setY(geom.getY()); speciesGlyph.getBoundingBox().setWidth(geom.getWidth()); speciesGlyph.getBoundingBox().setHeight(geom.getHeight()); TextGlyph textGlyph = null; if (layout.getTextGlyph((String)cell.getId())!=null) { textGlyph = layout.getTextGlyph((String)cell.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId((String)cell.getId()); textGlyph.setGraphicalObjectId((String)cell.getId()); textGlyph.setText((String)cell.getId()); textGlyph.setBoundingBox(speciesGlyph.getBoundingBox()); } else if (getCellType(cell).equals(GlobalConstants.REACTION)|| getCellType(cell).equals(GlobalConstants.RULE)|| getCellType(cell).equals(GlobalConstants.CONSTRAINT)|| getCellType(cell).equals(GlobalConstants.EVENT)) { Layout layout = null; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { layout = gcm.getSBMLLayout().getLayout("iBioSim"); } else { layout = gcm.getSBMLLayout().createLayout(); layout.setId("iBioSim"); } ReactionGlyph reactionGlyph = null; if (layout.getReactionGlyph((String)cell.getId())!=null) { reactionGlyph = layout.getReactionGlyph((String)cell.getId()); } else { reactionGlyph = layout.createReactionGlyph(); reactionGlyph.setId((String)cell.getId()); reactionGlyph.setReactionId((String)cell.getId()); } reactionGlyph.getBoundingBox().setX(geom.getX()); reactionGlyph.getBoundingBox().setY(geom.getY()); reactionGlyph.getBoundingBox().setWidth(geom.getWidth()); reactionGlyph.getBoundingBox().setHeight(geom.getHeight()); TextGlyph textGlyph = null; if (layout.getTextGlyph((String)cell.getId())!=null) { textGlyph = layout.getTextGlyph((String)cell.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId((String)cell.getId()); textGlyph.setGraphicalObjectId((String)cell.getId()); textGlyph.setText((String)cell.getId()); textGlyph.setBoundingBox(reactionGlyph.getBoundingBox()); } else if (getCellType(cell).equals(GlobalConstants.COMPONENT)) { Layout layout = null; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { layout = gcm.getSBMLLayout().getLayout("iBioSim"); } else { layout = gcm.getSBMLLayout().createLayout(); layout.setId("iBioSim"); } CompartmentGlyph compGlyph = null; if (layout.getCompartmentGlyph((String)cell.getId())!=null) { compGlyph = layout.getCompartmentGlyph((String)cell.getId()); } else { compGlyph = layout.createCompartmentGlyph(); compGlyph.setId((String)cell.getId()); compGlyph.setCompartmentId((String)cell.getId()); } compGlyph.getBoundingBox().setX(geom.getX()); compGlyph.getBoundingBox().setY(geom.getY()); compGlyph.getBoundingBox().setWidth(geom.getWidth()); compGlyph.getBoundingBox().setHeight(geom.getHeight()); TextGlyph textGlyph = null; if (layout.getTextGlyph((String)cell.getId())!=null) { textGlyph = layout.getTextGlyph((String)cell.getId()); } else { textGlyph = layout.createTextGlyph(); } textGlyph.setId((String)cell.getId()); textGlyph.setGraphicalObjectId((String)cell.getId()); textGlyph.setText((String)cell.getId()); textGlyph.setBoundingBox(compGlyph.getBoundingBox()); } } /** * Given a species, component, or drawn promoter cell, position it * using the properties. */ private boolean sizeAndPositionFromProperties(mxCell cell){ double x = 0; double y = 0; double width = 0; double height = 0; boolean needsPositioning = false; if (getCellType(cell).equals(GlobalConstants.SPECIES)|| getCellType(cell).equals(GlobalConstants.PROMOTER)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getSpeciesGlyph((String)cell.getId())!=null) { SpeciesGlyph speciesGlyph = layout.getSpeciesGlyph((String)cell.getId()); x = speciesGlyph.getBoundingBox().getPosition().getXOffset(); y = speciesGlyph.getBoundingBox().getPosition().getYOffset(); width = speciesGlyph.getBoundingBox().getDimensions().getWidth(); height = speciesGlyph.getBoundingBox().getDimensions().getHeight(); } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; gcm.placeSpecies((String)cell.getId(), x, y, height, width); } } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.REACTION) || getCellType(cell).equals(GlobalConstants.RULE) || getCellType(cell).equals(GlobalConstants.CONSTRAINT) || getCellType(cell).equals(GlobalConstants.EVENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getReactionGlyph((String)cell.getId())!=null) { ReactionGlyph reactionGlyph = layout.getReactionGlyph((String)cell.getId()); x = reactionGlyph.getBoundingBox().getPosition().getXOffset(); y = reactionGlyph.getBoundingBox().getPosition().getYOffset(); width = reactionGlyph.getBoundingBox().getDimensions().getWidth(); height = reactionGlyph.getBoundingBox().getDimensions().getHeight(); } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; gcm.placeReaction((String)cell.getId(), x, y, height, width); } } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.COMPONENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getCompartmentGlyph((String)cell.getId())!=null) { CompartmentGlyph compGlyph = layout.getCompartmentGlyph((String)cell.getId()); x = compGlyph.getBoundingBox().getPosition().getXOffset(); y = compGlyph.getBoundingBox().getPosition().getYOffset(); width = compGlyph.getBoundingBox().getDimensions().getWidth(); height = compGlyph.getBoundingBox().getDimensions().getHeight(); } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; gcm.placeCompartment((String)cell.getId(), x, y, height, width); } } else { unpositionedSpeciesComponentCount += 1; needsPositioning = true; x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } cell.setGeometry(new mxGeometry(x, y, width, height)); return needsPositioning; } /** * redraws the grid components */ public void updateGrid() { dynamic = true; this.removeCells(this.getChildCells(this.getDefaultParent(), true, true)); gridRectangleToMxCellMap.clear(); addGridCells(); componentsToMxCellMap.clear(); Grid grid = gcm.getGrid(); double gridWidth = grid.getGridGeomWidth(); double gridHeight = grid.getGridGeomHeight(); componentsToMxCellMap.clear(); //ADD COMPONENTS for (int row = 0; row < grid.getNumRows(); ++row) { for (int col = 0; col < grid.getNumCols(); ++col) { if (grid.getOccupancyFromLocation(row, col) == true) { double currX = 15 + col*gridWidth; double currY = 15 + row*gridHeight; String compID = grid.getCompIDFromLocation(row, col); grid.setNodeRectangle(compID, new Rectangle((int) currX, (int) currY, (int) gridWidth, (int) gridHeight)); Rectangle componentRectangle = grid.getSnapRectangleFromCompID(compID); String fullCompID = compID; if (compID.contains("_of_")) compID = compID.split("_")[0]; if (compID.length() > 10) { compID = compID.substring(0,9) + "..."; } CellValueObject compcvo = new CellValueObject(compID, "Component", null); mxCell compCell = (mxCell) this.insertVertex(this.getDefaultParent(), fullCompID, compcvo, componentRectangle.getX(), componentRectangle.getY(), componentRectangle.getWidth(), componentRectangle.getHeight()); compCell.setConnectable(false); compCell.setStyle("GRIDCOMPARTMENT"); componentsToMxCellMap.put(fullCompID, compCell); } } } } //GET METHODS /** * returns GlobalConstants.SPECIES, GlobalConstants.COMPONENT, GlobalConstants.INFLUENCE, or GlobalConstants.COMPONENT_CONNECTION. * @param cell */ public String getCellType(mxCell cell){ if(cell.isEdge()){ String sourceType = getCellType(cell.getSource()); String targetType = getCellType(cell.getTarget()); if(sourceType == CELL_VALUE_NOT_FOUND || targetType == CELL_VALUE_NOT_FOUND){ return CELL_NOT_FULLY_CONNECTED; } else if(sourceType == GlobalConstants.COMPONENT || targetType == GlobalConstants.COMPONENT){ return GlobalConstants.COMPONENT_CONNECTION; } else if(sourceType == GlobalConstants.PROMOTER && targetType == GlobalConstants.SPECIES){ return GlobalConstants.PRODUCTION; } else if (sourceType == GlobalConstants.SPECIES && targetType == GlobalConstants.SPECIES && (gcm.getSBMLDocument().getModel().getNumReactions() > 0) && cell.getValue() != null && (gcm.getSBMLDocument().getModel().getReaction((String)cell.getValue()) != null)) { return GlobalConstants.REACTION_EDGE; } else if (sourceType == GlobalConstants.REACTION || targetType == GlobalConstants.REACTION) { return GlobalConstants.REACTION_EDGE; } else if (sourceType == GlobalConstants.RULE || targetType == GlobalConstants.RULE) { return GlobalConstants.RULE_EDGE; } else if (sourceType == GlobalConstants.CONSTRAINT || targetType == GlobalConstants.CONSTRAINT) { return GlobalConstants.CONSTRAINT_EDGE; } else if (sourceType == GlobalConstants.EVENT || targetType == GlobalConstants.EVENT) { return GlobalConstants.EVENT_EDGE; } else { return GlobalConstants.INFLUENCE; } } //cell is a vertex else{ String type = ((CellValueObject)(cell.getValue())).type; if(type.equals("Component")) return GlobalConstants.COMPONENT; else if(type.equals("Species")) return GlobalConstants.SPECIES; else if(type.equals("Promoter")) return GlobalConstants.PROMOTER; else if(type.equals("Reaction")) return GlobalConstants.REACTION; else if(type.equals("Rule")) return GlobalConstants.RULE; else if(type.equals("Constraint")) return GlobalConstants.CONSTRAINT; else if(type.equals("Event")) return GlobalConstants.EVENT; else if (type.equals("Rectangle")) return GlobalConstants.GRID_RECTANGLE; else return CELL_VALUE_NOT_FOUND; } } /** * * @param cell * @return */ public String getCellType(mxICell cell) { return getCellType((mxCell)cell); } public String getModelFileName(String compId) { return gcm.getModelFileName(compId).replace(".xml", ".gcm"); } /** * * @param id * @return */ public mxCell getSpeciesCell(String id){ return speciesToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getReactionsCell(String id){ return reactionsToMxCellMap.get(id); } public mxCell getRulesCell(String id){ return rulesToMxCellMap.get(id); } public mxCell getConstraintsCell(String id){ return constraintsToMxCellMap.get(id); } public mxCell getEventsCell(String id){ return eventsToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getComponentCell(String id){ return componentsToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getDrawnPromoterCell(String id){ return drawnPromoterToMxCellMap.get(id); } /** * * @param id * @return */ public mxCell getInfluence(String id){ return (influencesToMxCellMap.get(id)); } /** * returns the mxCell corresponding to the id passed in * this mxCell is a grid rectangle on the grid * * @param id the id of the grid rectangle's cell * @return the corresponding cell */ public mxCell getGridRectangleCellFromID(String id) { return gridRectangleToMxCellMap.get(id); } /** * returns if the cell is selectable or not * * @param cell the cell that is or isn't selectable */ @Override public boolean isCellSelectable(Object cell) { mxCell tempCell = (mxCell)cell; //if it's a grid cell, it's not selectable //otherwise, do the default behavior if (tempCell.getStyle().equals("GRID_RECTANGLE")) return false; return isCellsSelectable(); } //GRAPH PART CREATION /** * * @param id * @return */ private boolean createGraphComponentFromModel(String id){ boolean needsPositioning = false; double x = 0; double y = 0; double width = 0; double height = 0; if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getCompartmentGlyph(id)!=null) { CompartmentGlyph compGlyph = layout.getCompartmentGlyph(id); x = compGlyph.getBoundingBox().getPosition().getXOffset(); y = compGlyph.getBoundingBox().getPosition().getYOffset(); width = compGlyph.getBoundingBox().getDimensions().getWidth(); height = compGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } //set the correct compartment status BioModel compGCMFile = new BioModel(gcm.getPath()); boolean compart = false; //String modelFileName = gcm.getModelFileName(id).replace(".xml", ".gcm"); String modelFileName = gcm.getModelFileName(id); File compFile = new File(gcm.getPath() + File.separator + modelFileName); if (compGCMFile != null && compFile.exists()) { compGCMFile.load(gcm.getPath() + File.separator + modelFileName); compart = compGCMFile.IsWithinCompartment(); } else { JOptionPane.showMessageDialog(Gui.frame, "A model definition cannot be found for " + modelFileName + ".\nDropping component from the schematic.\n", "Warning", JOptionPane.WARNING_MESSAGE); return false; } if(x < -9998 || y < -9998){ unpositionedSpeciesComponentCount += 1; needsPositioning = true; // Line the unpositioned species up nicely. The mod is there as a rough // and dirty way to prevent // them going off the bottom or right hand side of the screen. x = (unpositionedSpeciesComponentCount%50) * 20; y = (unpositionedSpeciesComponentCount%10) * (GlobalConstants.DEFAULT_SPECIES_HEIGHT + 10); } String truncGCM = modelFileName.replace(".xml", ""); String truncID = ""; //if the id is too long, truncate it if (truncGCM.length() > 10) truncGCM = truncGCM.substring(0, 9) + "..."; if (id.length() > 10) truncID = id.substring(0, 9) + "..."; else truncID = id; String label = truncID + "\n" + truncGCM; CellValueObject cvo = new CellValueObject(label, "Component", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, x, y, width, height); this.componentsToMxCellMap.put(id, (mxCell)insertedVertex); //pass whether or not the component is a compartment, as the styles are different this.setComponentStyles(id, compart); // now draw the edges that connect the component for (String propName : gcm.getInputs(id).keySet()) { // input, the arrow should point in from the species String topSpecies = gcm.getInputs(id).get(propName); Object createdEdge = this.insertEdge(this.getDefaultParent(), "", "", this.getSpeciesCell(topSpecies),insertedVertex); String key = id + " Input " + topSpecies; componentsConnectionsToMxCellMap.put(key, (mxCell)createdEdge); this.updateComponentConnectionVisuals((mxCell)createdEdge, propName); } // now draw the edges that connect the component for (String propName : gcm.getOutputs(id).keySet()) { // output, the arrow should point out to the species String topSpecies = gcm.getOutputs(id).get(propName); Object createdEdge = this.insertEdge(this.getDefaultParent(), "", "", insertedVertex, this.getSpeciesCell(topSpecies)); String key = id + " Output " + topSpecies; componentsConnectionsToMxCellMap.put(key, (mxCell)createdEdge); this.updateComponentConnectionVisuals((mxCell)createdEdge, propName); } return needsPositioning; } /** * creates a vertex on the graph using the internal model. * @param id * * @return: A bool, true if the species had to be positioned. */ private boolean createGraphSpeciesFromModel(String sp){ String type = BioModel.getSpeciesType(gcm.getSBMLDocument(),sp); if (gcm.getDiffusionReaction(sp)!=null) type += " (D)"; if (gcm.isSpeciesConstitutive(sp)) type += " (C)"; if (type.equals(GlobalConstants.MRNA)) return false; String truncID = ""; if (sp.length() > 12) truncID = sp.substring(0, 11) + "..."; else truncID = sp; String label = truncID + '\n' + type; CellValueObject cvo = new CellValueObject(label, "Species", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), sp, cvo, 1, 1, 1, 1); this.speciesToMxCellMap.put(sp, (mxCell)insertedVertex); this.setSpeciesStyles(sp); return sizeAndPositionFromProperties((mxCell)insertedVertex); } /** * creates a vertex on the graph using the internal model. * @param id * * @return: A bool, true if the reaction had to be positioned. */ private boolean createGraphReactionFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Reaction", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.reactionsToMxCellMap.put(id, (mxCell)insertedVertex); this.setReactionStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } private boolean createGraphRuleFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Rule", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.rulesToMxCellMap.put(id, (mxCell)insertedVertex); this.setRuleStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } private boolean createGraphConstraintFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Constraint", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.constraintsToMxCellMap.put(id, (mxCell)insertedVertex); this.setConstraintStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } private boolean createGraphEventFromModel(String id){ CellValueObject cvo = new CellValueObject(id, "Event", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.eventsToMxCellMap.put(id, (mxCell)insertedVertex); this.setEventStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } /** * Creates a drawn promoter using the internal model * @param pname * @return */ private boolean createGraphDrawnPromoterFromModel(String id){ String truncID; if (id.length() > 8) truncID = id.substring(0, 7) + "..."; else truncID = id; CellValueObject cvo = new CellValueObject(truncID, "Promoter", null); Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); this.drawnPromoterToMxCellMap.put(id, (mxCell)insertedVertex); this.setDrawnPromoterStyles(id); return sizeAndPositionFromProperties((mxCell)insertedVertex); } /** * creates an edge between two graph entities */ @Override public Object insertEdge(Object parent, String id, Object value, Object source, Object target, String style){ Object ret = super.insertEdge(parent, id, value, source, target, style); this.influencesToMxCellMap.put(id, (mxCell)ret); return ret; } //VISUALS /** * Given an id, update the style of the influence based on the internal model. */ /* private void updateInfluenceVisuals(String id){ Properties prop = gcm.getInfluences().get(id); //gcm.getSBMLDocument().getModel(); if(prop == null) throw new Error("Invalid id '"+id+"'. Valid ids were:" + String.valueOf(gcm.getInfluences().keySet())); // build the edge style String style = "defaultEdge;" + mxConstants.STYLE_ENDARROW + "="; if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.ACTIVATION)) style = "ACTIVATION"; else if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.REPRESSION)) style = "REPRESSION"; else if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.NOINFLUENCE)) style = "NOINFLUENCE"; else if(prop.getProperty(GlobalConstants.TYPE).equals(GlobalConstants.COMPLEX)) style = "COMPLEX"; else style = "DEFAULT"; // apply the style mxCell cell = this.getInfluence(id); cell.setStyle(style); // apply the promoter name as a label, only if the promoter isn't drawn. if(gcm.influenceHasExplicitPromoter(id) == false) cell.setValue(prop.getProperty(GlobalConstants.PROMOTER)); }; */ /** * * @param cell * @param label */ public void updateComponentConnectionVisuals(mxCell cell, String label){ //cell.setStyle(mxConstants.STYLE_ENDARROW + "=" + mxConstants.ARROW_OPEN); cell.setStyle("COMPONENT_EDGE"); cell.setValue("Port " + label); // position the label as intelligently as possible mxGeometry geom = cell.getGeometry(); if(this.getCellType(cell.getSource()) == GlobalConstants.COMPONENT){ geom.setX(-.6); } else{ geom.setX(.6); } cell.setGeometry(geom); } /** * Builds the style sheets that will be used by the graph. */ public void createStyleSheets(){ mxStylesheet stylesheet = this.getStylesheet(); //species Hashtable<String, Object> style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FILLCOLOR, "#5CB4F2"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, true); stylesheet.putCellStyle("SPECIES", style); //reactions style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_ELLIPSE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FILLCOLOR, "#C7007B"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); stylesheet.putCellStyle("REACTION", style); //rules style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_SWIMLANE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#FFFF00"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("RULE", style); //constraints style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_HEXAGON); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#FF0000"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("CONSTRAINT", style); //events style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#00FF00"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("EVENT", style); //components style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("COMPONENT", style); //grid components style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("GRIDCOMPONENT", style); //compartments style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, true); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("COMPARTMENT", style); //grid compartments style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_ROUNDED, true); style.put(mxConstants.STYLE_FILLCOLOR, "#87F274"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("GRIDCOMPARTMENT", style); //grid rectangle style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RECTANGLE); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_ROUNDED, false); style.put(mxConstants.STYLE_FILLCOLOR, "none"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_MOVABLE, false); style.put(mxConstants.STYLE_RESIZABLE, false); style.put(mxConstants.STYLE_NOLABEL, true); stylesheet.putCellStyle("GRID_RECTANGLE", style); //component edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#FFAA00"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); stylesheet.putCellStyle("COMPONENT_EDGE", style); //production edge (promoter to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#34BA04"); style.put(mxConstants.STYLE_STROKECOLOR, "#34BA04"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_EDGE, mxConstants.EDGESTYLE_ENTITY_RELATION); stylesheet.putCellStyle("PRODUCTION", style); //activation edge (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#34BA04"); style.put(mxConstants.STYLE_STROKECOLOR, "#34BA04"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_BLOCK); stylesheet.putCellStyle("ACTIVATION", style); //repression edge (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#FA2A2A"); style.put(mxConstants.STYLE_STROKECOLOR, "#FA2A2A"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OVAL); stylesheet.putCellStyle("REPRESSION", style); //no influence (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#000000"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_DIAMOND); style.put(mxConstants.STYLE_DASHED, "true"); stylesheet.putCellStyle("NOINFLUENCE", style); //complex formation edge (species to species) style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#4E5D9C"); style.put(mxConstants.STYLE_STROKECOLOR, "#4E5D9C"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "true"); stylesheet.putCellStyle("COMPLEX", style); //reaction edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F2861B"); style.put(mxConstants.STYLE_STROKECOLOR, "#F2861B"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_OPEN); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("REACTION_EDGE", style); //default edge style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_OPACITY, 100); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#000000"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); style.put(mxConstants.STYLE_ENDARROW, mxConstants.ARROW_CLASSIC); style.put(mxConstants.STYLE_DASHED, "false"); stylesheet.putCellStyle("DEFAULT", style); //explicit promoter style = new Hashtable<String, Object>(); style.put(mxConstants.STYLE_SHAPE, mxConstants.SHAPE_RHOMBUS); style.put(mxConstants.STYLE_OPACITY, 50); style.put(mxConstants.STYLE_FONTCOLOR, "#000000"); style.put(mxConstants.STYLE_FILLCOLOR, "#F00E0E"); style.put(mxConstants.STYLE_STROKECOLOR, "#000000"); stylesheet.putCellStyle("EXPLICIT_PROMOTER", style); } //STYLE SETTING /** * * @param id */ private void setSpeciesStyles(String id){ String style="SPECIES;"; mxCell cell = this.getSpeciesCell(id); cell.setStyle(style); } /** * * @param id */ private void setReactionStyles(String id){ String style="REACTION;"; mxCell cell = this.getReactionsCell(id); cell.setStyle(style); } private void setRuleStyles(String id){ String style="RULE;"; mxCell cell = this.getRulesCell(id); cell.setStyle(style); } private void setConstraintStyles(String id){ String style="CONSTRAINT;"; mxCell cell = this.getConstraintsCell(id); cell.setStyle(style); } private void setEventStyles(String id){ String style="EVENT;"; mxCell cell = this.getEventsCell(id); cell.setStyle(style); } /** * * @param id * @param compart */ private void setComponentStyles(String id, boolean compart){ String style = ""; if (gcm.getGrid().isEnabled()) { if (compart) style = "GRIDCOMPARTMENT;"; else style = "GRIDCOMPONENT;"; } else { if (compart) style = "COMPARTMENT;"; else style = "COMPONENT;"; } mxCell cell = this.getComponentCell(id); cell.setStyle(style); } /** * * @param id */ private void setDrawnPromoterStyles(String id){ String style="EXPLICIT_PROMOTER"; mxCell cell = this.getDrawnPromoterCell(id); cell.setStyle(style); } //ANIMATION /** * */ public void setSpeciesAnimationValue(String species, MovieAppearance appearance) { mxCell cell = this.speciesToMxCellMap.get(species); if (cell != null) { setCellAnimationValue(cell, appearance); } } /** * */ public void setComponentAnimationValue(String component, MovieAppearance appearance) { mxCell cell = this.componentsToMxCellMap.get(component); setCellAnimationValue(cell, appearance); } public void setGridRectangleAnimationValue(String gridLocation, MovieAppearance appearance) { mxCell cell = this.gridRectangleToMxCellMap.get(gridLocation); setCellAnimationValue(cell, appearance); } /** * Applies the MovieAppearance to the cell * @param cell * @param appearance * @param properties */ private void setCellAnimationValue(mxCell cell, MovieAppearance appearance) { if (appearance == null) return; // color String newStyle = cell.getStyle() + ";"; if (appearance.color != null) { newStyle += mxConstants.STYLE_FILLCOLOR + "=" + Integer.toHexString(appearance.color.getRGB()) + ";"; newStyle += mxConstants.STYLE_OPACITY + "=" + 75; } // opacity if (appearance.opacity != null) { newStyle += ";"; double op = (appearance.opacity) * 100.0; newStyle += mxConstants.STYLE_OPACITY + "=" + String.valueOf(op); } if (newStyle != null) cell.setStyle(newStyle); // size if (appearance.size != null) { double x = 0; double y = 0; double width = 0; double height = 0; if (getCellType(cell).equals(GlobalConstants.SPECIES)|| getCellType(cell).equals(GlobalConstants.PROMOTER)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getSpeciesGlyph((String)cell.getId())!=null) { SpeciesGlyph speciesGlyph = layout.getSpeciesGlyph((String)cell.getId()); x = speciesGlyph.getBoundingBox().getPosition().getXOffset(); y = speciesGlyph.getBoundingBox().getPosition().getYOffset(); width = speciesGlyph.getBoundingBox().getDimensions().getWidth(); height = speciesGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_SPECIES_WIDTH; height = GlobalConstants.DEFAULT_SPECIES_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.REACTION)|| getCellType(cell).equals(GlobalConstants.RULE)|| getCellType(cell).equals(GlobalConstants.CONSTRAINT)|| getCellType(cell).equals(GlobalConstants.EVENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getReactionGlyph((String)cell.getId())!=null) { ReactionGlyph reactionGlyph = layout.getReactionGlyph((String)cell.getId()); x = reactionGlyph.getBoundingBox().getPosition().getXOffset(); y = reactionGlyph.getBoundingBox().getPosition().getYOffset(); width = reactionGlyph.getBoundingBox().getDimensions().getWidth(); height = reactionGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_REACTION_WIDTH; height = GlobalConstants.DEFAULT_REACTION_HEIGHT; } } else if (getCellType(cell).equals(GlobalConstants.COMPONENT)) { if (gcm.getSBMLLayout().getLayout("iBioSim") != null) { Layout layout = gcm.getSBMLLayout().getLayout("iBioSim"); if (layout.getCompartmentGlyph((String)cell.getId())!=null) { CompartmentGlyph compGlyph = layout.getCompartmentGlyph((String)cell.getId()); x = compGlyph.getBoundingBox().getPosition().getXOffset(); y = compGlyph.getBoundingBox().getPosition().getYOffset(); width = compGlyph.getBoundingBox().getDimensions().getWidth(); height = compGlyph.getBoundingBox().getDimensions().getHeight(); } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } else { x = -9999; y = -9999; width = GlobalConstants.DEFAULT_COMPONENT_WIDTH; height = GlobalConstants.DEFAULT_COMPONENT_HEIGHT; } } double aspect_ratio = height / width; double centerX = x + width/2.0; double centerY = y + height/2.0; mxGeometry startG = cell.getGeometry(); startG.setWidth(appearance.size); startG.setHeight(appearance.size * aspect_ratio); startG.setX(centerX - startG.getWidth()/2.0); startG.setY(centerY - startG.getHeight()/2.0); } } //CELL VALUE OBJECT CLASS /** * The object that gets set as the mxCell value object. * It is basically a way to store a property and label. */ public class CellValueObject extends Object implements Serializable{ private static final long serialVersionUID = 918273645; public Properties prop; public String label; public String type; @Override /** * */ public String toString(){ return this.label; } /** * * @param oos * @throws IOException */ private void writeObject(ObjectOutputStream oos) throws IOException { oos.writeObject(label); oos.writeObject(prop); } /** * * @param ois * @throws ClassNotFoundException * @throws IOException */ private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { label = ois.readObject().toString(); prop = (Properties)ois.readObject(); } /** * * * @param label * @param prop */ public CellValueObject(String label, String type, Properties prop){ if(label == null || type == null) throw new Error("Neither label nor type can be null!"); this.label = label; this.type = type; this.prop = prop; } } //FUNCTIONS THAT TYLER COMMENTED OUT THAT WILL PROBABLY NEVER BE USED // /** // * Overwrite the parent insertVertex and additionally put the vertex into our hashmap. // * @return // */ // public Object insertVertex(Object parent, String id, Object value, double x, double y, double width, double height){ // Object ret = super.insertVertex(parent, id, value, x, y, width, height); // this.speciesToMxCellMap.put(id, (mxCell)ret); // return ret; // } // // /** // * returns the name of the component-to-species connection // * @param compName // * @param speciesName // * @return // */ // /* // private String getComponentConnectionName(String compName, String speciesName){ // return compName + " (component connection) " + speciesName; // } // */ // // /** // * creates a vertex on the graph using the internal model. // * @param id // * // * @return: A bool, true if the species had to be positioned. // */ // /* // private boolean createGraphCompartmentFromModel(String sp){ // Properties prop = new Properties(); // String id = sp; // CellValueObject cvo = new CellValueObject(id, prop); // Object insertedVertex = this.insertVertex(this.getDefaultParent(), id, cvo, 1, 1, 1, 1); // this.speciesToMxCellMap.put(id, (mxCell)insertedVertex); // // this.setSpeciesStyles(sp); // // return sizeAndPositionFromProperties((mxCell)insertedVertex, prop); // } // */ // // /** // * called after a species is deleted. Make sure to delete it from // * the internal model. // * @param id // */ // public void speciesRemoved(String id){ // this.speciesToMxCellMap.remove(id); // } // public void influenceRemoved(String id){ // this.influencesToMxCellMap.remove(id); // } }
Add edges for constraints and events.
gui/src/biomodel/gui/schematic/BioGraph.java
Add edges for constraints and events.
Java
apache-2.0
330b2ad1ce493f608152358a50a0ea6158cb1946
0
452/USBHIDTerminal,452/USBHIDTerminal,452/USBHIDTerminal
package com.appspot.usbhidterminal.core; import java.nio.ByteBuffer; import java.util.LinkedList; import java.util.List; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.hardware.usb.UsbConstants; import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbDeviceConnection; import android.hardware.usb.UsbEndpoint; import android.hardware.usb.UsbInterface; import android.hardware.usb.UsbManager; import android.hardware.usb.UsbRequest; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.ResultReceiver; import android.util.Log; public abstract class AbstractUSBHIDService extends Service { private USBThreadDataReceiver usbThreadDataReceiver; private ResultReceiver resultReceiver; private final Handler uiHandler = new Handler(); private UsbManager mUsbManager; private UsbInterface intf; private UsbEndpoint endPointRead; private UsbEndpoint endPointWrite; private UsbDeviceConnection connection; private UsbDevice device; private IntentFilter filter; private PendingIntent mPermissionIntent; private int packetSize; private boolean sendedDataType; @Override public IBinder onBind(Intent intent) { return null; } @Override public void onCreate() { super.onCreate(); mPermissionIntent = PendingIntent.getBroadcast(this, 0, new Intent(Consts.ACTION_USB_PERMISSION), 0); filter = new IntentFilter(Consts.ACTION_USB_PERMISSION); filter.addAction(UsbManager.ACTION_USB_DEVICE_ATTACHED); filter.addAction(UsbManager.ACTION_USB_DEVICE_DETACHED); filter.addAction(Consts.ACTION_USB_SHOW_DEVICES_LIST); filter.addAction(Consts.ACTION_USB_SELECT_DEVICE); filter.addAction(Consts.ACTION_USB_SEND_DATA); filter.addAction(Consts.ACTION_USB_DATA_TYPE); registerReceiver(mUsbReceiver, filter); } @Override public int onStartCommand(Intent intent, int flags, int startId) { String action = intent.getAction(); if (resultReceiver == null) { resultReceiver = intent.getParcelableExtra("receiver"); } if (Consts.ACTION_USB_DATA_TYPE.equals(action)) { sendedDataType = intent.getBooleanExtra(Consts.ACTION_USB_DATA_TYPE, false); } else if (Consts.ACTION_USB_SEND_DATA.equals(action)) { sendData(intent.getStringExtra(Consts.ACTION_USB_SEND_DATA), sendedDataType); } else if (Consts.ACTION_USB_SHOW_DEVICES_LIST.equals(action)) { mUsbManager = (UsbManager) getSystemService(Context.USB_SERVICE); List<CharSequence> list = new LinkedList<CharSequence>(); for (UsbDevice usbDevice : mUsbManager.getDeviceList().values()) { list.add(onBuildingDevicesList(usbDevice)); } final CharSequence devicesName[] = new CharSequence[mUsbManager.getDeviceList().size()]; list.toArray(devicesName); Bundle bundle = new Bundle(); bundle.putCharSequenceArray(Consts.ACTION_USB_SHOW_DEVICES_LIST, devicesName); sendResultToUI(Consts.ACTION_USB_SHOW_DEVICES_LIST_RESULT, bundle); } else if (Consts.ACTION_USB_SELECT_DEVICE.equals(action)) { device = (UsbDevice) mUsbManager.getDeviceList().values().toArray()[intent.getIntExtra(Consts.ACTION_USB_SELECT_DEVICE, 0)]; mUsbManager.requestPermission(device, mPermissionIntent); } onCommand(intent, action, flags, startId); return START_REDELIVER_INTENT; } @Override public void onDestroy() { super.onDestroy(); if (usbThreadDataReceiver != null) { usbThreadDataReceiver.stopThis(); } unregisterReceiver(mUsbReceiver); } private class USBThreadDataReceiver extends Thread { private volatile boolean isStopped; public USBThreadDataReceiver() { } @Override public void run() { if (connection != null && endPointRead != null) { UsbRequest request = new UsbRequest(); UsbRequest requestQueued = null; request.initialize(connection, endPointRead); final ByteBuffer buff = ByteBuffer.allocate(packetSize + 1); while (!isStopped) { request.queue(buff, packetSize); requestQueued = connection.requestWait(); if (request.equals(requestQueued)){ uiHandler.post(new Runnable() { @Override public void run() { onUSBDataReceive(buff.array()); } }); } } } } public void stopThis() { isStopped = true; } } private void sendData(String data, boolean sendAsString) { if (device != null && endPointWrite != null && mUsbManager.hasPermission(device) && !data.isEmpty()) { // mLog(connection +"\n"+ device +"\n"+ request +"\n"+ // packetSize); byte[] out = data.getBytes();// UTF-16LE // Charset.forName("UTF-16") onUSBDataSending(data); if (sendAsString) { try { String str[] = data.split("[\\s]"); out = new byte[str.length]; for (int i = 0; i < str.length; i++) { out[i] = USBUtils.toByte(Integer.decode(str[i])); } } catch (Exception e) { onSendingError(e); } } int status = connection.bulkTransfer(endPointWrite, out, out.length, 250); onUSBDataSended(status, out); } } /** * receives the permission request to connect usb devices */ private final BroadcastReceiver mUsbReceiver = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (Consts.ACTION_USB_PERMISSION.equals(action)) { setDevice(intent); } if (UsbManager.ACTION_USB_DEVICE_ATTACHED.equals(action)) { setDevice(intent); if (device == null) { onDeviceConnected(device); } } if (UsbManager.ACTION_USB_DEVICE_DETACHED.equals(action)) { if (device != null) { device = null; usbThreadDataReceiver.stopThis(); sendResultToUI(Consts.ACTION_USB_DEVICE_DETACHED, null); onDeviceDisconnected(device); } } } private void setDevice(Intent intent) { device = (UsbDevice) intent.getParcelableExtra(UsbManager.EXTRA_DEVICE); if (device != null && intent.getBooleanExtra(UsbManager.EXTRA_PERMISSION_GRANTED, false)) { onDeviceSelected(device); connection = mUsbManager.openDevice(device); intf = device.getInterface(0); if (null == connection) { // mLog("(unable to establish connection)\n"); } else { connection.claimInterface(intf, true); } try { if (UsbConstants.USB_DIR_OUT == intf.getEndpoint(1).getDirection()) { endPointWrite = intf.getEndpoint(1); } } catch (Exception e) { Log.e("endPointWrite", "Device have no endPointWrite", e); } try { if (UsbConstants.USB_DIR_IN == intf.getEndpoint(0).getDirection()) { endPointRead = intf.getEndpoint(0); packetSize = endPointRead.getMaxPacketSize(); } } catch (Exception e) { Log.e("endPointWrite", "Device have no endPointRead", e); } usbThreadDataReceiver = new USBThreadDataReceiver(); usbThreadDataReceiver.start(); sendResultToUI(Consts.ACTION_USB_DEVICE_ATTACHED, null); } } }; public void sendResultToUI(int resultCode, Bundle resultData) { resultReceiver.send(resultCode, resultData); } public void onCommand(Intent intent, String action, int flags, int startId) { } public void onUSBDataReceive(byte[] buffer) { } public void onDeviceConnected(UsbDevice device) { } public void onDeviceDisconnected(UsbDevice device) { } public void onDeviceSelected(UsbDevice device) { } public CharSequence onBuildingDevicesList(UsbDevice usbDevice) { return null; } public void onUSBDataSending(String data) { } public void onUSBDataSended(int status, byte[] out) { } public void onSendingError(Exception e) { } }
src/com/appspot/usbhidterminal/core/AbstractUSBHIDService.java
package com.appspot.usbhidterminal.core; import java.util.LinkedList; import java.util.List; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.hardware.usb.UsbConstants; import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbDeviceConnection; import android.hardware.usb.UsbEndpoint; import android.hardware.usb.UsbInterface; import android.hardware.usb.UsbManager; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.ResultReceiver; import android.util.Log; public abstract class AbstractUSBHIDService extends Service { private USBThreadDataReceiver usbThreadDataReceiver; private ResultReceiver resultReceiver; private final Handler uiHandler = new Handler(); private UsbManager mUsbManager; private UsbInterface intf; private UsbEndpoint endPointRead; private UsbEndpoint endPointWrite; private UsbDeviceConnection connection; private UsbDevice device; private IntentFilter filter; private PendingIntent mPermissionIntent; private int packetSize; private boolean sendedDataType; @Override public IBinder onBind(Intent intent) { return null; } @Override public void onCreate() { super.onCreate(); mPermissionIntent = PendingIntent.getBroadcast(this, 0, new Intent(Consts.ACTION_USB_PERMISSION), 0); filter = new IntentFilter(Consts.ACTION_USB_PERMISSION); filter.addAction(UsbManager.ACTION_USB_DEVICE_ATTACHED); filter.addAction(UsbManager.ACTION_USB_DEVICE_DETACHED); filter.addAction(Consts.ACTION_USB_SHOW_DEVICES_LIST); filter.addAction(Consts.ACTION_USB_SELECT_DEVICE); filter.addAction(Consts.ACTION_USB_SEND_DATA); filter.addAction(Consts.ACTION_USB_DATA_TYPE); registerReceiver(mUsbReceiver, filter); } @Override public int onStartCommand(Intent intent, int flags, int startId) { String action = intent.getAction(); if (resultReceiver == null) { resultReceiver = intent.getParcelableExtra("receiver"); } if (Consts.ACTION_USB_DATA_TYPE.equals(action)) { sendedDataType = intent.getBooleanExtra(Consts.ACTION_USB_DATA_TYPE, false); } else if (Consts.ACTION_USB_SEND_DATA.equals(action)) { sendData(intent.getStringExtra(Consts.ACTION_USB_SEND_DATA), sendedDataType); } else if (Consts.ACTION_USB_SHOW_DEVICES_LIST.equals(action)) { mUsbManager = (UsbManager) getSystemService(Context.USB_SERVICE); List<CharSequence> list = new LinkedList<CharSequence>(); for (UsbDevice usbDevice : mUsbManager.getDeviceList().values()) { list.add(onBuildingDevicesList(usbDevice)); } final CharSequence devicesName[] = new CharSequence[mUsbManager.getDeviceList().size()]; list.toArray(devicesName); Bundle bundle = new Bundle(); bundle.putCharSequenceArray(Consts.ACTION_USB_SHOW_DEVICES_LIST, devicesName); sendResultToUI(Consts.ACTION_USB_SHOW_DEVICES_LIST_RESULT, bundle); } else if (Consts.ACTION_USB_SELECT_DEVICE.equals(action)) { device = (UsbDevice) mUsbManager.getDeviceList().values().toArray()[intent.getIntExtra(Consts.ACTION_USB_SELECT_DEVICE, 0)]; mUsbManager.requestPermission(device, mPermissionIntent); } onCommand(intent, action, flags, startId); return START_REDELIVER_INTENT; } @Override public void onDestroy() { super.onDestroy(); if (usbThreadDataReceiver != null) { usbThreadDataReceiver.stopThis(); } unregisterReceiver(mUsbReceiver); } private class USBThreadDataReceiver extends Thread { private volatile boolean isStopped; public USBThreadDataReceiver() { } @Override public void run() { if (connection != null && endPointRead != null) { while (!isStopped) { final byte[] buffer = new byte[packetSize]; final int status = connection.bulkTransfer(endPointRead, buffer, packetSize, 300); if (status >= 0) { uiHandler.post(new Runnable() { @Override public void run() { onUSBDataReceive(buffer); } }); } } } } public void stopThis() { isStopped = true; } } private void sendData(String data, boolean sendAsString) { if (device != null && endPointWrite != null && mUsbManager.hasPermission(device) && !data.isEmpty()) { // mLog(connection +"\n"+ device +"\n"+ request +"\n"+ // packetSize); byte[] out = data.getBytes();// UTF-16LE // Charset.forName("UTF-16") onUSBDataSending(data); if (sendAsString) { try { String str[] = data.split("[\\s]"); out = new byte[str.length]; for (int i = 0; i < str.length; i++) { out[i] = USBUtils.toByte(Integer.decode(str[i])); } } catch (Exception e) { onSendingError(e); } } int status = connection.bulkTransfer(endPointWrite, out, out.length, 250); onUSBDataSended(status, out); } } /** * receives the permission request to connect usb devices */ private final BroadcastReceiver mUsbReceiver = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (Consts.ACTION_USB_PERMISSION.equals(action)) { setDevice(intent); } if (UsbManager.ACTION_USB_DEVICE_ATTACHED.equals(action)) { setDevice(intent); if (device == null) { onDeviceConnected(device); } } if (UsbManager.ACTION_USB_DEVICE_DETACHED.equals(action)) { if (device != null) { device = null; usbThreadDataReceiver.stopThis(); sendResultToUI(Consts.ACTION_USB_DEVICE_DETACHED, null); onDeviceDisconnected(device); } } } private void setDevice(Intent intent) { device = (UsbDevice) intent.getParcelableExtra(UsbManager.EXTRA_DEVICE); if (device != null && intent.getBooleanExtra(UsbManager.EXTRA_PERMISSION_GRANTED, false)) { onDeviceSelected(device); connection = mUsbManager.openDevice(device); intf = device.getInterface(0); if (null == connection) { // mLog("(unable to establish connection)\n"); } else { connection.claimInterface(intf, true); } try { if (UsbConstants.USB_DIR_OUT == intf.getEndpoint(1).getDirection()) { endPointWrite = intf.getEndpoint(1); } } catch (Exception e) { Log.e("endPointWrite", "Device have no endPointWrite", e); } try { if (UsbConstants.USB_DIR_IN == intf.getEndpoint(0).getDirection()) { endPointRead = intf.getEndpoint(0); packetSize = endPointRead.getMaxPacketSize(); } } catch (Exception e) { Log.e("endPointWrite", "Device have no endPointRead", e); } usbThreadDataReceiver = new USBThreadDataReceiver(); usbThreadDataReceiver.start(); sendResultToUI(Consts.ACTION_USB_DEVICE_ATTACHED, null); } } }; public void sendResultToUI(int resultCode, Bundle resultData) { resultReceiver.send(resultCode, resultData); } public void onCommand(Intent intent, String action, int flags, int startId) { } public void onUSBDataReceive(byte[] buffer) { } public void onDeviceConnected(UsbDevice device) { } public void onDeviceDisconnected(UsbDevice device) { } public void onDeviceSelected(UsbDevice device) { } public CharSequence onBuildingDevicesList(UsbDevice usbDevice) { return null; } public void onUSBDataSending(String data) { } public void onUSBDataSended(int status, byte[] out) { } public void onSendingError(Exception e) { } }
Updated USB HID read API, USED UsbRequest queue
src/com/appspot/usbhidterminal/core/AbstractUSBHIDService.java
Updated USB HID read API, USED UsbRequest queue
Java
apache-2.0
584d3cd8f5c8a04b463681ebcf49b18cca41541d
0
java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity
/* * Copyright 2017-2020 吴学文 and java110 team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.java110.user.cmd.menu; import com.alibaba.fastjson.JSONObject; import com.java110.core.annotation.Java110Cmd; import com.java110.core.context.ICmdDataFlowContext; import com.java110.core.event.cmd.AbstractServiceCmdListener; import com.java110.core.event.cmd.CmdEvent; import com.java110.dto.menuCatalog.MenuCatalogDto; import com.java110.dto.store.StoreDto; import com.java110.intf.store.IStoreInnerServiceSMO; import com.java110.intf.user.IMenuCatalogV1InnerServiceSMO; import com.java110.utils.exception.CmdException; import com.java110.utils.util.Assert; import com.java110.utils.util.BeanConvertUtil; import com.java110.vo.ResultVo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import java.util.ArrayList; import java.util.List; /** * 类表述:查询 * 服务编码:menuCatalog.listMenuCatalog * 请求路劲:/app/menuCatalog.ListMenuCatalog * add by 吴学文 at 2022-02-26 10:12:36 mail: [email protected] * open source address: https://gitee.com/wuxw7/MicroCommunity * 官网:http://www.homecommunity.cn * 温馨提示:如果您对此文件进行修改 请不要删除原有作者及注释信息,请补充您的 修改的原因以及联系邮箱如下 * // modify by 张三 at 2021-09-12 第10行在某种场景下存在某种bug 需要修复,注释10至20行 加入 20行至30行 */ @Java110Cmd(serviceCode = "menu.listCatalog") public class ListCatalogCmd extends AbstractServiceCmdListener { private static Logger logger = LoggerFactory.getLogger(ListCatalogCmd.class); @Autowired private IMenuCatalogV1InnerServiceSMO menuCatalogV1InnerServiceSMOImpl; @Autowired private IStoreInnerServiceSMO storeInnerServiceSMOImpl; @Override public void validate(CmdEvent event, ICmdDataFlowContext cmdDataFlowContext, JSONObject reqJson) { super.validatePageInfo(reqJson); } @Override public void doCmd(CmdEvent event, ICmdDataFlowContext cmdDataFlowContext, JSONObject reqJson) throws CmdException { MenuCatalogDto menuCatalogDto = BeanConvertUtil.covertBean(reqJson, MenuCatalogDto.class); //查询store 信息 StoreDto storeDto = new StoreDto(); storeDto.setStoreId(reqJson.getString("storeId")); List<StoreDto> storeDtos = storeInnerServiceSMOImpl.getStores(storeDto); Assert.listOnlyOne(storeDtos, "商户不存在"); menuCatalogDto.setStoreType(storeDtos.get(0).getStoreTypeCd()); int count = menuCatalogV1InnerServiceSMOImpl.queryMenuCatalogsCount(menuCatalogDto); List<MenuCatalogDto> menuCatalogDtos = null; if (count > 0) { menuCatalogDtos = menuCatalogV1InnerServiceSMOImpl.queryMenuCatalogs(menuCatalogDto); } else { menuCatalogDtos = new ArrayList<>(); } ResultVo resultVo = new ResultVo((int) Math.ceil((double) count / (double) reqJson.getInteger("row")), count, menuCatalogDtos); ResponseEntity<String> responseEntity = new ResponseEntity<String>(resultVo.toString(), HttpStatus.OK); cmdDataFlowContext.setResponseEntity(responseEntity); } }
service-user/src/main/java/com/java110/user/cmd/menu/ListCatalogCmd.java
/* * Copyright 2017-2020 吴学文 and java110 team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.java110.user.cmd.menu; import com.alibaba.fastjson.JSONObject; import com.java110.core.annotation.Java110Cmd; import com.java110.core.context.ICmdDataFlowContext; import com.java110.core.event.cmd.AbstractServiceCmdListener; import com.java110.core.event.cmd.CmdEvent; import com.java110.dto.menuCatalog.MenuCatalogDto; import com.java110.intf.user.IMenuCatalogV1InnerServiceSMO; import com.java110.utils.exception.CmdException; import com.java110.utils.util.BeanConvertUtil; import com.java110.vo.ResultVo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import java.util.ArrayList; import java.util.List; /** * 类表述:查询 * 服务编码:menuCatalog.listMenuCatalog * 请求路劲:/app/menuCatalog.ListMenuCatalog * add by 吴学文 at 2022-02-26 10:12:36 mail: [email protected] * open source address: https://gitee.com/wuxw7/MicroCommunity * 官网:http://www.homecommunity.cn * 温馨提示:如果您对此文件进行修改 请不要删除原有作者及注释信息,请补充您的 修改的原因以及联系邮箱如下 * // modify by 张三 at 2021-09-12 第10行在某种场景下存在某种bug 需要修复,注释10至20行 加入 20行至30行 */ @Java110Cmd(serviceCode = "menu.listCatalog") public class ListCatalogCmd extends AbstractServiceCmdListener { private static Logger logger = LoggerFactory.getLogger(ListCatalogCmd.class); @Autowired private IMenuCatalogV1InnerServiceSMO menuCatalogV1InnerServiceSMOImpl; @Override public void validate(CmdEvent event, ICmdDataFlowContext cmdDataFlowContext, JSONObject reqJson) { super.validatePageInfo(reqJson); } @Override public void doCmd(CmdEvent event, ICmdDataFlowContext cmdDataFlowContext, JSONObject reqJson) throws CmdException { MenuCatalogDto menuCatalogDto = BeanConvertUtil.covertBean(reqJson, MenuCatalogDto.class); int count = menuCatalogV1InnerServiceSMOImpl.queryMenuCatalogsCount(menuCatalogDto); List<MenuCatalogDto> menuCatalogDtos = null; if (count > 0) { menuCatalogDtos = menuCatalogV1InnerServiceSMOImpl.queryMenuCatalogs(menuCatalogDto); } else { menuCatalogDtos = new ArrayList<>(); } ResultVo resultVo = new ResultVo((int) Math.ceil((double) count / (double) reqJson.getInteger("row")), count, menuCatalogDtos); ResponseEntity<String> responseEntity = new ResponseEntity<String>(resultVo.toString(), HttpStatus.OK); cmdDataFlowContext.setResponseEntity(responseEntity); } }
优化菜单查询功能
service-user/src/main/java/com/java110/user/cmd/menu/ListCatalogCmd.java
优化菜单查询功能
Java
apache-2.0
bd4433ca01f7cf7e57a57c98de528096188914b2
0
ham1/jmeter,etnetera/jmeter,ham1/jmeter,etnetera/jmeter,ham1/jmeter,apache/jmeter,benbenw/jmeter,benbenw/jmeter,apache/jmeter,etnetera/jmeter,apache/jmeter,benbenw/jmeter,benbenw/jmeter,etnetera/jmeter,ham1/jmeter,apache/jmeter,ham1/jmeter,etnetera/jmeter,apache/jmeter
// $Header$ /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ package org.apache.jmeter.visualizers; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Rectangle; import java.util.Iterator; import java.util.List; import javax.swing.JComponent; import javax.swing.Scrollable; import javax.swing.SwingUtilities; import org.apache.jmeter.gui.util.JMeterColor; import org.apache.jmeter.samplers.Clearable; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; /** * Implements a simple graph for displaying performance results. * * @author Michael Stover Created March 21, 2002 * @version $Revision$ Last updated: $Date$ */ public class Graph extends JComponent implements Scrollable, Clearable { private static Logger log = LoggingManager.getLoggerForClass(); private boolean wantData = true; private boolean wantAverage = true; private boolean wantDeviation = true; private boolean wantThroughput = true; private boolean wantMedian = true; private SamplingStatCalculator model; private static int width = 2000; private long graphMax = 1; private double throughputMax = 1; /** * Constructor for the Graph object. */ public Graph() { this.setPreferredSize(new Dimension(width, 100)); } /** * Constructor for the Graph object. */ public Graph(SamplingStatCalculator model) { this(); setModel(model); } /** * Sets the Model attribute of the Graph object. */ private void setModel(Object model) { this.model = (SamplingStatCalculator) model; repaint(); } /** * Gets the PreferredScrollableViewportSize attribute of the Graph object. * * @return the PreferredScrollableViewportSize value */ public Dimension getPreferredScrollableViewportSize() { return this.getPreferredSize(); // return new Dimension(width, 400); } /** * Gets the ScrollableUnitIncrement attribute of the Graph object. * * @return the ScrollableUnitIncrement value */ public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) { return 5; } /** * Gets the ScrollableBlockIncrement attribute of the Graph object. * * @return the ScrollableBlockIncrement value */ public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) { return (int) (visibleRect.width * .9); } /** * Gets the ScrollableTracksViewportWidth attribute of the Graph object. * * @return the ScrollableTracksViewportWidth value */ public boolean getScrollableTracksViewportWidth() { return false; } /** * Gets the ScrollableTracksViewportHeight attribute of the Graph object. * * @return the ScrollableTracksViewportHeight value */ public boolean getScrollableTracksViewportHeight() { return true; } /** * Clears this graph. */ public void clear() { graphMax = 1; throughputMax = 1; } public void enableData(boolean value) { this.wantData = value; } public void enableAverage(boolean value) { this.wantAverage = value; } public void enableMedian(boolean value) { this.wantMedian = value; } public void enableDeviation(boolean value) { this.wantDeviation = value; } public void enableThroughput(boolean value) { this.wantThroughput = value; } public void updateGui(final Sample oneSample) { long h = model.getPercentPoint((float) 0.90).longValue(); boolean repaint = false; if ((oneSample.count % 20 == 0 || oneSample.count < 20) && h > (graphMax * 1.2) || graphMax > (h * 1.2)) { graphMax = h; repaint = true; } if (model.getMaxThroughput() > throughputMax) { throughputMax = model.getMaxThroughput() * 1.3; repaint = true; } if (repaint) { repaint(); return; } final int xPos = model.getCount(); SwingUtilities.invokeLater(new Runnable() { public void run() { Graphics g = getGraphics(); if (g != null) { drawSample(xPos, oneSample, g); } } }); } public void paintComponent(Graphics g) { super.paintComponent(g); List samples = model.getSamples(); synchronized (samples ) { Iterator e = samples.iterator(); for (int i = 0; e.hasNext(); i++) { Sample s = (Sample) e.next(); drawSample(i, s, g); } } } private void drawSample(int x, Sample oneSample, Graphics g) { // int width = getWidth(); int height = getHeight(); log.debug("Drawing a sample at " + x); if (wantData) { int data = (int) (oneSample.data * height / graphMax); if (oneSample.success) { g.setColor(Color.black); } else { g.setColor(JMeterColor.YELLOW); } g.drawLine(x % width, height - data, x % width, height - data - 1); log.debug("Drawing coords = " + (x % width) + "," + (height - data)); } if (wantAverage) { int average = (int) (oneSample.average * height / graphMax); g.setColor(Color.blue); g.drawLine(x % width, height - average, x % width, (height - average - 1)); } if (wantMedian) { int median = (int) (oneSample.median * height / graphMax); g.setColor(JMeterColor.purple); g.drawLine(x % width, height - median, x % width, (height - median - 1)); } if (wantDeviation) { int deviation = (int) (oneSample.deviation * height / graphMax); g.setColor(Color.red); g.drawLine(x % width, height - deviation, x % width, (height - deviation - 1)); } if (wantThroughput) { int throughput = (int) (oneSample.throughput * height / throughputMax); g.setColor(JMeterColor.dark_green); g.drawLine(x % width, height - throughput, x % width, (height - throughput - 1)); } } /** * @return Returns the graphMax. */ public long getGraphMax() { return graphMax; } }
src/components/org/apache/jmeter/visualizers/Graph.java
// $Header$ /* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * */ package org.apache.jmeter.visualizers; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Rectangle; import java.util.Iterator; import javax.swing.JComponent; import javax.swing.Scrollable; import javax.swing.SwingUtilities; import org.apache.jmeter.gui.util.JMeterColor; import org.apache.jmeter.samplers.Clearable; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; /** * Implements a simple graph for displaying performance results. * * @author Michael Stover Created March 21, 2002 * @version $Revision$ Last updated: $Date$ */ public class Graph extends JComponent implements Scrollable, Clearable { private static Logger log = LoggingManager.getLoggerForClass(); private boolean wantData = true; private boolean wantAverage = true; private boolean wantDeviation = true; private boolean wantThroughput = true; private boolean wantMedian = true; private SamplingStatCalculator model; private static int width = 2000; private long graphMax = 1; private double throughputMax = 1; /** * Constructor for the Graph object. */ public Graph() { this.setPreferredSize(new Dimension(width, 100)); } /** * Constructor for the Graph object. */ public Graph(SamplingStatCalculator model) { this(); setModel(model); } /** * Sets the Model attribute of the Graph object. */ private void setModel(Object model) { this.model = (SamplingStatCalculator) model; repaint(); } /** * Gets the PreferredScrollableViewportSize attribute of the Graph object. * * @return the PreferredScrollableViewportSize value */ public Dimension getPreferredScrollableViewportSize() { return this.getPreferredSize(); // return new Dimension(width, 400); } /** * Gets the ScrollableUnitIncrement attribute of the Graph object. * * @return the ScrollableUnitIncrement value */ public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) { return 5; } /** * Gets the ScrollableBlockIncrement attribute of the Graph object. * * @return the ScrollableBlockIncrement value */ public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) { return (int) (visibleRect.width * .9); } /** * Gets the ScrollableTracksViewportWidth attribute of the Graph object. * * @return the ScrollableTracksViewportWidth value */ public boolean getScrollableTracksViewportWidth() { return false; } /** * Gets the ScrollableTracksViewportHeight attribute of the Graph object. * * @return the ScrollableTracksViewportHeight value */ public boolean getScrollableTracksViewportHeight() { return true; } /** * Clears this graph. */ public void clear() { graphMax = 1; throughputMax = 1; } public void enableData(boolean value) { this.wantData = value; } public void enableAverage(boolean value) { this.wantAverage = value; } public void enableMedian(boolean value) { this.wantMedian = value; } public void enableDeviation(boolean value) { this.wantDeviation = value; } public void enableThroughput(boolean value) { this.wantThroughput = value; } public void updateGui(final Sample oneSample) { long h = model.getPercentPoint((float) 0.90).longValue(); boolean repaint = false; if ((oneSample.count % 20 == 0 || oneSample.count < 20) && h > (graphMax * 1.2) || graphMax > (h * 1.2)) { graphMax = h; repaint = true; } if (model.getMaxThroughput() > throughputMax) { throughputMax = model.getMaxThroughput() * 1.3; repaint = true; } if (repaint) { repaint(); return; } final int xPos = model.getCount(); SwingUtilities.invokeLater(new Runnable() { public void run() { Graphics g = getGraphics(); if (g != null) { drawSample(xPos, oneSample, g); } } }); } public void paintComponent(Graphics g) { super.paintComponent(g); synchronized (model.getSamples()) { Iterator e = model.getSamples().iterator(); for (int i = 0; e.hasNext(); i++) { Sample s = (Sample) e.next(); drawSample(i, s, g); } } } private void drawSample(int x, Sample oneSample, Graphics g) { // int width = getWidth(); int height = getHeight(); log.debug("Drawing a sample at " + x); if (wantData) { int data = (int) (oneSample.data * height / graphMax); if (oneSample.success) { g.setColor(Color.black); } else { g.setColor(JMeterColor.YELLOW); } g.drawLine(x % width, height - data, x % width, height - data - 1); log.debug("Drawing coords = " + (x % width) + "," + (height - data)); } if (wantAverage) { int average = (int) (oneSample.average * height / graphMax); g.setColor(Color.blue); g.drawLine(x % width, height - average, x % width, (height - average - 1)); } if (wantMedian) { int median = (int) (oneSample.median * height / graphMax); g.setColor(JMeterColor.purple); g.drawLine(x % width, height - median, x % width, (height - median - 1)); } if (wantDeviation) { int deviation = (int) (oneSample.deviation * height / graphMax); g.setColor(Color.red); g.drawLine(x % width, height - deviation, x % width, (height - deviation - 1)); } if (wantThroughput) { int throughput = (int) (oneSample.throughput * height / throughputMax); g.setColor(JMeterColor.dark_green); g.drawLine(x % width, height - throughput, x % width, (height - throughput - 1)); } } /** * @return Returns the graphMax. */ public long getGraphMax() { return graphMax; } }
Bug 33403 - thread safety problems git-svn-id: https://svn.apache.org/repos/asf/jakarta/jmeter/branches/rel-2-1@325754 13f79535-47bb-0310-9956-ffa450edef68 Former-commit-id: 27da3bf8a1e53d6653efb4ebce601ef3ff94e895
src/components/org/apache/jmeter/visualizers/Graph.java
Bug 33403 - thread safety problems
Java
apache-2.0
3d8fca22e070d211a1133ddeed16071c8b40056a
0
facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho; import javax.annotation.Nullable; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.graphics.Rect; import android.support.annotation.AttrRes; import android.support.annotation.ColorInt; import android.support.annotation.DimenRes; import android.support.annotation.Dimension; import android.support.annotation.DrawableRes; import android.support.annotation.Px; import android.support.annotation.StringRes; import android.support.v4.view.ViewCompat; import android.text.TextUtils; import android.util.SparseArray; import com.facebook.R; import com.facebook.litho.config.ComponentsConfiguration; import com.facebook.litho.reference.ColorDrawableReference; import com.facebook.litho.reference.Reference; import com.facebook.litho.reference.ResourceDrawableReference; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.yoga.YogaAlign; import com.facebook.yoga.YogaBaselineFunction; import com.facebook.yoga.YogaFlexDirection; import com.facebook.yoga.YogaJustify; import com.facebook.yoga.YogaDirection; import com.facebook.yoga.YogaPositionType; import com.facebook.yoga.YogaWrap; import com.facebook.yoga.YogaEdge; import com.facebook.yoga.YogaConstants; import com.facebook.yoga.YogaMeasureFunction; import com.facebook.yoga.YogaNode; import com.facebook.yoga.YogaNodeAPI; import com.facebook.yoga.YogaOverflow; import com.facebook.yoga.Spacing; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static android.os.Build.VERSION_CODES.JELLY_BEAN; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; import static android.support.annotation.Dimension.DP; import static com.facebook.litho.ComponentContext.NULL_LAYOUT; import static com.facebook.yoga.YogaEdge.ALL; import static com.facebook.yoga.YogaEdge.BOTTOM; import static com.facebook.yoga.YogaEdge.END; import static com.facebook.yoga.YogaEdge.HORIZONTAL; import static com.facebook.yoga.YogaEdge.LEFT; import static com.facebook.yoga.YogaEdge.RIGHT; import static com.facebook.yoga.YogaEdge.START; import static com.facebook.yoga.YogaEdge.TOP; import static com.facebook.yoga.YogaEdge.VERTICAL; /** * Internal class representing both a {@link ComponentLayout} and a * {@link com.facebook.litho.ComponentLayout.ContainerBuilder}. */ @ThreadConfined(ThreadConfined.ANY) class InternalNode implements ComponentLayout, ComponentLayout.ContainerBuilder { // Used to check whether or not the framework can use style IDs for // paddingStart/paddingEnd due to a bug in some Android devices. private static final boolean SUPPORTS_RTL = (SDK_INT >= JELLY_BEAN_MR1); // When this flag is set, layoutDirection style was explicitly set on this node. private static final long PFLAG_LAYOUT_DIRECTION_IS_SET = 1L << 0; // When this flag is set, alignSelf was explicitly set on this node. private static final long PFLAG_ALIGN_SELF_IS_SET = 1L << 1; // When this flag is set, position type was explicitly set on this node. private static final long PFLAG_POSITION_TYPE_IS_SET = 1L << 2; // When this flag is set, flex was explicitly set on this node. private static final long PFLAG_FLEX_IS_SET = 1L << 3; // When this flag is set, flex grow was explicitly set on this node. private static final long PFLAG_FLEX_GROW_IS_SET = 1L << 4; // When this flag is set, flex shrink was explicitly set on this node. private static final long PFLAG_FLEX_SHRINK_IS_SET = 1L << 5; // When this flag is set, flex basis was explicitly set on this node. private static final long PFLAG_FLEX_BASIS_IS_SET = 1L << 6; // When this flag is set, importantForAccessibility was explicitly set on this node. private static final long PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET = 1L << 7; // When this flag is set, duplicateParentState was explicitly set on this node. private static final long PFLAG_DUPLICATE_PARENT_STATE_IS_SET = 1L << 8; // When this flag is set, margin was explicitly set on this node. private static final long PFLAG_MARGIN_IS_SET = 1L << 9; // When this flag is set, padding was explicitly set on this node. private static final long PFLAG_PADDING_IS_SET = 1L << 10; // When this flag is set, position was explicitly set on this node. private static final long PFLAG_POSITION_IS_SET = 1L << 11; // When this flag is set, width was explicitly set on this node. private static final long PFLAG_WIDTH_IS_SET = 1L << 12; // When this flag is set, minWidth was explicitly set on this node. private static final long PFLAG_MIN_WIDTH_IS_SET = 1L << 13; // When this flag is set, maxWidth was explicitly set on this node. private static final long PFLAG_MAX_WIDTH_IS_SET = 1L << 14; // When this flag is set, height was explicitly set on this node. private static final long PFLAG_HEIGHT_IS_SET = 1L << 15; // When this flag is set, minHeight was explicitly set on this node. private static final long PFLAG_MIN_HEIGHT_IS_SET = 1L << 16; // When this flag is set, maxHeight was explicitly set on this node. private static final long PFLAG_MAX_HEIGHT_IS_SET = 1L << 17; // When this flag is set, background was explicitly set on this node. private static final long PFLAG_BACKGROUND_IS_SET = 1L << 18; // When this flag is set, foreground was explicitly set on this node. private static final long PFLAG_FOREGROUND_IS_SET = 1L << 19; // When this flag is set, visibleHandler was explicitly set on this node. private static final long PFLAG_VISIBLE_HANDLER_IS_SET = 1L << 20; // When this flag is set, focusedHandler was explicitly set on this node. private static final long PFLAG_FOCUSED_HANDLER_IS_SET = 1L << 21; // When this flag is set, fullImpressionHandler was explicitly set on this node. private static final long PFLAG_FULL_IMPRESSION_HANDLER_IS_SET = 1L << 22; // When this flag is set, invisibleHandler was explicitly set on this node. private static final long PFLAG_INVISIBLE_HANDLER_IS_SET = 1L << 23; // When this flag is set, touch expansion was explicitly set on this node. private static final long PFLAG_TOUCH_EXPANSION_IS_SET = 1L << 24; // When this flag is set, border width was explicitly set on this node. private static final long PFLAG_BORDER_WIDTH_IS_SET = 1L << 25; // When this flag is set, aspectRatio was explicitly set on this node. private static final long PFLAG_ASPECT_RATIO_IS_SET = 1L << 26; // When this flag is set, transitionKey was explicitly set on this node. private static final long PFLAG_TRANSITION_KEY_IS_SET = 1L << 27; // When this flag is set, border color was explicitly set on this node. private static final long PFLAG_BORDER_COLOR_IS_SET = 1L << 28; private final ResourceResolver mResourceResolver = new ResourceResolver(); YogaNodeAPI mYogaNode; private ComponentContext mComponentContext; private Resources mResources; private Component mComponent; private int mImportantForAccessibility = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO; private boolean mDuplicateParentState; private boolean mIsNestedTreeHolder; private InternalNode mNestedTree; private InternalNode mNestedTreeHolder; private long mPrivateFlags; private Reference<? extends Drawable> mBackground; private Reference<? extends Drawable> mForeground; private int mBorderColor = Color.TRANSPARENT; private NodeInfo mNodeInfo; private boolean mForceViewWrapping; private String mTransitionKey; private EventHandler mVisibleHandler; private EventHandler mFocusedHandler; private EventHandler mFullImpressionHandler; private EventHandler mInvisibleHandler; private String mTestKey; private Spacing mTouchExpansion; private Spacing mNestedTreePadding; private Spacing mNestedTreeBorderWidth; private boolean[] mIsPaddingPercent; private float mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED; private float mResolvedTouchExpansionRight = YogaConstants.UNDEFINED; private float mResolvedX = YogaConstants.UNDEFINED; private float mResolvedY = YogaConstants.UNDEFINED; private float mResolvedWidth = YogaConstants.UNDEFINED; private float mResolvedHeight = YogaConstants.UNDEFINED; private int mLastWidthSpec = DiffNode.UNSPECIFIED; private int mLastHeightSpec = DiffNode.UNSPECIFIED; private float mLastMeasuredWidth = DiffNode.UNSPECIFIED; private float mLastMeasuredHeight = DiffNode.UNSPECIFIED; private DiffNode mDiffNode; private boolean mCachedMeasuresValid; private TreeProps mPendingTreeProps; void init(YogaNodeAPI yogaNode, ComponentContext componentContext, Resources resources) { yogaNode.setData(this); yogaNode.setOverflow(YogaOverflow.HIDDEN); yogaNode.setMeasureFunction(null); // YogaNode is the only version of YogaNodeAPI with this support; if (yogaNode instanceof YogaNode) { yogaNode.setBaselineFunction(null); } mYogaNode = yogaNode; mComponentContext = componentContext; mResources = resources; mResourceResolver.init( mComponentContext, componentContext.getResourceCache()); } @Px @Override public int getX() { if (YogaConstants.isUndefined(mResolvedX)) { mResolvedX = mYogaNode.getLayoutX(); } return (int) mResolvedX; } @Px @Override public int getY() { if (YogaConstants.isUndefined(mResolvedY)) { mResolvedY = mYogaNode.getLayoutY(); } return (int) mResolvedY; } @Px @Override public int getWidth() { if (YogaConstants.isUndefined(mResolvedWidth)) { mResolvedWidth = mYogaNode.getLayoutWidth(); } return (int) mResolvedWidth; } @Px @Override public int getHeight() { if (YogaConstants.isUndefined(mResolvedHeight)) { mResolvedHeight = mYogaNode.getLayoutHeight(); } return (int) mResolvedHeight; } @Px @Override public int getPaddingLeft() { return FastMath.round(mYogaNode.getLayoutPadding(LEFT)); } @Px @Override public int getPaddingTop() { return FastMath.round(mYogaNode.getLayoutPadding(TOP)); } @Px @Override public int getPaddingRight() { return FastMath.round(mYogaNode.getLayoutPadding(RIGHT)); } @Px @Override public int getPaddingBottom() { return FastMath.round(mYogaNode.getLayoutPadding(BOTTOM)); } public Reference<? extends Drawable> getBackground() { return mBackground; } public Reference<? extends Drawable> getForeground() { return mForeground; } public void setCachedMeasuresValid(boolean valid) { mCachedMeasuresValid = valid; } public int getLastWidthSpec() { return mLastWidthSpec; } public void setLastWidthSpec(int widthSpec) { mLastWidthSpec = widthSpec; } public int getLastHeightSpec() { return mLastHeightSpec; } public void setLastHeightSpec(int heightSpec) { mLastHeightSpec = heightSpec; } public boolean hasVisibilityHandlers() { return mVisibleHandler != null || mFocusedHandler != null || mFullImpressionHandler != null || mInvisibleHandler != null; } /** * The last value the measure funcion associated with this node {@link Component} returned * for the width. This is used together with {@link InternalNode#getLastWidthSpec()} * to implement measure caching. */ float getLastMeasuredWidth() { return mLastMeasuredWidth; } /** * Sets the last value the measure funcion associated with this node {@link Component} returned * for the width. */ void setLastMeasuredWidth(float lastMeasuredWidth) { mLastMeasuredWidth = lastMeasuredWidth; } /** * The last value the measure funcion associated with this node {@link Component} returned * for the height. This is used together with {@link InternalNode#getLastHeightSpec()} * to implement measure caching. */ float getLastMeasuredHeight() { return mLastMeasuredHeight; } /** * Sets the last value the measure funcion associated with this node {@link Component} returned * for the height. */ void setLastMeasuredHeight(float lastMeasuredHeight) { mLastMeasuredHeight = lastMeasuredHeight; } DiffNode getDiffNode() { return mDiffNode; } boolean areCachedMeasuresValid() { return mCachedMeasuresValid; } void setDiffNode(DiffNode diffNode) { mDiffNode = diffNode; } /** * Mark this node as a nested tree root holder. */ void markIsNestedTreeHolder(TreeProps currentTreeProps) { mIsNestedTreeHolder = true; mPendingTreeProps = TreeProps.copy(currentTreeProps); } /** * @return Whether this node is holding a nested tree or not. The decision was made during * tree creation {@link ComponentLifecycle#createLayout(ComponentContext, Component, boolean)}. */ boolean isNestedTreeHolder() { return mIsNestedTreeHolder; } @Override public YogaDirection getResolvedLayoutDirection() { return mYogaNode.getLayoutDirection(); } @Override public InternalNode layoutDirection(YogaDirection direction) { mPrivateFlags |= PFLAG_LAYOUT_DIRECTION_IS_SET; mYogaNode.setDirection(direction); return this; } @Override public InternalNode flexDirection(YogaFlexDirection direction) { mYogaNode.setFlexDirection(direction); return this; } @Override public InternalNode wrap(YogaWrap wrap) { mYogaNode.setWrap(wrap); return this; } @Override public InternalNode justifyContent(YogaJustify justifyContent) { mYogaNode.setJustifyContent(justifyContent); return this; } @Override public InternalNode alignItems(YogaAlign alignItems) { mYogaNode.setAlignItems(alignItems); return this; } @Override public InternalNode alignContent(YogaAlign alignContent) { mYogaNode.setAlignContent(alignContent); return this; } @Override public InternalNode alignSelf(YogaAlign alignSelf) { mPrivateFlags |= PFLAG_ALIGN_SELF_IS_SET; mYogaNode.setAlignSelf(alignSelf); return this; } @Override public InternalNode positionType(YogaPositionType positionType) { mPrivateFlags |= PFLAG_POSITION_TYPE_IS_SET; mYogaNode.setPositionType(positionType); return this; } @Override public InternalNode flex(float flex) { mPrivateFlags |= PFLAG_FLEX_IS_SET; mYogaNode.setFlex(flex); return this; } @Override public InternalNode flexGrow(float flexGrow) { mPrivateFlags |= PFLAG_FLEX_GROW_IS_SET; mYogaNode.setFlexGrow(flexGrow); return this; } @Override public InternalNode flexShrink(float flexShrink) { mPrivateFlags |= PFLAG_FLEX_SHRINK_IS_SET; mYogaNode.setFlexShrink(flexShrink); return this; } @Override public InternalNode flexBasisPx(@Px int flexBasis) { mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET; mYogaNode.setFlexBasis(flexBasis); return this; } @Override public InternalNode flexBasisPercent(float percent) { mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET; mYogaNode.setFlexBasisPercent(percent); return this; } @Override public InternalNode flexBasisAttr(@AttrRes int resId, @DimenRes int defaultResId) { return flexBasisPx(mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode flexBasisAttr(@AttrRes int resId) { return flexBasisAttr(resId, 0); } @Override public InternalNode flexBasisRes(@DimenRes int resId) { return flexBasisPx(mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode flexBasisDip(@Dimension(unit = DP) int flexBasis) { return flexBasisPx(mResourceResolver.dipsToPixels(flexBasis)); } @Override public InternalNode importantForAccessibility(int importantForAccessibility) { mPrivateFlags |= PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET; mImportantForAccessibility = importantForAccessibility; return this; } @Override public InternalNode duplicateParentState(boolean duplicateParentState) { mPrivateFlags |= PFLAG_DUPLICATE_PARENT_STATE_IS_SET; mDuplicateParentState = duplicateParentState; return this; } @Override public InternalNode marginPx(YogaEdge edge, @Px int margin) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMargin(edge, margin); return this; } @Override public InternalNode marginPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMarginPercent(edge, percent); return this; } @Override public InternalNode marginAuto(YogaEdge edge) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMarginAuto(edge); return this; } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return marginPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId) { return marginAttr(edge, resId, 0); } @Override public InternalNode marginRes(YogaEdge edge, @DimenRes int resId) { return marginPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode marginDip(YogaEdge edge, @Dimension(unit = DP) int margin) { return marginPx(edge, mResourceResolver.dipsToPixels(margin)); } @Override public InternalNode paddingPx(YogaEdge edge, @Px int padding) { mPrivateFlags |= PFLAG_PADDING_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreePadding == null) { mNestedTreePadding = ComponentsPools.acquireSpacing(); } mNestedTreePadding.set(edge.intValue(), padding); setIsPaddingPercent(edge, false); } else { mYogaNode.setPadding(edge, padding); } return this; } @Override public InternalNode paddingPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_PADDING_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreePadding == null) { mNestedTreePadding = ComponentsPools.acquireSpacing(); } mNestedTreePadding.set(edge.intValue(), percent); setIsPaddingPercent(edge, true); } else { mYogaNode.setPaddingPercent(edge, percent); } return this; } @Override public InternalNode paddingAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return paddingPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode paddingAttr( YogaEdge edge, @AttrRes int resId) { return paddingAttr(edge, resId, 0); } @Override public InternalNode paddingRes(YogaEdge edge, @DimenRes int resId) { return paddingPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode paddingDip(YogaEdge edge, @Dimension(unit = DP) int padding) { return paddingPx(edge, mResourceResolver.dipsToPixels(padding)); } @Override public InternalNode borderWidthPx(YogaEdge edge, @Px int borderWidth) { mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreeBorderWidth == null) { mNestedTreeBorderWidth = ComponentsPools.acquireSpacing(); } mNestedTreeBorderWidth.set(edge.intValue(), borderWidth); } else { mYogaNode.setBorder(edge, borderWidth); } return this; } @Override public InternalNode borderWidthAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode borderWidthAttr( YogaEdge edge, @AttrRes int resId) { return borderWidthAttr(edge, resId, 0); } @Override public InternalNode borderWidthRes(YogaEdge edge, @DimenRes int resId) { return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode borderWidthDip( YogaEdge edge, @Dimension(unit = DP) int borderWidth) { return borderWidthPx(edge, mResourceResolver.dipsToPixels(borderWidth)); } @Override public Builder borderColor(@ColorInt int borderColor) { mPrivateFlags |= PFLAG_BORDER_COLOR_IS_SET; mBorderColor = borderColor; return this; } @Override public InternalNode positionPx(YogaEdge edge, @Px int position) { mPrivateFlags |= PFLAG_POSITION_IS_SET; mYogaNode.setPosition(edge, position); return this; } @Override public InternalNode positionPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_POSITION_IS_SET; mYogaNode.setPositionPercent(edge, percent); return this; } @Override public InternalNode positionAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return positionPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode positionAttr(YogaEdge edge, @AttrRes int resId) { return positionAttr(edge, resId, 0); } @Override public InternalNode positionRes(YogaEdge edge, @DimenRes int resId) { return positionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode positionDip( YogaEdge edge, @Dimension(unit = DP) int position) { return positionPx(edge, mResourceResolver.dipsToPixels(position)); } @Override public InternalNode widthPx(@Px int width) { mPrivateFlags |= PFLAG_WIDTH_IS_SET; mYogaNode.setWidth(width); return this; } @Override public InternalNode widthPercent(float percent) { mPrivateFlags |= PFLAG_WIDTH_IS_SET; mYogaNode.setWidthPercent(percent); return this; } @Override public InternalNode widthRes(@DimenRes int resId) { return widthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode widthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return widthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode widthAttr(@AttrRes int resId) { return widthAttr(resId, 0); } @Override public InternalNode widthDip(@Dimension(unit = DP) int width) { return widthPx(mResourceResolver.dipsToPixels(width)); } @Override public InternalNode minWidthPx(@Px int minWidth) { mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET; mYogaNode.setMinWidth(minWidth); return this; } @Override public InternalNode minWidthPercent(float percent) { mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET; mYogaNode.setMinWidthPercent(percent); return this; } @Override public InternalNode minWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return minWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode minWidthAttr(@AttrRes int resId) { return minWidthAttr(resId, 0); } @Override public InternalNode minWidthRes(@DimenRes int resId) { return minWidthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode minWidthDip(@Dimension(unit = DP) int minWidth) { return minWidthPx(mResourceResolver.dipsToPixels(minWidth)); } @Override public InternalNode maxWidthPx(@Px int maxWidth) { mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET; mYogaNode.setMaxWidth(maxWidth); return this; } @Override public InternalNode maxWidthPercent(float percent) { mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET; mYogaNode.setMaxWidthPercent(percent); return this; } @Override public InternalNode maxWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return maxWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode maxWidthAttr(@AttrRes int resId) { return maxWidthAttr(resId, 0); } @Override public InternalNode maxWidthRes(@DimenRes int resId) { return maxWidthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode maxWidthDip(@Dimension(unit = DP) int maxWidth) { return maxWidthPx(mResourceResolver.dipsToPixels(maxWidth)); } @Override public InternalNode heightPx(@Px int height) { mPrivateFlags |= PFLAG_HEIGHT_IS_SET; mYogaNode.setHeight(height); return this; } @Override public InternalNode heightPercent(float percent) { mPrivateFlags |= PFLAG_HEIGHT_IS_SET; mYogaNode.setHeightPercent(percent); return this; } @Override public InternalNode heightRes(@DimenRes int resId) { return heightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode heightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return heightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode heightAttr(@AttrRes int resId) { return heightAttr(resId, 0); } @Override public InternalNode heightDip(@Dimension(unit = DP) int height) { return heightPx(mResourceResolver.dipsToPixels(height)); } @Override public InternalNode minHeightPx(@Px int minHeight) { mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET; mYogaNode.setMinHeight(minHeight); return this; } @Override public InternalNode minHeightPercent(float percent) { mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET; mYogaNode.setMinHeightPercent(percent); return this; } @Override public InternalNode minHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return minHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode minHeightAttr(@AttrRes int resId) { return minHeightAttr(resId, 0); } @Override public InternalNode minHeightRes(@DimenRes int resId) { return minHeightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode minHeightDip(@Dimension(unit = DP) int minHeight) { return minHeightPx(mResourceResolver.dipsToPixels(minHeight)); } @Override public InternalNode maxHeightPx(@Px int maxHeight) { mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET; mYogaNode.setMaxHeight(maxHeight); return this; } @Override public InternalNode maxHeightPercent(float percent) { mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET; mYogaNode.setMaxHeightPercent(percent); return this; } @Override public InternalNode maxHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return maxHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode maxHeightAttr(@AttrRes int resId) { return maxHeightAttr(resId, 0); } @Override public InternalNode maxHeightRes(@DimenRes int resId) { return maxHeightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode maxHeightDip(@Dimension(unit = DP) int maxHeight) { return maxHeightPx(mResourceResolver.dipsToPixels(maxHeight)); } @Override public InternalNode aspectRatio(float aspectRatio) { mPrivateFlags |= PFLAG_ASPECT_RATIO_IS_SET; if (mYogaNode instanceof YogaNode) { ((YogaNode) mYogaNode).setAspectRatio(aspectRatio); return this; } else { throw new IllegalStateException("Aspect ration requires using YogaNode not YogaNodeDEPRECATED"); } } private boolean shouldApplyTouchExpansion() { return mTouchExpansion != null && mNodeInfo != null && mNodeInfo.hasTouchEventHandlers(); } boolean hasTouchExpansion() { return ((mPrivateFlags & PFLAG_TOUCH_EXPANSION_IS_SET) != 0L); } Spacing getTouchExpansion() { return mTouchExpansion; } int getTouchExpansionLeft() { if (!shouldApplyTouchExpansion()) { return 0; } if (YogaConstants.isUndefined(mResolvedTouchExpansionLeft)) { mResolvedTouchExpansionLeft = resolveHorizontalSpacing(mTouchExpansion, Spacing.LEFT); } return FastMath.round(mResolvedTouchExpansionLeft); } int getTouchExpansionTop() { if (!shouldApplyTouchExpansion()) { return 0; } return FastMath.round(mTouchExpansion.get(Spacing.TOP)); } int getTouchExpansionRight() { if (!shouldApplyTouchExpansion()) { return 0; } if (YogaConstants.isUndefined(mResolvedTouchExpansionRight)) { mResolvedTouchExpansionRight = resolveHorizontalSpacing(mTouchExpansion, Spacing.RIGHT); } return FastMath.round(mResolvedTouchExpansionRight); } int getTouchExpansionBottom() { if (!shouldApplyTouchExpansion()) { return 0; } return FastMath.round(mTouchExpansion.get(Spacing.BOTTOM)); } @Override public InternalNode touchExpansionPx(YogaEdge edge, @Px int touchExpansion) { if (mTouchExpansion == null) { mTouchExpansion = ComponentsPools.acquireSpacing(); } mPrivateFlags |= PFLAG_TOUCH_EXPANSION_IS_SET; mTouchExpansion.set(edge.intValue(), touchExpansion); return this; } @Override public InternalNode touchExpansionAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return touchExpansionPx( edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode touchExpansionAttr( YogaEdge edge, @AttrRes int resId) { return touchExpansionAttr(edge, resId, 0); } @Override public InternalNode touchExpansionRes(YogaEdge edge, @DimenRes int resId) { return touchExpansionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode touchExpansionDip( YogaEdge edge, @Dimension(unit = DP) int touchExpansion) { return touchExpansionPx(edge, mResourceResolver.dipsToPixels(touchExpansion)); } @Override public InternalNode child(ComponentLayout child) { if (child != null && child != NULL_LAYOUT) { addChildAt((InternalNode) child, mYogaNode.getChildCount()); } return this; } @Override public InternalNode child(ComponentLayout.Builder child) { if (child != null && child != NULL_LAYOUT) { child(child.build()); } return this; } @Override public InternalNode child(Component<?> child) { if (child != null) { child(Layout.create(mComponentContext, child).flexShrink(0).flexShrink(0).flexShrink(0)); } return this; } @Override public InternalNode child(Component.Builder<?> child) { if (child != null) { child(child.build()); } return this; } @Override public InternalNode background(Reference<? extends Drawable> background) { mPrivateFlags |= PFLAG_BACKGROUND_IS_SET; mBackground = background; setPaddingFromDrawableReference(background); return this; } @Override public InternalNode background(Reference.Builder<? extends Drawable> builder) { return background(builder.build()); } @Override public InternalNode backgroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) { return backgroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId)); } @Override public InternalNode backgroundAttr(@AttrRes int resId) { return backgroundAttr(resId, 0); } @Override public InternalNode backgroundRes(@DrawableRes int resId) { if (resId == 0) { return background((Reference<Drawable>) null); } return background( ResourceDrawableReference.create(mComponentContext) .resId(resId) .build()); } @Override public InternalNode backgroundColor(@ColorInt int backgroundColor) { return background( ColorDrawableReference.create(mComponentContext) .color(backgroundColor) .build()); } @Override public InternalNode foreground(Reference<? extends Drawable> foreground) { mPrivateFlags |= PFLAG_FOREGROUND_IS_SET; mForeground = foreground; return this; } @Override public InternalNode foreground(Reference.Builder<? extends Drawable> builder) { return foreground(builder.build()); } @Override public InternalNode foregroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) { return foregroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId)); } @Override public InternalNode foregroundAttr(@AttrRes int resId) { return foregroundAttr(resId, 0); } @Override public InternalNode foregroundRes(@DrawableRes int resId) { if (resId == 0) { return foreground((Reference<Drawable>) null); } return foreground( ResourceDrawableReference.create(mComponentContext) .resId(resId) .build()); } @Override public InternalNode foregroundColor(@ColorInt int foregroundColor) { return foreground( ColorDrawableReference.create(mComponentContext) .color(foregroundColor) .build()); } @Override public InternalNode wrapInView() { mForceViewWrapping = true; return this; } boolean isForceViewWrapping() { return mForceViewWrapping; } @Override public InternalNode clickHandler(EventHandler clickHandler) { getOrCreateNodeInfo().setClickHandler(clickHandler); return this; } @Override public InternalNode longClickHandler(EventHandler longClickHandler) { getOrCreateNodeInfo().setLongClickHandler(longClickHandler); return this; } @Override public InternalNode touchHandler(EventHandler touchHandler) { getOrCreateNodeInfo().setTouchHandler(touchHandler); return this; } @Override public ContainerBuilder focusable(boolean isFocusable) { getOrCreateNodeInfo().setFocusable(isFocusable); return this; } @Override public InternalNode visibleHandler(EventHandler visibleHandler) { mPrivateFlags |= PFLAG_VISIBLE_HANDLER_IS_SET; mVisibleHandler = visibleHandler; return this; } EventHandler getVisibleHandler() { return mVisibleHandler; } @Override public InternalNode focusedHandler(EventHandler focusedHandler) { mPrivateFlags |= PFLAG_FOCUSED_HANDLER_IS_SET; mFocusedHandler = focusedHandler; return this; } EventHandler getFocusedHandler() { return mFocusedHandler; } @Override public InternalNode fullImpressionHandler(EventHandler fullImpressionHandler) { mPrivateFlags |= PFLAG_FULL_IMPRESSION_HANDLER_IS_SET; mFullImpressionHandler = fullImpressionHandler; return this; } EventHandler getFullImpressionHandler() { return mFullImpressionHandler; } @Override public InternalNode invisibleHandler(EventHandler invisibleHandler) { mPrivateFlags |= PFLAG_INVISIBLE_HANDLER_IS_SET; mInvisibleHandler = invisibleHandler; return this; } EventHandler getInvisibleHandler() { return mInvisibleHandler; } @Override public InternalNode contentDescription(CharSequence contentDescription) { getOrCreateNodeInfo().setContentDescription(contentDescription); return this; } @Override public InternalNode contentDescription(@StringRes int stringId) { return contentDescription(mResources.getString(stringId)); } @Override public InternalNode contentDescription(@StringRes int stringId, Object... formatArgs) { return contentDescription(mResources.getString(stringId, formatArgs)); } @Override public InternalNode viewTag(Object viewTag) { getOrCreateNodeInfo().setViewTag(viewTag); return this; } @Override public InternalNode viewTags(SparseArray<Object> viewTags) { getOrCreateNodeInfo().setViewTags(viewTags); return this; } @Override public InternalNode testKey(String testKey) { mTestKey = testKey; return this; } @Override public InternalNode dispatchPopulateAccessibilityEventHandler( EventHandler<DispatchPopulateAccessibilityEventEvent> dispatchPopulateAccessibilityEventHandler) { getOrCreateNodeInfo().setDispatchPopulateAccessibilityEventHandler( dispatchPopulateAccessibilityEventHandler); return this; } @Override public InternalNode onInitializeAccessibilityEventHandler( EventHandler<OnInitializeAccessibilityEventEvent> onInitializeAccessibilityEventHandler) { getOrCreateNodeInfo().setOnInitializeAccessibilityEventHandler( onInitializeAccessibilityEventHandler); return this; } @Override public InternalNode onInitializeAccessibilityNodeInfoHandler( EventHandler<OnInitializeAccessibilityNodeInfoEvent> onInitializeAccessibilityNodeInfoHandler) { getOrCreateNodeInfo().setOnInitializeAccessibilityNodeInfoHandler( onInitializeAccessibilityNodeInfoHandler); return this; } @Override public InternalNode onPopulateAccessibilityEventHandler( EventHandler<OnPopulateAccessibilityEventEvent> onPopulateAccessibilityEventHandler) { getOrCreateNodeInfo().setOnPopulateAccessibilityEventHandler( onPopulateAccessibilityEventHandler); return this; } @Override public InternalNode onRequestSendAccessibilityEventHandler( EventHandler<OnRequestSendAccessibilityEventEvent> onRequestSendAccessibilityEventHandler) { getOrCreateNodeInfo().setOnRequestSendAccessibilityEventHandler( onRequestSendAccessibilityEventHandler); return this; } @Override public InternalNode performAccessibilityActionHandler( EventHandler<PerformAccessibilityActionEvent> performAccessibilityActionHandler) { getOrCreateNodeInfo().setPerformAccessibilityActionHandler(performAccessibilityActionHandler); return this; } @Override public InternalNode sendAccessibilityEventHandler( EventHandler<SendAccessibilityEventEvent> sendAccessibilityEventHandler) { getOrCreateNodeInfo().setSendAccessibilityEventHandler(sendAccessibilityEventHandler); return this; } @Override public InternalNode sendAccessibilityEventUncheckedHandler( EventHandler<SendAccessibilityEventUncheckedEvent> sendAccessibilityEventUncheckedHandler) { getOrCreateNodeInfo().setSendAccessibilityEventUncheckedHandler( sendAccessibilityEventUncheckedHandler); return this; } @Override public ContainerBuilder transitionKey(String key) { if (SDK_INT >= ICE_CREAM_SANDWICH) { mPrivateFlags |= PFLAG_TRANSITION_KEY_IS_SET; mTransitionKey = key; wrapInView(); } return this; } String getTransitionKey() { return mTransitionKey; } /** * A unique identifier which may be set for retrieving a component and its bounds when testing. */ String getTestKey() { return mTestKey; } void setMeasureFunction(YogaMeasureFunction measureFunction) { mYogaNode.setMeasureFunction(measureFunction); } void setBaselineFunction(YogaBaselineFunction baselineFunction) { // YogaNode is the only version of YogaNodeAPI with this support; if (mYogaNode instanceof YogaNode) { mYogaNode.setBaselineFunction(baselineFunction); } } boolean hasNewLayout() { return mYogaNode.hasNewLayout(); } void markLayoutSeen() { mYogaNode.markLayoutSeen(); } float getStyleWidth() { return mYogaNode.getWidth().value; } float getMinWidth() { return mYogaNode.getMinWidth().value; } float getMaxWidth() { return mYogaNode.getMaxWidth().value; } float getStyleHeight() { return mYogaNode.getHeight().value; } float getMinHeight() { return mYogaNode.getMinHeight().value; } float getMaxHeight() { return mYogaNode.getMaxHeight().value; } void calculateLayout(float width, float height) { final ComponentTree tree = mComponentContext == null ? null : mComponentContext.getComponentTree(); final ComponentsStethoManager stethoManager = tree == null ? null : tree.getStethoManager(); if (stethoManager != null) { applyOverridesRecursive(stethoManager, this); } mYogaNode.calculateLayout(width, height); } private static void applyOverridesRecursive( ComponentsStethoManager stethoManager, InternalNode node) { stethoManager.applyOverrides(node); for (int i = 0, count = node.getChildCount(); i < count; i++) { applyOverridesRecursive(stethoManager, node.getChildAt(i)); } if (node.hasNestedTree()) { applyOverridesRecursive(stethoManager, node.getNestedTree()); } } void calculateLayout() { calculateLayout(YogaConstants.UNDEFINED, YogaConstants.UNDEFINED); } int getChildCount() { return mYogaNode.getChildCount(); } com.facebook.yoga.YogaDirection getStyleDirection() { return mYogaNode.getStyleDirection(); } InternalNode getChildAt(int index) { if (mYogaNode.getChildAt(index) == null) { return null; } return (InternalNode) mYogaNode.getChildAt(index).getData(); } int getChildIndex(InternalNode child) { for (int i = 0, count = mYogaNode.getChildCount(); i < count; i++) { if (mYogaNode.getChildAt(i) == child.mYogaNode) { return i; } } return -1; } InternalNode getParent() { if (mYogaNode == null || mYogaNode.getParent() == null) { return null; } return (InternalNode) mYogaNode.getParent().getData(); } void addChildAt(InternalNode child, int index) { mYogaNode.addChildAt(child.mYogaNode, index); } InternalNode removeChildAt(int index) { return (InternalNode) mYogaNode.removeChildAt(index).getData(); } @Override public ComponentLayout build() { return this; } private float resolveHorizontalSpacing(Spacing spacing, int index) { final boolean isRtl = (mYogaNode.getLayoutDirection() == YogaDirection.RTL); final int resolvedIndex; switch (index) { case Spacing.LEFT: resolvedIndex = (isRtl ? Spacing.END : Spacing.START); break; case Spacing.RIGHT: resolvedIndex = (isRtl ? Spacing.START : Spacing.END); break; default: throw new IllegalArgumentException("Not an horizontal padding index: " + index); } float result = spacing.getRaw(resolvedIndex); if (YogaConstants.isUndefined(result)) { result = spacing.get(index); } return result; } ComponentContext getContext() { return mComponentContext; } Component getComponent() { return mComponent; } int getBorderColor() { return mBorderColor; } boolean shouldDrawBorders() { return mBorderColor != Color.TRANSPARENT && (mYogaNode.getLayoutBorder(LEFT) != 0 || mYogaNode.getLayoutBorder(TOP) != 0 || mYogaNode.getLayoutBorder(RIGHT) != 0 || mYogaNode.getLayoutBorder(BOTTOM) != 0); } void setComponent(Component component) { mComponent = component; } boolean hasNestedTree() { return mNestedTree != null; } @Nullable InternalNode getNestedTree() { return mNestedTree; } InternalNode getNestedTreeHolder() { return mNestedTreeHolder; } /** * Set the nested tree before measuring it in order to transfer over important information * such as layout direction needed during measurement. */ void setNestedTree(InternalNode nestedTree) { nestedTree.mNestedTreeHolder = this; mNestedTree = nestedTree; } NodeInfo getNodeInfo() { return mNodeInfo; } void copyInto(InternalNode node) { if (mNodeInfo != null) { if (node.mNodeInfo == null) { node.mNodeInfo = mNodeInfo.acquireRef(); } else { node.mNodeInfo.updateWith(mNodeInfo); } } if ((node.mPrivateFlags & PFLAG_LAYOUT_DIRECTION_IS_SET) == 0L || node.getResolvedLayoutDirection() == YogaDirection.INHERIT) { node.layoutDirection(getResolvedLayoutDirection()); } if ((node.mPrivateFlags & PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET) == 0L || node.mImportantForAccessibility == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) { node.mImportantForAccessibility = mImportantForAccessibility; } if ((mPrivateFlags & PFLAG_DUPLICATE_PARENT_STATE_IS_SET) != 0L) { node.mDuplicateParentState = mDuplicateParentState; } if ((mPrivateFlags & PFLAG_BACKGROUND_IS_SET) != 0L) { node.mBackground = mBackground; } if ((mPrivateFlags & PFLAG_FOREGROUND_IS_SET) != 0L) { node.mForeground = mForeground; } if (mForceViewWrapping) { node.mForceViewWrapping = true; } if ((mPrivateFlags & PFLAG_VISIBLE_HANDLER_IS_SET) != 0L) { node.mVisibleHandler = mVisibleHandler; } if ((mPrivateFlags & PFLAG_FOCUSED_HANDLER_IS_SET) != 0L) { node.mFocusedHandler = mFocusedHandler; } if ((mPrivateFlags & PFLAG_FULL_IMPRESSION_HANDLER_IS_SET) != 0L) { node.mFullImpressionHandler = mFullImpressionHandler; } if ((mPrivateFlags & PFLAG_INVISIBLE_HANDLER_IS_SET) != 0L) { node.mInvisibleHandler = mInvisibleHandler; } if (mTestKey != null) { node.mTestKey = mTestKey; } if ((mPrivateFlags & PFLAG_PADDING_IS_SET) != 0L) { if (mNestedTreePadding == null) { throw new IllegalStateException("copyInto() must be used when resolving a nestedTree. " + "If padding was set on the holder node, we must have a mNestedTreePadding instance"); } final YogaNodeAPI yogaNode = node.mYogaNode; node.mPrivateFlags |= PFLAG_PADDING_IS_SET; if (isPaddingPercent(LEFT)) { yogaNode.setPaddingPercent(LEFT, mNestedTreePadding.getRaw(Spacing.LEFT)); } else { yogaNode.setPadding(LEFT, mNestedTreePadding.getRaw(Spacing.LEFT)); } if (isPaddingPercent(TOP)) { yogaNode.setPaddingPercent(TOP, mNestedTreePadding.getRaw(Spacing.TOP)); } else { yogaNode.setPadding(TOP, mNestedTreePadding.getRaw(Spacing.TOP)); } if (isPaddingPercent(RIGHT)) { yogaNode.setPaddingPercent(RIGHT, mNestedTreePadding.getRaw(Spacing.RIGHT)); } else { yogaNode.setPadding(RIGHT, mNestedTreePadding.getRaw(Spacing.RIGHT)); } if (isPaddingPercent(BOTTOM)) { yogaNode.setPaddingPercent(BOTTOM, mNestedTreePadding.getRaw(Spacing.BOTTOM)); } else { yogaNode.setPadding(BOTTOM, mNestedTreePadding.getRaw(Spacing.BOTTOM)); } if (isPaddingPercent(VERTICAL)) { yogaNode.setPaddingPercent(VERTICAL, mNestedTreePadding.getRaw(Spacing.VERTICAL)); } else { yogaNode.setPadding(VERTICAL, mNestedTreePadding.getRaw(Spacing.VERTICAL)); } if (isPaddingPercent(HORIZONTAL)) { yogaNode.setPaddingPercent(HORIZONTAL, mNestedTreePadding.getRaw(Spacing.HORIZONTAL)); } else { yogaNode.setPadding(HORIZONTAL, mNestedTreePadding.getRaw(Spacing.HORIZONTAL)); } if (isPaddingPercent(START)) { yogaNode.setPaddingPercent(START, mNestedTreePadding.getRaw(Spacing.START)); } else { yogaNode.setPadding(START, mNestedTreePadding.getRaw(Spacing.START)); } if (isPaddingPercent(END)) { yogaNode.setPaddingPercent(END, mNestedTreePadding.getRaw(Spacing.END)); } else { yogaNode.setPadding(END, mNestedTreePadding.getRaw(Spacing.END)); } if (isPaddingPercent(ALL)) { yogaNode.setPaddingPercent(ALL, mNestedTreePadding.getRaw(Spacing.ALL)); } else { yogaNode.setPadding(ALL, mNestedTreePadding.getRaw(Spacing.ALL)); } } if ((mPrivateFlags & PFLAG_BORDER_WIDTH_IS_SET) != 0L) { if (mNestedTreeBorderWidth == null) { throw new IllegalStateException("copyInto() must be used when resolving a nestedTree. " + "If border width was set on the holder node, we must have a mNestedTreeBorderWidth " + "instance"); } final YogaNodeAPI yogaNode = node.mYogaNode; node.mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET; yogaNode.setBorder(LEFT, mNestedTreeBorderWidth.getRaw(Spacing.LEFT)); yogaNode.setBorder(TOP, mNestedTreeBorderWidth.getRaw(Spacing.TOP)); yogaNode.setBorder(RIGHT, mNestedTreeBorderWidth.getRaw(Spacing.RIGHT)); yogaNode.setBorder(BOTTOM, mNestedTreeBorderWidth.getRaw(Spacing.BOTTOM)); yogaNode.setBorder(VERTICAL, mNestedTreeBorderWidth.getRaw(Spacing.VERTICAL)); yogaNode.setBorder(HORIZONTAL, mNestedTreeBorderWidth.getRaw(Spacing.HORIZONTAL)); yogaNode.setBorder(START, mNestedTreeBorderWidth.getRaw(Spacing.START)); yogaNode.setBorder(END, mNestedTreeBorderWidth.getRaw(Spacing.END)); yogaNode.setBorder(ALL, mNestedTreeBorderWidth.getRaw(Spacing.ALL)); } if ((mPrivateFlags & PFLAG_TRANSITION_KEY_IS_SET) != 0L) { node.mTransitionKey = mTransitionKey; } if ((mPrivateFlags & PFLAG_BORDER_COLOR_IS_SET) != 0L) { node.mBorderColor = mBorderColor; } } void setStyleWidthFromSpec(int widthSpec) { switch (SizeSpec.getMode(widthSpec)) { case SizeSpec.UNSPECIFIED: mYogaNode.setWidth(YogaConstants.UNDEFINED); break; case SizeSpec.AT_MOST: mYogaNode.setMaxWidth(SizeSpec.getSize(widthSpec)); break; case SizeSpec.EXACTLY: mYogaNode.setWidth(SizeSpec.getSize(widthSpec)); break; } } void setStyleHeightFromSpec(int heightSpec) { switch (SizeSpec.getMode(heightSpec)) { case SizeSpec.UNSPECIFIED: mYogaNode.setHeight(YogaConstants.UNDEFINED); break; case SizeSpec.AT_MOST: mYogaNode.setMaxHeight(SizeSpec.getSize(heightSpec)); break; case SizeSpec.EXACTLY: mYogaNode.setHeight(SizeSpec.getSize(heightSpec)); break; } } int getImportantForAccessibility() { return mImportantForAccessibility; } boolean isDuplicateParentStateEnabled() { return mDuplicateParentState; } void applyAttributes(TypedArray a) { for (int i = 0, size = a.getIndexCount(); i < size; i++) { final int attr = a.getIndex(i); if (attr == R.styleable.ComponentLayout_android_layout_width) { int width = a.getLayoutDimension(attr, -1); // We don't support WRAP_CONTENT or MATCH_PARENT so no-op for them if (width >= 0) { widthPx(width); } } else if (attr == R.styleable.ComponentLayout_android_layout_height) { int height = a.getLayoutDimension(attr, -1); // We don't support WRAP_CONTENT or MATCH_PARENT so no-op for them if (height >= 0) { heightPx(height); } } else if (attr == R.styleable.ComponentLayout_android_paddingLeft) { paddingPx(LEFT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingTop) { paddingPx(TOP, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingRight) { paddingPx(RIGHT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingBottom) { paddingPx(BOTTOM, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingStart && SUPPORTS_RTL) { paddingPx(START, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingEnd && SUPPORTS_RTL) { paddingPx(END, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_padding) { paddingPx(ALL, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginLeft) { marginPx(LEFT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginTop) { marginPx(TOP, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginRight) { marginPx(RIGHT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginBottom) { marginPx(BOTTOM, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginStart && SUPPORTS_RTL) { marginPx(START, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginEnd && SUPPORTS_RTL) { marginPx(END, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_margin) { marginPx(ALL, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_importantForAccessibility && SDK_INT >= JELLY_BEAN) { importantForAccessibility(a.getInt(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_duplicateParentState) { duplicateParentState(a.getBoolean(attr, false)); } else if (attr == R.styleable.ComponentLayout_android_background) { if (TypedArrayUtils.isColorAttribute(a, R.styleable.ComponentLayout_android_background)) { backgroundColor(a.getColor(attr, 0)); } else { backgroundRes(a.getResourceId(attr, -1)); } } else if (attr == R.styleable.ComponentLayout_android_foreground) { if (TypedArrayUtils.isColorAttribute(a, R.styleable.ComponentLayout_android_foreground)) { foregroundColor(a.getColor(attr, 0)); } else { foregroundRes(a.getResourceId(attr, -1)); } } else if (attr == R.styleable.ComponentLayout_android_contentDescription) { contentDescription(a.getString(attr)); } else if (attr == R.styleable.ComponentLayout_flex_direction) { flexDirection(YogaFlexDirection.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_wrap) { wrap(YogaWrap.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_justifyContent) { justifyContent(YogaJustify.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_alignItems) { alignItems(YogaAlign.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_alignSelf) { alignSelf(YogaAlign.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_positionType) { positionType(YogaPositionType.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex) { final float flex = a.getFloat(attr, -1); if (flex >= 0f) { flex(flex); } } else if (attr == R.styleable.ComponentLayout_flex_left) { positionPx(LEFT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_top) { positionPx(TOP, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_right) { positionPx(RIGHT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_bottom) { positionPx(BOTTOM, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_layoutDirection) { final int layoutDirection = a.getInteger(attr, -1); layoutDirection(YogaDirection.fromInt(layoutDirection)); } } } /** * Reset all attributes to default values. Intended to facilitate recycling. */ void release() { if (mYogaNode.getParent() != null || mYogaNode.getChildCount() > 0) { throw new IllegalStateException("You should not free an attached Internalnode"); } ComponentsPools.release(mYogaNode); mYogaNode = null; mResourceResolver.internalRelease(); mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED; mResolvedTouchExpansionRight = YogaConstants.UNDEFINED; mResolvedX = YogaConstants.UNDEFINED; mResolvedY = YogaConstants.UNDEFINED; mResolvedWidth = YogaConstants.UNDEFINED; mResolvedHeight = YogaConstants.UNDEFINED; mComponentContext = null; mResources = null; mComponent = null; mNestedTree = null; mNestedTreeHolder = null; if (mNodeInfo != null) { mNodeInfo.release(); mNodeInfo = null; }
src/main/java/com/facebook/components/InternalNode.java
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho; import javax.annotation.Nullable; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.graphics.Rect; import android.support.annotation.AttrRes; import android.support.annotation.ColorInt; import android.support.annotation.DimenRes; import android.support.annotation.Dimension; import android.support.annotation.DrawableRes; import android.support.annotation.Px; import android.support.annotation.StringRes; import android.support.v4.view.ViewCompat; import android.text.TextUtils; import android.util.SparseArray; import com.facebook.R; import com.facebook.litho.config.ComponentsConfiguration; import com.facebook.litho.reference.ColorDrawableReference; import com.facebook.litho.reference.Reference; import com.facebook.litho.reference.ResourceDrawableReference; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.yoga.YogaAlign; import com.facebook.yoga.YogaBaselineFunction; import com.facebook.yoga.YogaFlexDirection; import com.facebook.yoga.YogaJustify; import com.facebook.yoga.YogaDirection; import com.facebook.yoga.YogaPositionType; import com.facebook.yoga.YogaWrap; import com.facebook.yoga.YogaEdge; import com.facebook.yoga.YogaConstants; import com.facebook.yoga.YogaMeasureFunction; import com.facebook.yoga.YogaNode; import com.facebook.yoga.YogaNodeAPI; import com.facebook.yoga.YogaOverflow; import com.facebook.yoga.Spacing; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static android.os.Build.VERSION_CODES.JELLY_BEAN; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; import static android.support.annotation.Dimension.DP; import static com.facebook.litho.ComponentContext.NULL_LAYOUT; import static com.facebook.yoga.YogaEdge.ALL; import static com.facebook.yoga.YogaEdge.BOTTOM; import static com.facebook.yoga.YogaEdge.END; import static com.facebook.yoga.YogaEdge.HORIZONTAL; import static com.facebook.yoga.YogaEdge.LEFT; import static com.facebook.yoga.YogaEdge.RIGHT; import static com.facebook.yoga.YogaEdge.START; import static com.facebook.yoga.YogaEdge.TOP; import static com.facebook.yoga.YogaEdge.VERTICAL; /** * Internal class representing both a {@link ComponentLayout} and a * {@link com.facebook.litho.ComponentLayout.ContainerBuilder}. */ @ThreadConfined(ThreadConfined.ANY) class InternalNode implements ComponentLayout, ComponentLayout.ContainerBuilder { // Used to check whether or not the framework can use style IDs for // paddingStart/paddingEnd due to a bug in some Android devices. private static final boolean SUPPORTS_RTL = (SDK_INT >= JELLY_BEAN_MR1); // When this flag is set, layoutDirection style was explicitly set on this node. private static final long PFLAG_LAYOUT_DIRECTION_IS_SET = 1L << 0; // When this flag is set, alignSelf was explicitly set on this node. private static final long PFLAG_ALIGN_SELF_IS_SET = 1L << 1; // When this flag is set, position type was explicitly set on this node. private static final long PFLAG_POSITION_TYPE_IS_SET = 1L << 2; // When this flag is set, flex was explicitly set on this node. private static final long PFLAG_FLEX_IS_SET = 1L << 3; // When this flag is set, flex grow was explicitly set on this node. private static final long PFLAG_FLEX_GROW_IS_SET = 1L << 4; // When this flag is set, flex shrink was explicitly set on this node. private static final long PFLAG_FLEX_SHRINK_IS_SET = 1L << 5; // When this flag is set, flex basis was explicitly set on this node. private static final long PFLAG_FLEX_BASIS_IS_SET = 1L << 6; // When this flag is set, importantForAccessibility was explicitly set on this node. private static final long PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET = 1L << 7; // When this flag is set, duplicateParentState was explicitly set on this node. private static final long PFLAG_DUPLICATE_PARENT_STATE_IS_SET = 1L << 8; // When this flag is set, margin was explicitly set on this node. private static final long PFLAG_MARGIN_IS_SET = 1L << 9; // When this flag is set, padding was explicitly set on this node. private static final long PFLAG_PADDING_IS_SET = 1L << 10; // When this flag is set, position was explicitly set on this node. private static final long PFLAG_POSITION_IS_SET = 1L << 11; // When this flag is set, width was explicitly set on this node. private static final long PFLAG_WIDTH_IS_SET = 1L << 12; // When this flag is set, minWidth was explicitly set on this node. private static final long PFLAG_MIN_WIDTH_IS_SET = 1L << 13; // When this flag is set, maxWidth was explicitly set on this node. private static final long PFLAG_MAX_WIDTH_IS_SET = 1L << 14; // When this flag is set, height was explicitly set on this node. private static final long PFLAG_HEIGHT_IS_SET = 1L << 15; // When this flag is set, minHeight was explicitly set on this node. private static final long PFLAG_MIN_HEIGHT_IS_SET = 1L << 16; // When this flag is set, maxHeight was explicitly set on this node. private static final long PFLAG_MAX_HEIGHT_IS_SET = 1L << 17; // When this flag is set, background was explicitly set on this node. private static final long PFLAG_BACKGROUND_IS_SET = 1L << 18; // When this flag is set, foreground was explicitly set on this node. private static final long PFLAG_FOREGROUND_IS_SET = 1L << 19; // When this flag is set, visibleHandler was explicitly set on this node. private static final long PFLAG_VISIBLE_HANDLER_IS_SET = 1L << 20; // When this flag is set, focusedHandler was explicitly set on this node. private static final long PFLAG_FOCUSED_HANDLER_IS_SET = 1L << 21; // When this flag is set, fullImpressionHandler was explicitly set on this node. private static final long PFLAG_FULL_IMPRESSION_HANDLER_IS_SET = 1L << 22; // When this flag is set, invisibleHandler was explicitly set on this node. private static final long PFLAG_INVISIBLE_HANDLER_IS_SET = 1L << 23; // When this flag is set, touch expansion was explicitly set on this node. private static final long PFLAG_TOUCH_EXPANSION_IS_SET = 1L << 24; // When this flag is set, border width was explicitly set on this node. private static final long PFLAG_BORDER_WIDTH_IS_SET = 1L << 25; // When this flag is set, aspectRatio was explicitly set on this node. private static final long PFLAG_ASPECT_RATIO_IS_SET = 1L << 26; // When this flag is set, transitionKey was explicitly set on this node. private static final long PFLAG_TRANSITION_KEY_IS_SET = 1L << 27; // When this flag is set, border color was explicitly set on this node. private static final long PFLAG_BORDER_COLOR_IS_SET = 1L << 28; private final ResourceResolver mResourceResolver = new ResourceResolver(); YogaNodeAPI mYogaNode; private ComponentContext mComponentContext; private Resources mResources; private Component mComponent; private int mImportantForAccessibility = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO; private boolean mDuplicateParentState; private boolean mIsNestedTreeHolder; private InternalNode mNestedTree; private InternalNode mNestedTreeHolder; private long mPrivateFlags; private Reference<? extends Drawable> mBackground; private Reference<? extends Drawable> mForeground; private int mBorderColor = Color.TRANSPARENT; private NodeInfo mNodeInfo; private boolean mForceViewWrapping; private String mTransitionKey; private EventHandler mVisibleHandler; private EventHandler mFocusedHandler; private EventHandler mFullImpressionHandler; private EventHandler mInvisibleHandler; private String mTestKey; private Spacing mTouchExpansion; private Spacing mNestedTreePadding; private Spacing mNestedTreeBorderWidth; private boolean[] mIsPaddingPercent; private float mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED; private float mResolvedTouchExpansionRight = YogaConstants.UNDEFINED; private float mResolvedX = YogaConstants.UNDEFINED; private float mResolvedY = YogaConstants.UNDEFINED; private float mResolvedWidth = YogaConstants.UNDEFINED; private float mResolvedHeight = YogaConstants.UNDEFINED; private int mLastWidthSpec = DiffNode.UNSPECIFIED; private int mLastHeightSpec = DiffNode.UNSPECIFIED; private float mLastMeasuredWidth = DiffNode.UNSPECIFIED; private float mLastMeasuredHeight = DiffNode.UNSPECIFIED; private DiffNode mDiffNode; private boolean mCachedMeasuresValid; private TreeProps mPendingTreeProps; void init(YogaNodeAPI yogaNode, ComponentContext componentContext, Resources resources) { yogaNode.setData(this); yogaNode.setOverflow(YogaOverflow.HIDDEN); yogaNode.setMeasureFunction(null); // YogaNode is the only version of YogaNodeAPI with this support; if (yogaNode instanceof YogaNode) { yogaNode.setBaselineFunction(null); } mYogaNode = yogaNode; mComponentContext = componentContext; mResources = resources; mResourceResolver.init( mComponentContext, componentContext.getResourceCache()); } @Px @Override public int getX() { if (YogaConstants.isUndefined(mResolvedX)) { mResolvedX = mYogaNode.getLayoutX(); } return (int) mResolvedX; } @Px @Override public int getY() { if (YogaConstants.isUndefined(mResolvedY)) { mResolvedY = mYogaNode.getLayoutY(); } return (int) mResolvedY; } @Px @Override public int getWidth() { if (YogaConstants.isUndefined(mResolvedWidth)) { mResolvedWidth = mYogaNode.getLayoutWidth(); } return (int) mResolvedWidth; } @Px @Override public int getHeight() { if (YogaConstants.isUndefined(mResolvedHeight)) { mResolvedHeight = mYogaNode.getLayoutHeight(); } return (int) mResolvedHeight; } @Px @Override public int getPaddingLeft() { return FastMath.round(mYogaNode.getLayoutPadding(LEFT)); } @Px @Override public int getPaddingTop() { return FastMath.round(mYogaNode.getLayoutPadding(TOP)); } @Px @Override public int getPaddingRight() { return FastMath.round(mYogaNode.getLayoutPadding(RIGHT)); } @Px @Override public int getPaddingBottom() { return FastMath.round(mYogaNode.getLayoutPadding(BOTTOM)); } public Reference<? extends Drawable> getBackground() { return mBackground; } public Reference<? extends Drawable> getForeground() { return mForeground; } public void setCachedMeasuresValid(boolean valid) { mCachedMeasuresValid = valid; } public int getLastWidthSpec() { return mLastWidthSpec; } public void setLastWidthSpec(int widthSpec) { mLastWidthSpec = widthSpec; } public int getLastHeightSpec() { return mLastHeightSpec; } public void setLastHeightSpec(int heightSpec) { mLastHeightSpec = heightSpec; } public boolean hasVisibilityHandlers() { return mVisibleHandler != null || mFocusedHandler != null || mFullImpressionHandler != null || mInvisibleHandler != null; } /** * The last value the measure funcion associated with this node {@link Component} returned * for the width. This is used together with {@link InternalNode#getLastWidthSpec()} * to implement measure caching. */ float getLastMeasuredWidth() { return mLastMeasuredWidth; } /** * Sets the last value the measure funcion associated with this node {@link Component} returned * for the width. */ void setLastMeasuredWidth(float lastMeasuredWidth) { mLastMeasuredWidth = lastMeasuredWidth; } /** * The last value the measure funcion associated with this node {@link Component} returned * for the height. This is used together with {@link InternalNode#getLastHeightSpec()} * to implement measure caching. */ float getLastMeasuredHeight() { return mLastMeasuredHeight; } /** * Sets the last value the measure funcion associated with this node {@link Component} returned * for the height. */ void setLastMeasuredHeight(float lastMeasuredHeight) { mLastMeasuredHeight = lastMeasuredHeight; } DiffNode getDiffNode() { return mDiffNode; } boolean areCachedMeasuresValid() { return mCachedMeasuresValid; } void setDiffNode(DiffNode diffNode) { mDiffNode = diffNode; } /** * Mark this node as a nested tree root holder. */ void markIsNestedTreeHolder(TreeProps currentTreeProps) { mIsNestedTreeHolder = true; mPendingTreeProps = TreeProps.copy(currentTreeProps); } /** * @return Whether this node is holding a nested tree or not. The decision was made during * tree creation {@link ComponentLifecycle#createLayout(ComponentContext, Component, boolean)}. */ boolean isNestedTreeHolder() { return mIsNestedTreeHolder; } @Override public YogaDirection getResolvedLayoutDirection() { return mYogaNode.getLayoutDirection(); } @Override public InternalNode layoutDirection(YogaDirection direction) { mPrivateFlags |= PFLAG_LAYOUT_DIRECTION_IS_SET; mYogaNode.setDirection(direction); return this; } @Override public InternalNode flexDirection(YogaFlexDirection direction) { mYogaNode.setFlexDirection(direction); return this; } @Override public InternalNode wrap(YogaWrap wrap) { mYogaNode.setWrap(wrap); return this; } @Override public InternalNode justifyContent(YogaJustify justifyContent) { mYogaNode.setJustifyContent(justifyContent); return this; } @Override public InternalNode alignItems(YogaAlign alignItems) { mYogaNode.setAlignItems(alignItems); return this; } @Override public InternalNode alignContent(YogaAlign alignContent) { mYogaNode.setAlignContent(alignContent); return this; } @Override public InternalNode alignSelf(YogaAlign alignSelf) { mPrivateFlags |= PFLAG_ALIGN_SELF_IS_SET; mYogaNode.setAlignSelf(alignSelf); return this; } @Override public InternalNode positionType(YogaPositionType positionType) { mPrivateFlags |= PFLAG_POSITION_TYPE_IS_SET; mYogaNode.setPositionType(positionType); return this; } @Override public InternalNode flex(float flex) { mPrivateFlags |= PFLAG_FLEX_IS_SET; mYogaNode.setFlex(flex); return this; } @Override public InternalNode flexGrow(float flexGrow) { mPrivateFlags |= PFLAG_FLEX_GROW_IS_SET; mYogaNode.setFlexGrow(flexGrow); return this; } @Override public InternalNode flexShrink(float flexShrink) { mPrivateFlags |= PFLAG_FLEX_SHRINK_IS_SET; mYogaNode.setFlexShrink(flexShrink); return this; } @Override public InternalNode flexBasisPx(@Px int flexBasis) { mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET; mYogaNode.setFlexBasis(flexBasis); return this; } @Override public InternalNode flexBasisPercent(float percent) { mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET; mYogaNode.setFlexBasisPercent(percent); return this; } @Override public InternalNode flexBasisAttr(@AttrRes int resId, @DimenRes int defaultResId) { return flexBasisPx(mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode flexBasisAttr(@AttrRes int resId) { return flexBasisAttr(resId, 0); } @Override public InternalNode flexBasisRes(@DimenRes int resId) { return flexBasisPx(mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode flexBasisDip(@Dimension(unit = DP) int flexBasis) { return flexBasisPx(mResourceResolver.dipsToPixels(flexBasis)); } @Override public InternalNode importantForAccessibility(int importantForAccessibility) { mPrivateFlags |= PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET; mImportantForAccessibility = importantForAccessibility; return this; } @Override public InternalNode duplicateParentState(boolean duplicateParentState) { mPrivateFlags |= PFLAG_DUPLICATE_PARENT_STATE_IS_SET; mDuplicateParentState = duplicateParentState; return this; } @Override public InternalNode marginPx(YogaEdge edge, @Px int margin) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMargin(edge, margin); return this; } @Override public InternalNode marginPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMarginPercent(edge, percent); return this; } @Override public InternalNode marginAuto(YogaEdge edge) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMarginAuto(edge); return this; } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return marginPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId) { return marginAttr(edge, resId, 0); } @Override public InternalNode marginRes(YogaEdge edge, @DimenRes int resId) { return marginPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode marginDip(YogaEdge edge, @Dimension(unit = DP) int margin) { return marginPx(edge, mResourceResolver.dipsToPixels(margin)); } @Override public InternalNode paddingPx(YogaEdge edge, @Px int padding) { mPrivateFlags |= PFLAG_PADDING_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreePadding == null) { mNestedTreePadding = ComponentsPools.acquireSpacing(); } mNestedTreePadding.set(edge.intValue(), padding); setIsPaddingPercent(edge, false); } else { mYogaNode.setPadding(edge, padding); } return this; } @Override public InternalNode paddingPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_PADDING_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreePadding == null) { mNestedTreePadding = ComponentsPools.acquireSpacing(); } mNestedTreePadding.set(edge.intValue(), percent); setIsPaddingPercent(edge, true); } else { mYogaNode.setPaddingPercent(edge, percent); } return this; } @Override public InternalNode paddingAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return paddingPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode paddingAttr( YogaEdge edge, @AttrRes int resId) { return paddingAttr(edge, resId, 0); } @Override public InternalNode paddingRes(YogaEdge edge, @DimenRes int resId) { return paddingPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode paddingDip(YogaEdge edge, @Dimension(unit = DP) int padding) { return paddingPx(edge, mResourceResolver.dipsToPixels(padding)); } @Override public InternalNode borderWidthPx(YogaEdge edge, @Px int borderWidth) { mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreeBorderWidth == null) { mNestedTreeBorderWidth = ComponentsPools.acquireSpacing(); } mNestedTreeBorderWidth.set(edge.intValue(), borderWidth); } else { mYogaNode.setBorder(edge, borderWidth); } return this; } @Override public InternalNode borderWidthAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode borderWidthAttr( YogaEdge edge, @AttrRes int resId) { return borderWidthAttr(edge, resId, 0); } @Override public InternalNode borderWidthRes(YogaEdge edge, @DimenRes int resId) { return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode borderWidthDip( YogaEdge edge, @Dimension(unit = DP) int borderWidth) { return borderWidthPx(edge, mResourceResolver.dipsToPixels(borderWidth)); } @Override public Builder borderColor(@ColorInt int borderColor) { mPrivateFlags |= PFLAG_BORDER_COLOR_IS_SET; mBorderColor = borderColor; return this; } @Override public InternalNode positionPx(YogaEdge edge, @Px int position) { mPrivateFlags |= PFLAG_POSITION_IS_SET; mYogaNode.setPosition(edge, position); return this; } @Override public InternalNode positionPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_POSITION_IS_SET; mYogaNode.setPositionPercent(edge, percent); return this; } @Override public InternalNode positionAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return positionPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode positionAttr(YogaEdge edge, @AttrRes int resId) { return positionAttr(edge, resId, 0); } @Override public InternalNode positionRes(YogaEdge edge, @DimenRes int resId) { return positionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode positionDip( YogaEdge edge, @Dimension(unit = DP) int position) { return positionPx(edge, mResourceResolver.dipsToPixels(position)); } @Override public InternalNode widthPx(@Px int width) { mPrivateFlags |= PFLAG_WIDTH_IS_SET; mYogaNode.setWidth(width); return this; } @Override public InternalNode widthPercent(float percent) { mPrivateFlags |= PFLAG_WIDTH_IS_SET; mYogaNode.setWidthPercent(percent); return this; } @Override public InternalNode widthRes(@DimenRes int resId) { return widthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode widthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return widthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode widthAttr(@AttrRes int resId) { return widthAttr(resId, 0); } @Override public InternalNode widthDip(@Dimension(unit = DP) int width) { return widthPx(mResourceResolver.dipsToPixels(width)); } @Override public InternalNode minWidthPx(@Px int minWidth) { mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET; mYogaNode.setMinWidth(minWidth); return this; } @Override public InternalNode minWidthPercent(float percent) { mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET; mYogaNode.setMinWidthPercent(percent); return this; } @Override public InternalNode minWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return minWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode minWidthAttr(@AttrRes int resId) { return minWidthAttr(resId, 0); } @Override public InternalNode minWidthRes(@DimenRes int resId) { return minWidthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode minWidthDip(@Dimension(unit = DP) int minWidth) { return minWidthPx(mResourceResolver.dipsToPixels(minWidth)); } @Override public InternalNode maxWidthPx(@Px int maxWidth) { mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET; mYogaNode.setMaxWidth(maxWidth); return this; } @Override public InternalNode maxWidthPercent(float percent) { mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET; mYogaNode.setMaxWidthPercent(percent); return this; } @Override public InternalNode maxWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return maxWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode maxWidthAttr(@AttrRes int resId) { return maxWidthAttr(resId, 0); } @Override public InternalNode maxWidthRes(@DimenRes int resId) { return maxWidthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode maxWidthDip(@Dimension(unit = DP) int maxWidth) { return maxWidthPx(mResourceResolver.dipsToPixels(maxWidth)); } @Override public InternalNode heightPx(@Px int height) { mPrivateFlags |= PFLAG_HEIGHT_IS_SET; mYogaNode.setHeight(height); return this; } @Override public InternalNode heightPercent(float percent) { mPrivateFlags |= PFLAG_HEIGHT_IS_SET; mYogaNode.setHeightPercent(percent); return this; } @Override public InternalNode heightRes(@DimenRes int resId) { return heightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode heightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return heightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode heightAttr(@AttrRes int resId) { return heightAttr(resId, 0); } @Override public InternalNode heightDip(@Dimension(unit = DP) int height) { return heightPx(mResourceResolver.dipsToPixels(height)); } @Override public InternalNode minHeightPx(@Px int minHeight) { mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET; mYogaNode.setMinHeight(minHeight); return this; } @Override public InternalNode minHeightPercent(float percent) { mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET; mYogaNode.setMinHeightPercent(percent); return this; } @Override public InternalNode minHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return minHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode minHeightAttr(@AttrRes int resId) { return minHeightAttr(resId, 0); } @Override public InternalNode minHeightRes(@DimenRes int resId) { return minHeightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode minHeightDip(@Dimension(unit = DP) int minHeight) { return minHeightPx(mResourceResolver.dipsToPixels(minHeight)); } @Override public InternalNode maxHeightPx(@Px int maxHeight) { mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET; mYogaNode.setMaxHeight(maxHeight); return this; } @Override public InternalNode maxHeightPercent(float percent) { mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET; mYogaNode.setMaxHeightPercent(percent); return this; } @Override public InternalNode maxHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return maxHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode maxHeightAttr(@AttrRes int resId) { return maxHeightAttr(resId, 0); } @Override public InternalNode maxHeightRes(@DimenRes int resId) { return maxHeightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode maxHeightDip(@Dimension(unit = DP) int maxHeight) { return maxHeightPx(mResourceResolver.dipsToPixels(maxHeight)); } @Override public InternalNode aspectRatio(float aspectRatio) { mPrivateFlags |= PFLAG_ASPECT_RATIO_IS_SET; if (mYogaNode instanceof YogaNode) { ((YogaNode) mYogaNode).setAspectRatio(aspectRatio); return this; } else { throw new IllegalStateException("Aspect ration requires using YogaNode not YogaNodeDEPRECATED"); } } private boolean shouldApplyTouchExpansion() { return mTouchExpansion != null && mNodeInfo != null && mNodeInfo.hasTouchEventHandlers(); } boolean hasTouchExpansion() { return ((mPrivateFlags & PFLAG_TOUCH_EXPANSION_IS_SET) != 0L); } Spacing getTouchExpansion() { return mTouchExpansion; } int getTouchExpansionLeft() { if (!shouldApplyTouchExpansion()) { return 0; } if (YogaConstants.isUndefined(mResolvedTouchExpansionLeft)) { mResolvedTouchExpansionLeft = resolveHorizontalSpacing(mTouchExpansion, Spacing.LEFT); } return FastMath.round(mResolvedTouchExpansionLeft); } int getTouchExpansionTop() { if (!shouldApplyTouchExpansion()) { return 0; } return FastMath.round(mTouchExpansion.get(Spacing.TOP)); } int getTouchExpansionRight() { if (!shouldApplyTouchExpansion()) { return 0; } if (YogaConstants.isUndefined(mResolvedTouchExpansionRight)) { mResolvedTouchExpansionRight = resolveHorizontalSpacing(mTouchExpansion, Spacing.RIGHT); } return FastMath.round(mResolvedTouchExpansionRight); } int getTouchExpansionBottom() { if (!shouldApplyTouchExpansion()) { return 0; } return FastMath.round(mTouchExpansion.get(Spacing.BOTTOM)); } @Override public InternalNode touchExpansionPx(YogaEdge edge, @Px int touchExpansion) { if (mTouchExpansion == null) { mTouchExpansion = ComponentsPools.acquireSpacing(); } mPrivateFlags |= PFLAG_TOUCH_EXPANSION_IS_SET; mTouchExpansion.set(edge.intValue(), touchExpansion); return this; } @Override public InternalNode touchExpansionAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return touchExpansionPx( edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode touchExpansionAttr( YogaEdge edge, @AttrRes int resId) { return touchExpansionAttr(edge, resId, 0); } @Override public InternalNode touchExpansionRes(YogaEdge edge, @DimenRes int resId) { return touchExpansionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode touchExpansionDip( YogaEdge edge, @Dimension(unit = DP) int touchExpansion) { return touchExpansionPx(edge, mResourceResolver.dipsToPixels(touchExpansion)); } @Override public InternalNode child(ComponentLayout child) { if (child != null && child != NULL_LAYOUT) { addChildAt((InternalNode) child, mYogaNode.getChildCount()); } return this; } @Override public InternalNode child(ComponentLayout.Builder child) { if (child != null && child != NULL_LAYOUT) { child(child.build()); } return this; } @Override public InternalNode child(Component<?> child) { if (child != null) { child(Layout.create(mComponentContext, child).flexShrink(0).flexShrink(0).flexShrink(0)); } return this; } @Override public InternalNode child(Component.Builder<?> child) { if (child != null) { child(child.build()); } return this; } @Override public InternalNode background(Reference<? extends Drawable> background) { mPrivateFlags |= PFLAG_BACKGROUND_IS_SET; mBackground = background; setPaddingFromDrawableReference(background); return this; } @Override public InternalNode background(Reference.Builder<? extends Drawable> builder) { return background(builder.build()); } @Override public InternalNode backgroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) { return backgroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId)); } @Override public InternalNode backgroundAttr(@AttrRes int resId) { return backgroundAttr(resId, 0); } @Override public InternalNode backgroundRes(@DrawableRes int resId) { if (resId == 0) { return background((Reference<Drawable>) null); } return background( ResourceDrawableReference.create(mComponentContext) .resId(resId) .build()); } @Override public InternalNode backgroundColor(@ColorInt int backgroundColor) { return background( ColorDrawableReference.create(mComponentContext) .color(backgroundColor) .build()); } @Override public InternalNode foreground(Reference<? extends Drawable> foreground) { mPrivateFlags |= PFLAG_FOREGROUND_IS_SET; mForeground = foreground; return this; } @Override public InternalNode foreground(Reference.Builder<? extends Drawable> builder) { return foreground(builder.build()); } @Override public InternalNode foregroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) { return foregroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId)); } @Override public InternalNode foregroundAttr(@AttrRes int resId) { return foregroundAttr(resId, 0); } @Override public InternalNode foregroundRes(@DrawableRes int resId) { if (resId == 0) { return foreground((Reference<Drawable>) null); } return foreground( ResourceDrawableReference.create(mComponentContext) .resId(resId) .build()); } @Override public InternalNode foregroundColor(@ColorInt int foregroundColor) { return foreground( ColorDrawableReference.create(mComponentContext) .color(foregroundColor) .build()); } @Override public InternalNode wrapInView() { mForceViewWrapping = true; return this; } boolean isForceViewWrapping() { return mForceViewWrapping; } @Override public InternalNode clickHandler(EventHandler clickHandler) { getOrCreateNodeInfo().setClickHandler(clickHandler); return this; } @Override public InternalNode longClickHandler(EventHandler longClickHandler) { getOrCreateNodeInfo().setLongClickHandler(longClickHandler); return this; } @Override public InternalNode touchHandler(EventHandler touchHandler) { getOrCreateNodeInfo().setTouchHandler(touchHandler); return this; } @Override public ContainerBuilder focusable(boolean isFocusable) { getOrCreateNodeInfo().setFocusable(isFocusable); return this; } @Override public InternalNode visibleHandler(EventHandler visibleHandler) { mPrivateFlags |= PFLAG_VISIBLE_HANDLER_IS_SET; mVisibleHandler = visibleHandler; return this; } EventHandler getVisibleHandler() { return mVisibleHandler; } @Override public InternalNode focusedHandler(EventHandler focusedHandler) { mPrivateFlags |= PFLAG_FOCUSED_HANDLER_IS_SET; mFocusedHandler = focusedHandler; return this; } EventHandler getFocusedHandler() { return mFocusedHandler; } @Override public InternalNode fullImpressionHandler(EventHandler fullImpressionHandler) { mPrivateFlags |= PFLAG_FULL_IMPRESSION_HANDLER_IS_SET; mFullImpressionHandler = fullImpressionHandler; return this; } EventHandler getFullImpressionHandler() { return mFullImpressionHandler; } @Override public InternalNode invisibleHandler(EventHandler invisibleHandler) { mPrivateFlags |= PFLAG_INVISIBLE_HANDLER_IS_SET; mInvisibleHandler = invisibleHandler; return this; } EventHandler getInvisibleHandler() { return mInvisibleHandler; } @Override public InternalNode contentDescription(CharSequence contentDescription) { getOrCreateNodeInfo().setContentDescription(contentDescription); return this; } @Override public InternalNode contentDescription(@StringRes int stringId) { return contentDescription(mResources.getString(stringId)); } @Override public InternalNode contentDescription(@StringRes int stringId, Object... formatArgs) { return contentDescription(mResources.getString(stringId, formatArgs)); } @Override public InternalNode viewTag(Object viewTag) { getOrCreateNodeInfo().setViewTag(viewTag); return this; } @Override public InternalNode viewTags(SparseArray<Object> viewTags) { getOrCreateNodeInfo().setViewTags(viewTags); return this; } @Override public InternalNode testKey(String testKey) { mTestKey = testKey; return this; } @Override public InternalNode dispatchPopulateAccessibilityEventHandler( EventHandler<DispatchPopulateAccessibilityEventEvent> dispatchPopulateAccessibilityEventHandler) { getOrCreateNodeInfo().setDispatchPopulateAccessibilityEventHandler( dispatchPopulateAccessibilityEventHandler); return this; } @Override public InternalNode onInitializeAccessibilityEventHandler( EventHandler<OnInitializeAccessibilityEventEvent> onInitializeAccessibilityEventHandler) { getOrCreateNodeInfo().setOnInitializeAccessibilityEventHandler( onInitializeAccessibilityEventHandler); return this; } @Override public InternalNode onInitializeAccessibilityNodeInfoHandler( EventHandler<OnInitializeAccessibilityNodeInfoEvent> onInitializeAccessibilityNodeInfoHandler) { getOrCreateNodeInfo().setOnInitializeAccessibilityNodeInfoHandler( onInitializeAccessibilityNodeInfoHandler); return this; } @Override public InternalNode onPopulateAccessibilityEventHandler( EventHandler<OnPopulateAccessibilityEventEvent> onPopulateAccessibilityEventHandler) { getOrCreateNodeInfo().setOnPopulateAccessibilityEventHandler( onPopulateAccessibilityEventHandler); return this; } @Override public InternalNode onRequestSendAccessibilityEventHandler( EventHandler<OnRequestSendAccessibilityEventEvent> onRequestSendAccessibilityEventHandler) { getOrCreateNodeInfo().setOnRequestSendAccessibilityEventHandler( onRequestSendAccessibilityEventHandler); return this; } @Override public InternalNode performAccessibilityActionHandler( EventHandler<PerformAccessibilityActionEvent> performAccessibilityActionHandler) { getOrCreateNodeInfo().setPerformAccessibilityActionHandler(performAccessibilityActionHandler); return this; } @Override public InternalNode sendAccessibilityEventHandler( EventHandler<SendAccessibilityEventEvent> sendAccessibilityEventHandler) { getOrCreateNodeInfo().setSendAccessibilityEventHandler(sendAccessibilityEventHandler); return this; } @Override public InternalNode sendAccessibilityEventUncheckedHandler( EventHandler<SendAccessibilityEventUncheckedEvent> sendAccessibilityEventUncheckedHandler) { getOrCreateNodeInfo().setSendAccessibilityEventUncheckedHandler( sendAccessibilityEventUncheckedHandler); return this; } @Override public ContainerBuilder transitionKey(String key) { if (SDK_INT >= ICE_CREAM_SANDWICH) { mPrivateFlags |= PFLAG_TRANSITION_KEY_IS_SET; mTransitionKey = key; wrapInView(); } return this; } String getTransitionKey() { return mTransitionKey; } /** * A unique identifier which may be set for retrieving a component and its bounds when testing. */ String getTestKey() { return mTestKey; } void setMeasureFunction(YogaMeasureFunction measureFunction) { mYogaNode.setMeasureFunction(measureFunction); } void setBaselineFunction(YogaBaselineFunction baselineFunction) { // YogaNode is the only version of YogaNodeAPI with this support; if (mYogaNode instanceof YogaNode) { mYogaNode.setBaselineFunction(baselineFunction); } } boolean hasNewLayout() { return mYogaNode.hasNewLayout(); } void markLayoutSeen() { mYogaNode.markLayoutSeen(); } float getStyleWidth() { return mYogaNode.getWidth().value; } float getMinWidth() { return mYogaNode.getMinWidth().value; } float getMaxWidth() { return mYogaNode.getMaxWidth().value; } float getStyleHeight() { return mYogaNode.getHeight().value; } float getMinHeight() { return mYogaNode.getMinHeight().value; } float getMaxHeight() { return mYogaNode.getMaxHeight().value; } void calculateLayout(float width, float height) { final ComponentTree tree = mComponentContext == null ? null : mComponentContext.getComponentTree(); final ComponentsStethoManager stethoManager = tree == null ? null : tree.getStethoManager(); if (stethoManager != null) { applyOverridesRecursive(stethoManager, this); } mYogaNode.calculateLayout(width, height); } private static void applyOverridesRecursive( ComponentsStethoManager stethoManager, InternalNode node) { stethoManager.applyOverrides(node); for (int i = 0, count = node.getChildCount(); i < count; i++) { applyOverridesRecursive(stethoManager, node.getChildAt(i)); } if (node.hasNestedTree()) { applyOverridesRecursive(stethoManager, node.getNestedTree()); } } void calculateLayout() { calculateLayout(YogaConstants.UNDEFINED, YogaConstants.UNDEFINED); } int getChildCount() { return mYogaNode.getChildCount(); } com.facebook.yoga.YogaDirection getStyleDirection() { return mYogaNode.getStyleDirection(); } InternalNode getChildAt(int index) { if (mYogaNode.getChildAt(index) == null) { return null; } return (InternalNode) mYogaNode.getChildAt(index).getData(); } int getChildIndex(InternalNode child) { for (int i = 0, count = mYogaNode.getChildCount(); i < count; i++) { if (mYogaNode.getChildAt(i) == child.mYogaNode) { return i; } } return -1; } InternalNode getParent() { if (mYogaNode == null || mYogaNode.getParent() == null) { return null; } return (InternalNode) mYogaNode.getParent().getData(); } void addChildAt(InternalNode child, int index) { mYogaNode.addChildAt(child.mYogaNode, index); } InternalNode removeChildAt(int index) { return (InternalNode) mYogaNode.removeChildAt(index).getData(); } @Override public ComponentLayout build() { return this; } private float resolveHorizontalSpacing(Spacing spacing, int index) { final boolean isRtl = (mYogaNode.getLayoutDirection() == YogaDirection.RTL); final int resolvedIndex; switch (index) { case Spacing.LEFT: resolvedIndex = (isRtl ? Spacing.END : Spacing.START); break; case Spacing.RIGHT: resolvedIndex = (isRtl ? Spacing.START : Spacing.END); break; default: throw new IllegalArgumentException("Not an horizontal padding index: " + index); } float result = spacing.getRaw(resolvedIndex); if (YogaConstants.isUndefined(result)) { result = spacing.get(index); } return result; } ComponentContext getContext() { return mComponentContext; } Component getComponent() { return mComponent; } int getBorderColor() { return mBorderColor; } boolean shouldDrawBorders() { return mBorderColor != Color.TRANSPARENT && (mYogaNode.getLayoutBorder(LEFT) != 0 || mYogaNode.getLayoutBorder(TOP) != 0 || mYogaNode.getLayoutBorder(RIGHT) != 0 || mYogaNode.getLayoutBorder(BOTTOM) != 0); } void setComponent(Component component) { mComponent = component; } boolean hasNestedTree() { return mNestedTree != null; } @Nullable InternalNode getNestedTree() { return mNestedTree; } InternalNode getNestedTreeHolder() { return mNestedTreeHolder; } /** * Set the nested tree before measuring it in order to transfer over important information * such as layout direction needed during measurement. */ void setNestedTree(InternalNode nestedTree) { nestedTree.mNestedTreeHolder = this; mNestedTree = nestedTree; } NodeInfo getNodeInfo() { return mNodeInfo; } void copyInto(InternalNode node) { if (mNodeInfo != null) { if (node.mNodeInfo == null) { node.mNodeInfo = mNodeInfo.acquireRef(); } else { node.mNodeInfo.updateWith(mNodeInfo); } } if ((node.mPrivateFlags & PFLAG_LAYOUT_DIRECTION_IS_SET) == 0L || node.getResolvedLayoutDirection() == YogaDirection.INHERIT) { node.layoutDirection(getResolvedLayoutDirection()); } if ((node.mPrivateFlags & PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET) == 0L || node.mImportantForAccessibility == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) { node.mImportantForAccessibility = mImportantForAccessibility; } if ((mPrivateFlags & PFLAG_DUPLICATE_PARENT_STATE_IS_SET) != 0L) { node.mDuplicateParentState = mDuplicateParentState; } if ((mPrivateFlags & PFLAG_BACKGROUND_IS_SET) != 0L) { node.mBackground = mBackground; } if ((mPrivateFlags & PFLAG_FOREGROUND_IS_SET) != 0L) { node.mForeground = mForeground; } if (mForceViewWrapping) { node.mForceViewWrapping = true; } if ((mPrivateFlags & PFLAG_VISIBLE_HANDLER_IS_SET) != 0L) { node.mVisibleHandler = mVisibleHandler; } if ((mPrivateFlags & PFLAG_FOCUSED_HANDLER_IS_SET) != 0L) { node.mFocusedHandler = mFocusedHandler; } if ((mPrivateFlags & PFLAG_FULL_IMPRESSION_HANDLER_IS_SET) != 0L) { node.mFullImpressionHandler = mFullImpressionHandler; } if ((mPrivateFlags & PFLAG_INVISIBLE_HANDLER_IS_SET) != 0L) { node.mInvisibleHandler = mInvisibleHandler; } if (mTestKey != null) { node.mTestKey = mTestKey; } if ((mPrivateFlags & PFLAG_PADDING_IS_SET) != 0L) { if (mNestedTreePadding == null) { throw new IllegalStateException("copyInto() must be used when resolving a nestedTree. " + "If padding was set on the holder node, we must have a mNestedTreePadding instance"); } final YogaNodeAPI yogaNode = node.mYogaNode; node.mPrivateFlags |= PFLAG_PADDING_IS_SET; if (isPaddingPercent(LEFT)) { yogaNode.setPaddingPercent(LEFT, mNestedTreePadding.getRaw(Spacing.LEFT)); } else { yogaNode.setPadding(LEFT, mNestedTreePadding.getRaw(Spacing.LEFT)); } if (isPaddingPercent(TOP)) { yogaNode.setPaddingPercent(TOP, mNestedTreePadding.getRaw(Spacing.TOP)); } else { yogaNode.setPadding(TOP, mNestedTreePadding.getRaw(Spacing.TOP)); } if (isPaddingPercent(RIGHT)) { yogaNode.setPaddingPercent(RIGHT, mNestedTreePadding.getRaw(Spacing.RIGHT)); } else { yogaNode.setPadding(RIGHT, mNestedTreePadding.getRaw(Spacing.RIGHT)); } if (isPaddingPercent(BOTTOM)) { yogaNode.setPaddingPercent(BOTTOM, mNestedTreePadding.getRaw(Spacing.BOTTOM)); } else { yogaNode.setPadding(BOTTOM, mNestedTreePadding.getRaw(Spacing.BOTTOM)); } if (isPaddingPercent(VERTICAL)) { yogaNode.setPaddingPercent(VERTICAL, mNestedTreePadding.getRaw(Spacing.VERTICAL)); } else { yogaNode.setPadding(VERTICAL, mNestedTreePadding.getRaw(Spacing.VERTICAL)); } if (isPaddingPercent(HORIZONTAL)) { yogaNode.setPaddingPercent(HORIZONTAL, mNestedTreePadding.getRaw(Spacing.HORIZONTAL)); } else { yogaNode.setPadding(HORIZONTAL, mNestedTreePadding.getRaw(Spacing.HORIZONTAL)); } if (isPaddingPercent(START)) { yogaNode.setPaddingPercent(START, mNestedTreePadding.getRaw(Spacing.START)); } else { yogaNode.setPadding(START, mNestedTreePadding.getRaw(Spacing.START)); } if (isPaddingPercent(END)) { yogaNode.setPaddingPercent(END, mNestedTreePadding.getRaw(Spacing.END)); } else { yogaNode.setPadding(END, mNestedTreePadding.getRaw(Spacing.END)); } if (isPaddingPercent(ALL)) { yogaNode.setPaddingPercent(ALL, mNestedTreePadding.getRaw(Spacing.ALL)); } else { yogaNode.setPadding(ALL, mNestedTreePadding.getRaw(Spacing.ALL)); } } if ((mPrivateFlags & PFLAG_BORDER_WIDTH_IS_SET) != 0L) { if (mNestedTreeBorderWidth == null) { throw new IllegalStateException("copyInto() must be used when resolving a nestedTree. " + "If border width was set on the holder node, we must have a mNestedTreeBorderWidth " + "instance"); } final YogaNodeAPI yogaNode = node.mYogaNode; node.mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET; yogaNode.setBorder(LEFT, mNestedTreeBorderWidth.getRaw(Spacing.LEFT)); yogaNode.setBorder(TOP, mNestedTreeBorderWidth.getRaw(Spacing.TOP)); yogaNode.setBorder(RIGHT, mNestedTreeBorderWidth.getRaw(Spacing.RIGHT)); yogaNode.setBorder(BOTTOM, mNestedTreeBorderWidth.getRaw(Spacing.BOTTOM)); yogaNode.setBorder(VERTICAL, mNestedTreeBorderWidth.getRaw(Spacing.VERTICAL)); yogaNode.setBorder(HORIZONTAL, mNestedTreeBorderWidth.getRaw(Spacing.HORIZONTAL)); yogaNode.setBorder(START, mNestedTreeBorderWidth.getRaw(Spacing.START)); yogaNode.setBorder(END, mNestedTreeBorderWidth.getRaw(Spacing.END)); yogaNode.setBorder(ALL, mNestedTreeBorderWidth.getRaw(Spacing.ALL)); } if ((mPrivateFlags & PFLAG_TRANSITION_KEY_IS_SET) != 0L) { node.mTransitionKey = mTransitionKey; } if ((mPrivateFlags & PFLAG_BORDER_COLOR_IS_SET) != 0L) { node.mBorderColor = mBorderColor; } } void setStyleWidthFromSpec(int widthSpec) { switch (SizeSpec.getMode(widthSpec)) { case SizeSpec.UNSPECIFIED: mYogaNode.setWidth(YogaConstants.UNDEFINED); break; case SizeSpec.AT_MOST: mYogaNode.setMaxWidth(SizeSpec.getSize(widthSpec)); break; case SizeSpec.EXACTLY: mYogaNode.setWidth(SizeSpec.getSize(widthSpec)); break; } } void setStyleHeightFromSpec(int heightSpec) { switch (SizeSpec.getMode(heightSpec)) { case SizeSpec.UNSPECIFIED: mYogaNode.setHeight(YogaConstants.UNDEFINED); break; case SizeSpec.AT_MOST: mYogaNode.setMaxHeight(SizeSpec.getSize(heightSpec)); break; case SizeSpec.EXACTLY: mYogaNode.setHeight(SizeSpec.getSize(heightSpec)); break; } } int getImportantForAccessibility() { return mImportantForAccessibility; } boolean isDuplicateParentStateEnabled() { return mDuplicateParentState; } void applyAttributes(TypedArray a) { for (int i = 0, size = a.getIndexCount(); i < size; i++) { final int attr = a.getIndex(i); if (attr == R.styleable.ComponentLayout_android_layout_width) { int width = a.getLayoutDimension(attr, -1); // We don't support WRAP_CONTENT or MATCH_PARENT so no-op for them if (width >= 0) { widthPx(width); } } else if (attr == R.styleable.ComponentLayout_android_layout_height) { int height = a.getLayoutDimension(attr, -1); // We don't support WRAP_CONTENT or MATCH_PARENT so no-op for them if (height >= 0) { heightPx(height); } } else if (attr == R.styleable.ComponentLayout_android_paddingLeft) { paddingPx(LEFT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingTop) { paddingPx(TOP, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingRight) { paddingPx(RIGHT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingBottom) { paddingPx(BOTTOM, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingStart && SUPPORTS_RTL) { paddingPx(START, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_paddingEnd && SUPPORTS_RTL) { paddingPx(END, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_padding) { paddingPx(ALL, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginLeft) { marginPx(LEFT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginTop) { marginPx(TOP, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginRight) { marginPx(RIGHT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginBottom) { marginPx(BOTTOM, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginStart && SUPPORTS_RTL) { marginPx(START, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_marginEnd && SUPPORTS_RTL) { marginPx(END, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_layout_margin) { marginPx(ALL, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_importantForAccessibility && SDK_INT >= JELLY_BEAN) { importantForAccessibility(a.getInt(attr, 0)); } else if (attr == R.styleable.ComponentLayout_android_duplicateParentState) { duplicateParentState(a.getBoolean(attr, false)); } else if (attr == R.styleable.ComponentLayout_android_background) { if (TypedArrayUtils.isColorAttribute(a, R.styleable.ComponentLayout_android_background)) { backgroundColor(a.getColor(attr, 0)); } else { backgroundRes(a.getResourceId(attr, -1)); } } else if (attr == R.styleable.ComponentLayout_android_foreground) { if (TypedArrayUtils.isColorAttribute(a, R.styleable.ComponentLayout_android_foreground)) { foregroundColor(a.getColor(attr, 0)); } else { foregroundRes(a.getResourceId(attr, -1)); } } else if (attr == R.styleable.ComponentLayout_android_contentDescription) { contentDescription(a.getString(attr)); } else if (attr == R.styleable.ComponentLayout_flex_direction) { flexDirection(YogaFlexDirection.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_wrap) { wrap(YogaWrap.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_justifyContent) { justifyContent(YogaJustify.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_alignItems) { alignItems(YogaAlign.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_alignSelf) { alignSelf(YogaAlign.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex_positionType) { positionType(YogaPositionType.fromInt(a.getInteger(attr, 0))); } else if (attr == R.styleable.ComponentLayout_flex) { final float flex = a.getFloat(attr, -1); if (flex >= 0f) { flex(flex); } } else if (attr == R.styleable.ComponentLayout_flex_left) { positionPx(LEFT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_top) { positionPx(TOP, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_right) { positionPx(RIGHT, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_bottom) { positionPx(BOTTOM, a.getDimensionPixelOffset(attr, 0)); } else if (attr == R.styleable.ComponentLayout_flex_layoutDirection) { final int layoutDirection = a.getInteger(attr, -1); layoutDirection(YogaDirection.fromInt(layoutDirection)); } } } /** * Reset all attributes to default values. Intended to facilitate recycling. */ void release() { if (mYogaNode.getParent() != null || mYogaNode.getChildCount() > 0) { throw new IllegalStateException("You should not free an attached Internalnode"); } ComponentsPools.release(mYogaNode); mYogaNode = null; mResourceResolver.internalRelease(); mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED; mResolvedTouchExpansionRight = YogaConstants.UNDEFINED; mResolvedX = YogaConstants.UNDEFINED; mResolvedY = YogaConstants.UNDEFINED; mResolvedWidth = YogaConstants.UNDEFINED; mResolvedHeight = YogaConstants.UNDEFINED; mComponentContext = null; mResources = null; mComponent = null; mNestedTree = null; mNestedTreeHolder = null;
Lines authored by marcocova This commit forms part of the blame-preserving initial commit suite.
src/main/java/com/facebook/components/InternalNode.java
Lines authored by marcocova
Java
apache-2.0
73d9067226671adb6410ccfb4d5ca2f00283c82b
0
1haodian/spark,dbtsai/spark,nlalevee/spark,minixalpha/spark,goldmedal/spark,ueshin/apache-spark,icexelloss/spark,hhbyyh/spark,cin/spark,cloud-fan/spark,shuangshuangwang/spark,nchammas/spark,markhamstra/spark,mdespriee/spark,maropu/spark,1haodian/spark,eyalfa/spark,JoshRosen/spark,wangyum/spark,actuaryzhang/spark,ConeyLiu/spark,ericvandenbergfb/spark,zhouyejoe/spark,zhouyejoe/spark,hhbyyh/spark,debugger87/spark,chuckchen/spark,esi-mineset/spark,lvdongr/spark,szhem/spark,saltstar/spark,lxsmnv/spark,icexelloss/spark,spark-test/spark,apache/spark,caneGuy/spark,xuanyuanking/spark,vinodkc/spark,ericvandenbergfb/spark,rikima/spark,zuotingbing/spark,zero323/spark,zuotingbing/spark,yanboliang/spark,jkbradley/spark,skonto/spark,apache/spark,gengliangwang/spark,ioana-delaney/spark,HyukjinKwon/spark,guoxiaolongzte/spark,zzcclp/spark,ddna1021/spark,apache/spark,sureshthalamati/spark,LantaoJin/spark,spark-test/spark,rikima/spark,adrian-ionescu/apache-spark,sahilTakiar/spark,markhamstra/spark,bravo-zhang/spark,adrian-ionescu/apache-spark,ioana-delaney/spark,taroplus/spark,pgandhi999/spark,dongjoon-hyun/spark,witgo/spark,brad-kaiser/spark,hvanhovell/spark,highfei2011/spark,wangmiao1981/spark,witgo/spark,LantaoJin/spark,goldmedal/spark,facaiy/spark,milliman/spark,apache/spark,bdrillard/spark,tengpeng/spark,wangyum/spark,Aegeaner/spark,cloud-fan/spark,cloud-fan/spark,minixalpha/spark,adrian-ionescu/apache-spark,nlalevee/spark,saltstar/spark,taroplus/spark,zuotingbing/spark,kevinyu98/spark,jiangxb1987/spark,mdespriee/spark,JoshRosen/spark,lvdongr/spark,maropu/spark,ron8hu/spark,nlalevee/spark,spark-test/spark,nchammas/spark,hvanhovell/spark,nlalevee/spark,markhamstra/spark,highfei2011/spark,debugger87/spark,ConeyLiu/spark,ConeyLiu/spark,caneGuy/spark,facaiy/spark,sahilTakiar/spark,hvanhovell/spark,kiszk/spark,guoxiaolongzte/spark,saltstar/spark,BryanCutler/spark,rednaxelafx/apache-spark,aray/spark,wangmiao1981/spark,skonto/spark,hhbyyh/spark,icexelloss/spark,shaneknapp/spark,hvanhovell/spark,JoshRosen/spark,brad-kaiser/spark,rednaxelafx/apache-spark,ddna1021/spark,chuckchen/spark,zhouyejoe/spark,BryanCutler/spark,alunarbeach/spark,spark-test/spark,nchammas/spark,taroplus/spark,bdrillard/spark,cloud-fan/spark,szhem/spark,milliman/spark,kevinyu98/spark,tejasapatil/spark,sahilTakiar/spark,cin/spark,guoxiaolongzte/spark,hvanhovell/spark,dongjoon-hyun/spark,zuotingbing/spark,1haodian/spark,facaiy/spark,aosagie/spark,shuangshuangwang/spark,mahak/spark,apache/spark,akopich/spark,xuanyuanking/spark,wangmiao1981/spark,HyukjinKwon/spark,xuanyuanking/spark,icexelloss/spark,ptkool/spark,goldmedal/spark,srowen/spark,ericvandenbergfb/spark,rikima/spark,Aegeaner/spark,joseph-torres/spark,Aegeaner/spark,JoshRosen/spark,vinodkc/spark,eyalfa/spark,Aegeaner/spark,srowen/spark,witgo/spark,minixalpha/spark,yanboliang/spark,facaiy/spark,saltstar/spark,ptkool/spark,szhem/spark,darionyaphet/spark,sureshthalamati/spark,zero323/spark,wangmiao1981/spark,wangyum/spark,ioana-delaney/spark,joseph-torres/spark,aray/spark,jiangxb1987/spark,skonto/spark,kevinyu98/spark,LantaoJin/spark,brad-kaiser/spark,bdrillard/spark,dongjoon-hyun/spark,ddna1021/spark,mahak/spark,apache/spark,hhbyyh/spark,dbtsai/spark,ahnqirage/spark,cin/spark,1haodian/spark,aray/spark,tengpeng/spark,michalsenkyr/spark,tengpeng/spark,zhouyejoe/spark,apache/spark,chuckchen/spark,Aegeaner/spark,srowen/spark,adrian-ionescu/apache-spark,lvdongr/spark,rezasafi/spark,icexelloss/spark,alunarbeach/spark,WindCanDie/spark,kevinyu98/spark,rednaxelafx/apache-spark,shubhamchopra/spark,aray/spark,Aegeaner/spark,actuaryzhang/spark,ron8hu/spark,tengpeng/spark,gengliangwang/spark,darionyaphet/spark,HyukjinKwon/spark,actuaryzhang/spark,ahnqirage/spark,techaddict/spark,BryanCutler/spark,markhamstra/spark,highfei2011/spark,lxsmnv/spark,1haodian/spark,jiangxb1987/spark,aosagie/spark,nlalevee/spark,milliman/spark,dongjoon-hyun/spark,mahak/spark,facaiy/spark,zzcclp/spark,ueshin/apache-spark,rezasafi/spark,rednaxelafx/apache-spark,esi-mineset/spark,xuanyuanking/spark,zero323/spark,wzhfy/spark,ueshin/apache-spark,akopich/spark,hvanhovell/spark,zuotingbing/spark,rezasafi/spark,bdrillard/spark,HyukjinKwon/spark,sahilTakiar/spark,vinodkc/spark,caneGuy/spark,chuckchen/spark,gengliangwang/spark,sahilTakiar/spark,sahilTakiar/spark,zhouyejoe/spark,highfei2011/spark,alunarbeach/spark,debugger87/spark,gengliangwang/spark,kevinyu98/spark,nchammas/spark,aray/spark,ueshin/apache-spark,techaddict/spark,cloud-fan/spark,guoxiaolongzte/spark,WindCanDie/spark,JoshRosen/spark,gengliangwang/spark,wzhfy/spark,vinodkc/spark,shuangshuangwang/spark,ahnqirage/spark,darionyaphet/spark,goldmedal/spark,BryanCutler/spark,shubhamchopra/spark,alunarbeach/spark,zero323/spark,techaddict/spark,milliman/spark,yanboliang/spark,esi-mineset/spark,LantaoJin/spark,rikima/spark,wangyum/spark,1haodian/spark,aosagie/spark,cloud-fan/spark,ron8hu/spark,jkbradley/spark,Aegeaner/spark,sureshthalamati/spark,HyukjinKwon/spark,rednaxelafx/apache-spark,lxsmnv/spark,wzhfy/spark,WeichenXu123/spark,joseph-torres/spark,brad-kaiser/spark,ptkool/spark,pgandhi999/spark,ueshin/apache-spark,tejasapatil/spark,witgo/spark,1haodian/spark,WeichenXu123/spark,techaddict/spark,dbtsai/spark,alunarbeach/spark,akopich/spark,esi-mineset/spark,bravo-zhang/spark,skonto/spark,icexelloss/spark,lxsmnv/spark,tejasapatil/spark,shuangshuangwang/spark,ddna1021/spark,WindCanDie/spark,dbtsai/spark,sureshthalamati/spark,ioana-delaney/spark,mdespriee/spark,ron8hu/spark,WeichenXu123/spark,LantaoJin/spark,wangyum/spark,techaddict/spark,rekhajoshm/spark,tengpeng/spark,LantaoJin/spark,kiszk/spark,srowen/spark,michalsenkyr/spark,pgandhi999/spark,akopich/spark,joseph-torres/spark,milliman/spark,tejasapatil/spark,vinodkc/spark,hhbyyh/spark,ddna1021/spark,eyalfa/spark,rekhajoshm/spark,zuotingbing/spark,sureshthalamati/spark,holdenk/spark,HyukjinKwon/spark,taroplus/spark,hvanhovell/spark,rekhajoshm/spark,BryanCutler/spark,wzhfy/spark,ioana-delaney/spark,nlalevee/spark,darionyaphet/spark,zhouyejoe/spark,actuaryzhang/spark,xuanyuanking/spark,jiangxb1987/spark,adrian-ionescu/apache-spark,shaneknapp/spark,WindCanDie/spark,maropu/spark,icexelloss/spark,markhamstra/spark,akopich/spark,milliman/spark,wangyum/spark,ConeyLiu/spark,saltstar/spark,joseph-torres/spark,wangmiao1981/spark,jkbradley/spark,rikima/spark,WeichenXu123/spark,zzcclp/spark,minixalpha/spark,xuanyuanking/spark,gengliangwang/spark,tengpeng/spark,BryanCutler/spark,tejasapatil/spark,aray/spark,rekhajoshm/spark,bdrillard/spark,mahak/spark,akopich/spark,facaiy/spark,highfei2011/spark,ahnqirage/spark,lvdongr/spark,holdenk/spark,caneGuy/spark,wangmiao1981/spark,mahak/spark,WindCanDie/spark,vinodkc/spark,nchammas/spark,guoxiaolongzte/spark,zero323/spark,kiszk/spark,shubhamchopra/spark,ericvandenbergfb/spark,chuckchen/spark,dbtsai/spark,kiszk/spark,bravo-zhang/spark,mdespriee/spark,rezasafi/spark,sahilTakiar/spark,WindCanDie/spark,vinodkc/spark,mahak/spark,caneGuy/spark,wzhfy/spark,goldmedal/spark,rezasafi/spark,joseph-torres/spark,ddna1021/spark,jkbradley/spark,zzcclp/spark,chuckchen/spark,zuotingbing/spark,spark-test/spark,eyalfa/spark,lvdongr/spark,JoshRosen/spark,rekhajoshm/spark,holdenk/spark,darionyaphet/spark,HyukjinKwon/spark,taroplus/spark,dbtsai/spark,srowen/spark,facaiy/spark,BryanCutler/spark,mdespriee/spark,jiangxb1987/spark,ron8hu/spark,ioana-delaney/spark,yanboliang/spark,techaddict/spark,markhamstra/spark,michalsenkyr/spark,debugger87/spark,shubhamchopra/spark,shuangshuangwang/spark,lxsmnv/spark,actuaryzhang/spark,zero323/spark,srowen/spark,ConeyLiu/spark,alunarbeach/spark,holdenk/spark,xuanyuanking/spark,ConeyLiu/spark,dongjoon-hyun/spark,yanboliang/spark,mdespriee/spark,ahnqirage/spark,shaneknapp/spark,minixalpha/spark,aosagie/spark,hhbyyh/spark,shaneknapp/spark,kiszk/spark,wzhfy/spark,zzcclp/spark,ioana-delaney/spark,hhbyyh/spark,joseph-torres/spark,darionyaphet/spark,markhamstra/spark,bdrillard/spark,witgo/spark,lvdongr/spark,pgandhi999/spark,dongjoon-hyun/spark,srowen/spark,rikima/spark,holdenk/spark,cin/spark,holdenk/spark,rekhajoshm/spark,shaneknapp/spark,aosagie/spark,goldmedal/spark,shubhamchopra/spark,adrian-ionescu/apache-spark,debugger87/spark,zzcclp/spark,jiangxb1987/spark,bravo-zhang/spark,bdrillard/spark,ahnqirage/spark,techaddict/spark,maropu/spark,witgo/spark,wangmiao1981/spark,eyalfa/spark,skonto/spark,nchammas/spark,shaneknapp/spark,kevinyu98/spark,mdespriee/spark,tejasapatil/spark,WindCanDie/spark,ericvandenbergfb/spark,michalsenkyr/spark,caneGuy/spark,shuangshuangwang/spark,WeichenXu123/spark,pgandhi999/spark,aosagie/spark,rikima/spark,rednaxelafx/apache-spark,maropu/spark,chuckchen/spark,bravo-zhang/spark,bravo-zhang/spark,ron8hu/spark,esi-mineset/spark,dbtsai/spark,maropu/spark,adrian-ionescu/apache-spark,milliman/spark,eyalfa/spark,rezasafi/spark,ptkool/spark,tengpeng/spark,actuaryzhang/spark,WeichenXu123/spark,saltstar/spark,shuangshuangwang/spark,aosagie/spark,ptkool/spark,maropu/spark,esi-mineset/spark,ron8hu/spark,michalsenkyr/spark,pgandhi999/spark,taroplus/spark,cin/spark,skonto/spark,gengliangwang/spark,szhem/spark,holdenk/spark,brad-kaiser/spark,eyalfa/spark,zhouyejoe/spark,michalsenkyr/spark,zzcclp/spark,sureshthalamati/spark,spark-test/spark,lxsmnv/spark,szhem/spark,nlalevee/spark,debugger87/spark,taroplus/spark,ahnqirage/spark,zero323/spark,bravo-zhang/spark,alunarbeach/spark,esi-mineset/spark,guoxiaolongzte/spark,highfei2011/spark,yanboliang/spark,sureshthalamati/spark,ptkool/spark,shaneknapp/spark,shubhamchopra/spark,cloud-fan/spark,ptkool/spark,highfei2011/spark,minixalpha/spark,michalsenkyr/spark,nchammas/spark,ericvandenbergfb/spark,lvdongr/spark,actuaryzhang/spark,dongjoon-hyun/spark,pgandhi999/spark,guoxiaolongzte/spark,JoshRosen/spark,ddna1021/spark,brad-kaiser/spark,saltstar/spark,cin/spark,szhem/spark,LantaoJin/spark,kevinyu98/spark,ericvandenbergfb/spark,yanboliang/spark,shubhamchopra/spark,brad-kaiser/spark,ConeyLiu/spark,spark-test/spark,witgo/spark,ueshin/apache-spark,goldmedal/spark,lxsmnv/spark,rednaxelafx/apache-spark,minixalpha/spark,akopich/spark,cin/spark,mahak/spark,ueshin/apache-spark,matthewfranglen/spark,caneGuy/spark,wzhfy/spark,WeichenXu123/spark,rezasafi/spark,kiszk/spark,jkbradley/spark,skonto/spark,debugger87/spark,rekhajoshm/spark,jkbradley/spark,szhem/spark,jiangxb1987/spark,tejasapatil/spark,darionyaphet/spark,jkbradley/spark,wangyum/spark,kiszk/spark,aray/spark
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.unsafe.types; import javax.annotation.Nonnull; import java.io.*; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Map; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.KryoSerializable; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.array.ByteArrayMethods; import org.apache.spark.unsafe.hash.Murmur3_x86_32; import static org.apache.spark.unsafe.Platform.*; /** * A UTF-8 String for internal Spark use. * <p> * A String encoded in UTF-8 as an Array[Byte], which can be used for comparison, * search, see http://en.wikipedia.org/wiki/UTF-8 for details. * <p> * Note: This is not designed for general use cases, should not be used outside SQL. */ public final class UTF8String implements Comparable<UTF8String>, Externalizable, KryoSerializable, Cloneable { // These are only updated by readExternal() or read() @Nonnull private Object base; private long offset; private int numBytes; public Object getBaseObject() { return base; } public long getBaseOffset() { return offset; } private static int[] bytesOfCodePointInUTF8 = {2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6}; private static final boolean IS_LITTLE_ENDIAN = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN; private static final UTF8String COMMA_UTF8 = UTF8String.fromString(","); public static final UTF8String EMPTY_UTF8 = UTF8String.fromString(""); /** * Creates an UTF8String from byte array, which should be encoded in UTF-8. * * Note: `bytes` will be hold by returned UTF8String. */ public static UTF8String fromBytes(byte[] bytes) { if (bytes != null) { return new UTF8String(bytes, BYTE_ARRAY_OFFSET, bytes.length); } else { return null; } } /** * Creates an UTF8String from byte array, which should be encoded in UTF-8. * * Note: `bytes` will be hold by returned UTF8String. */ public static UTF8String fromBytes(byte[] bytes, int offset, int numBytes) { if (bytes != null) { return new UTF8String(bytes, BYTE_ARRAY_OFFSET + offset, numBytes); } else { return null; } } /** * Creates an UTF8String from given address (base and offset) and length. */ public static UTF8String fromAddress(Object base, long offset, int numBytes) { return new UTF8String(base, offset, numBytes); } /** * Creates an UTF8String from String. */ public static UTF8String fromString(String str) { return str == null ? null : fromBytes(str.getBytes(StandardCharsets.UTF_8)); } /** * Creates an UTF8String that contains `length` spaces. */ public static UTF8String blankString(int length) { byte[] spaces = new byte[length]; Arrays.fill(spaces, (byte) ' '); return fromBytes(spaces); } protected UTF8String(Object base, long offset, int numBytes) { this.base = base; this.offset = offset; this.numBytes = numBytes; } // for serialization public UTF8String() { this(null, 0, 0); } /** * Writes the content of this string into a memory address, identified by an object and an offset. * The target memory address must already been allocated, and have enough space to hold all the * bytes in this string. */ public void writeToMemory(Object target, long targetOffset) { Platform.copyMemory(base, offset, target, targetOffset, numBytes); } public void writeTo(ByteBuffer buffer) { assert(buffer.hasArray()); byte[] target = buffer.array(); int offset = buffer.arrayOffset(); int pos = buffer.position(); writeToMemory(target, Platform.BYTE_ARRAY_OFFSET + offset + pos); buffer.position(pos + numBytes); } /** * Returns a {@link ByteBuffer} wrapping the base object if it is a byte array * or a copy of the data if the base object is not a byte array. * * Unlike getBytes this will not create a copy the array if this is a slice. */ @Nonnull public ByteBuffer getByteBuffer() { if (base instanceof byte[] && offset >= BYTE_ARRAY_OFFSET) { final byte[] bytes = (byte[]) base; // the offset includes an object header... this is only needed for unsafe copies final long arrayOffset = offset - BYTE_ARRAY_OFFSET; // verify that the offset and length points somewhere inside the byte array // and that the offset can safely be truncated to a 32-bit integer if ((long) bytes.length < arrayOffset + numBytes) { throw new ArrayIndexOutOfBoundsException(); } return ByteBuffer.wrap(bytes, (int) arrayOffset, numBytes); } else { return ByteBuffer.wrap(getBytes()); } } public void writeTo(OutputStream out) throws IOException { final ByteBuffer bb = this.getByteBuffer(); assert(bb.hasArray()); // similar to Utils.writeByteBuffer but without the spark-core dependency out.write(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); } /** * Returns the number of bytes for a code point with the first byte as `b` * @param b The first byte of a code point */ private static int numBytesForFirstByte(final byte b) { final int offset = (b & 0xFF) - 192; return (offset >= 0) ? bytesOfCodePointInUTF8[offset] : 1; } /** * Returns the number of bytes */ public int numBytes() { return numBytes; } /** * Returns the number of code points in it. */ public int numChars() { int len = 0; for (int i = 0; i < numBytes; i += numBytesForFirstByte(getByte(i))) { len += 1; } return len; } /** * Returns a 64-bit integer that can be used as the prefix used in sorting. */ public long getPrefix() { // Since JVMs are either 4-byte aligned or 8-byte aligned, we check the size of the string. // If size is 0, just return 0. // If size is between 0 and 4 (inclusive), assume data is 4-byte aligned under the hood and // use a getInt to fetch the prefix. // If size is greater than 4, assume we have at least 8 bytes of data to fetch. // After getting the data, we use a mask to mask out data that is not part of the string. long p; long mask = 0; if (IS_LITTLE_ENDIAN) { if (numBytes >= 8) { p = Platform.getLong(base, offset); } else if (numBytes > 4) { p = Platform.getLong(base, offset); mask = (1L << (8 - numBytes) * 8) - 1; } else if (numBytes > 0) { p = (long) Platform.getInt(base, offset); mask = (1L << (8 - numBytes) * 8) - 1; } else { p = 0; } p = java.lang.Long.reverseBytes(p); } else { // byteOrder == ByteOrder.BIG_ENDIAN if (numBytes >= 8) { p = Platform.getLong(base, offset); } else if (numBytes > 4) { p = Platform.getLong(base, offset); mask = (1L << (8 - numBytes) * 8) - 1; } else if (numBytes > 0) { p = ((long) Platform.getInt(base, offset)) << 32; mask = (1L << (8 - numBytes) * 8) - 1; } else { p = 0; } } p &= ~mask; return p; } /** * Returns the underline bytes, will be a copy of it if it's part of another array. */ public byte[] getBytes() { // avoid copy if `base` is `byte[]` if (offset == BYTE_ARRAY_OFFSET && base instanceof byte[] && ((byte[]) base).length == numBytes) { return (byte[]) base; } else { byte[] bytes = new byte[numBytes]; copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, numBytes); return bytes; } } /** * Returns a substring of this. * @param start the position of first code point * @param until the position after last code point, exclusive. */ public UTF8String substring(final int start, final int until) { if (until <= start || start >= numBytes) { return EMPTY_UTF8; } int i = 0; int c = 0; while (i < numBytes && c < start) { i += numBytesForFirstByte(getByte(i)); c += 1; } int j = i; while (i < numBytes && c < until) { i += numBytesForFirstByte(getByte(i)); c += 1; } if (i > j) { byte[] bytes = new byte[i - j]; copyMemory(base, offset + j, bytes, BYTE_ARRAY_OFFSET, i - j); return fromBytes(bytes); } else { return EMPTY_UTF8; } } public UTF8String substringSQL(int pos, int length) { // Information regarding the pos calculation: // Hive and SQL use one-based indexing for SUBSTR arguments but also accept zero and // negative indices for start positions. If a start index i is greater than 0, it // refers to element i-1 in the sequence. If a start index i is less than 0, it refers // to the -ith element before the end of the sequence. If a start index i is 0, it // refers to the first element. int len = numChars(); int start = (pos > 0) ? pos -1 : ((pos < 0) ? len + pos : 0); int end = (length == Integer.MAX_VALUE) ? len : start + length; return substring(start, end); } /** * Returns whether this contains `substring` or not. */ public boolean contains(final UTF8String substring) { if (substring.numBytes == 0) { return true; } byte first = substring.getByte(0); for (int i = 0; i <= numBytes - substring.numBytes; i++) { if (getByte(i) == first && matchAt(substring, i)) { return true; } } return false; } /** * Returns the byte at position `i`. */ private byte getByte(int i) { return Platform.getByte(base, offset + i); } private boolean matchAt(final UTF8String s, int pos) { if (s.numBytes + pos > numBytes || pos < 0) { return false; } return ByteArrayMethods.arrayEquals(base, offset + pos, s.base, s.offset, s.numBytes); } public boolean startsWith(final UTF8String prefix) { return matchAt(prefix, 0); } public boolean endsWith(final UTF8String suffix) { return matchAt(suffix, numBytes - suffix.numBytes); } /** * Returns the upper case of this string */ public UTF8String toUpperCase() { if (numBytes == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[numBytes]; bytes[0] = (byte) Character.toTitleCase(getByte(0)); for (int i = 0; i < numBytes; i++) { byte b = getByte(i); if (numBytesForFirstByte(b) != 1) { // fallback return toUpperCaseSlow(); } int upper = Character.toUpperCase((int) b); if (upper > 127) { // fallback return toUpperCaseSlow(); } bytes[i] = (byte) upper; } return fromBytes(bytes); } private UTF8String toUpperCaseSlow() { return fromString(toString().toUpperCase()); } /** * Returns the lower case of this string */ public UTF8String toLowerCase() { if (numBytes == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[numBytes]; bytes[0] = (byte) Character.toTitleCase(getByte(0)); for (int i = 0; i < numBytes; i++) { byte b = getByte(i); if (numBytesForFirstByte(b) != 1) { // fallback return toLowerCaseSlow(); } int lower = Character.toLowerCase((int) b); if (lower > 127) { // fallback return toLowerCaseSlow(); } bytes[i] = (byte) lower; } return fromBytes(bytes); } private UTF8String toLowerCaseSlow() { return fromString(toString().toLowerCase()); } /** * Returns the title case of this string, that could be used as title. */ public UTF8String toTitleCase() { if (numBytes == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[numBytes]; for (int i = 0; i < numBytes; i++) { byte b = getByte(i); if (i == 0 || getByte(i - 1) == ' ') { if (numBytesForFirstByte(b) != 1) { // fallback return toTitleCaseSlow(); } int upper = Character.toTitleCase(b); if (upper > 127) { // fallback return toTitleCaseSlow(); } bytes[i] = (byte) upper; } else { bytes[i] = b; } } return fromBytes(bytes); } private UTF8String toTitleCaseSlow() { StringBuffer sb = new StringBuffer(); String s = toString(); sb.append(s); sb.setCharAt(0, Character.toTitleCase(sb.charAt(0))); for (int i = 1; i < s.length(); i++) { if (sb.charAt(i - 1) == ' ') { sb.setCharAt(i, Character.toTitleCase(sb.charAt(i))); } } return fromString(sb.toString()); } /* * Returns the index of the string `match` in this String. This string has to be a comma separated * list. If `match` contains a comma 0 will be returned. If the `match` isn't part of this String, * 0 will be returned, else the index of match (1-based index) */ public int findInSet(UTF8String match) { if (match.contains(COMMA_UTF8)) { return 0; } int n = 1, lastComma = -1; for (int i = 0; i < numBytes; i++) { if (getByte(i) == (byte) ',') { if (i - (lastComma + 1) == match.numBytes && ByteArrayMethods.arrayEquals(base, offset + (lastComma + 1), match.base, match.offset, match.numBytes)) { return n; } lastComma = i; n++; } } if (numBytes - (lastComma + 1) == match.numBytes && ByteArrayMethods.arrayEquals(base, offset + (lastComma + 1), match.base, match.offset, match.numBytes)) { return n; } return 0; } /** * Copy the bytes from the current UTF8String, and make a new UTF8String. * @param start the start position of the current UTF8String in bytes. * @param end the end position of the current UTF8String in bytes. * @return a new UTF8String in the position of [start, end] of current UTF8String bytes. */ private UTF8String copyUTF8String(int start, int end) { int len = end - start + 1; byte[] newBytes = new byte[len]; copyMemory(base, offset + start, newBytes, BYTE_ARRAY_OFFSET, len); return UTF8String.fromBytes(newBytes); } public UTF8String trim() { int s = 0; int e = this.numBytes - 1; // skip all of the space (0x20) in the left side while (s < this.numBytes && getByte(s) == 0x20) s++; // skip all of the space (0x20) in the right side while (e >= 0 && getByte(e) == 0x20) e--; if (s > e) { // empty string return EMPTY_UTF8; } else { return copyUTF8String(s, e); } } public UTF8String trimLeft() { int s = 0; // skip all of the space (0x20) in the left side while (s < this.numBytes && getByte(s) == 0x20) s++; if (s == this.numBytes) { // empty string return EMPTY_UTF8; } else { return copyUTF8String(s, this.numBytes - 1); } } public UTF8String trimRight() { int e = numBytes - 1; // skip all of the space (0x20) in the right side while (e >= 0 && getByte(e) == 0x20) e--; if (e < 0) { // empty string return EMPTY_UTF8; } else { return copyUTF8String(0, e); } } public UTF8String reverse() { byte[] result = new byte[this.numBytes]; int i = 0; // position in byte while (i < numBytes) { int len = numBytesForFirstByte(getByte(i)); copyMemory(this.base, this.offset + i, result, BYTE_ARRAY_OFFSET + result.length - i - len, len); i += len; } return UTF8String.fromBytes(result); } public UTF8String repeat(int times) { if (times <= 0) { return EMPTY_UTF8; } byte[] newBytes = new byte[numBytes * times]; copyMemory(this.base, this.offset, newBytes, BYTE_ARRAY_OFFSET, numBytes); int copied = 1; while (copied < times) { int toCopy = Math.min(copied, times - copied); System.arraycopy(newBytes, 0, newBytes, copied * numBytes, numBytes * toCopy); copied += toCopy; } return UTF8String.fromBytes(newBytes); } /** * Returns the position of the first occurrence of substr in * current string from the specified position (0-based index). * * @param v the string to be searched * @param start the start position of the current string for searching * @return the position of the first occurrence of substr, if not found, -1 returned. */ public int indexOf(UTF8String v, int start) { if (v.numBytes() == 0) { return 0; } // locate to the start position. int i = 0; // position in byte int c = 0; // position in character while (i < numBytes && c < start) { i += numBytesForFirstByte(getByte(i)); c += 1; } do { if (i + v.numBytes > numBytes) { return -1; } if (ByteArrayMethods.arrayEquals(base, offset + i, v.base, v.offset, v.numBytes)) { return c; } i += numBytesForFirstByte(getByte(i)); c += 1; } while (i < numBytes); return -1; } /** * Find the `str` from left to right. */ private int find(UTF8String str, int start) { assert (str.numBytes > 0); while (start <= numBytes - str.numBytes) { if (ByteArrayMethods.arrayEquals(base, offset + start, str.base, str.offset, str.numBytes)) { return start; } start += 1; } return -1; } /** * Find the `str` from right to left. */ private int rfind(UTF8String str, int start) { assert (str.numBytes > 0); while (start >= 0) { if (ByteArrayMethods.arrayEquals(base, offset + start, str.base, str.offset, str.numBytes)) { return start; } start -= 1; } return -1; } /** * Returns the substring from string str before count occurrences of the delimiter delim. * If count is positive, everything the left of the final delimiter (counting from left) is * returned. If count is negative, every to the right of the final delimiter (counting from the * right) is returned. subStringIndex performs a case-sensitive match when searching for delim. */ public UTF8String subStringIndex(UTF8String delim, int count) { if (delim.numBytes == 0 || count == 0) { return EMPTY_UTF8; } if (count > 0) { int idx = -1; while (count > 0) { idx = find(delim, idx + 1); if (idx >= 0) { count --; } else { // can not find enough delim return this; } } if (idx == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[idx]; copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, idx); return fromBytes(bytes); } else { int idx = numBytes - delim.numBytes + 1; count = -count; while (count > 0) { idx = rfind(delim, idx - 1); if (idx >= 0) { count --; } else { // can not find enough delim return this; } } if (idx + delim.numBytes == numBytes) { return EMPTY_UTF8; } int size = numBytes - delim.numBytes - idx; byte[] bytes = new byte[size]; copyMemory(base, offset + idx + delim.numBytes, bytes, BYTE_ARRAY_OFFSET, size); return fromBytes(bytes); } } /** * Returns str, right-padded with pad to a length of len * For example: * ('hi', 5, '??') =&gt; 'hi???' * ('hi', 1, '??') =&gt; 'h' */ public UTF8String rpad(int len, UTF8String pad) { int spaces = len - this.numChars(); // number of char need to pad if (spaces <= 0 || pad.numBytes() == 0) { // no padding at all, return the substring of the current string return substring(0, len); } else { int padChars = pad.numChars(); int count = spaces / padChars; // how many padding string needed // the partial string of the padding UTF8String remain = pad.substring(0, spaces - padChars * count); byte[] data = new byte[this.numBytes + pad.numBytes * count + remain.numBytes]; copyMemory(this.base, this.offset, data, BYTE_ARRAY_OFFSET, this.numBytes); int offset = this.numBytes; int idx = 0; while (idx < count) { copyMemory(pad.base, pad.offset, data, BYTE_ARRAY_OFFSET + offset, pad.numBytes); ++ idx; offset += pad.numBytes; } copyMemory(remain.base, remain.offset, data, BYTE_ARRAY_OFFSET + offset, remain.numBytes); return UTF8String.fromBytes(data); } } /** * Returns str, left-padded with pad to a length of len. * For example: * ('hi', 5, '??') =&gt; '???hi' * ('hi', 1, '??') =&gt; 'h' */ public UTF8String lpad(int len, UTF8String pad) { int spaces = len - this.numChars(); // number of char need to pad if (spaces <= 0 || pad.numBytes() == 0) { // no padding at all, return the substring of the current string return substring(0, len); } else { int padChars = pad.numChars(); int count = spaces / padChars; // how many padding string needed // the partial string of the padding UTF8String remain = pad.substring(0, spaces - padChars * count); byte[] data = new byte[this.numBytes + pad.numBytes * count + remain.numBytes]; int offset = 0; int idx = 0; while (idx < count) { copyMemory(pad.base, pad.offset, data, BYTE_ARRAY_OFFSET + offset, pad.numBytes); ++ idx; offset += pad.numBytes; } copyMemory(remain.base, remain.offset, data, BYTE_ARRAY_OFFSET + offset, remain.numBytes); offset += remain.numBytes; copyMemory(this.base, this.offset, data, BYTE_ARRAY_OFFSET + offset, numBytes()); return UTF8String.fromBytes(data); } } /** * Concatenates input strings together into a single string. Returns null if any input is null. */ public static UTF8String concat(UTF8String... inputs) { // Compute the total length of the result. int totalLength = 0; for (int i = 0; i < inputs.length; i++) { if (inputs[i] != null) { totalLength += inputs[i].numBytes; } else { return null; } } // Allocate a new byte array, and copy the inputs one by one into it. final byte[] result = new byte[totalLength]; int offset = 0; for (int i = 0; i < inputs.length; i++) { int len = inputs[i].numBytes; copyMemory( inputs[i].base, inputs[i].offset, result, BYTE_ARRAY_OFFSET + offset, len); offset += len; } return fromBytes(result); } /** * Concatenates input strings together into a single string using the separator. * A null input is skipped. For example, concat(",", "a", null, "c") would yield "a,c". */ public static UTF8String concatWs(UTF8String separator, UTF8String... inputs) { if (separator == null) { return null; } int numInputBytes = 0; // total number of bytes from the inputs int numInputs = 0; // number of non-null inputs for (int i = 0; i < inputs.length; i++) { if (inputs[i] != null) { numInputBytes += inputs[i].numBytes; numInputs++; } } if (numInputs == 0) { // Return an empty string if there is no input, or all the inputs are null. return EMPTY_UTF8; } // Allocate a new byte array, and copy the inputs one by one into it. // The size of the new array is the size of all inputs, plus the separators. final byte[] result = new byte[numInputBytes + (numInputs - 1) * separator.numBytes]; int offset = 0; for (int i = 0, j = 0; i < inputs.length; i++) { if (inputs[i] != null) { int len = inputs[i].numBytes; copyMemory( inputs[i].base, inputs[i].offset, result, BYTE_ARRAY_OFFSET + offset, len); offset += len; j++; // Add separator if this is not the last input. if (j < numInputs) { copyMemory( separator.base, separator.offset, result, BYTE_ARRAY_OFFSET + offset, separator.numBytes); offset += separator.numBytes; } } } return fromBytes(result); } public UTF8String[] split(UTF8String pattern, int limit) { String[] splits = toString().split(pattern.toString(), limit); UTF8String[] res = new UTF8String[splits.length]; for (int i = 0; i < res.length; i++) { res[i] = fromString(splits[i]); } return res; } public UTF8String replace(UTF8String search, UTF8String replace) { if (EMPTY_UTF8.equals(search)) { return this; } String replaced = toString().replace( search.toString(), replace.toString()); return fromString(replaced); } // TODO: Need to use `Code Point` here instead of Char in case the character longer than 2 bytes public UTF8String translate(Map<Character, Character> dict) { String srcStr = this.toString(); StringBuilder sb = new StringBuilder(); for(int k = 0; k< srcStr.length(); k++) { if (null == dict.get(srcStr.charAt(k))) { sb.append(srcStr.charAt(k)); } else if ('\0' != dict.get(srcStr.charAt(k))){ sb.append(dict.get(srcStr.charAt(k))); } } return fromString(sb.toString()); } /** * Wrapper over `long` to allow result of parsing long from string to be accessed via reference. * This is done solely for better performance and is not expected to be used by end users. */ public static class LongWrapper implements Serializable { public transient long value = 0; } /** * Wrapper over `int` to allow result of parsing integer from string to be accessed via reference. * This is done solely for better performance and is not expected to be used by end users. * * {@link LongWrapper} could have been used here but using `int` directly save the extra cost of * conversion from `long` to `int` */ public static class IntWrapper implements Serializable { public transient int value = 0; } /** * Parses this UTF8String to long. * * Note that, in this method we accumulate the result in negative format, and convert it to * positive format at the end, if this string is not started with '-'. This is because min value * is bigger than max value in digits, e.g. Long.MAX_VALUE is '9223372036854775807' and * Long.MIN_VALUE is '-9223372036854775808'. * * This code is mostly copied from LazyLong.parseLong in Hive. * * @param toLongResult If a valid `long` was parsed from this UTF8String, then its value would * be set in `toLongResult` * @return true if the parsing was successful else false */ public boolean toLong(LongWrapper toLongResult) { if (numBytes == 0) { return false; } byte b = getByte(0); final boolean negative = b == '-'; int offset = 0; if (negative || b == '+') { offset++; if (numBytes == 1) { return false; } } final byte separator = '.'; final int radix = 10; final long stopValue = Long.MIN_VALUE / radix; long result = 0; while (offset < numBytes) { b = getByte(offset); offset++; if (b == separator) { // We allow decimals and will return a truncated integral in that case. // Therefore we won't throw an exception here (checking the fractional // part happens below.) break; } int digit; if (b >= '0' && b <= '9') { digit = b - '0'; } else { return false; } // We are going to process the new digit and accumulate the result. However, before doing // this, if the result is already smaller than the stopValue(Long.MIN_VALUE / radix), then // result * 10 will definitely be smaller than minValue, and we can stop. if (result < stopValue) { return false; } result = result * radix - digit; // Since the previous result is less than or equal to stopValue(Long.MIN_VALUE / radix), we // can just use `result > 0` to check overflow. If result overflows, we should stop. if (result > 0) { return false; } } // This is the case when we've encountered a decimal separator. The fractional // part will not change the number, but we will verify that the fractional part // is well formed. while (offset < numBytes) { byte currentByte = getByte(offset); if (currentByte < '0' || currentByte > '9') { return false; } offset++; } if (!negative) { result = -result; if (result < 0) { return false; } } toLongResult.value = result; return true; } /** * Parses this UTF8String to int. * * Note that, in this method we accumulate the result in negative format, and convert it to * positive format at the end, if this string is not started with '-'. This is because min value * is bigger than max value in digits, e.g. Integer.MAX_VALUE is '2147483647' and * Integer.MIN_VALUE is '-2147483648'. * * This code is mostly copied from LazyInt.parseInt in Hive. * * Note that, this method is almost same as `toLong`, but we leave it duplicated for performance * reasons, like Hive does. * * @param intWrapper If a valid `int` was parsed from this UTF8String, then its value would * be set in `intWrapper` * @return true if the parsing was successful else false */ public boolean toInt(IntWrapper intWrapper) { if (numBytes == 0) { return false; } byte b = getByte(0); final boolean negative = b == '-'; int offset = 0; if (negative || b == '+') { offset++; if (numBytes == 1) { return false; } } final byte separator = '.'; final int radix = 10; final int stopValue = Integer.MIN_VALUE / radix; int result = 0; while (offset < numBytes) { b = getByte(offset); offset++; if (b == separator) { // We allow decimals and will return a truncated integral in that case. // Therefore we won't throw an exception here (checking the fractional // part happens below.) break; } int digit; if (b >= '0' && b <= '9') { digit = b - '0'; } else { return false; } // We are going to process the new digit and accumulate the result. However, before doing // this, if the result is already smaller than the stopValue(Integer.MIN_VALUE / radix), then // result * 10 will definitely be smaller than minValue, and we can stop if (result < stopValue) { return false; } result = result * radix - digit; // Since the previous result is less than or equal to stopValue(Integer.MIN_VALUE / radix), // we can just use `result > 0` to check overflow. If result overflows, we should stop if (result > 0) { return false; } } // This is the case when we've encountered a decimal separator. The fractional // part will not change the number, but we will verify that the fractional part // is well formed. while (offset < numBytes) { byte currentByte = getByte(offset); if (currentByte < '0' || currentByte > '9') { return false; } offset++; } if (!negative) { result = -result; if (result < 0) { return false; } } intWrapper.value = result; return true; } public boolean toShort(IntWrapper intWrapper) { if (toInt(intWrapper)) { int intValue = intWrapper.value; short result = (short) intValue; if (result == intValue) { return true; } } return false; } public boolean toByte(IntWrapper intWrapper) { if (toInt(intWrapper)) { int intValue = intWrapper.value; byte result = (byte) intValue; if (result == intValue) { return true; } } return false; } @Override public String toString() { return new String(getBytes(), StandardCharsets.UTF_8); } @Override public UTF8String clone() { return fromBytes(getBytes()); } public UTF8String copy() { byte[] bytes = new byte[numBytes]; copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, numBytes); return fromBytes(bytes); } @Override public int compareTo(@Nonnull final UTF8String other) { int len = Math.min(numBytes, other.numBytes); int wordMax = (len / 8) * 8; long roffset = other.offset; Object rbase = other.base; for (int i = 0; i < wordMax; i += 8) { long left = getLong(base, offset + i); long right = getLong(rbase, roffset + i); if (left != right) { if (IS_LITTLE_ENDIAN) { return Long.compareUnsigned(Long.reverseBytes(left), Long.reverseBytes(right)); } else { return Long.compareUnsigned(left, right); } } } for (int i = wordMax; i < len; i++) { // In UTF-8, the byte should be unsigned, so we should compare them as unsigned int. int res = (getByte(i) & 0xFF) - (Platform.getByte(rbase, roffset + i) & 0xFF); if (res != 0) { return res; } } return numBytes - other.numBytes; } public int compare(final UTF8String other) { return compareTo(other); } @Override public boolean equals(final Object other) { if (other instanceof UTF8String) { UTF8String o = (UTF8String) other; if (numBytes != o.numBytes) { return false; } return ByteArrayMethods.arrayEquals(base, offset, o.base, o.offset, numBytes); } else { return false; } } /** * Levenshtein distance is a metric for measuring the distance of two strings. The distance is * defined by the minimum number of single-character edits (i.e. insertions, deletions or * substitutions) that are required to change one of the strings into the other. */ public int levenshteinDistance(UTF8String other) { // Implementation adopted from org.apache.common.lang3.StringUtils.getLevenshteinDistance int n = numChars(); int m = other.numChars(); if (n == 0) { return m; } else if (m == 0) { return n; } UTF8String s, t; if (n <= m) { s = this; t = other; } else { s = other; t = this; int swap; swap = n; n = m; m = swap; } int[] p = new int[n + 1]; int[] d = new int[n + 1]; int[] swap; int i, i_bytes, j, j_bytes, num_bytes_j, cost; for (i = 0; i <= n; i++) { p[i] = i; } for (j = 0, j_bytes = 0; j < m; j_bytes += num_bytes_j, j++) { num_bytes_j = numBytesForFirstByte(t.getByte(j_bytes)); d[0] = j + 1; for (i = 0, i_bytes = 0; i < n; i_bytes += numBytesForFirstByte(s.getByte(i_bytes)), i++) { if (s.getByte(i_bytes) != t.getByte(j_bytes) || num_bytes_j != numBytesForFirstByte(s.getByte(i_bytes))) { cost = 1; } else { cost = (ByteArrayMethods.arrayEquals(t.base, t.offset + j_bytes, s.base, s.offset + i_bytes, num_bytes_j)) ? 0 : 1; } d[i + 1] = Math.min(Math.min(d[i] + 1, p[i + 1] + 1), p[i] + cost); } swap = p; p = d; d = swap; } return p[n]; } @Override public int hashCode() { return Murmur3_x86_32.hashUnsafeBytes(base, offset, numBytes, 42); } /** * Soundex mapping table */ private static final byte[] US_ENGLISH_MAPPING = {'0', '1', '2', '3', '0', '1', '2', '7', '0', '2', '2', '4', '5', '5', '0', '1', '2', '6', '2', '3', '0', '1', '7', '2', '0', '2'}; /** * Encodes a string into a Soundex value. Soundex is an encoding used to relate similar names, * but can also be used as a general purpose scheme to find word with similar phonemes. * https://en.wikipedia.org/wiki/Soundex */ public UTF8String soundex() { if (numBytes == 0) { return EMPTY_UTF8; } byte b = getByte(0); if ('a' <= b && b <= 'z') { b -= 32; } else if (b < 'A' || 'Z' < b) { // first character must be a letter return this; } byte[] sx = {'0', '0', '0', '0'}; sx[0] = b; int sxi = 1; int idx = b - 'A'; byte lastCode = US_ENGLISH_MAPPING[idx]; for (int i = 1; i < numBytes; i++) { b = getByte(i); if ('a' <= b && b <= 'z') { b -= 32; } else if (b < 'A' || 'Z' < b) { // not a letter, skip it lastCode = '0'; continue; } idx = b - 'A'; byte code = US_ENGLISH_MAPPING[idx]; if (code == '7') { // ignore it } else { if (code != '0' && code != lastCode) { sx[sxi++] = code; if (sxi > 3) break; } lastCode = code; } } return UTF8String.fromBytes(sx); } public void writeExternal(ObjectOutput out) throws IOException { byte[] bytes = getBytes(); out.writeInt(bytes.length); out.write(bytes); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { offset = BYTE_ARRAY_OFFSET; numBytes = in.readInt(); base = new byte[numBytes]; in.readFully((byte[]) base); } @Override public void write(Kryo kryo, Output out) { byte[] bytes = getBytes(); out.writeInt(bytes.length); out.write(bytes); } @Override public void read(Kryo kryo, Input in) { this.offset = BYTE_ARRAY_OFFSET; this.numBytes = in.readInt(); this.base = new byte[numBytes]; in.read((byte[]) base); } }
common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.unsafe.types; import javax.annotation.Nonnull; import java.io.*; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Map; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.KryoSerializable; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import org.apache.spark.unsafe.Platform; import org.apache.spark.unsafe.array.ByteArrayMethods; import org.apache.spark.unsafe.hash.Murmur3_x86_32; import static org.apache.spark.unsafe.Platform.*; /** * A UTF-8 String for internal Spark use. * <p> * A String encoded in UTF-8 as an Array[Byte], which can be used for comparison, * search, see http://en.wikipedia.org/wiki/UTF-8 for details. * <p> * Note: This is not designed for general use cases, should not be used outside SQL. */ public final class UTF8String implements Comparable<UTF8String>, Externalizable, KryoSerializable, Cloneable { // These are only updated by readExternal() or read() @Nonnull private Object base; private long offset; private int numBytes; public Object getBaseObject() { return base; } public long getBaseOffset() { return offset; } private static int[] bytesOfCodePointInUTF8 = {2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6}; private static boolean isLittleEndian = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN; private static final UTF8String COMMA_UTF8 = UTF8String.fromString(","); public static final UTF8String EMPTY_UTF8 = UTF8String.fromString(""); /** * Creates an UTF8String from byte array, which should be encoded in UTF-8. * * Note: `bytes` will be hold by returned UTF8String. */ public static UTF8String fromBytes(byte[] bytes) { if (bytes != null) { return new UTF8String(bytes, BYTE_ARRAY_OFFSET, bytes.length); } else { return null; } } /** * Creates an UTF8String from byte array, which should be encoded in UTF-8. * * Note: `bytes` will be hold by returned UTF8String. */ public static UTF8String fromBytes(byte[] bytes, int offset, int numBytes) { if (bytes != null) { return new UTF8String(bytes, BYTE_ARRAY_OFFSET + offset, numBytes); } else { return null; } } /** * Creates an UTF8String from given address (base and offset) and length. */ public static UTF8String fromAddress(Object base, long offset, int numBytes) { return new UTF8String(base, offset, numBytes); } /** * Creates an UTF8String from String. */ public static UTF8String fromString(String str) { return str == null ? null : fromBytes(str.getBytes(StandardCharsets.UTF_8)); } /** * Creates an UTF8String that contains `length` spaces. */ public static UTF8String blankString(int length) { byte[] spaces = new byte[length]; Arrays.fill(spaces, (byte) ' '); return fromBytes(spaces); } protected UTF8String(Object base, long offset, int numBytes) { this.base = base; this.offset = offset; this.numBytes = numBytes; } // for serialization public UTF8String() { this(null, 0, 0); } /** * Writes the content of this string into a memory address, identified by an object and an offset. * The target memory address must already been allocated, and have enough space to hold all the * bytes in this string. */ public void writeToMemory(Object target, long targetOffset) { Platform.copyMemory(base, offset, target, targetOffset, numBytes); } public void writeTo(ByteBuffer buffer) { assert(buffer.hasArray()); byte[] target = buffer.array(); int offset = buffer.arrayOffset(); int pos = buffer.position(); writeToMemory(target, Platform.BYTE_ARRAY_OFFSET + offset + pos); buffer.position(pos + numBytes); } /** * Returns a {@link ByteBuffer} wrapping the base object if it is a byte array * or a copy of the data if the base object is not a byte array. * * Unlike getBytes this will not create a copy the array if this is a slice. */ @Nonnull public ByteBuffer getByteBuffer() { if (base instanceof byte[] && offset >= BYTE_ARRAY_OFFSET) { final byte[] bytes = (byte[]) base; // the offset includes an object header... this is only needed for unsafe copies final long arrayOffset = offset - BYTE_ARRAY_OFFSET; // verify that the offset and length points somewhere inside the byte array // and that the offset can safely be truncated to a 32-bit integer if ((long) bytes.length < arrayOffset + numBytes) { throw new ArrayIndexOutOfBoundsException(); } return ByteBuffer.wrap(bytes, (int) arrayOffset, numBytes); } else { return ByteBuffer.wrap(getBytes()); } } public void writeTo(OutputStream out) throws IOException { final ByteBuffer bb = this.getByteBuffer(); assert(bb.hasArray()); // similar to Utils.writeByteBuffer but without the spark-core dependency out.write(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); } /** * Returns the number of bytes for a code point with the first byte as `b` * @param b The first byte of a code point */ private static int numBytesForFirstByte(final byte b) { final int offset = (b & 0xFF) - 192; return (offset >= 0) ? bytesOfCodePointInUTF8[offset] : 1; } /** * Returns the number of bytes */ public int numBytes() { return numBytes; } /** * Returns the number of code points in it. */ public int numChars() { int len = 0; for (int i = 0; i < numBytes; i += numBytesForFirstByte(getByte(i))) { len += 1; } return len; } /** * Returns a 64-bit integer that can be used as the prefix used in sorting. */ public long getPrefix() { // Since JVMs are either 4-byte aligned or 8-byte aligned, we check the size of the string. // If size is 0, just return 0. // If size is between 0 and 4 (inclusive), assume data is 4-byte aligned under the hood and // use a getInt to fetch the prefix. // If size is greater than 4, assume we have at least 8 bytes of data to fetch. // After getting the data, we use a mask to mask out data that is not part of the string. long p; long mask = 0; if (isLittleEndian) { if (numBytes >= 8) { p = Platform.getLong(base, offset); } else if (numBytes > 4) { p = Platform.getLong(base, offset); mask = (1L << (8 - numBytes) * 8) - 1; } else if (numBytes > 0) { p = (long) Platform.getInt(base, offset); mask = (1L << (8 - numBytes) * 8) - 1; } else { p = 0; } p = java.lang.Long.reverseBytes(p); } else { // byteOrder == ByteOrder.BIG_ENDIAN if (numBytes >= 8) { p = Platform.getLong(base, offset); } else if (numBytes > 4) { p = Platform.getLong(base, offset); mask = (1L << (8 - numBytes) * 8) - 1; } else if (numBytes > 0) { p = ((long) Platform.getInt(base, offset)) << 32; mask = (1L << (8 - numBytes) * 8) - 1; } else { p = 0; } } p &= ~mask; return p; } /** * Returns the underline bytes, will be a copy of it if it's part of another array. */ public byte[] getBytes() { // avoid copy if `base` is `byte[]` if (offset == BYTE_ARRAY_OFFSET && base instanceof byte[] && ((byte[]) base).length == numBytes) { return (byte[]) base; } else { byte[] bytes = new byte[numBytes]; copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, numBytes); return bytes; } } /** * Returns a substring of this. * @param start the position of first code point * @param until the position after last code point, exclusive. */ public UTF8String substring(final int start, final int until) { if (until <= start || start >= numBytes) { return EMPTY_UTF8; } int i = 0; int c = 0; while (i < numBytes && c < start) { i += numBytesForFirstByte(getByte(i)); c += 1; } int j = i; while (i < numBytes && c < until) { i += numBytesForFirstByte(getByte(i)); c += 1; } if (i > j) { byte[] bytes = new byte[i - j]; copyMemory(base, offset + j, bytes, BYTE_ARRAY_OFFSET, i - j); return fromBytes(bytes); } else { return EMPTY_UTF8; } } public UTF8String substringSQL(int pos, int length) { // Information regarding the pos calculation: // Hive and SQL use one-based indexing for SUBSTR arguments but also accept zero and // negative indices for start positions. If a start index i is greater than 0, it // refers to element i-1 in the sequence. If a start index i is less than 0, it refers // to the -ith element before the end of the sequence. If a start index i is 0, it // refers to the first element. int len = numChars(); int start = (pos > 0) ? pos -1 : ((pos < 0) ? len + pos : 0); int end = (length == Integer.MAX_VALUE) ? len : start + length; return substring(start, end); } /** * Returns whether this contains `substring` or not. */ public boolean contains(final UTF8String substring) { if (substring.numBytes == 0) { return true; } byte first = substring.getByte(0); for (int i = 0; i <= numBytes - substring.numBytes; i++) { if (getByte(i) == first && matchAt(substring, i)) { return true; } } return false; } /** * Returns the byte at position `i`. */ private byte getByte(int i) { return Platform.getByte(base, offset + i); } private boolean matchAt(final UTF8String s, int pos) { if (s.numBytes + pos > numBytes || pos < 0) { return false; } return ByteArrayMethods.arrayEquals(base, offset + pos, s.base, s.offset, s.numBytes); } public boolean startsWith(final UTF8String prefix) { return matchAt(prefix, 0); } public boolean endsWith(final UTF8String suffix) { return matchAt(suffix, numBytes - suffix.numBytes); } /** * Returns the upper case of this string */ public UTF8String toUpperCase() { if (numBytes == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[numBytes]; bytes[0] = (byte) Character.toTitleCase(getByte(0)); for (int i = 0; i < numBytes; i++) { byte b = getByte(i); if (numBytesForFirstByte(b) != 1) { // fallback return toUpperCaseSlow(); } int upper = Character.toUpperCase((int) b); if (upper > 127) { // fallback return toUpperCaseSlow(); } bytes[i] = (byte) upper; } return fromBytes(bytes); } private UTF8String toUpperCaseSlow() { return fromString(toString().toUpperCase()); } /** * Returns the lower case of this string */ public UTF8String toLowerCase() { if (numBytes == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[numBytes]; bytes[0] = (byte) Character.toTitleCase(getByte(0)); for (int i = 0; i < numBytes; i++) { byte b = getByte(i); if (numBytesForFirstByte(b) != 1) { // fallback return toLowerCaseSlow(); } int lower = Character.toLowerCase((int) b); if (lower > 127) { // fallback return toLowerCaseSlow(); } bytes[i] = (byte) lower; } return fromBytes(bytes); } private UTF8String toLowerCaseSlow() { return fromString(toString().toLowerCase()); } /** * Returns the title case of this string, that could be used as title. */ public UTF8String toTitleCase() { if (numBytes == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[numBytes]; for (int i = 0; i < numBytes; i++) { byte b = getByte(i); if (i == 0 || getByte(i - 1) == ' ') { if (numBytesForFirstByte(b) != 1) { // fallback return toTitleCaseSlow(); } int upper = Character.toTitleCase(b); if (upper > 127) { // fallback return toTitleCaseSlow(); } bytes[i] = (byte) upper; } else { bytes[i] = b; } } return fromBytes(bytes); } private UTF8String toTitleCaseSlow() { StringBuffer sb = new StringBuffer(); String s = toString(); sb.append(s); sb.setCharAt(0, Character.toTitleCase(sb.charAt(0))); for (int i = 1; i < s.length(); i++) { if (sb.charAt(i - 1) == ' ') { sb.setCharAt(i, Character.toTitleCase(sb.charAt(i))); } } return fromString(sb.toString()); } /* * Returns the index of the string `match` in this String. This string has to be a comma separated * list. If `match` contains a comma 0 will be returned. If the `match` isn't part of this String, * 0 will be returned, else the index of match (1-based index) */ public int findInSet(UTF8String match) { if (match.contains(COMMA_UTF8)) { return 0; } int n = 1, lastComma = -1; for (int i = 0; i < numBytes; i++) { if (getByte(i) == (byte) ',') { if (i - (lastComma + 1) == match.numBytes && ByteArrayMethods.arrayEquals(base, offset + (lastComma + 1), match.base, match.offset, match.numBytes)) { return n; } lastComma = i; n++; } } if (numBytes - (lastComma + 1) == match.numBytes && ByteArrayMethods.arrayEquals(base, offset + (lastComma + 1), match.base, match.offset, match.numBytes)) { return n; } return 0; } /** * Copy the bytes from the current UTF8String, and make a new UTF8String. * @param start the start position of the current UTF8String in bytes. * @param end the end position of the current UTF8String in bytes. * @return a new UTF8String in the position of [start, end] of current UTF8String bytes. */ private UTF8String copyUTF8String(int start, int end) { int len = end - start + 1; byte[] newBytes = new byte[len]; copyMemory(base, offset + start, newBytes, BYTE_ARRAY_OFFSET, len); return UTF8String.fromBytes(newBytes); } public UTF8String trim() { int s = 0; int e = this.numBytes - 1; // skip all of the space (0x20) in the left side while (s < this.numBytes && getByte(s) == 0x20) s++; // skip all of the space (0x20) in the right side while (e >= 0 && getByte(e) == 0x20) e--; if (s > e) { // empty string return EMPTY_UTF8; } else { return copyUTF8String(s, e); } } public UTF8String trimLeft() { int s = 0; // skip all of the space (0x20) in the left side while (s < this.numBytes && getByte(s) == 0x20) s++; if (s == this.numBytes) { // empty string return EMPTY_UTF8; } else { return copyUTF8String(s, this.numBytes - 1); } } public UTF8String trimRight() { int e = numBytes - 1; // skip all of the space (0x20) in the right side while (e >= 0 && getByte(e) == 0x20) e--; if (e < 0) { // empty string return EMPTY_UTF8; } else { return copyUTF8String(0, e); } } public UTF8String reverse() { byte[] result = new byte[this.numBytes]; int i = 0; // position in byte while (i < numBytes) { int len = numBytesForFirstByte(getByte(i)); copyMemory(this.base, this.offset + i, result, BYTE_ARRAY_OFFSET + result.length - i - len, len); i += len; } return UTF8String.fromBytes(result); } public UTF8String repeat(int times) { if (times <= 0) { return EMPTY_UTF8; } byte[] newBytes = new byte[numBytes * times]; copyMemory(this.base, this.offset, newBytes, BYTE_ARRAY_OFFSET, numBytes); int copied = 1; while (copied < times) { int toCopy = Math.min(copied, times - copied); System.arraycopy(newBytes, 0, newBytes, copied * numBytes, numBytes * toCopy); copied += toCopy; } return UTF8String.fromBytes(newBytes); } /** * Returns the position of the first occurrence of substr in * current string from the specified position (0-based index). * * @param v the string to be searched * @param start the start position of the current string for searching * @return the position of the first occurrence of substr, if not found, -1 returned. */ public int indexOf(UTF8String v, int start) { if (v.numBytes() == 0) { return 0; } // locate to the start position. int i = 0; // position in byte int c = 0; // position in character while (i < numBytes && c < start) { i += numBytesForFirstByte(getByte(i)); c += 1; } do { if (i + v.numBytes > numBytes) { return -1; } if (ByteArrayMethods.arrayEquals(base, offset + i, v.base, v.offset, v.numBytes)) { return c; } i += numBytesForFirstByte(getByte(i)); c += 1; } while (i < numBytes); return -1; } /** * Find the `str` from left to right. */ private int find(UTF8String str, int start) { assert (str.numBytes > 0); while (start <= numBytes - str.numBytes) { if (ByteArrayMethods.arrayEquals(base, offset + start, str.base, str.offset, str.numBytes)) { return start; } start += 1; } return -1; } /** * Find the `str` from right to left. */ private int rfind(UTF8String str, int start) { assert (str.numBytes > 0); while (start >= 0) { if (ByteArrayMethods.arrayEquals(base, offset + start, str.base, str.offset, str.numBytes)) { return start; } start -= 1; } return -1; } /** * Returns the substring from string str before count occurrences of the delimiter delim. * If count is positive, everything the left of the final delimiter (counting from left) is * returned. If count is negative, every to the right of the final delimiter (counting from the * right) is returned. subStringIndex performs a case-sensitive match when searching for delim. */ public UTF8String subStringIndex(UTF8String delim, int count) { if (delim.numBytes == 0 || count == 0) { return EMPTY_UTF8; } if (count > 0) { int idx = -1; while (count > 0) { idx = find(delim, idx + 1); if (idx >= 0) { count --; } else { // can not find enough delim return this; } } if (idx == 0) { return EMPTY_UTF8; } byte[] bytes = new byte[idx]; copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, idx); return fromBytes(bytes); } else { int idx = numBytes - delim.numBytes + 1; count = -count; while (count > 0) { idx = rfind(delim, idx - 1); if (idx >= 0) { count --; } else { // can not find enough delim return this; } } if (idx + delim.numBytes == numBytes) { return EMPTY_UTF8; } int size = numBytes - delim.numBytes - idx; byte[] bytes = new byte[size]; copyMemory(base, offset + idx + delim.numBytes, bytes, BYTE_ARRAY_OFFSET, size); return fromBytes(bytes); } } /** * Returns str, right-padded with pad to a length of len * For example: * ('hi', 5, '??') =&gt; 'hi???' * ('hi', 1, '??') =&gt; 'h' */ public UTF8String rpad(int len, UTF8String pad) { int spaces = len - this.numChars(); // number of char need to pad if (spaces <= 0 || pad.numBytes() == 0) { // no padding at all, return the substring of the current string return substring(0, len); } else { int padChars = pad.numChars(); int count = spaces / padChars; // how many padding string needed // the partial string of the padding UTF8String remain = pad.substring(0, spaces - padChars * count); byte[] data = new byte[this.numBytes + pad.numBytes * count + remain.numBytes]; copyMemory(this.base, this.offset, data, BYTE_ARRAY_OFFSET, this.numBytes); int offset = this.numBytes; int idx = 0; while (idx < count) { copyMemory(pad.base, pad.offset, data, BYTE_ARRAY_OFFSET + offset, pad.numBytes); ++ idx; offset += pad.numBytes; } copyMemory(remain.base, remain.offset, data, BYTE_ARRAY_OFFSET + offset, remain.numBytes); return UTF8String.fromBytes(data); } } /** * Returns str, left-padded with pad to a length of len. * For example: * ('hi', 5, '??') =&gt; '???hi' * ('hi', 1, '??') =&gt; 'h' */ public UTF8String lpad(int len, UTF8String pad) { int spaces = len - this.numChars(); // number of char need to pad if (spaces <= 0 || pad.numBytes() == 0) { // no padding at all, return the substring of the current string return substring(0, len); } else { int padChars = pad.numChars(); int count = spaces / padChars; // how many padding string needed // the partial string of the padding UTF8String remain = pad.substring(0, spaces - padChars * count); byte[] data = new byte[this.numBytes + pad.numBytes * count + remain.numBytes]; int offset = 0; int idx = 0; while (idx < count) { copyMemory(pad.base, pad.offset, data, BYTE_ARRAY_OFFSET + offset, pad.numBytes); ++ idx; offset += pad.numBytes; } copyMemory(remain.base, remain.offset, data, BYTE_ARRAY_OFFSET + offset, remain.numBytes); offset += remain.numBytes; copyMemory(this.base, this.offset, data, BYTE_ARRAY_OFFSET + offset, numBytes()); return UTF8String.fromBytes(data); } } /** * Concatenates input strings together into a single string. Returns null if any input is null. */ public static UTF8String concat(UTF8String... inputs) { // Compute the total length of the result. int totalLength = 0; for (int i = 0; i < inputs.length; i++) { if (inputs[i] != null) { totalLength += inputs[i].numBytes; } else { return null; } } // Allocate a new byte array, and copy the inputs one by one into it. final byte[] result = new byte[totalLength]; int offset = 0; for (int i = 0; i < inputs.length; i++) { int len = inputs[i].numBytes; copyMemory( inputs[i].base, inputs[i].offset, result, BYTE_ARRAY_OFFSET + offset, len); offset += len; } return fromBytes(result); } /** * Concatenates input strings together into a single string using the separator. * A null input is skipped. For example, concat(",", "a", null, "c") would yield "a,c". */ public static UTF8String concatWs(UTF8String separator, UTF8String... inputs) { if (separator == null) { return null; } int numInputBytes = 0; // total number of bytes from the inputs int numInputs = 0; // number of non-null inputs for (int i = 0; i < inputs.length; i++) { if (inputs[i] != null) { numInputBytes += inputs[i].numBytes; numInputs++; } } if (numInputs == 0) { // Return an empty string if there is no input, or all the inputs are null. return EMPTY_UTF8; } // Allocate a new byte array, and copy the inputs one by one into it. // The size of the new array is the size of all inputs, plus the separators. final byte[] result = new byte[numInputBytes + (numInputs - 1) * separator.numBytes]; int offset = 0; for (int i = 0, j = 0; i < inputs.length; i++) { if (inputs[i] != null) { int len = inputs[i].numBytes; copyMemory( inputs[i].base, inputs[i].offset, result, BYTE_ARRAY_OFFSET + offset, len); offset += len; j++; // Add separator if this is not the last input. if (j < numInputs) { copyMemory( separator.base, separator.offset, result, BYTE_ARRAY_OFFSET + offset, separator.numBytes); offset += separator.numBytes; } } } return fromBytes(result); } public UTF8String[] split(UTF8String pattern, int limit) { String[] splits = toString().split(pattern.toString(), limit); UTF8String[] res = new UTF8String[splits.length]; for (int i = 0; i < res.length; i++) { res[i] = fromString(splits[i]); } return res; } public UTF8String replace(UTF8String search, UTF8String replace) { if (EMPTY_UTF8.equals(search)) { return this; } String replaced = toString().replace( search.toString(), replace.toString()); return fromString(replaced); } // TODO: Need to use `Code Point` here instead of Char in case the character longer than 2 bytes public UTF8String translate(Map<Character, Character> dict) { String srcStr = this.toString(); StringBuilder sb = new StringBuilder(); for(int k = 0; k< srcStr.length(); k++) { if (null == dict.get(srcStr.charAt(k))) { sb.append(srcStr.charAt(k)); } else if ('\0' != dict.get(srcStr.charAt(k))){ sb.append(dict.get(srcStr.charAt(k))); } } return fromString(sb.toString()); } /** * Wrapper over `long` to allow result of parsing long from string to be accessed via reference. * This is done solely for better performance and is not expected to be used by end users. */ public static class LongWrapper implements Serializable { public transient long value = 0; } /** * Wrapper over `int` to allow result of parsing integer from string to be accessed via reference. * This is done solely for better performance and is not expected to be used by end users. * * {@link LongWrapper} could have been used here but using `int` directly save the extra cost of * conversion from `long` to `int` */ public static class IntWrapper implements Serializable { public transient int value = 0; } /** * Parses this UTF8String to long. * * Note that, in this method we accumulate the result in negative format, and convert it to * positive format at the end, if this string is not started with '-'. This is because min value * is bigger than max value in digits, e.g. Long.MAX_VALUE is '9223372036854775807' and * Long.MIN_VALUE is '-9223372036854775808'. * * This code is mostly copied from LazyLong.parseLong in Hive. * * @param toLongResult If a valid `long` was parsed from this UTF8String, then its value would * be set in `toLongResult` * @return true if the parsing was successful else false */ public boolean toLong(LongWrapper toLongResult) { if (numBytes == 0) { return false; } byte b = getByte(0); final boolean negative = b == '-'; int offset = 0; if (negative || b == '+') { offset++; if (numBytes == 1) { return false; } } final byte separator = '.'; final int radix = 10; final long stopValue = Long.MIN_VALUE / radix; long result = 0; while (offset < numBytes) { b = getByte(offset); offset++; if (b == separator) { // We allow decimals and will return a truncated integral in that case. // Therefore we won't throw an exception here (checking the fractional // part happens below.) break; } int digit; if (b >= '0' && b <= '9') { digit = b - '0'; } else { return false; } // We are going to process the new digit and accumulate the result. However, before doing // this, if the result is already smaller than the stopValue(Long.MIN_VALUE / radix), then // result * 10 will definitely be smaller than minValue, and we can stop. if (result < stopValue) { return false; } result = result * radix - digit; // Since the previous result is less than or equal to stopValue(Long.MIN_VALUE / radix), we // can just use `result > 0` to check overflow. If result overflows, we should stop. if (result > 0) { return false; } } // This is the case when we've encountered a decimal separator. The fractional // part will not change the number, but we will verify that the fractional part // is well formed. while (offset < numBytes) { byte currentByte = getByte(offset); if (currentByte < '0' || currentByte > '9') { return false; } offset++; } if (!negative) { result = -result; if (result < 0) { return false; } } toLongResult.value = result; return true; } /** * Parses this UTF8String to int. * * Note that, in this method we accumulate the result in negative format, and convert it to * positive format at the end, if this string is not started with '-'. This is because min value * is bigger than max value in digits, e.g. Integer.MAX_VALUE is '2147483647' and * Integer.MIN_VALUE is '-2147483648'. * * This code is mostly copied from LazyInt.parseInt in Hive. * * Note that, this method is almost same as `toLong`, but we leave it duplicated for performance * reasons, like Hive does. * * @param intWrapper If a valid `int` was parsed from this UTF8String, then its value would * be set in `intWrapper` * @return true if the parsing was successful else false */ public boolean toInt(IntWrapper intWrapper) { if (numBytes == 0) { return false; } byte b = getByte(0); final boolean negative = b == '-'; int offset = 0; if (negative || b == '+') { offset++; if (numBytes == 1) { return false; } } final byte separator = '.'; final int radix = 10; final int stopValue = Integer.MIN_VALUE / radix; int result = 0; while (offset < numBytes) { b = getByte(offset); offset++; if (b == separator) { // We allow decimals and will return a truncated integral in that case. // Therefore we won't throw an exception here (checking the fractional // part happens below.) break; } int digit; if (b >= '0' && b <= '9') { digit = b - '0'; } else { return false; } // We are going to process the new digit and accumulate the result. However, before doing // this, if the result is already smaller than the stopValue(Integer.MIN_VALUE / radix), then // result * 10 will definitely be smaller than minValue, and we can stop if (result < stopValue) { return false; } result = result * radix - digit; // Since the previous result is less than or equal to stopValue(Integer.MIN_VALUE / radix), // we can just use `result > 0` to check overflow. If result overflows, we should stop if (result > 0) { return false; } } // This is the case when we've encountered a decimal separator. The fractional // part will not change the number, but we will verify that the fractional part // is well formed. while (offset < numBytes) { byte currentByte = getByte(offset); if (currentByte < '0' || currentByte > '9') { return false; } offset++; } if (!negative) { result = -result; if (result < 0) { return false; } } intWrapper.value = result; return true; } public boolean toShort(IntWrapper intWrapper) { if (toInt(intWrapper)) { int intValue = intWrapper.value; short result = (short) intValue; if (result == intValue) { return true; } } return false; } public boolean toByte(IntWrapper intWrapper) { if (toInt(intWrapper)) { int intValue = intWrapper.value; byte result = (byte) intValue; if (result == intValue) { return true; } } return false; } @Override public String toString() { return new String(getBytes(), StandardCharsets.UTF_8); } @Override public UTF8String clone() { return fromBytes(getBytes()); } public UTF8String copy() { byte[] bytes = new byte[numBytes]; copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, numBytes); return fromBytes(bytes); } @Override public int compareTo(@Nonnull final UTF8String other) { int len = Math.min(numBytes, other.numBytes); // TODO: compare 8 bytes as unsigned long for (int i = 0; i < len; i ++) { // In UTF-8, the byte should be unsigned, so we should compare them as unsigned int. int res = (getByte(i) & 0xFF) - (other.getByte(i) & 0xFF); if (res != 0) { return res; } } return numBytes - other.numBytes; } public int compare(final UTF8String other) { return compareTo(other); } @Override public boolean equals(final Object other) { if (other instanceof UTF8String) { UTF8String o = (UTF8String) other; if (numBytes != o.numBytes) { return false; } return ByteArrayMethods.arrayEquals(base, offset, o.base, o.offset, numBytes); } else { return false; } } /** * Levenshtein distance is a metric for measuring the distance of two strings. The distance is * defined by the minimum number of single-character edits (i.e. insertions, deletions or * substitutions) that are required to change one of the strings into the other. */ public int levenshteinDistance(UTF8String other) { // Implementation adopted from org.apache.common.lang3.StringUtils.getLevenshteinDistance int n = numChars(); int m = other.numChars(); if (n == 0) { return m; } else if (m == 0) { return n; } UTF8String s, t; if (n <= m) { s = this; t = other; } else { s = other; t = this; int swap; swap = n; n = m; m = swap; } int[] p = new int[n + 1]; int[] d = new int[n + 1]; int[] swap; int i, i_bytes, j, j_bytes, num_bytes_j, cost; for (i = 0; i <= n; i++) { p[i] = i; } for (j = 0, j_bytes = 0; j < m; j_bytes += num_bytes_j, j++) { num_bytes_j = numBytesForFirstByte(t.getByte(j_bytes)); d[0] = j + 1; for (i = 0, i_bytes = 0; i < n; i_bytes += numBytesForFirstByte(s.getByte(i_bytes)), i++) { if (s.getByte(i_bytes) != t.getByte(j_bytes) || num_bytes_j != numBytesForFirstByte(s.getByte(i_bytes))) { cost = 1; } else { cost = (ByteArrayMethods.arrayEquals(t.base, t.offset + j_bytes, s.base, s.offset + i_bytes, num_bytes_j)) ? 0 : 1; } d[i + 1] = Math.min(Math.min(d[i] + 1, p[i + 1] + 1), p[i] + cost); } swap = p; p = d; d = swap; } return p[n]; } @Override public int hashCode() { return Murmur3_x86_32.hashUnsafeBytes(base, offset, numBytes, 42); } /** * Soundex mapping table */ private static final byte[] US_ENGLISH_MAPPING = {'0', '1', '2', '3', '0', '1', '2', '7', '0', '2', '2', '4', '5', '5', '0', '1', '2', '6', '2', '3', '0', '1', '7', '2', '0', '2'}; /** * Encodes a string into a Soundex value. Soundex is an encoding used to relate similar names, * but can also be used as a general purpose scheme to find word with similar phonemes. * https://en.wikipedia.org/wiki/Soundex */ public UTF8String soundex() { if (numBytes == 0) { return EMPTY_UTF8; } byte b = getByte(0); if ('a' <= b && b <= 'z') { b -= 32; } else if (b < 'A' || 'Z' < b) { // first character must be a letter return this; } byte[] sx = {'0', '0', '0', '0'}; sx[0] = b; int sxi = 1; int idx = b - 'A'; byte lastCode = US_ENGLISH_MAPPING[idx]; for (int i = 1; i < numBytes; i++) { b = getByte(i); if ('a' <= b && b <= 'z') { b -= 32; } else if (b < 'A' || 'Z' < b) { // not a letter, skip it lastCode = '0'; continue; } idx = b - 'A'; byte code = US_ENGLISH_MAPPING[idx]; if (code == '7') { // ignore it } else { if (code != '0' && code != lastCode) { sx[sxi++] = code; if (sxi > 3) break; } lastCode = code; } } return UTF8String.fromBytes(sx); } public void writeExternal(ObjectOutput out) throws IOException { byte[] bytes = getBytes(); out.writeInt(bytes.length); out.write(bytes); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { offset = BYTE_ARRAY_OFFSET; numBytes = in.readInt(); base = new byte[numBytes]; in.readFully((byte[]) base); } @Override public void write(Kryo kryo, Output out) { byte[] bytes = getBytes(); out.writeInt(bytes.length); out.write(bytes); } @Override public void read(Kryo kryo, Input in) { this.offset = BYTE_ARRAY_OFFSET; this.numBytes = in.readInt(); this.base = new byte[numBytes]; in.read((byte[]) base); } }
[SPARK-21967][CORE] org.apache.spark.unsafe.types.UTF8String#compareTo Should Compare 8 Bytes at a Time for Better Performance ## What changes were proposed in this pull request? * Using 64 bit unsigned long comparison instead of unsigned int comparison in `org.apache.spark.unsafe.types.UTF8String#compareTo` for better performance. * Making `IS_LITTLE_ENDIAN` a constant for correctness reasons (shouldn't use a non-constant in `compareTo` implementations and it def. is a constant per JVM) ## How was this patch tested? Build passes and the functionality is widely covered by existing tests as far as I can see. Author: Armin <[email protected]> Closes #19180 from original-brownbear/SPARK-21967.
common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
[SPARK-21967][CORE] org.apache.spark.unsafe.types.UTF8String#compareTo Should Compare 8 Bytes at a Time for Better Performance
Java
bsd-3-clause
d9516a9d7c02ee1ec2d1148a71b49f954a7d0cf2
0
PedeLa/CAE-Model-Persistence-Service,PedeLa/CAE-Model-Persistence-Service
package i5.las2peer.services.modelPersistenceService; import java.io.Serializable; import java.net.HttpURLConnection; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import javax.ws.rs.BadRequestException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.InternalServerErrorException; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.json.simple.parser.ParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import i5.cae.semanticCheck.SemanticCheckResponse; import i5.cae.simpleModel.SimpleEntityAttribute; import i5.cae.simpleModel.SimpleModel; import i5.cae.simpleModel.node.SimpleNode; import i5.las2peer.api.Context; import i5.las2peer.api.ServiceException; import i5.las2peer.api.execution.InternalServiceException; import i5.las2peer.api.execution.ServiceAccessDeniedException; import i5.las2peer.api.execution.ServiceInvocationFailedException; import i5.las2peer.api.execution.ServiceMethodNotFoundException; import i5.las2peer.api.execution.ServiceNotAuthorizedException; import i5.las2peer.api.execution.ServiceNotAvailableException; import i5.las2peer.api.execution.ServiceNotFoundException; import i5.las2peer.api.logging.MonitoringEvent; import i5.las2peer.logging.L2pLogger; import i5.las2peer.services.modelPersistenceService.database.DatabaseManager; import i5.las2peer.services.modelPersistenceService.exception.CGSInvocationException; import i5.las2peer.services.modelPersistenceService.exception.ModelNotFoundException; import i5.las2peer.services.modelPersistenceService.exception.VersionedModelNotFoundException; import i5.las2peer.services.modelPersistenceService.model.EntityAttribute; import i5.las2peer.services.modelPersistenceService.model.Model; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.jaxrs.Reader; import io.swagger.models.Swagger; import io.swagger.util.Json; import i5.las2peer.services.modelPersistenceService.model.metadata.MetadataDoc; import i5.las2peer.services.modelPersistenceService.modelServices.*; import i5.las2peer.services.modelPersistenceService.versionedModel.Commit; import i5.las2peer.services.modelPersistenceService.versionedModel.VersionedModel; @Path("/") public class RESTResources { private static final String PROJECT_MANAGEMENT_SERVICE = "i5.las2peer.services.projectManagementService.ProjectManagementService@0.1.0"; private final ModelPersistenceService service = (ModelPersistenceService) Context.getCurrent().getService(); private L2pLogger logger; private String semanticCheckService; private String codeGenerationService; private String deploymentUrl; private DatabaseManager dbm; private MetadataDocService metadataDocService; public RESTResources() throws ServiceException { this.logger = (L2pLogger) service.getLogger(); this.semanticCheckService = service.getSemanticCheckService(); this.codeGenerationService = service.getCodeGenerationService(); this.deploymentUrl = service.getDeploymentUrl(); this.dbm = service.getDbm(); this.metadataDocService = service.getMetadataService(); } /** * * Searches for a model in the database by name. * * @param modelId * the id of the model * * @return HttpResponse containing the status code of the request and (if * successful) the model as a JSON string * */ @GET @Path("/models/{modelId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for a model in the database. Takes the modelName as search parameter.", notes = "Searches for a model in the database.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model could not be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getModel(@PathParam("modelId") int modelId) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModel: searching for model with id " + modelId); Model model = null; Connection connection = null; try { connection = dbm.getConnection(); model = new Model(modelId, connection); } catch (ModelNotFoundException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModel: did not find model with id " + modelId); return Response.status(404).entity("Model not found!").build(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModel: exception fetching model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Database error!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModel: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModel: found model " + modelId + ", now converting to JSONObject and returning"); JSONObject jsonModel = model.toJSONObject(); return Response.ok(jsonModel.toJSONString(), MediaType.APPLICATION_JSON).build(); } /** * * Retrieves all model names from the database. * * * @return HttpResponse containing the status code of the request and (if * the database is not empty) the model-list as a JSON array * */ @SuppressWarnings("unchecked") @GET @Path("/models/") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Retrieves a list of models from the database.", notes = "Retrieves a list of all models stored in the database. Returns a list of model names.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model list is returned"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No models in the database"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getModels() { ArrayList<Integer> modelIds = new ArrayList<>(); Connection connection = null; try { connection = dbm.getConnection(); // search for all models PreparedStatement statement = connection.prepareStatement("SELECT modelId FROM Model;"); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: retrieving all models.."); ResultSet queryResult = statement.executeQuery(); while (queryResult.next()) { modelIds.add(queryResult.getInt(1)); } if (modelIds.isEmpty()) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: database is empty!"); return Response.status(404).entity("Database is empty!").build(); } connection.close(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: exception fetching model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Database error!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: created list of models, now converting to JSONObject and returning"); JSONArray jsonModelList = new JSONArray(); jsonModelList.addAll(modelIds); return Response.ok(jsonModelList.toJSONString(), MediaType.APPLICATION_JSON).build(); } @SuppressWarnings("unchecked") @GET @Path("/models/type/{modelType}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Retrieves a list of models from the database.", notes = "Retrieves a list of all models stored in the database. Returns a list of model names.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model list is returned"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No models in the database"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getModelsByType(@PathParam("modelType") String modelType) { ArrayList<String> modelNames = new ArrayList<String>(); Connection connection = null; try { connection = dbm.getConnection(); String sql = "select `ModelAttributes`.`modelName` from `AttributeToModelAttributes`, `Attribute`, `ModelAttributes`\n" + "where `AttributeToModelAttributes`.`attributeId` = `Attribute`.`attributeId`\n" + "and `AttributeToModelAttributes`.`modelAttributesName` = `ModelAttributes`.`modelName`\n" + "and `Attribute`.`name` = 'type'\n" + "and `Attribute`.`value` = '" + modelType + "';"; // search for all models PreparedStatement statement = connection.prepareStatement(sql); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: retrieving all models..!"); ResultSet queryResult = statement.executeQuery(); while (queryResult.next()) { modelNames.add(queryResult.getString(1)); } if (modelNames.isEmpty()) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: retrieving all models.."); return Response.ok(new JSONArray().toJSONString(), MediaType.APPLICATION_JSON).build(); } connection.close(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: exception fetching model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Database error!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: created list of models, now converting to JSONObject and returning"); JSONArray jsonModelList = new JSONArray(); jsonModelList.addAll(modelNames); return Response.ok(jsonModelList.toJSONString(), MediaType.APPLICATION_JSON).build(); } /** * * Deletes a model. * * @param modelId * id of the model * * @return HttpResponse containing the status code of the request * */ @DELETE @Path("/models/{modelId}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Deletes a model given by its name.", notes = "Deletes a model given by its name.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model is deleted"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deleteModel(@PathParam("modelId") int modelId) { Connection connection = null; Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deleteModel: trying to delete model with id: " + modelId); try { connection = dbm.getConnection(); Model model = new Model(modelId, connection); // call code generation service if (!codeGenerationService.isEmpty()) { /*try { // TODO: reactivate usage of code generation service //model = callCodeGenerationService("deleteRepositoryOfModel", model, "", null); } catch (CGSInvocationException e) { return Response.serverError().entity("Model not valid: " + e.getMessage()).build(); }*/ } model.deleteFromDatabase(connection); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deleteModel: deleted model " + modelId); return Response.ok("Model deleted!").build(); } catch (ModelNotFoundException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deleteModel: did not find model with id " + modelId); return Response.status(404).entity("Model not found!").build(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "deleteModel: exception deleting model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Internal server error...").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } /** * Searches for a versioned model with the given id. * @param versionedModelId Id of the versioned model to search for. * @return Response with status code (and possibly error message). */ @GET @Path("/versionedModels/{id}") @ApiOperation(value = "Searches for a versioned model in the database.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message="OK, found versioned model with the given it. Return it."), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message="Versioned model with the given id could not be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error.") }) public Response getVersionedModelById(@PathParam("id") int versionedModelId) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getVersionedModelById: searching for versionedModel with id " + versionedModelId); Connection connection = null; try { connection = dbm.getConnection(); // load versioned model by id VersionedModel versionedModel = new VersionedModel(versionedModelId, connection); // if no VersionedModelNotFoundException was thrown, then the model exists // return it return Response.ok(versionedModel.toJSONObject().toJSONString()).build(); } catch (VersionedModelNotFoundException e) { logger.printStackTrace(e); return Response.status(HttpURLConnection.HTTP_NOT_FOUND) .entity("Versioned model with the given id could not be found.").build(); } catch (SQLException e) { logger.printStackTrace(e); return Response.serverError().entity("Internal server error.").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } /** * Posts a commit to the versioned model. * @param versionedModelId Id of the versioned model, where the commit should be added to. * @param inputCommit Input commit as JSON, also containing the model that should be connected to the commit. * @return Response with status code (and possibly error message). */ @POST @Path("/versionedModels/{id}/commits") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Posts a commit to the versioned model.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, added commit to versioned model."), @ApiResponse(code = HttpURLConnection.HTTP_UNAUTHORIZED, message = "User is not authorized."), @ApiResponse(code = HttpURLConnection.HTTP_BAD_REQUEST, message = "Parse error."), @ApiResponse(code = HttpURLConnection.HTTP_FORBIDDEN, message = "USer is not allowed to commit to the versioned model."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error.") }) public Response postCommitToVersionedModel(@PathParam("id") int versionedModelId, String inputCommit) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "postCommitToVersionedModel: posting commit to versioned model with id " + versionedModelId); Connection connection = null; try { connection = dbm.getConnection(); boolean isAnonymous = (boolean) Context.getCurrent().invoke(PROJECT_MANAGEMENT_SERVICE, "isAnonymous"); if(isAnonymous) { return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity("User not authorized.").build(); } else { boolean hasCommitPermission = (boolean) Context.getCurrent() .invoke(PROJECT_MANAGEMENT_SERVICE, "hasCommitPermission", versionedModelId); if(hasCommitPermission) { // user has the permission to commit to the versioned model // there always exists a commit for "uncommited changes" // that one needs to be removed first connection.setAutoCommit(false); VersionedModel versionedModel = new VersionedModel(versionedModelId, connection); Commit uncommitedChanges = versionedModel.getCommitForUncommitedChanges(); uncommitedChanges.delete(connection); // now create a new commit Commit commit = new Commit(inputCommit, false); commit.persist(versionedModelId, connection, false); // now create new commit for uncommited changes Commit uncommitedChangesNew = new Commit(inputCommit, true); uncommitedChangesNew.persist(versionedModelId, connection, false); // reload versionedModel from database versionedModel = new VersionedModel(versionedModelId, connection); // get model Model model = commit.getModel(); // do the semantic check if (!semanticCheckService.isEmpty()) { this.checkModel(model); } // The codegen service and metadatadocservice already require the model to have a "type" attribute // this "type" attribute is included in the request body JSONObject commitJson = (JSONObject) JSONValue.parse(inputCommit); String type = (String) commitJson.get("componentType"); // given type "frontend" needs to be converted to "frontend-component" if(type.equals("frontend")) type = "frontend-component"; // the other types "microservice" and "application" do not need to be converted // these model attributes are not persisted to the database, since model.persist already got called // when the commit got persisted model.getAttributes().add(new EntityAttribute(new SimpleEntityAttribute("syncmetaid", "type", type))); model.getAttributes().add(new EntityAttribute("syncmetaid", "versionedModelId", String.valueOf(versionedModelId))); // call code generation service String commitSha = ""; if (!codeGenerationService.isEmpty()) { try { // get user input metadata doc if available String metadataDocString = model.getMetadataDoc(); if (metadataDocString == null) metadataDocString = ""; Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "postModel: invoking code generation service.."); // TODO: reactivate usage of code generation service // TODO: EDIT: is reactivated now, check if everything works, then this TODO can be removed // check if it is the first commit or not if(versionedModel.getCommits().size() == 2) { // this is the first commit (there are 2 in total, because of the "uncommited changes" commit) commitSha = callCodeGenerationService("createFromModel", metadataDocString, versionedModel, commit); } else { // not the first commit commitSha = callCodeGenerationService("updateRepositoryOfModel", metadataDocString, versionedModel, commit); } } catch (CGSInvocationException e) { try { connection.rollback(); } catch (SQLException e1) {} return Response.serverError().entity("Model not valid: " + e.getMessage()).build(); } } // generate metadata swagger doc after model valid in code generation metadataDocService.modelToSwagger(versionedModel.getId(), model); // now persist the sha given by code generation service commit.persistSha(commitSha, connection); // everything went well -> commit database changes connection.commit(); return Response.ok(commitSha).build(); } else { // user does not have the permission to commit to the versioned model, or an error occurred return Response.status(HttpURLConnection.HTTP_FORBIDDEN) .entity("User is not allowed to commit to the versioned model (or an error occurred).").build(); } } } catch (SQLException e) { try { connection.rollback(); } catch (SQLException e1) {} logger.printStackTrace(e); return Response.serverError().entity("Internal server error.").build(); } catch (ParseException e) { try { connection.rollback(); } catch (SQLException e1) {} logger.printStackTrace(e); return Response.status(HttpURLConnection.HTTP_BAD_REQUEST).entity("Parse error.").build(); } catch (Exception e) { try { connection.rollback(); } catch (SQLException e1) {} logger.printStackTrace(e); return Response.serverError().entity("Internal server error: " + e.getMessage()).build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } /** * Get the status / console text of a build. The build is identified by * using the queue item that is returned when a job is created. * * @param queueItem * The queue item of the job * @return The console text of the job */ @GET @Path("/deployStatus/") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Get the console text of the build from Jenkins", notes = "Get the console text of the build.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model will be deployed"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deployStatus(@QueryParam("queueItem") String queueItem) { // delegate the request to the code generation service as it is the // service responsible for // Jenkins try { String answer = (String) Context.getCurrent().invoke( "i5.las2peer.services.codeGenerationService.CodeGenerationService@0.1", "deployStatus", queueItem); return Response.ok(answer).build(); } catch (Exception e) { logger.printStackTrace(e); return Response.serverError().entity(e.getMessage()).build(); } } /** * * Requests the code generation service to start a Jenkins job for an * application model. * * @param versionedModelId * id of the versioned model * @param jobAlias * the name/alias of the job to run, i.e. either "Build" or * "Docker" * * @return HttpResponse containing the status code of the request * */ @GET @Path("/deploy/{versionedModelId}/{jobAlias}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Deploys an application model.", notes = "Deploys an application model.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model will be deployed"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deployModel(@PathParam("versionedModelId") int versionedModelId, @PathParam("jobAlias") String jobAlias) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deployModel: trying to deploy versioned model with id: " + versionedModelId); Model model; Connection connection = null; // first parse the updated model and check for correctness of format try { connection = dbm.getConnection(); // get versioned model first VersionedModel versionedModel = new VersionedModel(versionedModelId, connection); ArrayList<Commit> commits = versionedModel.getCommits(); if(commits.size() < 2) { return Response.serverError().entity("There does not exist a commit to the versioned model with the given id.").build(); } // get the commit at index 1, because the commit at index 0 is the one for uncommited changes Commit latestCommit = commits.get(1); // use the model of the latest commit for the deployment model = latestCommit.getModel(); // add type attribute "application" model.getAttributes().add(new EntityAttribute(new SimpleEntityAttribute("syncmetaid", "type", "application"))); // add attribute for versionedModelId model.getAttributes().add(new EntityAttribute(new SimpleEntityAttribute("syncmetaid", "versionedModelId", String.valueOf(versionedModelId)))); try { // only create temp repository once, i.e. before the "Build" // job is started in Jenkins if (jobAlias.equals("Build")) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deployModel: invoking code generation service.."); // TODO: reactivate usage of code generation service // TODO: EDIT: is reactivated now, check if everything works, then TODO can be removed callCodeGenerationService("prepareDeploymentApplicationModel", "", null, latestCommit); } // start the jenkins job by the code generation service String answer = (String) Context.getCurrent().invoke( "i5.las2peer.services.codeGenerationService.CodeGenerationService@0.1", "startJenkinsJob", jobAlias); // safe deployment time and url if(!deploymentUrl.isEmpty()) metadataDocService.updateDeploymentDetails(model, deploymentUrl); return Response.ok(answer).build(); } catch (CGSInvocationException e) { return Response.serverError().entity("Model not valid: " + e.getMessage()).build(); } } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "updateModel: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Internal server error!").build(); } // always close connections finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } //////////////////////////////////////////////////////////////////////////////////////// // Methods special to the CAE. Feel free to ignore them:-) /////////////////////////////////////////////////////////////////////////////////////// /** * * Loads a model from the database (by calling the respective resource) and * sends it to the code generation service, requesting a Communication Model * view to be displayed in SyncMeta's application editor view. * * TODO: Not tested.. * * @param modelId * the id of the model to be loaded. * * @return HttpResponse containing the status code of the request and the * communication view model as a JSON string */ @GET @Path("/models/commView/{modelId}") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets a CAE communication view model.", notes = "Gets a CAE communication view model.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getCAECommunicationModel(@PathParam("modelId") int modelId) { // load the application model from the database SimpleModel appModel; Connection connection = null; try { connection = dbm.getConnection(); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Loading model " + modelId + " from the database"); appModel = (SimpleModel) new Model(modelId, connection).getMinifiedRepresentation(); } catch (SQLException e) { // model might not exist logger.printStackTrace(e); Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: model " + modelId + " not found"); return Response.status(404).entity("Model " + modelId + " does not exist!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } // load submodules of application model from the database Serializable[] modelsToSend = null; for (SimpleEntityAttribute attribute : appModel.getAttributes()) { if (attribute.getName().equals("type") && attribute.getValue().equals("application")) { modelsToSend = new SimpleModel[appModel.getNodes().size() + 1]; modelsToSend[0] = appModel; // first is always "application" // model itself int modelsToSendIndex = 1; // iterate through the nodes and add corresponding models to // array for (SimpleNode node : appModel.getNodes()) { // send application models only have one attribute with // its label // TODO: here subModelName got changed to subModelId -> check if it works int subModelId = Integer.valueOf(node.getAttributes().get(0).getValue()); try { connection = dbm.getConnection(); modelsToSend[modelsToSendIndex] = new Model(subModelId, connection) .getMinifiedRepresentation(); } catch (SQLException e) { // model might not exist logger.printStackTrace(e); Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: Error loading application component: " + subModelId); return Response.serverError().entity("Internal server error...").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } modelsToSendIndex++; } // invoke code generation service try { Serializable[] payload = { modelsToSend }; Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Invoking code generation service now.."); SimpleModel communicationModel = (SimpleModel) Context.getCurrent().invoke(codeGenerationService, "getCommunicationViewOfApplicationModel", payload); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Got communication model from code generation service.."); Model returnModel = new Model(communicationModel); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Created model " + modelId + "from simple model, now converting to JSONObject and returning"); JSONObject jsonModel = returnModel.toJSONObject(); return Response.ok(jsonModel.toJSONString()).build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: Internal error " + e.getMessage()); logger.printStackTrace(e); return Response.serverError().entity("Internal server error...").build(); } } } Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: model " + modelId + " is not an application"); return Response.serverError().entity("Internal server error...").build(); } /** * * Calls the code generation service to see if the model is a valid CAE * model. Also implements a bit of CAE logic by checking if the code * generation service needs additional models (in case of an application * model) and serves them automatically, such that the rest of this service * does not have to deal with this "special case". * * @param methodName * the method name of the code generation service * @param metadataDoc * @param versionedModel The versioned model where the given model belongs to. * @param commit Commit where the code generation should be called with. * @return Commit sha identifier * * @throws CGSInvocationException * if something went wrong invoking the service * */ private String callCodeGenerationService(String methodName, String metadataDoc, VersionedModel versionedModel, Commit commit) throws CGSInvocationException { Model model = commit.getModel(); if (metadataDoc == null) metadataDoc = ""; Connection connection = null; Serializable[] modelsToSend = null; SimpleModel simpleModel = (SimpleModel) model.getMinifiedRepresentation(); boolean isApplication = false; String modelType = null; for(EntityAttribute a : model.getAttributes()) { if(a.getName().equals("type")) { modelType = a.getValue(); break; } } if (modelType.equals("application")) { isApplication = true; } if (isApplication) { modelsToSend = new SimpleModel[simpleModel.getNodes().size() + 1]; modelsToSend[0] = simpleModel; // first is always "application" // model itself int modelsToSendIndex = 1; // iterate through the nodes and add corresponding models to // array for (SimpleNode node : simpleModel.getNodes()) { String versionedModelIdStr = "-1"; for(SimpleEntityAttribute a : node.getAttributes()) { if(a.getName().equals("versionedModelId")) { versionedModelIdStr = a.getValue(); break; } } int versionedModelId = Integer.parseInt(versionedModelIdStr); // get latest commit VersionedModel v; try { connection = dbm.getConnection(); v = new VersionedModel(versionedModelId, connection); } catch (SQLException e1) { throw new CGSInvocationException(e1.getMessage()); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } ArrayList<Commit> commits = v.getCommits(); if(commits.size() < 2) throw new CGSInvocationException("Application contains versioned model without commit."); // get first "model-commit" Model m = null; for(int i = 1; i < commits.size(); i++) { if(commits.get(i).getCommitType() == Commit.COMMIT_TYPE_MODEL) { m = commits.get(i).getModel(); } } String type = ""; if(node.getType().equals("Frontend Component")) type = "frontend-component"; else if(node.getType().equals("Microservice")) type = "microservice"; m.getAttributes().add(new EntityAttribute("syncmetaid", "type", type)); logger.info("Attributes: " + node.getAttributes().toString()); try { connection = dbm.getConnection(); logger.info("Modelname: " + m.getId()); SimpleModel s = (SimpleModel) m.getMinifiedRepresentation(); // s now has the id of the model as id, not the versioned model id // thus we create a new SimpleModel and use the versioned model id as the model id SimpleModel s2 = new SimpleModel(String.valueOf(versionedModelId), s.getNodes(), s.getEdges(), s.getAttributes()); modelsToSend[modelsToSendIndex] = (Serializable) s2; } catch (SQLException e) { // model might not exist logger.printStackTrace(e); throw new CGSInvocationException("Error loading application component: " + m.getId()); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } modelsToSendIndex++; } } else { SimpleModel oldModel = null; // check if there exists an old model int commitCount = versionedModel.getCommits().size(); if(commitCount == 2) { // there only exists one commit and the "uncommited changes" commit modelsToSend = new SimpleModel[1]; modelsToSend[0] = simpleModel; } else { // there exists an old commit modelsToSend = new SimpleModel[2]; modelsToSend[0] = simpleModel; Model old = null; for(int i = 2; i < versionedModel.getCommits().size(); i++) { if(versionedModel.getCommits().get(i).getCommitType() == Commit.COMMIT_TYPE_MODEL) { old = versionedModel.getCommits().get(i).getModel(); break; } } // the old model does not contain attributes for type and versionedModelId old.getAttributes().add(new EntityAttribute("syncmetaid", "versionedModelId", String.valueOf(versionedModel.getId()))); oldModel = (SimpleModel) old.getMinifiedRepresentation(); modelsToSend[1] = oldModel; } } // actual invocation try { String answer = ""; if (!methodName.equals("updateRepositoryOfModel") && !methodName.equals("createFromModel")) { Serializable[] payload = { modelsToSend }; answer = (String) Context.getCurrent().invoke(codeGenerationService, methodName, payload); } else { // method is either updateRepositoryOfModel or createFromModel String versionTag = commit.getVersionTag(); if(versionTag == null) versionTag = ""; Serializable[] payload = { commit.getMessage(), versionTag, metadataDoc, modelsToSend }; answer = (String) Context.getCurrent().invoke(codeGenerationService, methodName, payload); } if (!answer.startsWith("done")) { throw new CGSInvocationException(answer); } if(answer.startsWith("done:")) return answer.split("done:")[1]; return ""; } catch (Exception e) { logger.printStackTrace(e); throw new CGSInvocationException(e.getMessage()); } } //////////////////////////////////////////////////////////////////////////////////////// // Methods for Semantic Check //////////////////////////////////////////////////////////////////////////////////////// /** * * Performs the semantic check (if specified) on the model, without storing * it * * @param inputModel * the model as a JSON string * * @return HttpResponse status of the check * */ @PUT @Path("/semantics") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Performs the semantic check", notes = "Performs the semantic check") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_NOT_MODIFIED, message = "Semantic Check successful"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response checkModel(String inputModel) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "checkModel: performing semantic check"); Model model; // first parse the updated model and check for correctness of format try { model = new Model(inputModel); } catch (ParseException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "semantic check: exception parsing JSON input: " + e); return Response.serverError().entity("JSON parsing exception, file not valid!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "semantic check: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Internal server error!").build(); } // do the semantic story check if (!semanticCheckService.isEmpty()) { this.doSemanticCheck(model); } else { return Response.status(400).entity("No semantic check service available").build(); } return Response.ok(SemanticCheckResponse.success().toJSONResultString()).build(); } private void checkModel(Model model) { SemanticCheckResponse result; EntityAttribute semcheckAttr = findSemcheckAttribute(model); try { result = (SemanticCheckResponse) Context.getCurrent().invoke(semanticCheckService, "doSemanticCheck", model.getMinifiedRepresentation()); } catch (Exception e) { System.out.println(e); throw new InternalServerErrorException("could not execute semantic check service"); } if (result == null) { throw new InternalServerErrorException("an error orrcured within the semantic check"); } else if (result.getError() != 0) { if (semcheckAttr == null) { throw new BadRequestException(result.toJSONResultString()); } else if (!semcheckAttr.getValue().equals("false")) { throw new BadRequestException("This model was supposed to be incorrect"); } } else if (result.getError() == 0) { if (semcheckAttr != null && !semcheckAttr.getValue().equals("true")) { throw new BadRequestException("This model was supposed to be correct"); } } } private void doSemanticCheck(Model model) { SemanticCheckResponse result; try { result = (SemanticCheckResponse) Context.getCurrent().invoke(semanticCheckService, "doSemanticCheck", model.getMinifiedRepresentation()); } catch (Exception e) { System.out.println(e); throw new InternalServerErrorException("could not execute semantic check service"); } if (result == null) { throw new InternalServerErrorException("an error orrcured within the semantic check"); } else if (result.getError() != 0) { throw new InternalServerErrorException(Response.ok(result.toJSONResultString()).build()); } } private EntityAttribute findSemcheckAttribute(Model model) { EntityAttribute res = null; for (EntityAttribute a : model.getAttributes()) { if (a.getName().equals("_semcheck")) { return a; } } return res; } //////////////////////////////////////////////////////////////////////////////////////// // Methods providing a Swagger documentation of the service API. //////////////////////////////////////////////////////////////////////////////////////// /** * * Returns the API documentation for a specific annotated top level resource * for purposes of the Swagger documentation. * * Note: If you do not intend to use Swagger for the documentation of your * Service API, this method may be removed. * * Trouble shooting: Please make sure that the endpoint URL below is correct * with respect to your service. * * @return the resource's documentation * */ @GET @Path("/models/swagger.json") @Produces(MediaType.APPLICATION_JSON) public Response getSwaggerJSON() { Swagger swagger = new Reader(new Swagger()).read(this.getClass()); if (swagger == null) { return Response.status(404).entity("Swagger API declaration not available!").build(); } try { return Response.ok(Json.mapper().writeValueAsString(swagger), MediaType.APPLICATION_JSON).build(); } catch (JsonProcessingException e) { logger.printStackTrace(e); return Response.serverError().entity(e.getMessage()).build(); } } /***********METADATA DOCS*************** */ /** * Get all element to element connections in the database * * @return JSON data of the list of all element to element connections */ @GET @Path("/docs/") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for all metadata docs in the database. Takes no parameter.", notes = "Searches for all metadata docs in the database.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, Metadata doc found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No metadata doc could be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getDocs() { ArrayList<MetadataDoc> docs = null; ObjectMapper mapper = new ObjectMapper(); try { docs = this.metadataDocService.getAll(); String jsonString = mapper.writeValueAsString(docs); return Response.ok(jsonString, MediaType.APPLICATION_JSON).build(); } catch (Exception e) { this.logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } } /** * Get metadata docs in the database by component id * * @return JSON data of the list of all metadata docs */ @GET @Path("/docs/component/{id}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for all metadata doc in the database by component id.", notes = "Searches for all metadata doc in the database by component id.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, metadata doc found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No metadata doc could be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getDocByComponentId(@PathParam("id") int id) { MetadataDoc doc = null; ObjectMapper mapper = new ObjectMapper(); try { doc = this.metadataDocService.getByComponentId(id); String jsonString = mapper.writeValueAsString(doc); return Response.ok(jsonString, MediaType.APPLICATION_JSON).build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.ok("{}", MediaType.APPLICATION_JSON).build(); } catch (Exception e) { this.logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } } /** * Get metadata docs in the database by component id * * @return JSON data of the list of all metadata docs */ @GET @Path("/docs/component/{id}/{version}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for all metadata doc in the database by component id.", notes = "Searches for all metadata doc in the database by component id.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, metadata doc found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No metadata doc could be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getDocByComponentIdVersion(@PathParam("id") String id, @PathParam("version") int version) { MetadataDoc doc = null; ObjectMapper mapper = new ObjectMapper(); try { doc = this.metadataDocService.getByComponentIdVersion(id, version); String jsonString = mapper.writeValueAsString(doc); return Response.ok(jsonString, MediaType.APPLICATION_JSON).build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.ok("{}", MediaType.APPLICATION_JSON).build(); } catch (Exception e) { this.logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } } /** * Creates or update user input metadata doc. * * @param inputJsonString json of the new model. * @return HttpResponse with the status */ @POST @Path("/docs/{id}/{version}") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Create or update metadata doc.", notes = "Create or update metadata doc.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_CREATED, message = "OK, model stored"), @ApiResponse(code = HttpURLConnection.HTTP_BAD_REQUEST, message = "Input model was not valid"), @ApiResponse(code = HttpURLConnection.HTTP_CONFLICT, message = "Tried to save a model that already had a name and thus was not new"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response postDoc(String inputJsonString, @PathParam("version") int version, @PathParam("id") int id) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "postDoc called with version " + version + " and id " + id); ObjectMapper mapper = new ObjectMapper(); try { this.metadataDocService.createUpdateUserGeneratedMetadata(id, inputJsonString, version); return Response.ok().entity("Doc updated or created").build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.serverError().entity("Could not create new metadata doc, SQL exception").build(); } } /** * * Deletes a model. * * @param modelName * a string containing the model name * * @return HttpResponse containing the status code of the request * */ @DELETE @Path("/docs/{id}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Deletes a metadata doc by id.", notes = "Deletes a metadata doc by id.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model is deleted"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deleteDoc(@PathParam("id") String id) { try { this.metadataDocService.delete(id); return Response.ok().entity("element to element deleted").build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.serverError().entity("Could not delete metadata doc, SQL exception").build(); } } }
src/main/i5/las2peer/services/modelPersistenceService/RESTResources.java
package i5.las2peer.services.modelPersistenceService; import java.io.Serializable; import java.net.HttpURLConnection; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import javax.ws.rs.BadRequestException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.InternalServerErrorException; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.json.simple.parser.ParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import i5.cae.semanticCheck.SemanticCheckResponse; import i5.cae.simpleModel.SimpleEntityAttribute; import i5.cae.simpleModel.SimpleModel; import i5.cae.simpleModel.node.SimpleNode; import i5.las2peer.api.Context; import i5.las2peer.api.ServiceException; import i5.las2peer.api.execution.InternalServiceException; import i5.las2peer.api.execution.ServiceAccessDeniedException; import i5.las2peer.api.execution.ServiceInvocationFailedException; import i5.las2peer.api.execution.ServiceMethodNotFoundException; import i5.las2peer.api.execution.ServiceNotAuthorizedException; import i5.las2peer.api.execution.ServiceNotAvailableException; import i5.las2peer.api.execution.ServiceNotFoundException; import i5.las2peer.api.logging.MonitoringEvent; import i5.las2peer.logging.L2pLogger; import i5.las2peer.services.modelPersistenceService.database.DatabaseManager; import i5.las2peer.services.modelPersistenceService.exception.CGSInvocationException; import i5.las2peer.services.modelPersistenceService.exception.ModelNotFoundException; import i5.las2peer.services.modelPersistenceService.exception.VersionedModelNotFoundException; import i5.las2peer.services.modelPersistenceService.model.EntityAttribute; import i5.las2peer.services.modelPersistenceService.model.Model; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.jaxrs.Reader; import io.swagger.models.Swagger; import io.swagger.util.Json; import i5.las2peer.services.modelPersistenceService.model.metadata.MetadataDoc; import i5.las2peer.services.modelPersistenceService.modelServices.*; import i5.las2peer.services.modelPersistenceService.versionedModel.Commit; import i5.las2peer.services.modelPersistenceService.versionedModel.VersionedModel; @Path("/") public class RESTResources { private static final String PROJECT_MANAGEMENT_SERVICE = "i5.las2peer.services.projectManagementService.ProjectManagementService@0.1.0"; private final ModelPersistenceService service = (ModelPersistenceService) Context.getCurrent().getService(); private L2pLogger logger; private String semanticCheckService; private String codeGenerationService; private String deploymentUrl; private DatabaseManager dbm; private MetadataDocService metadataDocService; public RESTResources() throws ServiceException { this.logger = (L2pLogger) service.getLogger(); this.semanticCheckService = service.getSemanticCheckService(); this.codeGenerationService = service.getCodeGenerationService(); this.deploymentUrl = service.getDeploymentUrl(); this.dbm = service.getDbm(); this.metadataDocService = service.getMetadataService(); } /** * * Searches for a model in the database by name. * * @param modelId * the id of the model * * @return HttpResponse containing the status code of the request and (if * successful) the model as a JSON string * */ @GET @Path("/models/{modelId}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for a model in the database. Takes the modelName as search parameter.", notes = "Searches for a model in the database.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model could not be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getModel(@PathParam("modelId") int modelId) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModel: searching for model with id " + modelId); Model model = null; Connection connection = null; try { connection = dbm.getConnection(); model = new Model(modelId, connection); } catch (ModelNotFoundException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModel: did not find model with id " + modelId); return Response.status(404).entity("Model not found!").build(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModel: exception fetching model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Database error!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModel: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModel: found model " + modelId + ", now converting to JSONObject and returning"); JSONObject jsonModel = model.toJSONObject(); return Response.ok(jsonModel.toJSONString(), MediaType.APPLICATION_JSON).build(); } /** * * Retrieves all model names from the database. * * * @return HttpResponse containing the status code of the request and (if * the database is not empty) the model-list as a JSON array * */ @SuppressWarnings("unchecked") @GET @Path("/models/") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Retrieves a list of models from the database.", notes = "Retrieves a list of all models stored in the database. Returns a list of model names.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model list is returned"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No models in the database"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getModels() { ArrayList<Integer> modelIds = new ArrayList<>(); Connection connection = null; try { connection = dbm.getConnection(); // search for all models PreparedStatement statement = connection.prepareStatement("SELECT modelId FROM Model;"); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: retrieving all models.."); ResultSet queryResult = statement.executeQuery(); while (queryResult.next()) { modelIds.add(queryResult.getInt(1)); } if (modelIds.isEmpty()) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: database is empty!"); return Response.status(404).entity("Database is empty!").build(); } connection.close(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: exception fetching model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Database error!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: created list of models, now converting to JSONObject and returning"); JSONArray jsonModelList = new JSONArray(); jsonModelList.addAll(modelIds); return Response.ok(jsonModelList.toJSONString(), MediaType.APPLICATION_JSON).build(); } @SuppressWarnings("unchecked") @GET @Path("/models/type/{modelType}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Retrieves a list of models from the database.", notes = "Retrieves a list of all models stored in the database. Returns a list of model names.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model list is returned"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No models in the database"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getModelsByType(@PathParam("modelType") String modelType) { ArrayList<String> modelNames = new ArrayList<String>(); Connection connection = null; try { connection = dbm.getConnection(); String sql = "select `ModelAttributes`.`modelName` from `AttributeToModelAttributes`, `Attribute`, `ModelAttributes`\n" + "where `AttributeToModelAttributes`.`attributeId` = `Attribute`.`attributeId`\n" + "and `AttributeToModelAttributes`.`modelAttributesName` = `ModelAttributes`.`modelName`\n" + "and `Attribute`.`name` = 'type'\n" + "and `Attribute`.`value` = '" + modelType + "';"; // search for all models PreparedStatement statement = connection.prepareStatement(sql); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: retrieving all models..!"); ResultSet queryResult = statement.executeQuery(); while (queryResult.next()) { modelNames.add(queryResult.getString(1)); } if (modelNames.isEmpty()) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: retrieving all models.."); return Response.ok(new JSONArray().toJSONString(), MediaType.APPLICATION_JSON).build(); } connection.close(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: exception fetching model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Database error!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getModels: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getModels: created list of models, now converting to JSONObject and returning"); JSONArray jsonModelList = new JSONArray(); jsonModelList.addAll(modelNames); return Response.ok(jsonModelList.toJSONString(), MediaType.APPLICATION_JSON).build(); } /** * * Deletes a model. * * @param modelId * id of the model * * @return HttpResponse containing the status code of the request * */ @DELETE @Path("/models/{modelId}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Deletes a model given by its name.", notes = "Deletes a model given by its name.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model is deleted"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deleteModel(@PathParam("modelId") int modelId) { Connection connection = null; Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deleteModel: trying to delete model with id: " + modelId); try { connection = dbm.getConnection(); Model model = new Model(modelId, connection); // call code generation service if (!codeGenerationService.isEmpty()) { /*try { // TODO: reactivate usage of code generation service //model = callCodeGenerationService("deleteRepositoryOfModel", model, "", null); } catch (CGSInvocationException e) { return Response.serverError().entity("Model not valid: " + e.getMessage()).build(); }*/ } model.deleteFromDatabase(connection); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deleteModel: deleted model " + modelId); return Response.ok("Model deleted!").build(); } catch (ModelNotFoundException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deleteModel: did not find model with id " + modelId); return Response.status(404).entity("Model not found!").build(); } catch (SQLException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "deleteModel: exception deleting model: " + e); logger.printStackTrace(e); return Response.serverError().entity("Internal server error...").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } /** * Searches for a versioned model with the given id. * @param versionedModelId Id of the versioned model to search for. * @return Response with status code (and possibly error message). */ @GET @Path("/versionedModels/{id}") @ApiOperation(value = "Searches for a versioned model in the database.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message="OK, found versioned model with the given it. Return it."), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message="Versioned model with the given id could not be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error.") }) public Response getVersionedModelById(@PathParam("id") int versionedModelId) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getVersionedModelById: searching for versionedModel with id " + versionedModelId); Connection connection = null; try { connection = dbm.getConnection(); // load versioned model by id VersionedModel versionedModel = new VersionedModel(versionedModelId, connection); // if no VersionedModelNotFoundException was thrown, then the model exists // return it return Response.ok(versionedModel.toJSONObject().toJSONString()).build(); } catch (VersionedModelNotFoundException e) { logger.printStackTrace(e); return Response.status(HttpURLConnection.HTTP_NOT_FOUND) .entity("Versioned model with the given id could not be found.").build(); } catch (SQLException e) { logger.printStackTrace(e); return Response.serverError().entity("Internal server error.").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } /** * Posts a commit to the versioned model. * @param versionedModelId Id of the versioned model, where the commit should be added to. * @param inputCommit Input commit as JSON, also containing the model that should be connected to the commit. * @return Response with status code (and possibly error message). */ @POST @Path("/versionedModels/{id}/commits") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Posts a commit to the versioned model.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, added commit to versioned model."), @ApiResponse(code = HttpURLConnection.HTTP_UNAUTHORIZED, message = "User is not authorized."), @ApiResponse(code = HttpURLConnection.HTTP_BAD_REQUEST, message = "Parse error."), @ApiResponse(code = HttpURLConnection.HTTP_FORBIDDEN, message = "USer is not allowed to commit to the versioned model."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error.") }) public Response postCommitToVersionedModel(@PathParam("id") int versionedModelId, String inputCommit) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "postCommitToVersionedModel: posting commit to versioned model with id " + versionedModelId); Connection connection = null; try { connection = dbm.getConnection(); boolean isAnonymous = (boolean) Context.getCurrent().invoke(PROJECT_MANAGEMENT_SERVICE, "isAnonymous"); if(isAnonymous) { return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity("User not authorized.").build(); } else { boolean hasCommitPermission = (boolean) Context.getCurrent() .invoke(PROJECT_MANAGEMENT_SERVICE, "hasCommitPermission", versionedModelId); if(hasCommitPermission) { // user has the permission to commit to the versioned model // there always exists a commit for "uncommited changes" // that one needs to be removed first connection.setAutoCommit(false); VersionedModel versionedModel = new VersionedModel(versionedModelId, connection); Commit uncommitedChanges = versionedModel.getCommitForUncommitedChanges(); uncommitedChanges.delete(connection); // now create a new commit Commit commit = new Commit(inputCommit, false); commit.persist(versionedModelId, connection, false); // now create new commit for uncommited changes Commit uncommitedChangesNew = new Commit(inputCommit, true); uncommitedChangesNew.persist(versionedModelId, connection, false); // reload versionedModel from database versionedModel = new VersionedModel(versionedModelId, connection); // get model Model model = commit.getModel(); // do the semantic check if (!semanticCheckService.isEmpty()) { this.checkModel(model); } // The codegen service and metadatadocservice already require the model to have a "type" attribute // this "type" attribute is included in the request body JSONObject commitJson = (JSONObject) JSONValue.parse(inputCommit); String type = (String) commitJson.get("componentType"); // given type "frontend" needs to be converted to "frontend-component" if(type.equals("frontend")) type = "frontend-component"; // the other types "microservice" and "application" do not need to be converted // these model attributes are not persisted to the database, since model.persist already got called // when the commit got persisted model.getAttributes().add(new EntityAttribute(new SimpleEntityAttribute("syncmetaid", "type", type))); model.getAttributes().add(new EntityAttribute("syncmetaid", "versionedModelId", String.valueOf(versionedModelId))); // call code generation service String commitSha = ""; if (!codeGenerationService.isEmpty()) { try { // get user input metadata doc if available String metadataDocString = model.getMetadataDoc(); if (metadataDocString == null) metadataDocString = ""; Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "postModel: invoking code generation service.."); // TODO: reactivate usage of code generation service // TODO: EDIT: is reactivated now, check if everything works, then this TODO can be removed // check if it is the first commit or not if(versionedModel.getCommits().size() == 2) { // this is the first commit (there are 2 in total, because of the "uncommited changes" commit) commitSha = callCodeGenerationService("createFromModel", metadataDocString, versionedModel, commit); } else { // not the first commit commitSha = callCodeGenerationService("updateRepositoryOfModel", metadataDocString, versionedModel, commit); } } catch (CGSInvocationException e) { try { connection.rollback(); } catch (SQLException e1) {} return Response.serverError().entity("Model not valid: " + e.getMessage()).build(); } } // generate metadata swagger doc after model valid in code generation metadataDocService.modelToSwagger(versionedModel.getId(), model); // now persist the sha given by code generation service commit.persistSha(commitSha, connection); // everything went well -> commit database changes connection.commit(); return Response.ok(commitSha).build(); } else { // user does not have the permission to commit to the versioned model, or an error occurred return Response.status(HttpURLConnection.HTTP_FORBIDDEN) .entity("User is not allowed to commit to the versioned model (or an error occurred).").build(); } } } catch (SQLException e) { try { connection.rollback(); } catch (SQLException e1) {} logger.printStackTrace(e); return Response.serverError().entity("Internal server error.").build(); } catch (ParseException e) { try { connection.rollback(); } catch (SQLException e1) {} logger.printStackTrace(e); return Response.status(HttpURLConnection.HTTP_BAD_REQUEST).entity("Parse error.").build(); } catch (Exception e) { try { connection.rollback(); } catch (SQLException e1) {} logger.printStackTrace(e); return Response.serverError().entity("Internal server error: " + e.getMessage()).build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } /** * Get the status / console text of a build. The build is identified by * using the queue item that is returned when a job is created. * * @param queueItem * The queue item of the job * @return The console text of the job */ @GET @Path("/deployStatus/") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Get the console text of the build from Jenkins", notes = "Get the console text of the build.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model will be deployed"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deployStatus(@QueryParam("queueItem") String queueItem) { // delegate the request to the code generation service as it is the // service responsible for // Jenkins try { String answer = (String) Context.getCurrent().invoke( "i5.las2peer.services.codeGenerationService.CodeGenerationService@0.1", "deployStatus", queueItem); return Response.ok(answer).build(); } catch (Exception e) { logger.printStackTrace(e); return Response.serverError().entity(e.getMessage()).build(); } } /** * * Requests the code generation service to start a Jenkins job for an * application model. * * @param versionedModelId * id of the versioned model * @param jobAlias * the name/alias of the job to run, i.e. either "Build" or * "Docker" * * @return HttpResponse containing the status code of the request * */ @GET @Path("/deploy/{versionedModelId}/{jobAlias}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Deploys an application model.", notes = "Deploys an application model.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model will be deployed"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deployModel(@PathParam("versionedModelId") int versionedModelId, @PathParam("jobAlias") String jobAlias) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deployModel: trying to deploy versioned model with id: " + versionedModelId); Model model; Connection connection = null; // first parse the updated model and check for correctness of format try { connection = dbm.getConnection(); // get versioned model first VersionedModel versionedModel = new VersionedModel(versionedModelId, connection); ArrayList<Commit> commits = versionedModel.getCommits(); if(commits.size() < 2) { return Response.serverError().entity("There does not exist a commit to the versioned model with the given id.").build(); } // get the commit at index 1, because the commit at index 0 is the one for uncommited changes Commit latestCommit = commits.get(1); // use the model of the latest commit for the deployment model = latestCommit.getModel(); // add type attribute "application" model.getAttributes().add(new EntityAttribute(new SimpleEntityAttribute("syncmetaid", "type", "application"))); // add attribute for versionedModelId model.getAttributes().add(new EntityAttribute(new SimpleEntityAttribute("syncmetaid", "versionedModelId", String.valueOf(versionedModelId)))); try { // only create temp repository once, i.e. before the "Build" // job is started in Jenkins if (jobAlias.equals("Build")) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "deployModel: invoking code generation service.."); // TODO: reactivate usage of code generation service // TODO: EDIT: is reactivated now, check if everything works, then TODO can be removed callCodeGenerationService("prepareDeploymentApplicationModel", "", null, latestCommit); } // start the jenkins job by the code generation service String answer = (String) Context.getCurrent().invoke( "i5.las2peer.services.codeGenerationService.CodeGenerationService@0.1", "startJenkinsJob", jobAlias); // safe deployment time and url if(!deploymentUrl.isEmpty()) metadataDocService.updateDeploymentDetails(model, deploymentUrl); return Response.ok(answer).build(); } catch (CGSInvocationException e) { return Response.serverError().entity("Model not valid: " + e.getMessage()).build(); } } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "updateModel: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Internal server error!").build(); } // always close connections finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } } //////////////////////////////////////////////////////////////////////////////////////// // Methods special to the CAE. Feel free to ignore them:-) /////////////////////////////////////////////////////////////////////////////////////// /** * * Loads a model from the database (by calling the respective resource) and * sends it to the code generation service, requesting a Communication Model * view to be displayed in SyncMeta's application editor view. * * TODO: Not tested.. * * @param modelId * the id of the model to be loaded. * * @return HttpResponse containing the status code of the request and the * communication view model as a JSON string */ @GET @Path("/models/commView/{modelId}") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Gets a CAE communication view model.", notes = "Gets a CAE communication view model.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getCAECommunicationModel(@PathParam("modelId") int modelId) { // load the application model from the database SimpleModel appModel; Connection connection = null; try { connection = dbm.getConnection(); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Loading model " + modelId + " from the database"); appModel = (SimpleModel) new Model(modelId, connection).getMinifiedRepresentation(); } catch (SQLException e) { // model might not exist logger.printStackTrace(e); Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: model " + modelId + " not found"); return Response.status(404).entity("Model " + modelId + " does not exist!").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } // load submodules of application model from the database Serializable[] modelsToSend = null; for (SimpleEntityAttribute attribute : appModel.getAttributes()) { if (attribute.getName().equals("type") && attribute.getValue().equals("application")) { modelsToSend = new SimpleModel[appModel.getNodes().size() + 1]; modelsToSend[0] = appModel; // first is always "application" // model itself int modelsToSendIndex = 1; // iterate through the nodes and add corresponding models to // array for (SimpleNode node : appModel.getNodes()) { // send application models only have one attribute with // its label // TODO: here subModelName got changed to subModelId -> check if it works int subModelId = Integer.valueOf(node.getAttributes().get(0).getValue()); try { connection = dbm.getConnection(); modelsToSend[modelsToSendIndex] = new Model(subModelId, connection) .getMinifiedRepresentation(); } catch (SQLException e) { // model might not exist logger.printStackTrace(e); Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: Error loading application component: " + subModelId); return Response.serverError().entity("Internal server error...").build(); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } modelsToSendIndex++; } // invoke code generation service try { Serializable[] payload = { modelsToSend }; Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Invoking code generation service now.."); SimpleModel communicationModel = (SimpleModel) Context.getCurrent().invoke(codeGenerationService, "getCommunicationViewOfApplicationModel", payload); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Got communication model from code generation service.."); Model returnModel = new Model(communicationModel); Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "getCAECommunicationModel: Created model " + modelId + "from simple model, now converting to JSONObject and returning"); JSONObject jsonModel = returnModel.toJSONObject(); return Response.ok(jsonModel.toJSONString()).build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: Internal error " + e.getMessage()); logger.printStackTrace(e); return Response.serverError().entity("Internal server error...").build(); } } } Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "getCAECommunicationModel: model " + modelId + " is not an application"); return Response.serverError().entity("Internal server error...").build(); } /** * * Calls the code generation service to see if the model is a valid CAE * model. Also implements a bit of CAE logic by checking if the code * generation service needs additional models (in case of an application * model) and serves them automatically, such that the rest of this service * does not have to deal with this "special case". * * @param methodName * the method name of the code generation service * @param metadataDoc * @param versionedModel The versioned model where the given model belongs to. * @param commit Commit where the code generation should be called with. * @return Commit sha identifier * * @throws CGSInvocationException * if something went wrong invoking the service * */ private String callCodeGenerationService(String methodName, String metadataDoc, VersionedModel versionedModel, Commit commit) throws CGSInvocationException { Model model = commit.getModel(); if (metadataDoc == null) metadataDoc = ""; Connection connection = null; Serializable[] modelsToSend = null; SimpleModel simpleModel = (SimpleModel) model.getMinifiedRepresentation(); boolean isApplication = false; String modelType = null; for(EntityAttribute a : model.getAttributes()) { if(a.getName().equals("type")) { modelType = a.getValue(); break; } } if (modelType.equals("application")) { isApplication = true; } if (isApplication) { modelsToSend = new SimpleModel[simpleModel.getNodes().size() + 1]; modelsToSend[0] = simpleModel; // first is always "application" // model itself int modelsToSendIndex = 1; // iterate through the nodes and add corresponding models to // array for (SimpleNode node : simpleModel.getNodes()) { String versionedModelIdStr = "-1"; for(SimpleEntityAttribute a : node.getAttributes()) { if(a.getName().equals("versionedModelId")) { versionedModelIdStr = a.getValue(); break; } } int versionedModelId = Integer.parseInt(versionedModelIdStr); // get latest commit VersionedModel v; try { connection = dbm.getConnection(); v = new VersionedModel(versionedModelId, connection); } catch (SQLException e1) { throw new CGSInvocationException(e1.getMessage()); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } ArrayList<Commit> commits = v.getCommits(); if(commits.size() < 2) throw new CGSInvocationException("Application contains versioned model without commit."); // get first "model-commit" Model m = null; for(int i = 1; i < commits.size(); i++) { if(commits.get(i).getCommitType() == Commit.COMMIT_TYPE_MODEL) { m = commits.get(i).getModel(); } } String type = ""; if(node.getType().equals("Frontend Component")) type = "frontend-component"; else if(node.getType().equals("Microservice")) type = "microservice"; m.getAttributes().add(new EntityAttribute("syncmetaid", "type", type)); logger.info("Attributes: " + node.getAttributes().toString()); try { connection = dbm.getConnection(); logger.info("Modelname: " + m.getId()); SimpleModel s = (SimpleModel) m.getMinifiedRepresentation(); // s now has the id of the model as id, not the versioned model id // thus we create a new SimpleModel and use the versioned model id as the model id SimpleModel s2 = new SimpleModel(String.valueOf(versionedModelId), s.getNodes(), s.getEdges(), s.getAttributes()); modelsToSend[modelsToSendIndex] = (Serializable) s2; } catch (SQLException e) { // model might not exist logger.printStackTrace(e); throw new CGSInvocationException("Error loading application component: " + m.getId()); } finally { try { connection.close(); } catch (SQLException e) { logger.printStackTrace(e); } } modelsToSendIndex++; } } else { SimpleModel oldModel = null; // check if there exists an old model int commitCount = versionedModel.getCommits().size(); if(commitCount == 2) { // there only exists one commit and the "uncommited changes" commit modelsToSend = new SimpleModel[1]; modelsToSend[0] = simpleModel; } else { // there exists an old commit modelsToSend = new SimpleModel[2]; modelsToSend[0] = simpleModel; Model old = null; for(int i = 2; i < versionedModel.getCommits().size(); i++) { if(versionedModel.getCommits().get(i).getCommitType() == Commit.COMMIT_TYPE_MODEL) { old = versionedModel.getCommits().get(i).getModel(); break; } } // the old model does not contain attributes for type and versionedModelId old.getAttributes().add(new EntityAttribute("syncmetaid", "versionedModelId", String.valueOf(versionedModel.getId()))); oldModel = (SimpleModel) old.getMinifiedRepresentation(); modelsToSend[1] = oldModel; } } // actual invocation try { String answer = ""; if (!methodName.equals("updateRepositoryOfModel") && !methodName.equals("createFromModel")) { Serializable[] payload = { modelsToSend }; answer = (String) Context.getCurrent().invoke(codeGenerationService, methodName, payload); } else { // method is either updateRepositoryOfModel or createFromModel String versionTag = commit.getVersionTag(); if(versionTag == null) versionTag = ""; Serializable[] payload = { commit.getMessage(), versionTag, metadataDoc, modelsToSend }; answer = (String) Context.getCurrent().invoke(codeGenerationService, methodName, payload); } if (!answer.startsWith("done")) { throw new CGSInvocationException(answer); } if(answer.startsWith("done:")) return answer.split("done:")[1]; return ""; } catch (Exception e) { logger.printStackTrace(e); throw new CGSInvocationException(e.getMessage()); } } //////////////////////////////////////////////////////////////////////////////////////// // Methods for Semantic Check //////////////////////////////////////////////////////////////////////////////////////// /** * * Performs the semantic check (if specified) on the model, without storing * it * * @param inputModel * the model as a JSON string * * @return HttpResponse status of the check * */ @PUT @Path("/semantics") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Performs the semantic check", notes = "Performs the semantic check") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_NOT_MODIFIED, message = "Semantic Check successful"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response checkModel(String inputModel) { Context.get().monitorEvent(MonitoringEvent.SERVICE_MESSAGE, "checkModel: performing semantic check"); Model model; // first parse the updated model and check for correctness of format try { model = new Model(inputModel); } catch (ParseException e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "semantic check: exception parsing JSON input: " + e); return Response.serverError().entity("JSON parsing exception, file not valid!").build(); } catch (Exception e) { Context.get().monitorEvent(MonitoringEvent.SERVICE_ERROR, "semantic check: something went seriously wrong: " + e); logger.printStackTrace(e); return Response.serverError().entity("Internal server error!").build(); } // do the semantic story check if (!semanticCheckService.isEmpty()) { this.doSemanticCheck(model); } else { return Response.status(400).entity("No semantic check service available").build(); } return Response.ok(SemanticCheckResponse.success().toJSONResultString()).build(); } private void checkModel(Model model) { SemanticCheckResponse result; EntityAttribute semcheckAttr = findSemcheckAttribute(model); try { result = (SemanticCheckResponse) Context.getCurrent().invoke(semanticCheckService, "doSemanticCheck", model.getMinifiedRepresentation()); } catch (Exception e) { System.out.println(e); throw new InternalServerErrorException("could not execute semantic check service"); } if (result == null) { throw new InternalServerErrorException("an error orrcured within the semantic check"); } else if (result.getError() != 0) { if (semcheckAttr == null) { throw new BadRequestException(result.toJSONResultString()); } else if (!semcheckAttr.getValue().equals("false")) { throw new BadRequestException("This model was supposed to be incorrect"); } } else if (result.getError() == 0) { if (semcheckAttr != null && !semcheckAttr.getValue().equals("true")) { throw new BadRequestException("This model was supposed to be correct"); } } } private void doSemanticCheck(Model model) { SemanticCheckResponse result; try { result = (SemanticCheckResponse) Context.getCurrent().invoke(semanticCheckService, "doSemanticCheck", model.getMinifiedRepresentation()); } catch (Exception e) { System.out.println(e); throw new InternalServerErrorException("could not execute semantic check service"); } if (result == null) { throw new InternalServerErrorException("an error orrcured within the semantic check"); } else if (result.getError() != 0) { throw new InternalServerErrorException(Response.ok(result.toJSONResultString()).build()); } } private EntityAttribute findSemcheckAttribute(Model model) { EntityAttribute res = null; for (EntityAttribute a : model.getAttributes()) { if (a.getName().equals("_semcheck")) { return a; } } return res; } //////////////////////////////////////////////////////////////////////////////////////// // Methods providing a Swagger documentation of the service API. //////////////////////////////////////////////////////////////////////////////////////// /** * * Returns the API documentation for a specific annotated top level resource * for purposes of the Swagger documentation. * * Note: If you do not intend to use Swagger for the documentation of your * Service API, this method may be removed. * * Trouble shooting: Please make sure that the endpoint URL below is correct * with respect to your service. * * @return the resource's documentation * */ @GET @Path("/models/swagger.json") @Produces(MediaType.APPLICATION_JSON) public Response getSwaggerJSON() { Swagger swagger = new Reader(new Swagger()).read(this.getClass()); if (swagger == null) { return Response.status(404).entity("Swagger API declaration not available!").build(); } try { return Response.ok(Json.mapper().writeValueAsString(swagger), MediaType.APPLICATION_JSON).build(); } catch (JsonProcessingException e) { logger.printStackTrace(e); return Response.serverError().entity(e.getMessage()).build(); } } /***********METADATA DOCS*************** */ /** * Get all element to element connections in the database * * @return JSON data of the list of all element to element connections */ @GET @Path("/docs/") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for all metadata docs in the database. Takes no parameter.", notes = "Searches for all metadata docs in the database.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, Metadata doc found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No metadata doc could be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getDocs() { ArrayList<MetadataDoc> docs = null; ObjectMapper mapper = new ObjectMapper(); try { docs = this.metadataDocService.getAll(); String jsonString = mapper.writeValueAsString(docs); return Response.ok(jsonString, MediaType.APPLICATION_JSON).build(); } catch (Exception e) { this.logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } } /** * Get metadata docs in the database by component id * * @return JSON data of the list of all metadata docs */ @GET @Path("/docs/component/{id}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for all metadata doc in the database by component id.", notes = "Searches for all metadata doc in the database by component id.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, metadata doc found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No metadata doc could be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getDocByComponentId(@PathParam("id") int id) { MetadataDoc doc = null; ObjectMapper mapper = new ObjectMapper(); try { doc = this.metadataDocService.getByComponentId(id); String jsonString = mapper.writeValueAsString(doc); return Response.ok(jsonString, MediaType.APPLICATION_JSON).build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.ok("{}", MediaType.APPLICATION_JSON).build(); } catch (Exception e) { this.logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } } /** * Get metadata docs in the database by component id * * @return JSON data of the list of all metadata docs */ @GET @Path("/docs/component/{id}/{version}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Searches for all metadata doc in the database by component id.", notes = "Searches for all metadata doc in the database by component id.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, metadata doc found"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "No metadata doc could be found."), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response getDocByComponentIdVersion(@PathParam("id") String id, @PathParam("version") int version) { MetadataDoc doc = null; ObjectMapper mapper = new ObjectMapper(); try { doc = this.metadataDocService.getByComponentIdVersion(id, version); String jsonString = mapper.writeValueAsString(doc); return Response.ok(jsonString, MediaType.APPLICATION_JSON).build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.ok("{}", MediaType.APPLICATION_JSON).build(); } catch (Exception e) { this.logger.printStackTrace(e); return Response.serverError().entity("Server error!").build(); } } /** * Creates or update user input metadata doc. * * @param inputJsonString json of the new model. * @return HttpResponse with the status */ @POST @Path("/docs/{id}/{version}") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Create or update metadata doc.", notes = "Create or update metadata doc.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_CREATED, message = "OK, model stored"), @ApiResponse(code = HttpURLConnection.HTTP_BAD_REQUEST, message = "Input model was not valid"), @ApiResponse(code = HttpURLConnection.HTTP_CONFLICT, message = "Tried to save a model that already had a name and thus was not new"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response postDoc(String inputJsonString, @PathParam("version") int version, @PathParam("id") int id) { ObjectMapper mapper = new ObjectMapper(); try { this.metadataDocService.createUpdateUserGeneratedMetadata(id, inputJsonString, version); return Response.ok().entity("Doc updated or created").build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.serverError().entity("Could not create new metadata doc, SQL exception").build(); } } /** * * Deletes a model. * * @param modelName * a string containing the model name * * @return HttpResponse containing the status code of the request * */ @DELETE @Path("/docs/{id}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Deletes a metadata doc by id.", notes = "Deletes a metadata doc by id.") @ApiResponses(value = { @ApiResponse(code = HttpURLConnection.HTTP_OK, message = "OK, model is deleted"), @ApiResponse(code = HttpURLConnection.HTTP_NOT_FOUND, message = "Model does not exist"), @ApiResponse(code = HttpURLConnection.HTTP_INTERNAL_ERROR, message = "Internal server error") }) public Response deleteDoc(@PathParam("id") String id) { try { this.metadataDocService.delete(id); return Response.ok().entity("element to element deleted").build(); } catch (SQLException e) { this.logger.printStackTrace(e); return Response.serverError().entity("Could not delete metadata doc, SQL exception").build(); } } }
Added debug message for postDoc method
src/main/i5/las2peer/services/modelPersistenceService/RESTResources.java
Added debug message for postDoc method
Java
mit
8c45eb9825e1db38d29cd9cf25106cfb5cfd2c3c
0
Pozo/bkk-info
package com.github.pozo.bkkinfo.activities; import com.github.pozo.bkkinfo.R; import com.github.pozo.bkkinfo.services.NotificationService; import com.github.pozo.bkkinfo.shared.Constants; import com.github.pozo.bkkinfo.shared.NetworkConnectionHelper; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.Preference; import android.preference.PreferenceActivity; import android.preference.PreferenceManager; import android.util.Log; public class BasicPreferenceActivity extends PreferenceActivity { public static final String PREFERENCES_NOTIFICATION_SOUND = "notificationSound"; public static final String PREFERENCES_SYNC_DATE = "prefLastSyncDate"; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.preferences); SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this); Preference prefLastSyncDate = findPreference(PREFERENCES_SYNC_DATE); prefLastSyncDate.setSummary(sharedPrefs.getString(PREFERENCES_SYNC_DATE, getResources().getString(R.string.notification_settings_last_refresh_default_value))); } @Override protected void onPause() { super.onPause(); Log.i(Constants.LOG_TAG, "BasicPreferenceActivity:onPause"); if(NetworkConnectionHelper.isNetworkConnected(this)) { stopService(new Intent(this, NotificationService.class)); Intent intent = new Intent(this, NotificationService.class); intent.putExtra(NotificationService.KEY_NEED_REFRESH, false); startService(intent); } } }
src/com/github/pozo/bkkinfo/activities/BasicPreferenceActivity.java
package com.github.pozo.bkkinfo.activities; import com.github.pozo.bkkinfo.R; import com.github.pozo.bkkinfo.services.NotificationService; import com.github.pozo.bkkinfo.shared.Constants; import com.github.pozo.bkkinfo.shared.NetworkConnectionHelper; import android.content.Intent; import android.os.Bundle; import android.preference.PreferenceActivity; import android.util.Log; public class BasicPreferenceActivity extends PreferenceActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.preferences); } @Override protected void onPause() { super.onPause(); Log.i(Constants.LOG_TAG, "BasicPreferenceActivity:onPause"); if(NetworkConnectionHelper.isNetworkConnected(this)) { stopService(new Intent(this, NotificationService.class)); startService(new Intent(this, NotificationService.class)); } } }
Added preference reading at onCreate and refactored service starting
src/com/github/pozo/bkkinfo/activities/BasicPreferenceActivity.java
Added preference reading at onCreate and refactored service starting
Java
mit
4cc40a2ac60d1d519cd21c35dfafbd78a7fbdb87
0
jt19056/Notey,jt19056/Notey
package thomas.jonathan.notey; import android.annotation.TargetApi; import android.app.AlarmManager; import android.app.IntentService; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Intent; import android.content.SharedPreferences; import android.database.sqlite.SQLiteException; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v4.app.NotificationCompat; import java.util.Date; import java.util.List; /* This service is used to restore all the notifications stored in the database*/ public class NotificationBootService extends IntentService { final int CURRENT_ANDROID_VERSION = Build.VERSION.SDK_INT; int priority, repeatTime = 0; boolean pref_expand, pref_swipe, pref_share_action; String clickNotif, intentType, pref_priority, noteString; PendingIntent alarmPendingIntent; public NotificationBootService() { super("NotificationService"); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public void onStart(Intent intent, int startId) { MySQLiteHelper db = new MySQLiteHelper(this); List<NoteyNote> allNoteys = db.getAllNoteys(); NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); intentType = intent.getExtras().getString("action"); if (intentType == null) return; //loop through noteys and re-create the alarms if (intentType.equals("boot_alarm")) { for (NoteyNote n : allNoteys) { if (n.getAlarm() != null) { // intent for service to launch info screen when alarm goes off Intent myIntent = new Intent(this, AlarmService.class); Bundle bundle = new Bundle(); bundle.putInt("alarmID", n.getId()); myIntent.putExtras(bundle); alarmPendingIntent = PendingIntent.getService(this, n.getId(), myIntent, PendingIntent.FLAG_UPDATE_CURRENT); //set alarm AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE); SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this); repeatTime = sp.getInt("repeat" + n.getId(), 0); //set repeating alarm or set regular alarm if (repeatTime != 0) { // check the sharedPrefs for the check box to wake up the device if (PreferenceManager.getDefaultSharedPreferences(getBaseContext()).getBoolean("wake" + Integer.toString(n.getId()), true)) alarmManager.setRepeating(AlarmManager.RTC_WAKEUP, Long.valueOf(n.getAlarm()), repeatTime * 1000 * 60, alarmPendingIntent); else alarmManager.setRepeating(AlarmManager.RTC, Long.valueOf(n.getAlarm()), repeatTime * 1000 * 60, alarmPendingIntent); } else { //set regualar alarm //if the alarm hasn't past then continue if(Long.valueOf(n.getAlarm()) > System.currentTimeMillis()) { // check the sharedPrefs for the check box to wake up the device if (PreferenceManager.getDefaultSharedPreferences(getBaseContext()).getBoolean("wake" + Integer.toString(n.getId()), true)) alarmManager.set(AlarmManager.RTC_WAKEUP, Long.valueOf(n.getAlarm()), alarmPendingIntent); else alarmManager.set(AlarmManager.RTC, Long.valueOf(n.getAlarm()), alarmPendingIntent); } } } } } else if (intentType.equals("boot")) { //Settings stuff initializeSettings(); //show shortcut notification if settings say so if (PreferenceManager.getDefaultSharedPreferences(getBaseContext()).getBoolean("pref_shortcut", false)) { Notification n = new NotificationCompat.Builder(this) .setContentTitle(getString(R.string.app_name)) .setContentText(getString(R.string.quick_note)) .setSmallIcon(R.drawable.ic_launcher_dashclock) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_new_note)) .setOngoing(true) .setContentIntent(PendingIntent.getActivity(getApplicationContext(), MainActivity.SHORTCUT_NOTIF_ID, new Intent(this, MainActivity.class), PendingIntent.FLAG_UPDATE_CURRENT)) .setAutoCancel(false) .setPriority(Notification.PRIORITY_MIN) .build(); nm.notify(MainActivity.SHORTCUT_NOTIF_ID, n); } for (NoteyNote n : allNoteys) { PendingIntent piDismiss = createOnDismissedIntent(n.getId()); PendingIntent piEdit = createEditIntent(n); PendingIntent piShare = createShareIntent(n); //for users on < v2.0.3, they didn't have 'title' column in db if (n.getTitle() == null) { n.setTitle(getString(R.string.app_name)); db.updateNotey(n); } noteString = n.getNote(); //temp note text to display alarm if (n.getAlarm() != null) { Date date = new Date(Long.valueOf(n.getAlarm())); // add the alarm info to the notification noteString += "\n" + getString(R.string.alarm) + ": " + MainActivity.format_short_date.format(date) + ", " + MainActivity.format_short_time.format(date); } String tickerText; //if title is there, set ticker to title. otherwise set it to the note if (n.getTitle().equals("") || n.getTitle().equals(getString(R.string.app_name))) tickerText = n.getNote(); else tickerText = n.getTitle(); //converts the iconName to int. if there isn't an iconName, default to check white int ico; try { ico = getResources().getIdentifier(n.getIconName(), "drawable", getPackageName()); } catch (SQLiteException e) { ico = R.drawable.ic_check_white_36dp; e.printStackTrace(); } catch (Exception e) { ico = R.drawable.ic_check_white_36dp; e.printStackTrace(); } Bitmap bm; //big white icons are un-seeable on lollipop if (CURRENT_ANDROID_VERSION >= 21 && n.getIconName().contains("white_36dp")) { bm = null; } else bm = BitmapFactory.decodeResource(getResources(), ico); Notification notif; if (pref_expand && pref_share_action && CURRENT_ANDROID_VERSION >= 16) { //expandable, with share button, and on jelly bean or greater notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setLargeIcon(bm) .setStyle(new NotificationCompat.BigTextStyle().bigText(noteString)) .setDeleteIntent(piDismiss) .setContentIntent(onNotifClickPI(clickNotif, n)) .setOngoing(!pref_swipe) .setAutoCancel(false) .setPriority(priority) .addAction(R.drawable.ic_edit_white_24dp, getString(R.string.edit), piEdit) .addAction(R.drawable.ic_share_white_24dp, getString(R.string.share), piShare) .addAction(R.drawable.ic_delete_white_24dp, getString(R.string.remove), piDismiss) .build(); } // same as above, except no share action button else if (pref_expand && !pref_share_action && CURRENT_ANDROID_VERSION >= 16) { notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setLargeIcon(bm) .setStyle(new NotificationCompat.BigTextStyle().bigText(noteString)) .setDeleteIntent(piDismiss) .setContentIntent(onNotifClickPI(clickNotif, n)) .setOngoing(!pref_swipe) .setAutoCancel(false) .setPriority(priority) .addAction(R.drawable.ic_edit_white_24dp, getString(R.string.edit), piEdit) .addAction(R.drawable.ic_delete_white_24dp, getString(R.string.remove), piDismiss) .build(); } //not expandable, but still able to set priority else if (!pref_expand && CURRENT_ANDROID_VERSION >= 16) { notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setDeleteIntent(piDismiss) .setContentIntent(onNotifClickPI(clickNotif, n)) .setOngoing(!pref_swipe) .setAutoCancel(false) .setPriority(priority) .build(); } //if api < 16 else { notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setContentIntent(onNotifClickPI(clickNotif, n)) .setDeleteIntent(piDismiss) .setOngoing(!pref_swipe) .setAutoCancel(false) .build(); } nm.notify(n.getId(), notif); } } stopSelf(); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void initializeSettings() { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); if (CURRENT_ANDROID_VERSION >= 16) { pref_expand = sharedPreferences.getBoolean("pref_expand", true); pref_swipe = sharedPreferences.getBoolean("pref_swipe", false); pref_priority = sharedPreferences.getString("pref_priority", "normal"); switch (pref_priority) { case "high": priority = Notification.PRIORITY_MAX; break; case "low": priority = Notification.PRIORITY_LOW; break; case "minimum": priority = Notification.PRIORITY_MIN; break; default: priority = Notification.PRIORITY_DEFAULT; break; } } else { pref_expand = false; pref_swipe = sharedPreferences.getBoolean("pref_swipe", true); } clickNotif = sharedPreferences.getString("clickNotif", "edit"); pref_share_action = sharedPreferences.getBoolean("pref_share_action", true); } private PendingIntent createOnDismissedIntent(int notificationId) { Intent intent = new Intent(this, NotificationDismiss.class); intent.putExtra("NotificationID", notificationId); return PendingIntent.getBroadcast(getApplicationContext(), notificationId, intent, 0); } private PendingIntent createEditIntent(NoteyNote n) { Intent editIntent = new Intent(this, MainActivity.class); editIntent.putExtra("editNotificationID", n.getId()); editIntent.putExtra("editNote", n.getNote()); editIntent.putExtra("editLoc", n.getSpinnerLoc()); editIntent.putExtra("editButton", n.getImgBtnNum()); editIntent.putExtra("editTitle", n.getTitle()); editIntent.putExtra("editAlarm", n.getAlarm()); editIntent.putExtra("editRepeat", repeatTime); editIntent.putExtra("editAlarmPendingIntent", alarmPendingIntent); return PendingIntent.getActivity(getApplicationContext(), n.getId(), editIntent, PendingIntent.FLAG_UPDATE_CURRENT); } private PendingIntent createShareIntent(NoteyNote n) { Intent sendIntent = new Intent(); sendIntent.setAction(Intent.ACTION_SEND); sendIntent.putExtra(Intent.EXTRA_TEXT, n.getNote()); sendIntent.setType("text/plain"); return PendingIntent.getActivity(this, n.getId(), sendIntent, 0); } private PendingIntent createInfoScreenIntent(NoteyNote n) { Intent infoIntent = new Intent(this, InfoScreenActivity.class); infoIntent.putExtra("infoNotificationID", n.getId()); infoIntent.putExtra("infoNote", n.getNote()); infoIntent.putExtra("infoLoc", n.getSpinnerLoc()); infoIntent.putExtra("infoButton", n.getImgBtnNum()); infoIntent.putExtra("infoTitle", n.getTitle()); infoIntent.putExtra("infoAlarm", n.getAlarm()); infoIntent.putExtra("infoRepeat", repeatTime); infoIntent.putExtra("infoAlarmPendingIntent", alarmPendingIntent); return PendingIntent.getActivity(getApplicationContext(), n.getId(), infoIntent, PendingIntent.FLAG_UPDATE_CURRENT); } private PendingIntent onNotifClickPI(String clickNotif, NoteyNote n) { switch (clickNotif) { case "info": return createInfoScreenIntent(n); case "edit": return createEditIntent(n); case "remove": return createOnDismissedIntent(n.getId()); default: return null; } } @Override protected void onHandleIntent(Intent intent) { /*empty*/ } }
app/src/main/java/thomas/jonathan/notey/NotificationBootService.java
package thomas.jonathan.notey; import android.annotation.TargetApi; import android.app.AlarmManager; import android.app.IntentService; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Intent; import android.content.SharedPreferences; import android.database.sqlite.SQLiteException; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v4.app.NotificationCompat; import java.util.Date; import java.util.List; /* This service is used to restore all the notifications stored in the database*/ public class NotificationBootService extends IntentService { final int CURRENT_ANDROID_VERSION = Build.VERSION.SDK_INT; int priority, repeatTime = 0; boolean pref_expand, pref_swipe, pref_share_action; String clickNotif, intentType, pref_priority, noteString; PendingIntent alarmPendingIntent; public NotificationBootService() { super("NotificationService"); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public void onStart(Intent intent, int startId) { MySQLiteHelper db = new MySQLiteHelper(this); List<NoteyNote> allNoteys = db.getAllNoteys(); NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); intentType = intent.getExtras().getString("action"); if (intentType == null) return; //loop through noteys and re-create the alarms if (intentType.equals("boot_alarm")) { for (NoteyNote n : allNoteys) { if (n.getAlarm() != null) { // intent for service to launch info screen when alarm goes off Intent myIntent = new Intent(this, AlarmService.class); Bundle bundle = new Bundle(); bundle.putInt("alarmID", n.getId()); myIntent.putExtras(bundle); alarmPendingIntent = PendingIntent.getService(this, n.getId(), myIntent, PendingIntent.FLAG_UPDATE_CURRENT); //set alarm AlarmManager alarmManager = (AlarmManager) getSystemService(ALARM_SERVICE); SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this); repeatTime = sp.getInt("repeat" + n.getId(), 0); //set repeating alarm or set regular alarm if (repeatTime != 0) { // check the sharedPrefs for the check box to wake up the device if (PreferenceManager.getDefaultSharedPreferences(getBaseContext()).getBoolean("wake" + Integer.toString(n.getId()), true)) alarmManager.setRepeating(AlarmManager.RTC_WAKEUP, Long.valueOf(n.getAlarm()), repeatTime * 1000 * 60, alarmPendingIntent); else alarmManager.setRepeating(AlarmManager.RTC, Long.valueOf(n.getAlarm()), repeatTime * 1000 * 60, alarmPendingIntent); } else { //set regualar alarm //if the alarm hasn't past then continue if(Long.valueOf(n.getAlarm()) > System.currentTimeMillis()) { // check the sharedPrefs for the check box to wake up the device if (PreferenceManager.getDefaultSharedPreferences(getBaseContext()).getBoolean("wake" + Integer.toString(n.getId()), true)) alarmManager.set(AlarmManager.RTC_WAKEUP, Long.valueOf(n.getAlarm()), alarmPendingIntent); else alarmManager.set(AlarmManager.RTC, Long.valueOf(n.getAlarm()), alarmPendingIntent); } } } } } else if (intentType.equals("boot")) { //Settings stuff initializeSettings(); //show shortcut notification if settings say so if (PreferenceManager.getDefaultSharedPreferences(getBaseContext()).getBoolean("pref_shortcut", false)) { Notification n = new NotificationCompat.Builder(this) .setContentTitle(getString(R.string.app_name)) .setContentText(getString(R.string.quick_note)) .setSmallIcon(R.drawable.ic_launcher_dashclock) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_new_note)) .setOngoing(true) .setContentIntent(PendingIntent.getActivity(getApplicationContext(), MainActivity.SHORTCUT_NOTIF_ID, new Intent(this, MainActivity.class), PendingIntent.FLAG_UPDATE_CURRENT)) .setAutoCancel(false) .setPriority(Notification.PRIORITY_MIN) .build(); nm.notify(MainActivity.SHORTCUT_NOTIF_ID, n); } for (NoteyNote n : allNoteys) { PendingIntent piDismiss = createOnDismissedIntent(n.getId()); PendingIntent piEdit = createEditIntent(n); PendingIntent piShare = createShareIntent(n); //for users on < v2.0.3, they didn't have 'title' column in db if (n.getTitle() == null) { n.setTitle(getString(R.string.app_name)); db.updateNotey(n); } noteString = n.getNote(); //temp note text to display alarm if (n.getAlarm() != null) { Date date = new Date(Long.valueOf(n.getAlarm())); // add the alarm info to the notification noteString += "\n" + getString(R.string.alarm) + ": " + MainActivity.format_short_date.format(date) + ", " + MainActivity.format_short_time.format(date); } String tickerText; //if title is there, set ticker to title. otherwise set it to the note if (n.getTitle().equals("") || n.getTitle().equals(getString(R.string.app_name))) tickerText = n.getNote(); else tickerText = n.getTitle(); //converts the iconName to int. if there isn't an iconName, default to check white int ico; try { ico = getResources().getIdentifier(n.getIconName(), "drawable", getPackageName()); } catch (SQLiteException e) { ico = R.drawable.ic_check_white_36dp; e.printStackTrace(); } catch (Exception e) { ico = R.drawable.ic_check_white_36dp; e.printStackTrace(); } Bitmap bm; //big white icons are un-seeable on lollipop if (CURRENT_ANDROID_VERSION >= 21 && n.getIconName().contains("white_36dp")) { bm = null; } else bm = BitmapFactory.decodeResource(getResources(), ico); Notification notif; if (pref_expand && pref_share_action && CURRENT_ANDROID_VERSION >= 16) { //expandable, with share button, and on jelly bean or greater notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setLargeIcon(bm) .setStyle(new NotificationCompat.BigTextStyle().bigText(noteString)) .setDeleteIntent(piDismiss) .setContentIntent(onNotifClickPI(clickNotif, n)) .setOngoing(!pref_swipe) .setAutoCancel(false) .setPriority(priority) .addAction(R.drawable.ic_edit_white_24dp, getString(R.string.edit), piEdit) .addAction(R.drawable.ic_share_white_24dp, getString(R.string.share), piShare) .addAction(R.drawable.ic_delete_white_24dp, getString(R.string.remove), piDismiss) .build(); } // same as above, except no share action button else if (pref_expand && !pref_share_action && CURRENT_ANDROID_VERSION >= 16) { notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setLargeIcon(bm) .setStyle(new NotificationCompat.BigTextStyle().bigText(noteString)) .setDeleteIntent(piDismiss) .setContentIntent(onNotifClickPI(clickNotif, n)) .setOngoing(!pref_swipe) .setAutoCancel(false) .setPriority(priority) .addAction(R.drawable.ic_edit_white_24dp, getString(R.string.edit), piEdit) .addAction(R.drawable.ic_delete_white_24dp, getString(R.string.remove), piDismiss) .build(); } //not expandable, but still able to set priority else if (!pref_expand && CURRENT_ANDROID_VERSION >= 16) { notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(tickerText) .setTicker(n.getNote()) .setSmallIcon(ico) .setDeleteIntent(piDismiss) .setContentIntent(onNotifClickPI(clickNotif, n)) .setOngoing(!pref_swipe) .setAutoCancel(false) .setPriority(priority) .build(); } //if api < 16 else { notif = new NotificationCompat.Builder(this) .setContentTitle(n.getTitle()) .setContentText(n.getNote()) .setTicker(tickerText) .setSmallIcon(ico) .setContentIntent(onNotifClickPI(clickNotif, n)) .setDeleteIntent(piDismiss) .setOngoing(!pref_swipe) .setAutoCancel(false) .build(); } nm.notify(n.getId(), notif); } } stopSelf(); } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void initializeSettings() { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); if (CURRENT_ANDROID_VERSION >= 16) { pref_expand = sharedPreferences.getBoolean("pref_expand", true); pref_swipe = sharedPreferences.getBoolean("pref_swipe", false); pref_priority = sharedPreferences.getString("pref_priority", "normal"); switch (pref_priority) { case "high": priority = Notification.PRIORITY_MAX; break; case "low": priority = Notification.PRIORITY_LOW; break; case "minimum": priority = Notification.PRIORITY_MIN; break; default: priority = Notification.PRIORITY_DEFAULT; break; } } else { pref_expand = false; pref_swipe = sharedPreferences.getBoolean("pref_swipe", true); } clickNotif = sharedPreferences.getString("clickNotif", "edit"); pref_share_action = sharedPreferences.getBoolean("pref_share_action", true); } private PendingIntent createOnDismissedIntent(int notificationId) { Intent intent = new Intent(this, NotificationDismiss.class); intent.putExtra("NotificationID", notificationId); return PendingIntent.getBroadcast(getApplicationContext(), notificationId, intent, 0); } private PendingIntent createEditIntent(NoteyNote n) { Intent editIntent = new Intent(this, MainActivity.class); editIntent.putExtra("editNotificationID", n.getId()); editIntent.putExtra("editNote", n.getNote()); editIntent.putExtra("editLoc", n.getSpinnerLoc()); editIntent.putExtra("editButton", n.getImgBtnNum()); editIntent.putExtra("editTitle", n.getTitle()); editIntent.putExtra("editAlarm", n.getAlarm()); editIntent.putExtra("editRepeat", repeatTime); editIntent.putExtra("editAlarmPendingIntent", alarmPendingIntent); return PendingIntent.getActivity(getApplicationContext(), n.getId(), editIntent, PendingIntent.FLAG_UPDATE_CURRENT); } private PendingIntent createShareIntent(NoteyNote n) { Intent sendIntent = new Intent(); sendIntent.setAction(Intent.ACTION_SEND); sendIntent.putExtra(Intent.EXTRA_TEXT, n.getNote()); sendIntent.setType("text/plain"); return PendingIntent.getActivity(this, n.getId(), sendIntent, 0); } private PendingIntent createInfoScreenIntent(NoteyNote n) { Intent infoIntent = new Intent(this, InfoScreenActivity.class); infoIntent.putExtra("infoNotificationID", n.getId()); infoIntent.putExtra("infoNote", n.getNote()); infoIntent.putExtra("infoLoc", n.getSpinnerLoc()); infoIntent.putExtra("infoButton", n.getImgBtnNum()); infoIntent.putExtra("infoTitle", n.getTitle()); infoIntent.putExtra("infoAlarm", n.getAlarm()); infoIntent.putExtra("infoRepeat", repeatTime); infoIntent.putExtra("infoAlarmPendingIntent", alarmPendingIntent); return PendingIntent.getActivity(getApplicationContext(), n.getId(), infoIntent, PendingIntent.FLAG_UPDATE_CURRENT); } private PendingIntent onNotifClickPI(String clickNotif, NoteyNote n) { switch (clickNotif) { case "info": return createInfoScreenIntent(n); case "edit": return createEditIntent(n); case "remove": return createOnDismissedIntent(n.getId()); default: return null; } } @Override protected void onHandleIntent(Intent intent) { /*empty*/ } }
Case 34 Closes #34
app/src/main/java/thomas/jonathan/notey/NotificationBootService.java
Case 34
Java
mit
6bc70a87dcfd178606f2a605db680672cad3cfae
0
rjust/defects4j,rjust/defects4j,jose/defects4j,jose/defects4j,jose/defects4j,rjust/defects4j,rjust/defects4j,jose/defects4j
package edu.washington.cs.mut.testrunner; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.OutputStream; import java.io.PrintStream; import java.util.regex.Matcher; import java.util.regex.Pattern; import junit.framework.AssertionFailedError; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.taskdefs.optional.junit.JUnitResultFormatter; import org.apache.tools.ant.taskdefs.optional.junit.JUnitTest; public class Formatter implements JUnitResultFormatter { private PrintStream ps; private PrintStream allTests; { try { this.ps = new PrintStream(new FileOutputStream(System.getProperty("OUTFILE", "failing-tests.txt"), true), true); this.allTests = new PrintStream(new FileOutputStream(System.getProperty("ALLTESTS", "all_tests"), true), true); } catch (FileNotFoundException e) { throw new RuntimeException(e); } } @Override public void endTestSuite(JUnitTest arg0) throws BuildException { } @Override public void setOutput(OutputStream arg0) { } @Override public void setSystemError(String arg0) { } @Override public void setSystemOutput(String arg0) { } String className ; boolean alreadyPrinted = true; @Override public void startTestSuite(JUnitTest junitTest) throws BuildException { className = junitTest.getName(); alreadyPrinted = false; } @Override public void addError(Test test, Throwable t) { handle(test, t); } @Override public void addFailure(Test test, AssertionFailedError t) { handle(test,t); } private void handle(Test test, Throwable t) { String prefix = "--- " ; String className = null; String methodName = null; if (test == null) { // if test is null it indicates an initialization error for the class failClass(t, prefix); return; } { Pattern regexp = Pattern.compile("(.*)\\((.*)\\)"); Matcher match = regexp.matcher(test.toString()); if (match.matches()) { className = match.group(2); methodName = match.group(1); } } { Pattern regexp = Pattern.compile("(.*):(.*)"); // for some weird reson this format is used for Timeout in Junit4 Matcher match = regexp.matcher(test.toString()); if (match.matches()) { className = match.group(1); methodName = match.group(2); } } if ("warning".equals(methodName) || "initializationError".equals(methodName)) { failClass(t, prefix); // there is an issue with the class, not the method. } else if (null != methodName && null != className) { if (isJunit4InitFail(t)) { failClass(t, prefix); } else { ps.println(prefix + className + "::" + methodName); // normal case t.printStackTrace(ps); } } else { ps.print(prefix + "broken test input " + test.toString()); t.printStackTrace(ps); } } private void failClass(Throwable t, String prefix) { if (!this.alreadyPrinted) { ps.println(prefix + this.className); t.printStackTrace(ps); this.alreadyPrinted = true; } } private boolean isJunit4InitFail(Throwable t) { for (StackTraceElement ste: t.getStackTrace()) { if ("createTest".equals(ste.getMethodName())) { return true; } } return false; } @Override public void endTest(Test test) { } @Override public void startTest(Test test) { allTests.println(test.toString()); } }
framework/lib/formatter/src/edu/washington/cs/mut/testrunner/Formatter.java
package edu.washington.cs.mut.testrunner; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.OutputStream; import java.io.PrintStream; import java.util.regex.Matcher; import java.util.regex.Pattern; import junit.framework.AssertionFailedError; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.taskdefs.optional.junit.JUnitResultFormatter; import org.apache.tools.ant.taskdefs.optional.junit.JUnitTest; public class Formatter implements JUnitResultFormatter { private PrintStream ps; { try { this.ps = new PrintStream(new FileOutputStream(System.getProperty("OUTFILE", "failing-tests.txt"), true), true); } catch (FileNotFoundException e) { throw new RuntimeException(e); } } @Override public void endTestSuite(JUnitTest arg0) throws BuildException { } @Override public void setOutput(OutputStream arg0) { } @Override public void setSystemError(String arg0) { } @Override public void setSystemOutput(String arg0) { } String className ; boolean alreadyPrinted = true; @Override public void startTestSuite(JUnitTest junitTest) throws BuildException { className = junitTest.getName(); alreadyPrinted = false; } @Override public void addError(Test test, Throwable t) { handle(test, t); } @Override public void addFailure(Test test, AssertionFailedError t) { handle(test,t); } private void handle(Test test, Throwable t) { String prefix = "--- " ; String className = null; String methodName = null; if (test == null) { // if test is null it indicates an initialization error for the class failClass(t, prefix); return; } { Pattern regexp = Pattern.compile("(.*)\\((.*)\\)"); Matcher match = regexp.matcher(test.toString()); if (match.matches()) { className = match.group(2); methodName = match.group(1); } } { Pattern regexp = Pattern.compile("(.*):(.*)"); // for some weird reson this format is used for Timeout in Junit4 Matcher match = regexp.matcher(test.toString()); if (match.matches()) { className = match.group(1); methodName = match.group(2); } } if ("warning".equals(methodName) || "initializationError".equals(methodName)) { failClass(t, prefix); // there is an issue with the class, not the method. } else if (null != methodName && null != className) { if (isJunit4InitFail(t)) { failClass(t, prefix); } else { ps.println(prefix + className + "::" + methodName); // normal case t.printStackTrace(ps); } } else { ps.print(prefix + "broken test input " + test.toString()); t.printStackTrace(ps); } } private void failClass(Throwable t, String prefix) { if (!this.alreadyPrinted) { ps.println(prefix + this.className); t.printStackTrace(ps); this.alreadyPrinted = true; } } private boolean isJunit4InitFail(Throwable t) { for (StackTraceElement ste: t.getStackTrace()) { if ("createTest".equals(ste.getMethodName())) { return true; } } return false; } @Override public void endTest(Test test) { } @Override public void startTest(Test test) { } }
Log the name of all executed tests, in addition to the failing ones.
framework/lib/formatter/src/edu/washington/cs/mut/testrunner/Formatter.java
Log the name of all executed tests, in addition to the failing ones.
Java
mit
0a50448b08ca5ecfa3a8d62c6c8bdd3d5784c6f8
0
anotheria/moskito,anotheria/moskito,anotheria/moskito,anotheria/moskito
package net.anotheria.moskito.aop.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Include the whole class or its method into monitoring. * * @author Vitaliy Zhovtiuk. */ @Target ( {ElementType.TYPE, ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Monitor { /** * Id of the producer for producer registry. If null/unset the class name is extracted. * @return producer id */ String producerId() default ""; /** * Subsystem name. If null/unset 'default' will be used. * @return sub-system string */ String subsystem() default ""; /** * Category name. If null/unset 'annotated' will be used. * @return category string */ String category() default ""; }
moskito-aop/src/main/java/net/anotheria/moskito/aop/annotation/Monitor.java
package net.anotheria.moskito.aop.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Include the whole class or its method into monitoring. * * @author <a href="mailto:[email protected]">Vitaliy Zhovtiuk</a> */ @Target ( {ElementType.TYPE, ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface Monitor { /** * Id of the producer for producer registry. If null/unset the class name is extracted. * @return producer id */ String producerId() default ""; /** * Subsystem name. If null/unset default will be used. * @return sub-system string */ String subsystem() default ""; /** * Category name. If null/unset annotated will be used. * @return category string */ String category() default ""; }
fixed author tag
moskito-aop/src/main/java/net/anotheria/moskito/aop/annotation/Monitor.java
fixed author tag
Java
mit
205f22398d196bbeb0d8fbbeec59059de4518f3d
0
carrot/cream
package com.carrotcreative.cream.cache; import android.content.Context; import com.carrotcreative.cream.tasks.ReadSerializableTask; import com.carrotcreative.cream.tasks.WriteSerializableTask; import java.io.File; import java.io.FileFilter; import java.io.Serializable; import java.util.regex.Pattern; public class CacheManager { public static final String PREFIX_EXPIRATION_DELIMITER = "-"; //======================================= //============== Singleton ============== //======================================= private static CacheManager sInstance; public static CacheManager getInstance(Context context){ if(sInstance == null){ sInstance = new CacheManager(context); } return sInstance; } //=================================== //============== Class ============== //=================================== private final File mRootDir; private final Context mContext; private CacheManager(Context context) { mContext = context; mRootDir = context.getCacheDir(); } //============================================ //================== Cache =================== //============================================ public void readSerializable(String directoryString, String fileExtension, String prefix, boolean regardForExpiration, ReadSerializableTask.ReadSerializableCallback cb) { File directory = new File(mRootDir, directoryString); //Finding the file final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension); for(File f : matchingFiles) { long expiration = getFileExpiration(f, fileExtension); //If it's not expired, or we have no regard for expiration if(!(System.currentTimeMillis() > expiration) || !regardForExpiration) { readSerializable(f, cb); return; } } cb.failure(null); } public void writeSerializable(String directoryString, long expirationMinutes, String fileExtension, String prefix, Serializable content, WriteSerializableTask.WriteSerializableCallback cb) { File directory = new File(mRootDir, directoryString); long expiration = getExpirationEpochMinutes(expirationMinutes); String fileString = prefix + PREFIX_EXPIRATION_DELIMITER + expiration + "." + fileExtension; File file = new File(directory, fileString); deleteAllByPrefix(prefix, directory, fileExtension); writeSerializable(content, file, cb); } /** * This goes and deletes files that are expired by trashDays */ public void runTrashCleanup(String directoryString, String fileExtension, long trashMinutes) { File cleanupDir = new File(mRootDir, directoryString); File[] allFiles = cleanupDir.listFiles(); //http://docs.oracle.com/javase/1.5.0/docs/api/java/io/File.html#listFiles%28%29 if(allFiles != null){ for(File f : allFiles) { if(f.toString().endsWith(fileExtension)) { long trashDate = getFileTrashDate(f, fileExtension, trashMinutes); if(f.isFile() && (System.currentTimeMillis() > trashDate)) { f.delete(); } } } } } //============================================== //============== Helper Functions ============== //============================================== /** * Only to be ran by write functions. * * We're writing a fresh object, so obviously * we want to delete all of the old ones. */ private void deleteAllByPrefix(String prefix, File directory, String fileExtension) { final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension); for(File f : matchingFiles) { f.delete(); } } private long getFileTrashDate(File f, String extension, long trashMinutes) { long fileExpiration = getFileExpiration(f, extension); long diff = 1000 * 60 * trashMinutes; return fileExpiration + diff; } private long getFileExpiration(File f, String extension) { String expirationString = f.getName() .replaceFirst(".*" + PREFIX_EXPIRATION_DELIMITER, "") .replace("." + extension, ""); return Long.parseLong(expirationString); } private long getExpirationEpochMinutes(long minutes) { long diff = 1000 * 60 * minutes; return System.currentTimeMillis() + diff; } private static File[] getMatchingFiles(File root, String prefix, String fileExtension) { String regex = prefix + PREFIX_EXPIRATION_DELIMITER + ".*" + "\\." + fileExtension; if(!root.isDirectory()) { root.mkdir(); return new File[0]; } final Pattern p = Pattern.compile(regex); // careful: could also throw an exception! return root.listFiles(new FileFilter(){ @Override public boolean accept(File file) { return p.matcher(file.getName()).matches(); } }); } private static void writeSerializable(Serializable obj, File file, WriteSerializableTask.WriteSerializableCallback cb) { WriteSerializableTask task = new WriteSerializableTask(obj, file, cb); Void[] voidArray = new Void[0]; task.execute(voidArray); } private void readSerializable(File file, ReadSerializableTask.ReadSerializableCallback cb) { ReadSerializableTask task = new ReadSerializableTask(cb, file); Void[] voidArray = new Void[0]; task.execute(voidArray); } }
src/main/java/com/carrotcreative/cream/cache/CacheManager.java
package com.carrotcreative.cream.cache; import android.content.Context; import com.carrotcreative.cream.tasks.ReadSerializableTask; import com.carrotcreative.cream.tasks.WriteSerializableTask; import java.io.File; import java.io.FileFilter; import java.io.Serializable; import java.util.regex.Pattern; public class CacheManager { public static final String PREFIX_EXPIRATION_DELIMITER = "-CR-"; //======================================= //============== Singleton ============== //======================================= private static CacheManager sInstance; public static CacheManager getInstance(Context context){ if(sInstance == null){ sInstance = new CacheManager(context); } return sInstance; } //=================================== //============== Class ============== //=================================== private final File mRootDir; private final Context mContext; private CacheManager(Context context) { mContext = context; mRootDir = context.getCacheDir(); } //============================================ //================== Cache =================== //============================================ public void readSerializable(String directoryString, String fileExtension, String prefix, boolean regardForExpiration, ReadSerializableTask.ReadSerializableCallback cb) { File directory = new File(mRootDir, directoryString); //Finding the file final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension); for(File f : matchingFiles) { long expiration = getFileExpiration(f, fileExtension); //If it's not expired, or we have no regard for expiration if(!(System.currentTimeMillis() > expiration) || !regardForExpiration) { readSerializable(f, cb); return; } } cb.failure(null); } public void writeSerializable(String directoryString, long expirationMinutes, String fileExtension, String prefix, Serializable content, WriteSerializableTask.WriteSerializableCallback cb) { File directory = new File(mRootDir, directoryString); long expiration = getExpirationEpochMinutes(expirationMinutes); String fileString = prefix + PREFIX_EXPIRATION_DELIMITER + expiration + "." + fileExtension; File file = new File(directory, fileString); deleteAllByPrefix(prefix, directory, fileExtension); writeSerializable(content, file, cb); } /** * This goes and deletes files that are expired by trashDays */ public void runTrashCleanup(String directoryString, String fileExtension, long trashMinutes) { File cleanupDir = new File(mRootDir, directoryString); File[] allFiles = cleanupDir.listFiles(); //http://docs.oracle.com/javase/1.5.0/docs/api/java/io/File.html#listFiles%28%29 if(allFiles != null){ for(File f : allFiles) { if(f.toString().endsWith(fileExtension)) { long trashDate = getFileTrashDate(f, fileExtension, trashMinutes); if(f.isFile() && (System.currentTimeMillis() > trashDate)) { f.delete(); } } } } } //============================================== //============== Helper Functions ============== //============================================== /** * Only to be ran by write functions. * * We're writing a fresh object, so obviously * we want to delete all of the old ones. */ private void deleteAllByPrefix(String prefix, File directory, String fileExtension) { final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension); for(File f : matchingFiles) { f.delete(); } } private long getFileTrashDate(File f, String extension, long trashMinutes) { long fileExpiration = getFileExpiration(f, extension); long diff = 1000 * 60 * trashMinutes; return fileExpiration + diff; } private long getFileExpiration(File f, String extension) { String expirationString = f.getName() .replaceFirst(".*" + PREFIX_EXPIRATION_DELIMITER, "") .replace("." + extension, ""); return Long.parseLong(expirationString); } private long getExpirationEpochMinutes(long minutes) { long diff = 1000 * 60 * minutes; return System.currentTimeMillis() + diff; } private static File[] getMatchingFiles(File root, String prefix, String fileExtension) { String regex = prefix + PREFIX_EXPIRATION_DELIMITER + ".*" + "\\." + fileExtension; if(!root.isDirectory()) { root.mkdir(); return new File[0]; } final Pattern p = Pattern.compile(regex); // careful: could also throw an exception! return root.listFiles(new FileFilter(){ @Override public boolean accept(File file) { return p.matcher(file.getName()).matches(); } }); } private static void writeSerializable(Serializable obj, File file, WriteSerializableTask.WriteSerializableCallback cb) { WriteSerializableTask task = new WriteSerializableTask(obj, file, cb); Void[] voidArray = new Void[0]; task.execute(voidArray); } private void readSerializable(File file, ReadSerializableTask.ReadSerializableCallback cb) { ReadSerializableTask task = new ReadSerializableTask(cb, file); Void[] voidArray = new Void[0]; task.execute(voidArray); } }
change delimiter to - from -CR-
src/main/java/com/carrotcreative/cream/cache/CacheManager.java
change delimiter to - from -CR-
Java
mit
374668052f148982de868a4ff40e6cec281bba3a
0
onessimofalconi/bc-java,FAU-Inf2/spongycastle,onessimofalconi/bc-java,sergeypayu/bc-java,Skywalker-11/spongycastle,isghe/bc-java,bcgit/bc-java,open-keychain/spongycastle,sergeypayu/bc-java,FAU-Inf2/spongycastle,partheinstein/bc-java,sonork/spongycastle,sergeypayu/bc-java,savichris/spongycastle,iseki-masaya/spongycastle,partheinstein/bc-java,savichris/spongycastle,bcgit/bc-java,lesstif/spongycastle,sonork/spongycastle,Skywalker-11/spongycastle,FAU-Inf2/spongycastle,iseki-masaya/spongycastle,Skywalker-11/spongycastle,lesstif/spongycastle,open-keychain/spongycastle,lesstif/spongycastle,isghe/bc-java,sonork/spongycastle,open-keychain/spongycastle,iseki-masaya/spongycastle,isghe/bc-java,bcgit/bc-java,savichris/spongycastle,partheinstein/bc-java,onessimofalconi/bc-java
package org.bouncycastle.crypto.tls; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.SecureRandom; import java.util.Enumeration; import java.util.Hashtable; import java.util.Vector; import org.bouncycastle.asn1.ASN1Primitive; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.crypto.prng.ThreadedSeedGenerator; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Integers; /** * An implementation of all high level protocols in TLS 1.0. */ public class TlsProtocolHandler { private static final Integer EXT_RenegotiationInfo = Integers.valueOf(ExtensionType.renegotiation_info); /* * Our Connection states */ private static final short CS_CLIENT_HELLO_SEND = 1; private static final short CS_SERVER_HELLO_RECEIVED = 2; private static final short CS_SERVER_CERTIFICATE_RECEIVED = 3; private static final short CS_SERVER_KEY_EXCHANGE_RECEIVED = 4; private static final short CS_CERTIFICATE_REQUEST_RECEIVED = 5; private static final short CS_SERVER_HELLO_DONE_RECEIVED = 6; private static final short CS_CLIENT_KEY_EXCHANGE_SEND = 7; private static final short CS_CERTIFICATE_VERIFY_SEND = 8; private static final short CS_CLIENT_CHANGE_CIPHER_SPEC_SEND = 9; private static final short CS_CLIENT_FINISHED_SEND = 10; private static final short CS_SERVER_CHANGE_CIPHER_SPEC_RECEIVED = 11; private static final short CS_DONE = 12; private static final byte[] emptybuf = new byte[0]; private static final String TLS_ERROR_MESSAGE = "Internal TLS error, this could be an attack"; /* * Queues for data from some protocols. */ private ByteQueue applicationDataQueue = new ByteQueue(); private ByteQueue changeCipherSpecQueue = new ByteQueue(); private ByteQueue alertQueue = new ByteQueue(); private ByteQueue handshakeQueue = new ByteQueue(); /* * The Record Stream we use */ private RecordStream rs; private SecureRandom random; private TlsInputStream tlsInputStream = null; private TlsOutputStream tlsOutputStream = null; private boolean closed = false; private boolean failedWithError = false; private boolean appDataReady = false; private Hashtable clientExtensions; private SecurityParameters securityParameters = null; private TlsClientContextImpl tlsClientContext = null; private TlsClient tlsClient = null; private int[] offeredCipherSuites = null; private short[] offeredCompressionMethods = null; private TlsKeyExchange keyExchange = null; private TlsAuthentication authentication = null; private CertificateRequest certificateRequest = null; private short connection_state = 0; private static SecureRandom createSecureRandom() { /* * We use our threaded seed generator to generate a good random seed. If the user * has a better random seed, he should use the constructor with a SecureRandom. */ ThreadedSeedGenerator tsg = new ThreadedSeedGenerator(); SecureRandom random = new SecureRandom(); /* * Hopefully, 20 bytes in fast mode are good enough. */ random.setSeed(tsg.generateSeed(20, true)); return random; } public TlsProtocolHandler(InputStream is, OutputStream os) { this(is, os, createSecureRandom()); } public TlsProtocolHandler(InputStream is, OutputStream os, SecureRandom sr) { this.rs = new RecordStream(this, is, os); this.random = sr; } protected void processData(short protocol, byte[] buf, int offset, int len) throws IOException { /* * Have a look at the protocol type, and add it to the correct queue. */ switch (protocol) { case ContentType.change_cipher_spec: changeCipherSpecQueue.addData(buf, offset, len); processChangeCipherSpec(); break; case ContentType.alert: alertQueue.addData(buf, offset, len); processAlert(); break; case ContentType.handshake: handshakeQueue.addData(buf, offset, len); processHandshake(); break; case ContentType.application_data: if (!appDataReady) { this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } applicationDataQueue.addData(buf, offset, len); processApplicationData(); break; default: /* * Uh, we don't know this protocol. * * RFC2246 defines on page 13, that we should ignore this. */ } } private void processHandshake() throws IOException { boolean read; do { read = false; /* * We need the first 4 bytes, they contain type and length of the message. */ if (handshakeQueue.size() >= 4) { byte[] beginning = new byte[4]; handshakeQueue.read(beginning, 0, 4, 0); ByteArrayInputStream bis = new ByteArrayInputStream(beginning); short type = TlsUtils.readUint8(bis); int len = TlsUtils.readUint24(bis); /* * Check if we have enough bytes in the buffer to read the full message. */ if (handshakeQueue.size() >= (len + 4)) { /* * Read the message. */ byte[] buf = new byte[len]; handshakeQueue.read(buf, 0, len, 4); handshakeQueue.removeData(len + 4); /* * RFC 2246 7.4.9. The value handshake_messages includes all handshake * messages starting at client hello up to, but not including, this * finished message. [..] Note: [Also,] Hello Request messages are * omitted from handshake hashes. */ switch (type) { case HandshakeType.hello_request: case HandshakeType.finished: break; default: rs.updateHandshakeData(beginning, 0, 4); rs.updateHandshakeData(buf, 0, len); break; } /* * Now, parse the message. */ processHandshakeMessage(type, buf); read = true; } } } while (read); } private void processHandshakeMessage(short type, byte[] buf) throws IOException { ByteArrayInputStream is = new ByteArrayInputStream(buf); switch (type) { case HandshakeType.certificate: { switch (connection_state) { case CS_SERVER_HELLO_RECEIVED: { // Parse the Certificate message and send to cipher suite Certificate serverCertificate = Certificate.parse(is); assertEmpty(is); this.keyExchange.processServerCertificate(serverCertificate); this.authentication = tlsClient.getAuthentication(); this.authentication.notifyServerCertificate(serverCertificate); break; } default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } connection_state = CS_SERVER_CERTIFICATE_RECEIVED; break; } case HandshakeType.finished: switch (connection_state) { case CS_SERVER_CHANGE_CIPHER_SPEC_RECEIVED: /* * Read the checksum from the finished message, it has always 12 * bytes for TLS 1.0 and 36 for SSLv3. */ int checksumLength = tlsClientContext.getServerVersion().isSSL() ? 36 : 12; byte[] serverVerifyData = new byte[checksumLength]; TlsUtils.readFully(serverVerifyData, is); assertEmpty(is); /* * Calculate our own checksum. */ byte[] expectedServerVerifyData = TlsUtils.calculateVerifyData(tlsClientContext, "server finished", rs.getCurrentHash(TlsUtils.SSL_SERVER)); /* * Compare both checksums. */ if (!Arrays.constantTimeAreEqual(expectedServerVerifyData, serverVerifyData)) { /* * Wrong checksum in the finished message. */ this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } connection_state = CS_DONE; /* * We are now ready to receive application data. */ this.appDataReady = true; break; default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } break; case HandshakeType.server_hello: switch (connection_state) { case CS_CLIENT_HELLO_SEND: /* * Read the server hello message */ ProtocolVersion server_version = TlsUtils.readVersion(is); // Check that this matches what the server is sending in the record layer if (!server_version.equals(rs.getDiscoveredServerVersion())) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } ProtocolVersion client_version = this.tlsClientContext.getClientVersion(); // TODO[DTLS] This comparison needs to allow for DTLS (with decreasing minor version numbers) if (server_version.getFullVersion() > client_version.getFullVersion()) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClientContext.setServerVersion(server_version); this.tlsClient.notifyServerVersion(server_version); /* * Read the server random */ securityParameters.serverRandom = new byte[32]; TlsUtils.readFully(securityParameters.serverRandom, is); byte[] sessionID = TlsUtils.readOpaque8(is); if (sessionID.length > 32) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClient.notifySessionID(sessionID); /* * Find out which CipherSuite the server has chosen and check that * it was one of the offered ones. */ int selectedCipherSuite = TlsUtils.readUint16(is); if (!arrayContains(offeredCipherSuites, selectedCipherSuite) || selectedCipherSuite == CipherSuite.TLS_EMPTY_RENEGOTIATION_INFO_SCSV) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClient.notifySelectedCipherSuite(selectedCipherSuite); /* * Find out which CompressionMethod the server has chosen and check that * it was one of the offered ones. */ short selectedCompressionMethod = TlsUtils.readUint8(is); if (!arrayContains(offeredCompressionMethods, selectedCompressionMethod)) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClient.notifySelectedCompressionMethod(selectedCompressionMethod); /* * RFC3546 2.2 The extended server hello message format MAY be * sent in place of the server hello message when the client has * requested extended functionality via the extended client hello * message specified in Section 2.1. ... Note that the extended * server hello message is only sent in response to an extended * client hello message. This prevents the possibility that the * extended server hello message could "break" existing TLS 1.0 * clients. */ /* * TODO RFC 3546 2.3 If [...] the older session is resumed, then * the server MUST ignore extensions appearing in the client * hello, and send a server hello containing no extensions. */ // Integer -> byte[] Hashtable serverExtensions = new Hashtable(); /* * RFC 3546 2.2 Note that the extended server hello message is only sent in response * to an extended client hello message. * However, see RFC 5746 exception below. We always include the SCSV, so an * Extended Server Hello is always allowed. */ if (is.available() > 0) { // Process extensions from extended server hello byte[] extBytes = TlsUtils.readOpaque16(is); ByteArrayInputStream ext = new ByteArrayInputStream(extBytes); while (ext.available() > 0) { Integer extType = Integers.valueOf(TlsUtils.readUint16(ext)); byte[] extValue = TlsUtils.readOpaque16(ext); /* * RFC 5746 Note that sending a "renegotiation_info" * extension in response to a ClientHello containing only * the SCSV is an explicit exception to the prohibition in * RFC 5246, Section 7.4.1.4, on the server sending * unsolicited extensions and is only allowed because the * client is signaling its willingness to receive the * extension via the TLS_EMPTY_RENEGOTIATION_INFO_SCSV * SCSV. TLS implementations MUST continue to comply with * Section 7.4.1.4 for all other extensions. */ if (!extType.equals(EXT_RenegotiationInfo) && clientExtensions.get(extType) == null) { /* * RFC 3546 2.3 Note that for all extension types * (including those defined in future), the extension * type MUST NOT appear in the extended server hello * unless the same extension type appeared in the * corresponding client hello. Thus clients MUST abort * the handshake if they receive an extension type in * the extended server hello that they did not request * in the associated (extended) client hello. */ this.failWithError(AlertLevel.fatal, AlertDescription.unsupported_extension); } if (serverExtensions.containsKey(extType)) { /* * RFC 3546 2.3 Also note that when multiple * extensions of different types are present in the * extended client hello or the extended server hello, * the extensions may appear in any order. There MUST * NOT be more than one extension of the same type. */ this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } serverExtensions.put(extType, extValue); } } assertEmpty(is); /* * RFC 5746 3.4. When a ServerHello is received, the client MUST * check if it includes the "renegotiation_info" extension: */ { boolean secure_negotiation = serverExtensions.containsKey(EXT_RenegotiationInfo); /* * If the extension is present, set the secure_renegotiation * flag to TRUE. The client MUST then verify that the length * of the "renegotiated_connection" field is zero, and if it * is not, MUST abort the handshake (by sending a fatal * handshake_failure alert). */ if (secure_negotiation) { byte[] renegExtValue = (byte[])serverExtensions.get(EXT_RenegotiationInfo); if (!Arrays.constantTimeAreEqual(renegExtValue, createRenegotiationInfo(emptybuf))) { this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } } tlsClient.notifySecureRenegotiation(secure_negotiation); } if (clientExtensions != null) { tlsClient.processServerExtensions(serverExtensions); } this.keyExchange = tlsClient.getKeyExchange(); connection_state = CS_SERVER_HELLO_RECEIVED; break; default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } break; case HandshakeType.server_hello_done: switch (connection_state) { case CS_SERVER_HELLO_RECEIVED: // There was no server certificate message; check it's OK this.keyExchange.skipServerCertificate(); this.authentication = null; // NB: Fall through to next case label case CS_SERVER_CERTIFICATE_RECEIVED: // There was no server key exchange message; check it's OK this.keyExchange.skipServerKeyExchange(); // NB: Fall through to next case label case CS_SERVER_KEY_EXCHANGE_RECEIVED: case CS_CERTIFICATE_REQUEST_RECEIVED: assertEmpty(is); connection_state = CS_SERVER_HELLO_DONE_RECEIVED; TlsCredentials clientCreds = null; if (certificateRequest == null) { this.keyExchange.skipClientCredentials(); } else { clientCreds = this.authentication.getClientCredentials(certificateRequest); if (clientCreds == null) { this.keyExchange.skipClientCredentials(); if (tlsClientContext.getServerVersion().isSSL()) { sendAlert(AlertLevel.warning, AlertDescription.no_certificate); } else { sendClientCertificate(Certificate.EMPTY_CHAIN); } } else { this.keyExchange.processClientCredentials(clientCreds); sendClientCertificate(clientCreds.getCertificate()); } } /* * Send the client key exchange message, depending on the key * exchange we are using in our CipherSuite. */ sendClientKeyExchange(); connection_state = CS_CLIENT_KEY_EXCHANGE_SEND; /* * Calculate the master_secret */ { byte[] pms = this.keyExchange.generatePremasterSecret(); try { securityParameters.masterSecret = TlsUtils.calculateMasterSecret( this.tlsClientContext, pms); } finally { // TODO Is there a way to ensure the data is really overwritten? /* * RFC 2246 8.1. The pre_master_secret should be deleted from * memory once the master_secret has been computed. */ if (pms != null) { Arrays.fill(pms, (byte)0); } } } if (clientCreds != null && clientCreds instanceof TlsSignerCredentials) { TlsSignerCredentials signerCreds = (TlsSignerCredentials)clientCreds; byte[] md5andsha1 = rs.getCurrentHash(null); byte[] clientCertificateSignature = signerCreds.generateCertificateSignature( md5andsha1); sendCertificateVerify(clientCertificateSignature); connection_state = CS_CERTIFICATE_VERIFY_SEND; } /* * Now, we send change cipher state */ byte[] cmessage = new byte[1]; cmessage[0] = 1; rs.writeMessage(ContentType.change_cipher_spec, cmessage, 0, cmessage.length); connection_state = CS_CLIENT_CHANGE_CIPHER_SPEC_SEND; /* * Initialize our cipher suite */ rs.clientCipherSpecDecided(tlsClient.getCompression(), tlsClient.getCipher()); /* * Send our finished message. */ byte[] clientVerifyData = TlsUtils.calculateVerifyData(tlsClientContext, "client finished", rs.getCurrentHash(TlsUtils.SSL_CLIENT)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.finished, bos); TlsUtils.writeUint24(clientVerifyData.length, bos); bos.write(clientVerifyData); byte[] message = bos.toByteArray(); rs.writeMessage(ContentType.handshake, message, 0, message.length); this.connection_state = CS_CLIENT_FINISHED_SEND; break; default: this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } break; case HandshakeType.server_key_exchange: { switch (connection_state) { case CS_SERVER_HELLO_RECEIVED: // There was no server certificate message; check it's OK this.keyExchange.skipServerCertificate(); this.authentication = null; // NB: Fall through to next case label case CS_SERVER_CERTIFICATE_RECEIVED: this.keyExchange.processServerKeyExchange(is); assertEmpty(is); break; default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } this.connection_state = CS_SERVER_KEY_EXCHANGE_RECEIVED; break; } case HandshakeType.certificate_request: { switch (connection_state) { case CS_SERVER_CERTIFICATE_RECEIVED: // There was no server key exchange message; check it's OK this.keyExchange.skipServerKeyExchange(); // NB: Fall through to next case label case CS_SERVER_KEY_EXCHANGE_RECEIVED: { if (this.authentication == null) { /* * RFC 2246 7.4.4. It is a fatal handshake_failure alert * for an anonymous server to request client identification. */ this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } int numTypes = TlsUtils.readUint8(is); short[] certificateTypes = new short[numTypes]; for (int i = 0; i < numTypes; ++i) { certificateTypes[i] = TlsUtils.readUint8(is); } byte[] authorities = TlsUtils.readOpaque16(is); assertEmpty(is); Vector authorityDNs = new Vector(); ByteArrayInputStream bis = new ByteArrayInputStream(authorities); while (bis.available() > 0) { byte[] dnBytes = TlsUtils.readOpaque16(bis); authorityDNs.addElement(X500Name.getInstance(ASN1Primitive.fromByteArray(dnBytes))); } this.certificateRequest = new CertificateRequest(certificateTypes, authorityDNs); this.keyExchange.validateCertificateRequest(this.certificateRequest); break; } default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } this.connection_state = CS_CERTIFICATE_REQUEST_RECEIVED; break; } case HandshakeType.hello_request: /* * RFC 2246 7.4.1.1 Hello request This message will be ignored by the * client if the client is currently negotiating a session. This message * may be ignored by the client if it does not wish to renegotiate a * session, or the client may, if it wishes, respond with a * no_renegotiation alert. */ if (connection_state == CS_DONE) { // Renegotiation not supported yet sendAlert(AlertLevel.warning, AlertDescription.no_renegotiation); } break; case HandshakeType.client_key_exchange: case HandshakeType.certificate_verify: case HandshakeType.client_hello: case HandshakeType.hello_verify_request: default: // We do not support this! this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); break; } } private void processApplicationData() { /* * There is nothing we need to do here. * * This function could be used for callbacks when application data arrives in the * future. */ } private void processAlert() throws IOException { while (alertQueue.size() >= 2) { /* * An alert is always 2 bytes. Read the alert. */ byte[] tmp = new byte[2]; alertQueue.read(tmp, 0, 2, 0); alertQueue.removeData(2); short level = tmp[0]; short description = tmp[1]; if (level == AlertLevel.fatal) { /* * This is a fatal error. */ this.failedWithError = true; this.closed = true; /* * Now try to close the stream, ignore errors. */ try { rs.close(); } catch (Exception e) { } throw new IOException(TLS_ERROR_MESSAGE); } else { /* * This is just a warning. */ if (description == AlertDescription.close_notify) { /* * Close notify */ this.failWithError(AlertLevel.warning, AlertDescription.close_notify); } /* * If it is just a warning, we continue. */ } } } /** * This method is called, when a change cipher spec message is received. * * @throws IOException If the message has an invalid content or the handshake is not * in the correct state. */ private void processChangeCipherSpec() throws IOException { while (changeCipherSpecQueue.size() > 0) { /* * A change cipher spec message is only one byte with the value 1. */ byte[] b = new byte[1]; changeCipherSpecQueue.read(b, 0, 1, 0); changeCipherSpecQueue.removeData(1); if (b[0] != 1) { /* * This should never happen. */ this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } /* * Check if we are in the correct connection state. */ if (this.connection_state != CS_CLIENT_FINISHED_SEND) { this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } rs.serverClientSpecReceived(); this.connection_state = CS_SERVER_CHANGE_CIPHER_SPEC_RECEIVED; } } private void sendClientCertificate(Certificate clientCert) throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.certificate, bos); // Reserve space for length TlsUtils.writeUint24(0, bos); clientCert.encode(bos); byte[] message = bos.toByteArray(); // Patch actual length back in TlsUtils.writeUint24(message.length - 4, message, 1); rs.writeMessage(ContentType.handshake, message, 0, message.length); } private void sendClientKeyExchange() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.client_key_exchange, bos); // Reserve space for length TlsUtils.writeUint24(0, bos); this.keyExchange.generateClientKeyExchange(bos); byte[] message = bos.toByteArray(); // Patch actual length back in TlsUtils.writeUint24(message.length - 4, message, 1); rs.writeMessage(ContentType.handshake, message, 0, message.length); } private void sendCertificateVerify(byte[] data) throws IOException { /* * Send signature of handshake messages so far to prove we are the owner of the * cert See RFC 2246 sections 4.7, 7.4.3 and 7.4.8 */ ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.certificate_verify, bos); TlsUtils.writeUint24(data.length + 2, bos); TlsUtils.writeOpaque16(data, bos); byte[] message = bos.toByteArray(); rs.writeMessage(ContentType.handshake, message, 0, message.length); } /** * Connects to the remote system. * * @param verifyer Will be used when a certificate is received to verify that this * certificate is accepted by the client. * @throws IOException If handshake was not successful. * * @deprecated use version taking TlsClient */ public void connect(CertificateVerifyer verifyer) throws IOException { this.connect(new LegacyTlsClient(verifyer)); } /** * Connects to the remote system using client authentication * * @param tlsClient * @throws IOException If handshake was not successful. */ public void connect(TlsClient tlsClient) throws IOException { if (tlsClient == null) { throw new IllegalArgumentException("'tlsClient' cannot be null"); } if (this.tlsClient != null) { throw new IllegalStateException("connect can only be called once"); } /* * Send Client hello * * First, generate some random data. */ this.securityParameters = new SecurityParameters(); this.securityParameters.clientRandom = new byte[32]; random.nextBytes(securityParameters.clientRandom); TlsUtils.writeGMTUnixTime(securityParameters.clientRandom, 0); this.tlsClientContext = new TlsClientContextImpl(random, securityParameters); this.rs.init(tlsClientContext); this.tlsClient = tlsClient; this.tlsClient.init(tlsClientContext); ByteArrayOutputStream os = new ByteArrayOutputStream(); ProtocolVersion client_version = this.tlsClient.getClientVersion(); this.tlsClientContext.setClientVersion(client_version); TlsUtils.writeVersion(client_version, os); os.write(securityParameters.clientRandom); /* * Length of Session id */ TlsUtils.writeUint8((short)0, os); /* * Cipher suites */ this.offeredCipherSuites = this.tlsClient.getCipherSuites(); // Integer -> byte[] this.clientExtensions = this.tlsClient.getClientExtensions(); // Cipher Suites (and SCSV) { /* * RFC 5746 3.4. The client MUST include either an empty "renegotiation_info" * extension, or the TLS_EMPTY_RENEGOTIATION_INFO_SCSV signaling cipher suite * value in the ClientHello. Including both is NOT RECOMMENDED. */ boolean noRenegExt = clientExtensions == null || clientExtensions.get(EXT_RenegotiationInfo) == null; int count = offeredCipherSuites.length; if (noRenegExt) { // Note: 1 extra slot for TLS_EMPTY_RENEGOTIATION_INFO_SCSV ++count; } TlsUtils.writeUint16(2 * count, os); TlsUtils.writeUint16Array(offeredCipherSuites, os); if (noRenegExt) { TlsUtils.writeUint16(CipherSuite.TLS_EMPTY_RENEGOTIATION_INFO_SCSV, os); } } // Compression methods this.offeredCompressionMethods = this.tlsClient.getCompressionMethods(); TlsUtils.writeUint8((short)offeredCompressionMethods.length, os); TlsUtils.writeUint8Array(offeredCompressionMethods, os); // Extensions if (clientExtensions != null) { ByteArrayOutputStream ext = new ByteArrayOutputStream(); Enumeration keys = clientExtensions.keys(); while (keys.hasMoreElements()) { Integer extType = (Integer)keys.nextElement(); writeExtension(ext, extType, (byte[])clientExtensions.get(extType)); } TlsUtils.writeOpaque16(ext.toByteArray(), os); } ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.client_hello, bos); TlsUtils.writeUint24(os.size(), bos); bos.write(os.toByteArray()); byte[] message = bos.toByteArray(); safeWriteMessage(ContentType.handshake, message, 0, message.length); connection_state = CS_CLIENT_HELLO_SEND; /* * We will now read data, until we have completed the handshake. */ while (connection_state != CS_DONE) { safeReadData(); } this.tlsInputStream = new TlsInputStream(this); this.tlsOutputStream = new TlsOutputStream(this); } /** * Read data from the network. The method will return immediately, if there is still * some data left in the buffer, or block until some application data has been read * from the network. * * @param buf The buffer where the data will be copied to. * @param offset The position where the data will be placed in the buffer. * @param len The maximum number of bytes to read. * @return The number of bytes read. * @throws IOException If something goes wrong during reading data. */ protected int readApplicationData(byte[] buf, int offset, int len) throws IOException { while (applicationDataQueue.size() == 0) { /* * We need to read some data. */ if (this.closed) { if (this.failedWithError) { /* * Something went terribly wrong, we should throw an IOException */ throw new IOException(TLS_ERROR_MESSAGE); } /* * Connection has been closed, there is no more data to read. */ return -1; } safeReadData(); } len = Math.min(len, applicationDataQueue.size()); applicationDataQueue.read(buf, offset, len, 0); applicationDataQueue.removeData(len); return len; } private void safeReadData() throws IOException { try { rs.readData(); } catch (TlsFatalAlert e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, e.getAlertDescription()); } throw e; } catch (IOException e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } catch (RuntimeException e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } } private void safeWriteMessage(short type, byte[] buf, int offset, int len) throws IOException { try { rs.writeMessage(type, buf, offset, len); } catch (TlsFatalAlert e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, e.getAlertDescription()); } throw e; } catch (IOException e) { if (!closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } catch (RuntimeException e) { if (!closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } } /** * Send some application data to the remote system. * <p/> * The method will handle fragmentation internally. * * @param buf The buffer with the data. * @param offset The position in the buffer where the data is placed. * @param len The length of the data. * @throws IOException If something goes wrong during sending. */ protected void writeData(byte[] buf, int offset, int len) throws IOException { if (this.closed) { if (this.failedWithError) { throw new IOException(TLS_ERROR_MESSAGE); } throw new IOException("Sorry, connection has been closed, you cannot write more data"); } /* * Protect against known IV attack! * * DO NOT REMOVE THIS LINE, EXCEPT YOU KNOW EXACTLY WHAT YOU ARE DOING HERE. */ safeWriteMessage(ContentType.application_data, emptybuf, 0, 0); do { /* * We are only allowed to write fragments up to 2^14 bytes. */ int toWrite = Math.min(len, 1 << 14); safeWriteMessage(ContentType.application_data, buf, offset, toWrite); offset += toWrite; len -= toWrite; } while (len > 0); } /** * @return An OutputStream which can be used to send data. */ public OutputStream getOutputStream() { return this.tlsOutputStream; } /** * @return An InputStream which can be used to read data. */ public InputStream getInputStream() { return this.tlsInputStream; } /** * Terminate this connection with an alert. * <p/> * Can be used for normal closure too. * * @param alertLevel The level of the alert, an be AlertLevel.fatal or AL_warning. * @param alertDescription The exact alert message. * @throws IOException If alert was fatal. */ private void failWithError(short alertLevel, short alertDescription) throws IOException { /* * Check if the connection is still open. */ if (!closed) { /* * Prepare the message */ this.closed = true; if (alertLevel == AlertLevel.fatal) { /* * This is a fatal message. */ this.failedWithError = true; } sendAlert(alertLevel, alertDescription); rs.close(); if (alertLevel == AlertLevel.fatal) { throw new IOException(TLS_ERROR_MESSAGE); } } else { throw new IOException(TLS_ERROR_MESSAGE); } } private void sendAlert(short alertLevel, short alertDescription) throws IOException { byte[] error = new byte[2]; error[0] = (byte)alertLevel; error[1] = (byte)alertDescription; rs.writeMessage(ContentType.alert, error, 0, 2); } /** * Closes this connection. * * @throws IOException If something goes wrong during closing. */ public void close() throws IOException { if (!closed) { this.failWithError(AlertLevel.warning, AlertDescription.close_notify); } } /** * Make sure the InputStream is now empty. Fail otherwise. * * @param is The InputStream to check. * @throws IOException If is is not empty. */ protected void assertEmpty(ByteArrayInputStream is) throws IOException { if (is.available() > 0) { throw new TlsFatalAlert(AlertDescription.decode_error); } } protected void flush() throws IOException { rs.flush(); } static boolean arrayContains(short[] a, short n) { for (int i = 0; i < a.length; ++i) { if (a[i] == n) { return true; } } return false; } static boolean arrayContains(int[] a, int n) { for (int i = 0; i < a.length; ++i) { if (a[i] == n) { return true; } } return false; } static byte[] createRenegotiationInfo(byte[] renegotiated_connection) throws IOException { ByteArrayOutputStream buf = new ByteArrayOutputStream(); TlsUtils.writeOpaque8(renegotiated_connection, buf); return buf.toByteArray(); } static void writeExtension(OutputStream output, Integer extType, byte[] extValue) throws IOException { TlsUtils.writeUint16(extType.intValue(), output); TlsUtils.writeOpaque16(extValue, output); } }
src/main/java/org/bouncycastle/crypto/tls/TlsProtocolHandler.java
package org.bouncycastle.crypto.tls; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.SecureRandom; import java.util.Enumeration; import java.util.Hashtable; import java.util.Vector; import org.bouncycastle.asn1.ASN1Primitive; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.crypto.prng.ThreadedSeedGenerator; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Integers; /** * An implementation of all high level protocols in TLS 1.0. */ public class TlsProtocolHandler { private static final Integer EXT_RenegotiationInfo = Integers.valueOf(ExtensionType.renegotiation_info); /* * Our Connection states */ private static final short CS_CLIENT_HELLO_SEND = 1; private static final short CS_SERVER_HELLO_RECEIVED = 2; private static final short CS_SERVER_CERTIFICATE_RECEIVED = 3; private static final short CS_SERVER_KEY_EXCHANGE_RECEIVED = 4; private static final short CS_CERTIFICATE_REQUEST_RECEIVED = 5; private static final short CS_SERVER_HELLO_DONE_RECEIVED = 6; private static final short CS_CLIENT_KEY_EXCHANGE_SEND = 7; private static final short CS_CERTIFICATE_VERIFY_SEND = 8; private static final short CS_CLIENT_CHANGE_CIPHER_SPEC_SEND = 9; private static final short CS_CLIENT_FINISHED_SEND = 10; private static final short CS_SERVER_CHANGE_CIPHER_SPEC_RECEIVED = 11; private static final short CS_DONE = 12; private static final byte[] emptybuf = new byte[0]; private static final String TLS_ERROR_MESSAGE = "Internal TLS error, this could be an attack"; /* * Queues for data from some protocols. */ private ByteQueue applicationDataQueue = new ByteQueue(); private ByteQueue changeCipherSpecQueue = new ByteQueue(); private ByteQueue alertQueue = new ByteQueue(); private ByteQueue handshakeQueue = new ByteQueue(); /* * The Record Stream we use */ private RecordStream rs; private SecureRandom random; private TlsInputStream tlsInputStream = null; private TlsOutputStream tlsOutputStream = null; private boolean closed = false; private boolean failedWithError = false; private boolean appDataReady = false; private Hashtable clientExtensions; private SecurityParameters securityParameters = null; private TlsClientContextImpl tlsClientContext = null; private TlsClient tlsClient = null; private int[] offeredCipherSuites = null; private short[] offeredCompressionMethods = null; private TlsKeyExchange keyExchange = null; private TlsAuthentication authentication = null; private CertificateRequest certificateRequest = null; private short connection_state = 0; private static SecureRandom createSecureRandom() { /* * We use our threaded seed generator to generate a good random seed. If the user * has a better random seed, he should use the constructor with a SecureRandom. */ ThreadedSeedGenerator tsg = new ThreadedSeedGenerator(); SecureRandom random = new SecureRandom(); /* * Hopefully, 20 bytes in fast mode are good enough. */ random.setSeed(tsg.generateSeed(20, true)); return random; } public TlsProtocolHandler(InputStream is, OutputStream os) { this(is, os, createSecureRandom()); } public TlsProtocolHandler(InputStream is, OutputStream os, SecureRandom sr) { this.rs = new RecordStream(this, is, os); this.random = sr; } protected void processData(short protocol, byte[] buf, int offset, int len) throws IOException { /* * Have a look at the protocol type, and add it to the correct queue. */ switch (protocol) { case ContentType.change_cipher_spec: changeCipherSpecQueue.addData(buf, offset, len); processChangeCipherSpec(); break; case ContentType.alert: alertQueue.addData(buf, offset, len); processAlert(); break; case ContentType.handshake: handshakeQueue.addData(buf, offset, len); processHandshake(); break; case ContentType.application_data: if (!appDataReady) { this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } applicationDataQueue.addData(buf, offset, len); processApplicationData(); break; default: /* * Uh, we don't know this protocol. * * RFC2246 defines on page 13, that we should ignore this. */ } } private void processHandshake() throws IOException { boolean read; do { read = false; /* * We need the first 4 bytes, they contain type and length of the message. */ if (handshakeQueue.size() >= 4) { byte[] beginning = new byte[4]; handshakeQueue.read(beginning, 0, 4, 0); ByteArrayInputStream bis = new ByteArrayInputStream(beginning); short type = TlsUtils.readUint8(bis); int len = TlsUtils.readUint24(bis); /* * Check if we have enough bytes in the buffer to read the full message. */ if (handshakeQueue.size() >= (len + 4)) { /* * Read the message. */ byte[] buf = new byte[len]; handshakeQueue.read(buf, 0, len, 4); handshakeQueue.removeData(len + 4); /* * RFC 2246 7.4.9. The value handshake_messages includes all handshake * messages starting at client hello up to, but not including, this * finished message. [..] Note: [Also,] Hello Request messages are * omitted from handshake hashes. */ switch (type) { case HandshakeType.hello_request: case HandshakeType.finished: break; default: rs.updateHandshakeData(beginning, 0, 4); rs.updateHandshakeData(buf, 0, len); break; } /* * Now, parse the message. */ processHandshakeMessage(type, buf); read = true; } } } while (read); } private void processHandshakeMessage(short type, byte[] buf) throws IOException { ByteArrayInputStream is = new ByteArrayInputStream(buf); switch (type) { case HandshakeType.certificate: { switch (connection_state) { case CS_SERVER_HELLO_RECEIVED: { // Parse the Certificate message and send to cipher suite Certificate serverCertificate = Certificate.parse(is); assertEmpty(is); this.keyExchange.processServerCertificate(serverCertificate); this.authentication = tlsClient.getAuthentication(); this.authentication.notifyServerCertificate(serverCertificate); break; } default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } connection_state = CS_SERVER_CERTIFICATE_RECEIVED; break; } case HandshakeType.finished: switch (connection_state) { case CS_SERVER_CHANGE_CIPHER_SPEC_RECEIVED: /* * Read the checksum from the finished message, it has always 12 * bytes for TLS 1.0 and 36 for SSLv3. */ int checksumLength = tlsClientContext.getServerVersion().isSSL() ? 36 : 12; byte[] serverVerifyData = new byte[checksumLength]; TlsUtils.readFully(serverVerifyData, is); assertEmpty(is); /* * Calculate our own checksum. */ byte[] expectedServerVerifyData = TlsUtils.calculateVerifyData(tlsClientContext, "server finished", rs.getCurrentHash(TlsUtils.SSL_SERVER)); /* * Compare both checksums. */ if (!Arrays.constantTimeAreEqual(expectedServerVerifyData, serverVerifyData)) { /* * Wrong checksum in the finished message. */ this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } connection_state = CS_DONE; /* * We are now ready to receive application data. */ this.appDataReady = true; break; default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } break; case HandshakeType.server_hello: switch (connection_state) { case CS_CLIENT_HELLO_SEND: /* * Read the server hello message */ ProtocolVersion server_version = TlsUtils.readVersion(is); // Check that this matches what the server is sending in the record layer if (!server_version.equals(rs.getDiscoveredServerVersion())) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } ProtocolVersion client_version = this.tlsClientContext.getClientVersion(); // TODO[DTLS] This comparison needs to allow for DTLS (with decreasing minor version numbers) if (server_version.getFullVersion() > client_version.getFullVersion()) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClientContext.setServerVersion(server_version); this.tlsClient.notifyServerVersion(server_version); /* * Read the server random */ securityParameters.serverRandom = new byte[32]; TlsUtils.readFully(securityParameters.serverRandom, is); byte[] sessionID = TlsUtils.readOpaque8(is); if (sessionID.length > 32) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClient.notifySessionID(sessionID); /* * Find out which CipherSuite the server has chosen and check that * it was one of the offered ones. */ int selectedCipherSuite = TlsUtils.readUint16(is); if (!arrayContains(offeredCipherSuites, selectedCipherSuite) || selectedCipherSuite == CipherSuite.TLS_EMPTY_RENEGOTIATION_INFO_SCSV) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClient.notifySelectedCipherSuite(selectedCipherSuite); /* * Find out which CompressionMethod the server has chosen and check that * it was one of the offered ones. */ short selectedCompressionMethod = TlsUtils.readUint8(is); if (!arrayContains(offeredCompressionMethods, selectedCompressionMethod)) { this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } this.tlsClient.notifySelectedCompressionMethod(selectedCompressionMethod); /* * RFC3546 2.2 The extended server hello message format MAY be * sent in place of the server hello message when the client has * requested extended functionality via the extended client hello * message specified in Section 2.1. ... Note that the extended * server hello message is only sent in response to an extended * client hello message. This prevents the possibility that the * extended server hello message could "break" existing TLS 1.0 * clients. */ /* * TODO RFC 3546 2.3 If [...] the older session is resumed, then * the server MUST ignore extensions appearing in the client * hello, and send a server hello containing no extensions. */ // Integer -> byte[] Hashtable serverExtensions = new Hashtable(); /* * RFC 3546 2.2 Note that the extended server hello message is only sent in response * to an extended client hello message. */ if (clientExtensions != null && is.available() > 0) { // Process extensions from extended server hello byte[] extBytes = TlsUtils.readOpaque16(is); ByteArrayInputStream ext = new ByteArrayInputStream(extBytes); while (ext.available() > 0) { Integer extType = Integers.valueOf(TlsUtils.readUint16(ext)); byte[] extValue = TlsUtils.readOpaque16(ext); /* * RFC 5746 Note that sending a "renegotiation_info" * extension in response to a ClientHello containing only * the SCSV is an explicit exception to the prohibition in * RFC 5246, Section 7.4.1.4, on the server sending * unsolicited extensions and is only allowed because the * client is signaling its willingness to receive the * extension via the TLS_EMPTY_RENEGOTIATION_INFO_SCSV * SCSV. TLS implementations MUST continue to comply with * Section 7.4.1.4 for all other extensions. */ if (!extType.equals(EXT_RenegotiationInfo) && clientExtensions.get(extType) == null) { /* * RFC 3546 2.3 Note that for all extension types * (including those defined in future), the extension * type MUST NOT appear in the extended server hello * unless the same extension type appeared in the * corresponding client hello. Thus clients MUST abort * the handshake if they receive an extension type in * the extended server hello that they did not request * in the associated (extended) client hello. */ this.failWithError(AlertLevel.fatal, AlertDescription.unsupported_extension); } if (serverExtensions.containsKey(extType)) { /* * RFC 3546 2.3 Also note that when multiple * extensions of different types are present in the * extended client hello or the extended server hello, * the extensions may appear in any order. There MUST * NOT be more than one extension of the same type. */ this.failWithError(AlertLevel.fatal, AlertDescription.illegal_parameter); } serverExtensions.put(extType, extValue); } } assertEmpty(is); /* * RFC 5746 3.4. When a ServerHello is received, the client MUST * check if it includes the "renegotiation_info" extension: */ { boolean secure_negotiation = serverExtensions.containsKey(EXT_RenegotiationInfo); /* * If the extension is present, set the secure_renegotiation * flag to TRUE. The client MUST then verify that the length * of the "renegotiated_connection" field is zero, and if it * is not, MUST abort the handshake (by sending a fatal * handshake_failure alert). */ if (secure_negotiation) { byte[] renegExtValue = (byte[])serverExtensions.get(EXT_RenegotiationInfo); if (!Arrays.constantTimeAreEqual(renegExtValue, createRenegotiationInfo(emptybuf))) { this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } } tlsClient.notifySecureRenegotiation(secure_negotiation); } if (clientExtensions != null) { tlsClient.processServerExtensions(serverExtensions); } this.keyExchange = tlsClient.getKeyExchange(); connection_state = CS_SERVER_HELLO_RECEIVED; break; default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } break; case HandshakeType.server_hello_done: switch (connection_state) { case CS_SERVER_HELLO_RECEIVED: // There was no server certificate message; check it's OK this.keyExchange.skipServerCertificate(); this.authentication = null; // NB: Fall through to next case label case CS_SERVER_CERTIFICATE_RECEIVED: // There was no server key exchange message; check it's OK this.keyExchange.skipServerKeyExchange(); // NB: Fall through to next case label case CS_SERVER_KEY_EXCHANGE_RECEIVED: case CS_CERTIFICATE_REQUEST_RECEIVED: assertEmpty(is); connection_state = CS_SERVER_HELLO_DONE_RECEIVED; TlsCredentials clientCreds = null; if (certificateRequest == null) { this.keyExchange.skipClientCredentials(); } else { clientCreds = this.authentication.getClientCredentials(certificateRequest); if (clientCreds == null) { this.keyExchange.skipClientCredentials(); if (tlsClientContext.getServerVersion().isSSL()) { sendAlert(AlertLevel.warning, AlertDescription.no_certificate); } else { sendClientCertificate(Certificate.EMPTY_CHAIN); } } else { this.keyExchange.processClientCredentials(clientCreds); sendClientCertificate(clientCreds.getCertificate()); } } /* * Send the client key exchange message, depending on the key * exchange we are using in our CipherSuite. */ sendClientKeyExchange(); connection_state = CS_CLIENT_KEY_EXCHANGE_SEND; /* * Calculate the master_secret */ { byte[] pms = this.keyExchange.generatePremasterSecret(); try { securityParameters.masterSecret = TlsUtils.calculateMasterSecret( this.tlsClientContext, pms); } finally { // TODO Is there a way to ensure the data is really overwritten? /* * RFC 2246 8.1. The pre_master_secret should be deleted from * memory once the master_secret has been computed. */ if (pms != null) { Arrays.fill(pms, (byte)0); } } } if (clientCreds != null && clientCreds instanceof TlsSignerCredentials) { TlsSignerCredentials signerCreds = (TlsSignerCredentials)clientCreds; byte[] md5andsha1 = rs.getCurrentHash(null); byte[] clientCertificateSignature = signerCreds.generateCertificateSignature( md5andsha1); sendCertificateVerify(clientCertificateSignature); connection_state = CS_CERTIFICATE_VERIFY_SEND; } /* * Now, we send change cipher state */ byte[] cmessage = new byte[1]; cmessage[0] = 1; rs.writeMessage(ContentType.change_cipher_spec, cmessage, 0, cmessage.length); connection_state = CS_CLIENT_CHANGE_CIPHER_SPEC_SEND; /* * Initialize our cipher suite */ rs.clientCipherSpecDecided(tlsClient.getCompression(), tlsClient.getCipher()); /* * Send our finished message. */ byte[] clientVerifyData = TlsUtils.calculateVerifyData(tlsClientContext, "client finished", rs.getCurrentHash(TlsUtils.SSL_CLIENT)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.finished, bos); TlsUtils.writeUint24(clientVerifyData.length, bos); bos.write(clientVerifyData); byte[] message = bos.toByteArray(); rs.writeMessage(ContentType.handshake, message, 0, message.length); this.connection_state = CS_CLIENT_FINISHED_SEND; break; default: this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } break; case HandshakeType.server_key_exchange: { switch (connection_state) { case CS_SERVER_HELLO_RECEIVED: // There was no server certificate message; check it's OK this.keyExchange.skipServerCertificate(); this.authentication = null; // NB: Fall through to next case label case CS_SERVER_CERTIFICATE_RECEIVED: this.keyExchange.processServerKeyExchange(is); assertEmpty(is); break; default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } this.connection_state = CS_SERVER_KEY_EXCHANGE_RECEIVED; break; } case HandshakeType.certificate_request: { switch (connection_state) { case CS_SERVER_CERTIFICATE_RECEIVED: // There was no server key exchange message; check it's OK this.keyExchange.skipServerKeyExchange(); // NB: Fall through to next case label case CS_SERVER_KEY_EXCHANGE_RECEIVED: { if (this.authentication == null) { /* * RFC 2246 7.4.4. It is a fatal handshake_failure alert * for an anonymous server to request client identification. */ this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } int numTypes = TlsUtils.readUint8(is); short[] certificateTypes = new short[numTypes]; for (int i = 0; i < numTypes; ++i) { certificateTypes[i] = TlsUtils.readUint8(is); } byte[] authorities = TlsUtils.readOpaque16(is); assertEmpty(is); Vector authorityDNs = new Vector(); ByteArrayInputStream bis = new ByteArrayInputStream(authorities); while (bis.available() > 0) { byte[] dnBytes = TlsUtils.readOpaque16(bis); authorityDNs.addElement(X500Name.getInstance(ASN1Primitive.fromByteArray(dnBytes))); } this.certificateRequest = new CertificateRequest(certificateTypes, authorityDNs); this.keyExchange.validateCertificateRequest(this.certificateRequest); break; } default: this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } this.connection_state = CS_CERTIFICATE_REQUEST_RECEIVED; break; } case HandshakeType.hello_request: /* * RFC 2246 7.4.1.1 Hello request This message will be ignored by the * client if the client is currently negotiating a session. This message * may be ignored by the client if it does not wish to renegotiate a * session, or the client may, if it wishes, respond with a * no_renegotiation alert. */ if (connection_state == CS_DONE) { // Renegotiation not supported yet sendAlert(AlertLevel.warning, AlertDescription.no_renegotiation); } break; case HandshakeType.client_key_exchange: case HandshakeType.certificate_verify: case HandshakeType.client_hello: case HandshakeType.hello_verify_request: default: // We do not support this! this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); break; } } private void processApplicationData() { /* * There is nothing we need to do here. * * This function could be used for callbacks when application data arrives in the * future. */ } private void processAlert() throws IOException { while (alertQueue.size() >= 2) { /* * An alert is always 2 bytes. Read the alert. */ byte[] tmp = new byte[2]; alertQueue.read(tmp, 0, 2, 0); alertQueue.removeData(2); short level = tmp[0]; short description = tmp[1]; if (level == AlertLevel.fatal) { /* * This is a fatal error. */ this.failedWithError = true; this.closed = true; /* * Now try to close the stream, ignore errors. */ try { rs.close(); } catch (Exception e) { } throw new IOException(TLS_ERROR_MESSAGE); } else { /* * This is just a warning. */ if (description == AlertDescription.close_notify) { /* * Close notify */ this.failWithError(AlertLevel.warning, AlertDescription.close_notify); } /* * If it is just a warning, we continue. */ } } } /** * This method is called, when a change cipher spec message is received. * * @throws IOException If the message has an invalid content or the handshake is not * in the correct state. */ private void processChangeCipherSpec() throws IOException { while (changeCipherSpecQueue.size() > 0) { /* * A change cipher spec message is only one byte with the value 1. */ byte[] b = new byte[1]; changeCipherSpecQueue.read(b, 0, 1, 0); changeCipherSpecQueue.removeData(1); if (b[0] != 1) { /* * This should never happen. */ this.failWithError(AlertLevel.fatal, AlertDescription.unexpected_message); } /* * Check if we are in the correct connection state. */ if (this.connection_state != CS_CLIENT_FINISHED_SEND) { this.failWithError(AlertLevel.fatal, AlertDescription.handshake_failure); } rs.serverClientSpecReceived(); this.connection_state = CS_SERVER_CHANGE_CIPHER_SPEC_RECEIVED; } } private void sendClientCertificate(Certificate clientCert) throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.certificate, bos); // Reserve space for length TlsUtils.writeUint24(0, bos); clientCert.encode(bos); byte[] message = bos.toByteArray(); // Patch actual length back in TlsUtils.writeUint24(message.length - 4, message, 1); rs.writeMessage(ContentType.handshake, message, 0, message.length); } private void sendClientKeyExchange() throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.client_key_exchange, bos); // Reserve space for length TlsUtils.writeUint24(0, bos); this.keyExchange.generateClientKeyExchange(bos); byte[] message = bos.toByteArray(); // Patch actual length back in TlsUtils.writeUint24(message.length - 4, message, 1); rs.writeMessage(ContentType.handshake, message, 0, message.length); } private void sendCertificateVerify(byte[] data) throws IOException { /* * Send signature of handshake messages so far to prove we are the owner of the * cert See RFC 2246 sections 4.7, 7.4.3 and 7.4.8 */ ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.certificate_verify, bos); TlsUtils.writeUint24(data.length + 2, bos); TlsUtils.writeOpaque16(data, bos); byte[] message = bos.toByteArray(); rs.writeMessage(ContentType.handshake, message, 0, message.length); } /** * Connects to the remote system. * * @param verifyer Will be used when a certificate is received to verify that this * certificate is accepted by the client. * @throws IOException If handshake was not successful. * * @deprecated use version taking TlsClient */ public void connect(CertificateVerifyer verifyer) throws IOException { this.connect(new LegacyTlsClient(verifyer)); } /** * Connects to the remote system using client authentication * * @param tlsClient * @throws IOException If handshake was not successful. */ public void connect(TlsClient tlsClient) throws IOException { if (tlsClient == null) { throw new IllegalArgumentException("'tlsClient' cannot be null"); } if (this.tlsClient != null) { throw new IllegalStateException("connect can only be called once"); } /* * Send Client hello * * First, generate some random data. */ this.securityParameters = new SecurityParameters(); this.securityParameters.clientRandom = new byte[32]; random.nextBytes(securityParameters.clientRandom); TlsUtils.writeGMTUnixTime(securityParameters.clientRandom, 0); this.tlsClientContext = new TlsClientContextImpl(random, securityParameters); this.rs.init(tlsClientContext); this.tlsClient = tlsClient; this.tlsClient.init(tlsClientContext); ByteArrayOutputStream os = new ByteArrayOutputStream(); ProtocolVersion client_version = this.tlsClient.getClientVersion(); this.tlsClientContext.setClientVersion(client_version); TlsUtils.writeVersion(client_version, os); os.write(securityParameters.clientRandom); /* * Length of Session id */ TlsUtils.writeUint8((short)0, os); /* * Cipher suites */ this.offeredCipherSuites = this.tlsClient.getCipherSuites(); // Integer -> byte[] this.clientExtensions = this.tlsClient.getClientExtensions(); // Cipher Suites (and SCSV) { /* * RFC 5746 3.4. The client MUST include either an empty "renegotiation_info" * extension, or the TLS_EMPTY_RENEGOTIATION_INFO_SCSV signaling cipher suite * value in the ClientHello. Including both is NOT RECOMMENDED. */ boolean noRenegExt = clientExtensions == null || clientExtensions.get(EXT_RenegotiationInfo) == null; int count = offeredCipherSuites.length; if (noRenegExt) { // Note: 1 extra slot for TLS_EMPTY_RENEGOTIATION_INFO_SCSV ++count; } TlsUtils.writeUint16(2 * count, os); TlsUtils.writeUint16Array(offeredCipherSuites, os); if (noRenegExt) { TlsUtils.writeUint16(CipherSuite.TLS_EMPTY_RENEGOTIATION_INFO_SCSV, os); } } // Compression methods this.offeredCompressionMethods = this.tlsClient.getCompressionMethods(); TlsUtils.writeUint8((short)offeredCompressionMethods.length, os); TlsUtils.writeUint8Array(offeredCompressionMethods, os); // Extensions if (clientExtensions != null) { ByteArrayOutputStream ext = new ByteArrayOutputStream(); Enumeration keys = clientExtensions.keys(); while (keys.hasMoreElements()) { Integer extType = (Integer)keys.nextElement(); writeExtension(ext, extType, (byte[])clientExtensions.get(extType)); } TlsUtils.writeOpaque16(ext.toByteArray(), os); } ByteArrayOutputStream bos = new ByteArrayOutputStream(); TlsUtils.writeUint8(HandshakeType.client_hello, bos); TlsUtils.writeUint24(os.size(), bos); bos.write(os.toByteArray()); byte[] message = bos.toByteArray(); safeWriteMessage(ContentType.handshake, message, 0, message.length); connection_state = CS_CLIENT_HELLO_SEND; /* * We will now read data, until we have completed the handshake. */ while (connection_state != CS_DONE) { safeReadData(); } this.tlsInputStream = new TlsInputStream(this); this.tlsOutputStream = new TlsOutputStream(this); } /** * Read data from the network. The method will return immediately, if there is still * some data left in the buffer, or block until some application data has been read * from the network. * * @param buf The buffer where the data will be copied to. * @param offset The position where the data will be placed in the buffer. * @param len The maximum number of bytes to read. * @return The number of bytes read. * @throws IOException If something goes wrong during reading data. */ protected int readApplicationData(byte[] buf, int offset, int len) throws IOException { while (applicationDataQueue.size() == 0) { /* * We need to read some data. */ if (this.closed) { if (this.failedWithError) { /* * Something went terribly wrong, we should throw an IOException */ throw new IOException(TLS_ERROR_MESSAGE); } /* * Connection has been closed, there is no more data to read. */ return -1; } safeReadData(); } len = Math.min(len, applicationDataQueue.size()); applicationDataQueue.read(buf, offset, len, 0); applicationDataQueue.removeData(len); return len; } private void safeReadData() throws IOException { try { rs.readData(); } catch (TlsFatalAlert e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, e.getAlertDescription()); } throw e; } catch (IOException e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } catch (RuntimeException e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } } private void safeWriteMessage(short type, byte[] buf, int offset, int len) throws IOException { try { rs.writeMessage(type, buf, offset, len); } catch (TlsFatalAlert e) { if (!this.closed) { this.failWithError(AlertLevel.fatal, e.getAlertDescription()); } throw e; } catch (IOException e) { if (!closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } catch (RuntimeException e) { if (!closed) { this.failWithError(AlertLevel.fatal, AlertDescription.internal_error); } throw e; } } /** * Send some application data to the remote system. * <p/> * The method will handle fragmentation internally. * * @param buf The buffer with the data. * @param offset The position in the buffer where the data is placed. * @param len The length of the data. * @throws IOException If something goes wrong during sending. */ protected void writeData(byte[] buf, int offset, int len) throws IOException { if (this.closed) { if (this.failedWithError) { throw new IOException(TLS_ERROR_MESSAGE); } throw new IOException("Sorry, connection has been closed, you cannot write more data"); } /* * Protect against known IV attack! * * DO NOT REMOVE THIS LINE, EXCEPT YOU KNOW EXACTLY WHAT YOU ARE DOING HERE. */ safeWriteMessage(ContentType.application_data, emptybuf, 0, 0); do { /* * We are only allowed to write fragments up to 2^14 bytes. */ int toWrite = Math.min(len, 1 << 14); safeWriteMessage(ContentType.application_data, buf, offset, toWrite); offset += toWrite; len -= toWrite; } while (len > 0); } /** * @return An OutputStream which can be used to send data. */ public OutputStream getOutputStream() { return this.tlsOutputStream; } /** * @return An InputStream which can be used to read data. */ public InputStream getInputStream() { return this.tlsInputStream; } /** * Terminate this connection with an alert. * <p/> * Can be used for normal closure too. * * @param alertLevel The level of the alert, an be AlertLevel.fatal or AL_warning. * @param alertDescription The exact alert message. * @throws IOException If alert was fatal. */ private void failWithError(short alertLevel, short alertDescription) throws IOException { /* * Check if the connection is still open. */ if (!closed) { /* * Prepare the message */ this.closed = true; if (alertLevel == AlertLevel.fatal) { /* * This is a fatal message. */ this.failedWithError = true; } sendAlert(alertLevel, alertDescription); rs.close(); if (alertLevel == AlertLevel.fatal) { throw new IOException(TLS_ERROR_MESSAGE); } } else { throw new IOException(TLS_ERROR_MESSAGE); } } private void sendAlert(short alertLevel, short alertDescription) throws IOException { byte[] error = new byte[2]; error[0] = (byte)alertLevel; error[1] = (byte)alertDescription; rs.writeMessage(ContentType.alert, error, 0, 2); } /** * Closes this connection. * * @throws IOException If something goes wrong during closing. */ public void close() throws IOException { if (!closed) { this.failWithError(AlertLevel.warning, AlertDescription.close_notify); } } /** * Make sure the InputStream is now empty. Fail otherwise. * * @param is The InputStream to check. * @throws IOException If is is not empty. */ protected void assertEmpty(ByteArrayInputStream is) throws IOException { if (is.available() > 0) { throw new TlsFatalAlert(AlertDescription.decode_error); } } protected void flush() throws IOException { rs.flush(); } static boolean arrayContains(short[] a, short n) { for (int i = 0; i < a.length; ++i) { if (a[i] == n) { return true; } } return false; } static boolean arrayContains(int[] a, int n) { for (int i = 0; i < a.length; ++i) { if (a[i] == n) { return true; } } return false; } static byte[] createRenegotiationInfo(byte[] renegotiated_connection) throws IOException { ByteArrayOutputStream buf = new ByteArrayOutputStream(); TlsUtils.writeOpaque8(renegotiated_connection, buf); return buf.toByteArray(); } static void writeExtension(OutputStream output, Integer extType, byte[] extValue) throws IOException { TlsUtils.writeUint16(extType.intValue(), output); TlsUtils.writeOpaque16(extValue, output); } }
Revert last change, but add explanatory comments
src/main/java/org/bouncycastle/crypto/tls/TlsProtocolHandler.java
Revert last change, but add explanatory comments
Java
mit
8c188f794793ed8e4743971ff0bb2082b7a23c79
0
FastenYourSeatbelts/luggage-system
/** * The MIT License (MIT) * * Copyright (c) 2014-2015 ITopia IS102-5 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package luggage.controllers; import java.io.IOException; import java.net.URL; import java.time.LocalDate; import java.util.List; import java.util.ResourceBundle; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.Button; import javafx.scene.control.ChoiceBox; import javafx.scene.control.DatePicker; import javafx.scene.control.Label; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TextField; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.stage.Stage; import static jdk.nashorn.internal.runtime.Context.printStackTrace; import luggage.Debug; import luggage.MainActivity; import luggage.database.models.CustomerModel; import luggage.database.models.LocationModel; import luggage.database.models.LogModel; import luggage.database.models.LuggageModel; import luggage.database.models.Model; import luggage.helpers.StageHelper; import org.apache.pdfbox.exceptions.COSVisitorException; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.edit.PDPageContentStream; import org.apache.pdfbox.pdmodel.font.PDFont; import org.apache.pdfbox.pdmodel.font.PDType1Font; import org.controlsfx.control.action.Action; import org.controlsfx.dialog.Dialog; import org.controlsfx.dialog.Dialogs; /** * LuggageController * * Controller for luggage/list.fxml, luggage/new, luggage/edit, luggage/view, * and luggage/help. * * @package luggage.controllers * @author ITopia IS102-5 */ public class LuggageController extends BaseController implements Initializable { /** * LIST ELEMENTS */ @FXML private TableView listTableView; @FXML private TableColumn listTableViewId; @FXML private TableColumn listTableViewStatus; @FXML private TableColumn listTableViewTags; @FXML private TableColumn listTableViewDate; @FXML private Button listEdit; @FXML private Button listExportToPdf; @FXML private Button listHelp; @FXML private Button listNew; @FXML private Button listRemove; @FXML private Button listView; @FXML private TextField listSearchField; /** * NEW ELEMENTS */ @FXML private Button newCancel; @FXML private Button newReset; @FXML private Button newSave; @FXML private DatePicker newDate; @FXML private ChoiceBox<CustomerModel> newCustomerId; @FXML private ChoiceBox<LocationModel> newLocationId; @FXML private ChoiceBox<String> newStatus; @FXML private TextField newNotes; @FXML private TextField newTags; /** * EDIT ELEMENTS */ @FXML private Button editCancel; @FXML private Button editReset; @FXML private Button editSave; @FXML private DatePicker editDate; @FXML private ChoiceBox<CustomerModel> editCustomerId; @FXML private ChoiceBox<LocationModel> editLocationId; @FXML private ChoiceBox<String> editStatus; @FXML private TextField editNotes; @FXML private TextField editTags; /** * VIEW ELEMENTS */ @FXML private Button viewClose; @FXML private ChoiceBox<String> viewStatus; @FXML private ChoiceBox<LocationModel> viewLocationId; @FXML private ChoiceBox<CustomerModel> viewCustomerId; @FXML private DatePicker viewDate; @FXML private Label printNotif; @FXML private TextField viewStatusAsText; @FXML private TextField viewLocationAsText; @FXML private TextField viewCustomerAsText; @FXML private TextField viewNotes; @FXML private TextField viewTags; public String mlg = "holy shit"; private ObservableList<LuggageModel> listData = FXCollections.observableArrayList(); private final ObservableList<LocationModel> locationData = FXCollections.observableArrayList(); private final ObservableList<CustomerModel> customerData = FXCollections.observableArrayList(); /** * * Called on controller start * * @param url * @param rb */ @Override public void initialize(URL url, ResourceBundle rb) { Platform.runLater(new Runnable() { @Override public void run() { Debug.print("LUGGAGE CONTROLLER-----------------------------------------------------------------"); // List if (listTableView != null) { listResetTableView("", new String[0]); listEdit.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listRemove.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listView.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listExportToPdf.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listTableView.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); listKeyActions(); } // New if (newLocationId != null) { setNewChoiceBoxes(); newKeyActions(); } // Edit if (editLocationId != null) { setEditChoiceBoxes(); setEditFields(); editKeyActions(); } // View if (viewLocationId != null) { setViewChoiceBoxes(); setViewFields(); viewKeyActions(); } } }); } public LocationModel selectedLocation; public CustomerModel selectedCustomer; public void setNewChoiceBoxes() { // Locations LocationModel oLocationModel = new LocationModel(); List<Model> allLocations = oLocationModel.findAll(); for (Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; locationData.add(location); } newLocationId.setItems(locationData); // Customers CustomerModel oCustomerModel = new CustomerModel(); List<Model> allCustomers = oCustomerModel.findAll(); for (Model allCustomer : allCustomers) { CustomerModel customer = (CustomerModel) allCustomer; customerData.add(customer); } newCustomerId.setItems(customerData); ObservableList<String> statuses = FXCollections.observableArrayList(); statuses.add("Missing"); statuses.add("Found"); statuses.add("Resolved"); newStatus.setItems(statuses); } public void setEditChoiceBoxes() { // Locations LocationModel oLocationModel = new LocationModel(); List<Model> allLocations = oLocationModel.findAll(); int selectedLocationId = new LuggageModel(MainActivity.editId).getLocationId(); for (Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; if (location.getId() == selectedLocationId) { selectedLocation = location; } locationData.add(location); } editLocationId.setItems(locationData); // Customers CustomerModel oCustomerModel = new CustomerModel(); List<Model> allCustomers = oCustomerModel.findAll(); int selectedCustomerId = new LuggageModel(MainActivity.editId).getCustomerId(); for (Model allCustomer : allCustomers) { CustomerModel customer = (CustomerModel) allCustomer; if (customer.getId() == selectedCustomerId) { selectedCustomer = customer; } customerData.add(customer); } editCustomerId.setItems(customerData); ObservableList<String> statuses = FXCollections.observableArrayList(); statuses.add("Missing"); statuses.add("Found"); statuses.add("Resolved"); editStatus.setItems(statuses); } /** * Populates the view Location, Customer &amp; Status ChoiceBoxes. */ public void setViewChoiceBoxes() { // Locations LocationModel oLocationModel = new LocationModel(); List<Model> allLocations = oLocationModel.findAll(); int selectedLocationId = new LuggageModel(MainActivity.viewId).getLocationId(); for (Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; if (location.getId() == selectedLocationId) { selectedLocation = location; } locationData.add(location); } viewLocationId.setItems(locationData); long startTime = System.nanoTime(); // Customers CustomerModel oCustomerModel = new CustomerModel(); List<Model> allCustomers = oCustomerModel.findAll(); int selectedCustomerId = new LuggageModel(MainActivity.viewId).getCustomerId(); for (Model allCustomer : allCustomers) { CustomerModel customer = (CustomerModel) allCustomer; if (customer.getId() == selectedCustomerId) { selectedCustomer = customer; } customerData.add(customer); } viewCustomerId.setItems(customerData); ObservableList<String> statuses = FXCollections.observableArrayList(); statuses.add("Missing"); statuses.add("Found"); statuses.add("Resolved"); viewStatus.setItems(statuses); long endTime = System.nanoTime(); long microseconds = ((endTime - startTime) / 1000); Debug.print("Luggage setViewChoiceBoxes()" + " took " + microseconds + " microseconds."); } /** * Handles the search field functionality. */ @FXML protected void listOnSearch() { String[] keywords = listSearchField.getText().split("\\s+"); String[] params = new String[4 * keywords.length]; boolean firstColumn = true; String query = ""; for (int i = 0; i < keywords.length; i++) { if (firstColumn) { params[0 + i] = "%" + keywords[i] + "%"; query += "id LIKE ?"; } else { params[0 + i] = "%" + keywords[i] + "%"; query += " OR id LIKE ?"; } params[1 + i] = "%" + keywords[i] + "%"; query += " OR tags LIKE ?"; params[2 + i] = "%" + keywords[i] + "%"; query += " OR status LIKE ?"; params[3 + i] = "%" + keywords[i] + "%"; query += " OR datetime LIKE ?"; firstColumn = false; } listResetTableView(query, params); } /** * Opens the 'New Luggage' view. */ @FXML public void listNew() { StageHelper.addPopup("luggage/new", this, false, true); } /** * Opens the Luggage list's help view. */ @FXML public void listHelp() { StageHelper.addStage("luggage/listHelp", this, false, true); } /** * Opens the Luggage edit view for the selected customer. */ @FXML public void listEdit() { LuggageModel luggage = (LuggageModel) listTableView.getSelectionModel().getSelectedItem(); if (luggage == null) { return; } MainActivity.editId = luggage.getId(); StageHelper.addPopup("luggage/edit", this, false, true); } /** * Triggers a confirmation dialog for removing the selected luggage item. */ @FXML public void listRemove() { Stage removeStage = (Stage) listTableView.getScene().getWindow(); Action response = Dialogs.create().owner(removeStage) .title("Remove luggage") //.masthead("Are you sure you want to delete this item? 2") .message("Are you sure you want to delete this luggage item?") .actions(Dialog.ACTION_OK, Dialog.ACTION_CANCEL) .showWarning(); if (response == Dialog.ACTION_OK) { LuggageModel luggage = (LuggageModel) listTableView.getSelectionModel().getSelectedItem(); if (luggage == null) { return; } luggage.delete(); listOnSearch(); } } /** * Opens the Luggage list view. */ @FXML public void listView() { LuggageModel luggage = (LuggageModel) listTableView.getSelectionModel().getSelectedItem(); if (luggage == null) { return; } MainActivity.viewId = luggage.getId(); StageHelper.addPopup("luggage/view", this, false, true); } @FXML public void listExportToPdf() throws IOException, COSVisitorException { // Create a document and add a page to it PDDocument document = new PDDocument(); PDPage page = new PDPage(); document.addPage(page); // Create a new font object selecting one of the PDF base fonts PDFont font = PDType1Font.HELVETICA_BOLD; // Start a new content stream which will "hold" the to be created content PDPageContentStream contentStream = new PDPageContentStream(document, page); // Define a text content stream using the selected font contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(100, 100); contentStream.drawString("Corendon"); contentStream.endText(); contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(0, 48); contentStream.drawString("Corendon"); contentStream.endText(); contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(0, 48); contentStream.drawString("Corendon"); contentStream.endText(); // Make sure that the content stream is closed: contentStream.close(); // Save the results and ensure that the document is properly closed: document.save("Bon.pdf"); document.close(); Stage exportPdf = (Stage) listTableView.getScene().getWindow(); //Show a warning that the data has been exported to a PDF Action response = Dialogs.create().owner(exportPdf) .title("Export to PDF") //.masthead("Are you sure you want to delete this item? 2") .message("The data has been exported to a PDF file") .actions(Dialog.ACTION_OK) .showWarning(); //Log the action so that it is viewable in the log Debug.logToDatabase(LogModel.TYPE_INFO, "/*eenIdentifier + */" + "exported as PDF file."); } /** * * @param where * @param params */ public void listResetTableView(String where, String... params) { LuggageModel luggage = new LuggageModel(); List<Model> allLuggage = luggage.findAll(where, params); listData = FXCollections.observableArrayList(); for (Model allLuggage1 : allLuggage) { LuggageModel luggage2 = (LuggageModel) allLuggage1; listData.add(luggage2); } listTableViewId.setCellValueFactory(new PropertyValueFactory("id")); listTableViewStatus.setCellValueFactory(new PropertyValueFactory("status")); listTableViewTags.setCellValueFactory(new PropertyValueFactory("tags")); listTableViewDate.setCellValueFactory(new PropertyValueFactory("datetime")); listTableView.setItems(listData); } /** * Handles canceling and closing the new view. */ public void newCancel() { Stage addStage = (Stage) newCancel.getScene().getWindow(); StageHelper.closeStage(addStage); } /** * Resets all fields in the new view. */ public void newReset() { newTags.setText(""); newNotes.setText(""); newLocationId.setValue(null); newCustomerId.setValue(null); newStatus.setValue(null); newDate.setValue(null); } /** * Handles saving a new Luggage item. Checks if all necessary fields are * given and if so, writes to database. */ public void newSave() { if (newLocationId.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) newLocationId.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the current location of the luggage or where to ship it to.") .showWarning(); return; } else if (newStatus.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) newStatus.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the status for the luggage item.") .showWarning(); return; } else if (newDate.getValue() == null) { Dialogs.create() .owner((Stage) newDate.getScene().getWindow()) .title("Warning") .masthead("Date format error") .message("Please enter or select the correct date for the luggage item.") .showWarning(); return; } LuggageModel luggage = new LuggageModel(); try { luggage.setCustomerId(Integer.toString(newCustomerId.getSelectionModel().getSelectedItem().getId())); } catch (NullPointerException n) { printStackTrace(n); } luggage.setLocationId(Integer.toString(newLocationId.getSelectionModel().getSelectedItem().getId())); luggage.setDatetime(newDate.getValue() + " 00:00:00"); luggage.setTags(newTags.getText()); luggage.setNotes(newNotes.getText()); luggage.setStatus(newStatus.getValue()); luggage.save(); LuggageController luggageController = (LuggageController) StageHelper.callbackController; luggageController.listOnSearch(); newCancel(); } /** * Populates the edit fields with the selected Luggage item's data. */ public void setEditFields() { LuggageModel luggage = new LuggageModel(MainActivity.editId); editTags.setText(luggage.getTags()); editNotes.setText(luggage.getNotes()); editStatus.setValue(luggage.getStatus()); LocalDate date = LocalDate.parse(luggage.getDatetime()); editDate.setValue(date); editLocationId.getSelectionModel().select(selectedLocation); editCustomerId.getSelectionModel().select(selectedCustomer); } /** * Populates the view fields with the selected Luggage item's data. */ public void setViewFields() { LuggageModel luggage = new LuggageModel(MainActivity.viewId); viewLocationId.getSelectionModel().select(selectedLocation); viewStatus.setValue(luggage.getStatus()); viewLocationAsText.setText(selectedLocation.toString()); try { viewCustomerId.getSelectionModel().select(selectedCustomer); viewCustomerAsText.setText(selectedCustomer.toString()); MainActivity.searchTerm = selectedCustomer.getFullname(); Debug.print("LuggageController setting: " + MainActivity.searchTerm); } catch (NullPointerException n) { printStackTrace(n); } viewStatusAsText.setText(luggage.getStatus()); viewTags.setText(luggage.getTags()); viewNotes.setText(luggage.getNotes()); LocalDate date = LocalDate.parse(luggage.getDatetime()); viewDate.setValue(date); } /** * Cancels editing a Luggage item, does not change saved data. */ public void editCancel() { Stage addStage = (Stage) editCancel.getScene().getWindow(); StageHelper.closeStage(addStage); } /** * Resets all fields in the edit view. */ public void editReset() { editTags.setText(""); editNotes.setText(""); editLocationId.setValue(null); editCustomerId.setValue(null); editStatus.setValue(null); editDate.setValue(null); } /** * Handles saving changes to an existing Luggage item. Checks if all * necessary fields are filled and if so, writes to database, overwriting * existing data for selected Customer. */ public void editSave() { if (editLocationId.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) editLocationId.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the location of the luggage or where to ship it to.") .showWarning(); return; } else if (editStatus.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) editStatus.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the status for the luggage item.") .showWarning(); return; } else if (editDate.getValue() == null) { Dialogs.create() .owner((Stage) editDate.getScene().getWindow()) .title("Warning") .masthead("Date format error") .message("Please enter or select the correct date for the luggage item.") .showWarning(); return; } LuggageModel luggage = new LuggageModel(MainActivity.editId); try { luggage.setCustomerId(Integer.toString(editCustomerId.getSelectionModel().getSelectedItem().getId())); } catch (NullPointerException n) { printStackTrace(n); } luggage.setLocationId(Integer.toString(editLocationId.getSelectionModel().getSelectedItem().getId())); luggage.setDatetime(editDate.getValue() + " 00:00:00"); luggage.setTags(editTags.getText()); luggage.setNotes(editNotes.getText()); luggage.setStatus(editStatus.getValue()); luggage.save(); LuggageController luggageController = (LuggageController) StageHelper.callbackController; luggageController.listOnSearch(); editCancel(); } /** * Creates the (mouse, keyboard, etc.) event filters for the list view. */ public void listKeyActions() { listTableView.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.E)) { listEdit(); } else if (b.getCode().equals(KeyCode.H) || b.getCode().equals(KeyCode.F1)) { listHelp(); } else if (b.getCode().equals(KeyCode.N)) { listNew(); } else if (b.getCode().equals(KeyCode.R)) { listRemove(); } else if (b.getCode().equals(KeyCode.V) || b.getCode().equals(KeyCode.ENTER)) { listView(); } else if (b.getCode().equals(KeyCode.ESCAPE)) { listOnSearch(); } }); listSearchField.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { listResetTableView("", new String[0]); listSearchField.setText(""); clearNotif(); } }); } /** * Creates the (mouse, keyboard, etc.) event filters for the new view. */ public void newKeyActions() { newLocationId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newCustomerId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newStatus.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newDate.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newTags.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newNotes.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newSave.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newReset.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newReset(); } }); newCancel.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE) || b.getCode().equals(KeyCode.ENTER)) { newCancel(); } }); } /** * Creates the (mouse, keyboard, etc.) event filters for the edit view. */ public void editKeyActions() { editLocationId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editCustomerId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editStatus.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editDate.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editTags.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editNotes.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editSave.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editReset.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { editReset(); } }); editCancel.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE) || b.getCode().equals(KeyCode.ENTER)) { editCancel(); } }); } /** * Creates the (mouse, keyboard, etc.) event filters for the view page. */ public void viewKeyActions() { viewLocationAsText.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewCustomerAsText.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewStatusAsText.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewDate.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewTags.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewNotes.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewClose.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); } /** * Prints given parameter as notification label. * * @param notif */ @FXML private void printNotif(String notif) { printNotif.setText(notif); } /** * Clears the notification label. */ @FXML private void clearNotif() { printNotif.setText(""); } /** * Clears the notification label. */ @FXML private void clearSearch() { listOnSearch(); clearNotif(); } /** * Closes current view. */ public void viewClose() { Stage addStage = (Stage) viewClose.getScene().getWindow(); StageHelper.closeStage(addStage); } }
src/luggage/controllers/LuggageController.java
/** * The MIT License (MIT) * * Copyright (c) 2014-2015 ITopia IS102-5 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package luggage.controllers; import java.io.IOException; import java.net.URL; import java.time.LocalDate; import java.util.List; import java.util.ResourceBundle; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.Button; import javafx.scene.control.ChoiceBox; import javafx.scene.control.DatePicker; import javafx.scene.control.Label; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TextField; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.stage.Stage; import static jdk.nashorn.internal.runtime.Context.printStackTrace; import luggage.Debug; import luggage.MainActivity; import luggage.database.models.CustomerModel; import luggage.database.models.LocationModel; import luggage.database.models.LogModel; import luggage.database.models.LuggageModel; import luggage.database.models.Model; import luggage.helpers.StageHelper; import org.apache.pdfbox.exceptions.COSVisitorException; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.edit.PDPageContentStream; import org.apache.pdfbox.pdmodel.font.PDFont; import org.apache.pdfbox.pdmodel.font.PDType1Font; import org.controlsfx.control.action.Action; import org.controlsfx.dialog.Dialog; import org.controlsfx.dialog.Dialogs; /** * LuggageController * * Controller for luggage/list.fxml, luggage/new, luggage/edit, luggage/view, * and luggage/help. * * @package luggage.controllers * @author ITopia IS102-5 */ public class LuggageController extends BaseController implements Initializable { /** * LIST ELEMENTS */ @FXML private TableView listTableView; @FXML private TableColumn listTableViewId; @FXML private TableColumn listTableViewStatus; @FXML private TableColumn listTableViewTags; @FXML private TableColumn listTableViewDate; @FXML private Button listEdit; @FXML private Button listExportToPdf; @FXML private Button listHelp; @FXML private Button listNew; @FXML private Button listRemove; @FXML private Button listView; @FXML private TextField listSearchField; /** * NEW ELEMENTS */ @FXML private Button newCancel; @FXML private Button newReset; @FXML private Button newSave; @FXML private DatePicker newDate; @FXML private ChoiceBox<CustomerModel> newCustomerId; @FXML private ChoiceBox<LocationModel> newLocationId; @FXML private ChoiceBox<String> newStatus; @FXML private TextField newNotes; @FXML private TextField newTags; /** * EDIT ELEMENTS */ @FXML private Button editCancel; @FXML private Button editReset; @FXML private Button editSave; @FXML private DatePicker editDate; @FXML private ChoiceBox<CustomerModel> editCustomerId; @FXML private ChoiceBox<LocationModel> editLocationId; @FXML private ChoiceBox<String> editStatus; @FXML private TextField editNotes; @FXML private TextField editTags; /** * VIEW ELEMENTS */ @FXML private Button viewClose; @FXML private ChoiceBox<String> viewStatus; @FXML private ChoiceBox<LocationModel> viewLocationId; @FXML private ChoiceBox<CustomerModel> viewCustomerId; @FXML private DatePicker viewDate; @FXML private Label printNotif; @FXML private TextField viewStatusAsText; @FXML private TextField viewLocationAsText; @FXML private TextField viewCustomerAsText; @FXML private TextField viewNotes; @FXML private TextField viewTags; public String mlg = "holy shit"; private ObservableList<LuggageModel> listData = FXCollections.observableArrayList(); private final ObservableList<LocationModel> locationData = FXCollections.observableArrayList(); private final ObservableList<CustomerModel> customerData = FXCollections.observableArrayList(); /** * * Called on controller start * * @param url * @param rb */ @Override public void initialize(URL url, ResourceBundle rb) { Platform.runLater(new Runnable() { @Override public void run() { Debug.print("LUGGAGE CONTROLLER-----------------------------------------------------------------"); // List if (listTableView != null) { listResetTableView("", new String[0]); listEdit.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listRemove.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listView.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listExportToPdf.disableProperty().bind(listTableView.getSelectionModel().selectedItemProperty().isNull()); listTableView.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); listKeyActions(); } // New if (newLocationId != null) { setNewChoiceBoxes(); newKeyActions(); } // Edit if (editLocationId != null) { setEditChoiceBoxes(); setEditFields(); editKeyActions(); } // View if (viewLocationId != null) { setViewChoiceBoxes(); setViewFields(); viewKeyActions(); } } }); } public LocationModel selectedLocation; public CustomerModel selectedCustomer; public void setNewChoiceBoxes() { // Locations LocationModel oLocationModel = new LocationModel(); List<Model> allLocations = oLocationModel.findAll(); for (Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; locationData.add(location); } newLocationId.setItems(locationData); // Customers CustomerModel oCustomerModel = new CustomerModel(); List<Model> allCustomers = oCustomerModel.findAll(); for (Model allCustomer : allCustomers) { CustomerModel customer = (CustomerModel) allCustomer; customerData.add(customer); } newCustomerId.setItems(customerData); ObservableList<String> statuses = FXCollections.observableArrayList(); statuses.add("Missing"); statuses.add("Found"); statuses.add("Resolved"); newStatus.setItems(statuses); } public void setEditChoiceBoxes() { // Locations LocationModel oLocationModel = new LocationModel(); List<Model> allLocations = oLocationModel.findAll(); int selectedLocationId = new LuggageModel(MainActivity.editId).getLocationId(); for (Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; if (location.getId() == selectedLocationId) { selectedLocation = location; } locationData.add(location); } editLocationId.setItems(locationData); // Customers CustomerModel oCustomerModel = new CustomerModel(); List<Model> allCustomers = oCustomerModel.findAll(); int selectedCustomerId = new LuggageModel(MainActivity.editId).getCustomerId(); for (Model allCustomer : allCustomers) { CustomerModel customer = (CustomerModel) allCustomer; if (customer.getId() == selectedCustomerId) { selectedCustomer = customer; } customerData.add(customer); } editCustomerId.setItems(customerData); ObservableList<String> statuses = FXCollections.observableArrayList(); statuses.add("Missing"); statuses.add("Found"); statuses.add("Resolved"); editStatus.setItems(statuses); } /** * Populates the view Location, Customer &amp; Status ChoiceBoxes. */ public void setViewChoiceBoxes() { // Locations LocationModel oLocationModel = new LocationModel(); List<Model> allLocations = oLocationModel.findAll(); int selectedLocationId = new LuggageModel(MainActivity.viewId).getLocationId(); for (Model allLocation : allLocations) { LocationModel location = (LocationModel) allLocation; if (location.getId() == selectedLocationId) { selectedLocation = location; } locationData.add(location); } viewLocationId.setItems(locationData); long startTime = System.nanoTime(); // Customers CustomerModel oCustomerModel = new CustomerModel(); List<Model> allCustomers = oCustomerModel.findAll(); int selectedCustomerId = new LuggageModel(MainActivity.viewId).getCustomerId(); for (Model allCustomer : allCustomers) { CustomerModel customer = (CustomerModel) allCustomer; if (customer.getId() == selectedCustomerId) { selectedCustomer = customer; } customerData.add(customer); } viewCustomerId.setItems(customerData); ObservableList<String> statuses = FXCollections.observableArrayList(); statuses.add("Missing"); statuses.add("Found"); statuses.add("Resolved"); viewStatus.setItems(statuses); long endTime = System.nanoTime(); long microseconds = ((endTime - startTime) / 1000); Debug.print("Luggage setViewChoiceBoxes()" + " took " + microseconds + " microseconds."); } /** * Handles the search field functionality. */ @FXML protected void listOnSearch() { String[] keywords = listSearchField.getText().split("\\s+"); String[] params = new String[4 * keywords.length]; boolean firstColumn = true; String query = ""; for (int i = 0; i < keywords.length; i++) { if (firstColumn) { params[0 + i] = "%" + keywords[i] + "%"; query += "id LIKE ?"; } else { params[0 + i] = "%" + keywords[i] + "%"; query += " OR id LIKE ?"; } params[1 + i] = "%" + keywords[i] + "%"; query += " OR tags LIKE ?"; params[2 + i] = "%" + keywords[i] + "%"; query += " OR status LIKE ?"; params[3 + i] = "%" + keywords[i] + "%"; query += " OR datetime LIKE ?"; firstColumn = false; } listResetTableView(query, params); } /** * Opens the 'New Luggage' view. */ @FXML public void listNew() { StageHelper.addPopup("luggage/new", this, false, true); } /** * Opens the Luggage list's help view. */ @FXML public void listHelp() { StageHelper.addStage("luggage/listHelp", this, false, true); } /** * Opens the Luggage edit view for the selected customer. */ @FXML public void listEdit() { LuggageModel luggage = (LuggageModel) listTableView.getSelectionModel().getSelectedItem(); if (luggage == null) { return; } MainActivity.editId = luggage.getId(); StageHelper.addPopup("luggage/edit", this, false, true); } /** * Triggers a confirmation dialog for removing the selected luggage item. */ @FXML public void listRemove() { Stage removeStage = (Stage) listTableView.getScene().getWindow(); Action response = Dialogs.create().owner(removeStage) .title("Remove luggage") //.masthead("Are you sure you want to delete this item? 2") .message("Are you sure you want to delete this luggage item?") .actions(Dialog.ACTION_OK, Dialog.ACTION_CANCEL) .showWarning(); if (response == Dialog.ACTION_OK) { LuggageModel luggage = (LuggageModel) listTableView.getSelectionModel().getSelectedItem(); if (luggage == null) { return; } luggage.delete(); listOnSearch(); } } /** * Opens the Luggage list view. */ @FXML public void listView() { LuggageModel luggage = (LuggageModel) listTableView.getSelectionModel().getSelectedItem(); if (luggage == null) { return; } MainActivity.viewId = luggage.getId(); StageHelper.addPopup("luggage/view", this, false, true); } @FXML public void listExportToPdf() throws IOException, COSVisitorException { // Create a document and add a page to it PDDocument document = new PDDocument(); PDPage page = new PDPage(); document.addPage(page); // Create a new font object selecting one of the PDF base fonts PDFont font = PDType1Font.HELVETICA_BOLD; // Start a new content stream which will "hold" the to be created content PDPageContentStream contentStream = new PDPageContentStream(document, page); // Define a text content stream using the selected font contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(100, 100); contentStream.drawString("Corendon"); contentStream.endText(); contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(100, 200); contentStream.drawString("Customer name:"); contentStream.endText(); contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(100, 300); contentStream.drawString("Luggage Id"); contentStream.endText(); contentStream.beginText(); contentStream.setFont(font, 12); contentStream.moveTextPositionByAmount(100, 400); contentStream.drawString("Location"); contentStream.endText(); // Make sure that the content stream is closed: contentStream.close(); // Save the results and ensure that the document is properly closed: document.save("Bon.pdf"); document.close(); Debug.logToDatabase(LogModel.TYPE_INFO, "/*eenIdentifier + */" + "exported as PDF file."); } /** * * @param where * @param params */ public void listResetTableView(String where, String... params) { LuggageModel luggage = new LuggageModel(); List<Model> allLuggage = luggage.findAll(where, params); listData = FXCollections.observableArrayList(); for (Model allLuggage1 : allLuggage) { LuggageModel luggage2 = (LuggageModel) allLuggage1; listData.add(luggage2); } listTableViewId.setCellValueFactory(new PropertyValueFactory("id")); listTableViewStatus.setCellValueFactory(new PropertyValueFactory("status")); listTableViewTags.setCellValueFactory(new PropertyValueFactory("tags")); listTableViewDate.setCellValueFactory(new PropertyValueFactory("datetime")); listTableView.setItems(listData); } /** * Handles canceling and closing the new view. */ public void newCancel() { Stage addStage = (Stage) newCancel.getScene().getWindow(); StageHelper.closeStage(addStage); } /** * Resets all fields in the new view. */ public void newReset() { newTags.setText(""); newNotes.setText(""); newLocationId.setValue(null); newCustomerId.setValue(null); newStatus.setValue(null); newDate.setValue(null); } /** * Handles saving a new Luggage item. Checks if all necessary fields are * given and if so, writes to database. */ public void newSave() { if (newLocationId.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) newLocationId.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the current location of the luggage or where to ship it to.") .showWarning(); return; } else if (newStatus.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) newStatus.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the status for the luggage item.") .showWarning(); return; } else if (newDate.getValue() == null) { Dialogs.create() .owner((Stage) newDate.getScene().getWindow()) .title("Warning") .masthead("Date format error") .message("Please enter or select the correct date for the luggage item.") .showWarning(); return; } LuggageModel luggage = new LuggageModel(); try { luggage.setCustomerId(Integer.toString(newCustomerId.getSelectionModel().getSelectedItem().getId())); } catch (NullPointerException n) { printStackTrace(n); } luggage.setLocationId(Integer.toString(newLocationId.getSelectionModel().getSelectedItem().getId())); luggage.setDatetime(newDate.getValue() + " 00:00:00"); luggage.setTags(newTags.getText()); luggage.setNotes(newNotes.getText()); luggage.setStatus(newStatus.getValue()); luggage.save(); LuggageController luggageController = (LuggageController) StageHelper.callbackController; luggageController.listOnSearch(); newCancel(); } /** * Populates the edit fields with the selected Luggage item's data. */ public void setEditFields() { LuggageModel luggage = new LuggageModel(MainActivity.editId); editTags.setText(luggage.getTags()); editNotes.setText(luggage.getNotes()); editStatus.setValue(luggage.getStatus()); LocalDate date = LocalDate.parse(luggage.getDatetime()); editDate.setValue(date); editLocationId.getSelectionModel().select(selectedLocation); editCustomerId.getSelectionModel().select(selectedCustomer); } /** * Populates the view fields with the selected Luggage item's data. */ public void setViewFields() { LuggageModel luggage = new LuggageModel(MainActivity.viewId); viewLocationId.getSelectionModel().select(selectedLocation); viewStatus.setValue(luggage.getStatus()); viewLocationAsText.setText(selectedLocation.toString()); try { viewCustomerId.getSelectionModel().select(selectedCustomer); viewCustomerAsText.setText(selectedCustomer.toString()); MainActivity.searchTerm = selectedCustomer.getFullname(); Debug.print("LuggageController setting: " + MainActivity.searchTerm); } catch (NullPointerException n) { printStackTrace(n); } viewStatusAsText.setText(luggage.getStatus()); viewTags.setText(luggage.getTags()); viewNotes.setText(luggage.getNotes()); LocalDate date = LocalDate.parse(luggage.getDatetime()); viewDate.setValue(date); } /** * Cancels editing a Luggage item, does not change saved data. */ public void editCancel() { Stage addStage = (Stage) editCancel.getScene().getWindow(); StageHelper.closeStage(addStage); } /** * Resets all fields in the edit view. */ public void editReset() { editTags.setText(""); editNotes.setText(""); editLocationId.setValue(null); editCustomerId.setValue(null); editStatus.setValue(null); editDate.setValue(null); } /** * Handles saving changes to an existing Luggage item. Checks if all * necessary fields are filled and if so, writes to database, overwriting * existing data for selected Customer. */ public void editSave() { if (editLocationId.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) editLocationId.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the location of the luggage or where to ship it to.") .showWarning(); return; } else if (editStatus.getSelectionModel().getSelectedItem() == null) { Dialogs.create() .owner((Stage) editStatus.getScene().getWindow()) .title("Warning") .masthead("Selection error") .message("Please select the status for the luggage item.") .showWarning(); return; } else if (editDate.getValue() == null) { Dialogs.create() .owner((Stage) editDate.getScene().getWindow()) .title("Warning") .masthead("Date format error") .message("Please enter or select the correct date for the luggage item.") .showWarning(); return; } LuggageModel luggage = new LuggageModel(MainActivity.editId); try { luggage.setCustomerId(Integer.toString(editCustomerId.getSelectionModel().getSelectedItem().getId())); } catch (NullPointerException n) { printStackTrace(n); } luggage.setLocationId(Integer.toString(editLocationId.getSelectionModel().getSelectedItem().getId())); luggage.setDatetime(editDate.getValue() + " 00:00:00"); luggage.setTags(editTags.getText()); luggage.setNotes(editNotes.getText()); luggage.setStatus(editStatus.getValue()); luggage.save(); LuggageController luggageController = (LuggageController) StageHelper.callbackController; luggageController.listOnSearch(); editCancel(); } /** * Creates the (mouse, keyboard, etc.) event filters for the list view. */ public void listKeyActions() { listTableView.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.E)) { listEdit(); } else if (b.getCode().equals(KeyCode.H) || b.getCode().equals(KeyCode.F1)) { listHelp(); } else if (b.getCode().equals(KeyCode.N)) { listNew(); } else if (b.getCode().equals(KeyCode.R)) { listRemove(); } else if (b.getCode().equals(KeyCode.V) || b.getCode().equals(KeyCode.ENTER)) { listView(); } else if (b.getCode().equals(KeyCode.ESCAPE)) { listOnSearch(); } }); listSearchField.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { listResetTableView("", new String[0]); listSearchField.setText(""); clearNotif(); } }); } /** * Creates the (mouse, keyboard, etc.) event filters for the new view. */ public void newKeyActions() { newLocationId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newCustomerId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newStatus.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newDate.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newTags.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newNotes.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newSave.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newSave(); } }); newReset.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { newReset(); } }); newCancel.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE) || b.getCode().equals(KeyCode.ENTER)) { newCancel(); } }); } /** * Creates the (mouse, keyboard, etc.) event filters for the edit view. */ public void editKeyActions() { editLocationId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editCustomerId.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editStatus.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editDate.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editTags.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editNotes.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE)) { editCancel(); } else if (evt.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editSave.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { editSave(); } }); editReset.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE)) { newCancel(); } else if (b.getCode().equals(KeyCode.ENTER)) { editReset(); } }); editCancel.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent b) -> { if (b.getCode().equals(KeyCode.ESCAPE) || b.getCode().equals(KeyCode.ENTER)) { editCancel(); } }); } /** * Creates the (mouse, keyboard, etc.) event filters for the view page. */ public void viewKeyActions() { viewLocationAsText.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewCustomerAsText.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewStatusAsText.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewDate.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewTags.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewNotes.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); viewClose.addEventFilter(KeyEvent.KEY_PRESSED, (KeyEvent evt) -> { if (evt.getCode().equals(KeyCode.ESCAPE) || evt.getCode().equals(KeyCode.ENTER)) { viewClose(); } }); } /** * Prints given parameter as notification label. * * @param notif */ @FXML private void printNotif(String notif) { printNotif.setText(notif); } /** * Clears the notification label. */ @FXML private void clearNotif() { printNotif.setText(""); } /** * Clears the notification label. */ @FXML private void clearSearch() { listOnSearch(); clearNotif(); } /** * Closes current view. */ public void viewClose() { Stage addStage = (Stage) viewClose.getScene().getWindow(); StageHelper.closeStage(addStage); } }
Hoi, Jasper hier. Ik heb even gezorgd dat er een warning komt als je op de knop ExportToPdf klikt. Dus je kan zeggen dat ik deze keer ook echt iets nuttigs heb gedaan. Jeey voor Jasper. Sorry dat de message zo lang is. :D
src/luggage/controllers/LuggageController.java
Hoi, Jasper hier.
Java
mit
137bfbfcaa199819f17477b9b031f23f8d671f4e
0
alanjds/python-for-android,gonboy/python-for-android,wexi/python-for-android,vaginessa/python-for-android,inclement/python-for-android,kivatu/python-for-android,pybee/Python-Android-support,alanjds/python-for-android,PKRoma/python-for-android,germn/python-for-android,ehealthafrica-ci/python-for-android,PKRoma/python-for-android,gonboy/python-for-android,ravsa/python-for-android,kivatu/python-for-android,EMATech/python-for-android,renpytom/python-for-android,inclement/python-for-android,kerr-huang/python-for-android,ibobalo/python-for-android,kived/python-for-android,olymk2/python-for-android,kivatu/python-for-android,kerr-huang/python-for-android,ckudzu/python-for-android,rnixx/python-for-android,vaginessa/python-for-android,renpytom/python-for-android,dl1ksv/python-for-android,kerr-huang/python-for-android,dongguangming/python-for-android,bob-the-hamster/python-for-android,niavlys/python-for-android,ASMfreaK/python-for-android,Cheaterman/python-for-android,ehealthafrica-ci/python-for-android,bob-the-hamster/python-for-android,Stocarson/python-for-android,kivy/python-for-android,alanjds/python-for-android,germn/python-for-android,dvenkatsagar/python-for-android,kived/python-for-android,dl1ksv/python-for-android,Stocarson/python-for-android,kivy/python-for-android,Cheaterman/python-for-android,eHealthAfrica/python-for-android,kived/python-for-android,bob-the-hamster/python-for-android,inclement/python-for-android,dl1ksv/python-for-android,pybee/Python-Android-support,Stocarson/python-for-android,ravsa/python-for-android,Stocarson/python-for-android,ASMfreaK/python-for-android,PKRoma/python-for-android,wexi/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,kivatu/python-for-android,kronenpj/python-for-android,kerr-huang/python-for-android,tsdl2013/python-for-android,tsdl2013/python-for-android,wexi/python-for-android,olymk2/python-for-android,manashmndl/python-for-android,ckudzu/python-for-android,vaginessa/python-for-android,niavlys/python-for-android,niavlys/python-for-android,rnixx/python-for-android,ibobalo/python-for-android,kivy/python-for-android,lc-soft/python-for-android,codingang/python-for-android,ehealthafrica-ci/python-for-android,ravsa/python-for-android,dvenkatsagar/python-for-android,Cheaterman/python-for-android,codingang/python-for-android,wexi/python-for-android,ckudzu/python-for-android,ehealthafrica-ci/python-for-android,joliet0l/python-for-android,lc-soft/python-for-android,kivatu/python-for-android,ravsa/python-for-android,alanjds/python-for-android,EMATech/python-for-android,dvenkatsagar/python-for-android,germn/python-for-android,dvenkatsagar/python-for-android,niavlys/python-for-android,Cheaterman/python-for-android,Stocarson/python-for-android,kronenpj/python-for-android,eHealthAfrica/python-for-android,bob-the-hamster/python-for-android,renpytom/python-for-android,dongguangming/python-for-android,joliet0l/python-for-android,niavlys/python-for-android,ehealthafrica-ci/python-for-android,olymk2/python-for-android,chozabu/p4a-ctypes,gonboy/python-for-android,kronenpj/python-for-android,EMATech/python-for-android,EMATech/python-for-android,ckudzu/python-for-android,lc-soft/python-for-android,joliet0l/python-for-android,PKRoma/python-for-android,joliet0l/python-for-android,kived/python-for-android,ravsa/python-for-android,manashmndl/python-for-android,niavlys/python-for-android,vaginessa/python-for-android,ASMfreaK/python-for-android,Stocarson/python-for-android,alanjds/python-for-android,manashmndl/python-for-android,cbenhagen/python-for-android,wexi/python-for-android,gonboy/python-for-android,kivatu/python-for-android,germn/python-for-android,chozabu/p4a-ctypes,EMATech/python-for-android,chozabu/p4a-ctypes,ASMfreaK/python-for-android,chozabu/p4a-ctypes,dongguangming/python-for-android,codingang/python-for-android,lc-soft/python-for-android,bob-the-hamster/python-for-android,ibobalo/python-for-android,cbenhagen/python-for-android,kerr-huang/python-for-android,eHealthAfrica/python-for-android,tsdl2013/python-for-android,ASMfreaK/python-for-android,cbenhagen/python-for-android,ckudzu/python-for-android,ckudzu/python-for-android,inclement/python-for-android,eHealthAfrica/python-for-android,ravsa/python-for-android,cbenhagen/python-for-android,gonboy/python-for-android,manashmndl/python-for-android,ravsa/python-for-android,EMATech/python-for-android,ASMfreaK/python-for-android,codingang/python-for-android,Cheaterman/python-for-android,germn/python-for-android,chozabu/p4a-ctypes,joliet0l/python-for-android,kivy/python-for-android,ibobalo/python-for-android,rnixx/python-for-android,rnixx/python-for-android,kerr-huang/python-for-android,ehealthafrica-ci/python-for-android,kronenpj/python-for-android,lc-soft/python-for-android,ibobalo/python-for-android,EMATech/python-for-android,dl1ksv/python-for-android,renpytom/python-for-android,renpytom/python-for-android,cbenhagen/python-for-android,ehealthafrica-ci/python-for-android,tsdl2013/python-for-android,dvenkatsagar/python-for-android,olymk2/python-for-android,rnixx/python-for-android,germn/python-for-android,ASMfreaK/python-for-android,ckudzu/python-for-android,codingang/python-for-android,dl1ksv/python-for-android,chozabu/p4a-ctypes,ehealthafrica-ci/python-for-android,Stocarson/python-for-android,alanjds/python-for-android,joliet0l/python-for-android,eHealthAfrica/python-for-android,manashmndl/python-for-android,niavlys/python-for-android,kivy/python-for-android,gonboy/python-for-android,olymk2/python-for-android,kivatu/python-for-android,eHealthAfrica/python-for-android,vaginessa/python-for-android,Cheaterman/python-for-android,olymk2/python-for-android,gonboy/python-for-android,ibobalo/python-for-android,ASMfreaK/python-for-android,ckudzu/python-for-android,manashmndl/python-for-android,manashmndl/python-for-android,chozabu/p4a-ctypes,dongguangming/python-for-android,vaginessa/python-for-android,dongguangming/python-for-android,vaginessa/python-for-android,gonboy/python-for-android,inclement/python-for-android,niavlys/python-for-android,tsdl2013/python-for-android,PKRoma/python-for-android,joliet0l/python-for-android,codingang/python-for-android,manashmndl/python-for-android,alanjds/python-for-android,dvenkatsagar/python-for-android,tsdl2013/python-for-android,ravsa/python-for-android,dongguangming/python-for-android,dongguangming/python-for-android,dl1ksv/python-for-android,lc-soft/python-for-android,codingang/python-for-android,kerr-huang/python-for-android,tsdl2013/python-for-android,dl1ksv/python-for-android,dl1ksv/python-for-android,olymk2/python-for-android,Cheaterman/python-for-android,EMATech/python-for-android,eHealthAfrica/python-for-android,olymk2/python-for-android,kived/python-for-android,bob-the-hamster/python-for-android,tsdl2013/python-for-android,vaginessa/python-for-android,inclement/python-for-android,Cheaterman/python-for-android,codingang/python-for-android,dvenkatsagar/python-for-android,wexi/python-for-android,Stocarson/python-for-android,kived/python-for-android,dongguangming/python-for-android,cbenhagen/python-for-android,dvenkatsagar/python-for-android,renpytom/python-for-android,joliet0l/python-for-android
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // This string is autogenerated by ChangeAppSettings.sh, do not change spaces amount package org.renpy.android; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGL11; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLContext; import javax.microedition.khronos.egl.EGLDisplay; import javax.microedition.khronos.egl.EGLSurface; import javax.microedition.khronos.opengles.GL; import javax.microedition.khronos.opengles.GL10; import android.opengl.GLES20; import android.opengl.Matrix; import android.app.Activity; import android.content.Context; import android.content.pm.ActivityInfo; import android.util.AttributeSet; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.opengl.GLSurfaceView; import android.view.MotionEvent; import android.view.KeyEvent; import android.os.Build; import android.os.PowerManager; import java.io.IOException; import java.io.InputStream; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.opengl.GLUtils; import java.nio.FloatBuffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import android.graphics.Color; import android.content.res.Resources; public class SDLSurfaceView extends SurfaceView implements SurfaceHolder.Callback, Runnable { private static String TAG = "SDLSurface"; private final String mVertexShader = "uniform mat4 uMVPMatrix;\n" + "attribute vec4 aPosition;\n" + "attribute vec2 aTextureCoord;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_Position = uMVPMatrix * aPosition;\n" + " vTextureCoord = aTextureCoord;\n" + "}\n"; private final String mFragmentShader = "precision mediump float;\n" + "varying vec2 vTextureCoord;\n" + "uniform sampler2D sTexture;\n" + "void main() {\n" + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + "}\n"; private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser { public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { mRedSize = r; mGreenSize = g; mBlueSize = b; mAlphaSize = a; mDepthSize = depth; mStencilSize = stencil; } /* This EGL config specification is used to specify 2.0 rendering. * We use a minimum size of 4 bits for red/green/blue, but will * perform actual matching in chooseConfig() below. */ private static int EGL_OPENGL_ES2_BIT = 4; private static int[] s_configAttribs2 = { EGL10.EGL_RED_SIZE, 4, EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_BLUE_SIZE, 4, EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_NONE }; public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { /* Get the number of minimally matching EGL configurations */ int[] num_config = new int[1]; egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); int numConfigs = num_config[0]; if (numConfigs <= 0) { throw new IllegalArgumentException("No configs match configSpec"); } /* Allocate then read the array of minimally matching EGL configs */ EGLConfig[] configs = new EGLConfig[numConfigs]; egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); /* Now return the "best" one */ //printConfigs(egl, display, configs); return chooseConfig(egl, display, configs); } public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { for(EGLConfig config : configs) { int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0); int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0); // We need at least mDepthSize and mStencilSize bits if (d < mDepthSize || s < mStencilSize) continue; // We want an *exact* match for red/green/blue/alpha int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0); int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0); int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0); int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0); if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) return config; } return null; } private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) { if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { return mValue[0]; } return defaultValue; } private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { int numConfigs = configs.length; Log.w(TAG, String.format("%d configurations", numConfigs)); for (int i = 0; i < numConfigs; i++) { Log.w(TAG, String.format("Configuration %d:\n", i)); printConfig(egl, display, configs[i]); } } private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) { int[] attributes = { EGL10.EGL_BUFFER_SIZE, EGL10.EGL_ALPHA_SIZE, EGL10.EGL_BLUE_SIZE, EGL10.EGL_GREEN_SIZE, EGL10.EGL_RED_SIZE, EGL10.EGL_DEPTH_SIZE, EGL10.EGL_STENCIL_SIZE, EGL10.EGL_CONFIG_CAVEAT, EGL10.EGL_CONFIG_ID, EGL10.EGL_LEVEL, EGL10.EGL_MAX_PBUFFER_HEIGHT, EGL10.EGL_MAX_PBUFFER_PIXELS, EGL10.EGL_MAX_PBUFFER_WIDTH, EGL10.EGL_NATIVE_RENDERABLE, EGL10.EGL_NATIVE_VISUAL_ID, EGL10.EGL_NATIVE_VISUAL_TYPE, 0x3030, // EGL10.EGL_PRESERVED_RESOURCES, EGL10.EGL_SAMPLES, EGL10.EGL_SAMPLE_BUFFERS, EGL10.EGL_SURFACE_TYPE, EGL10.EGL_TRANSPARENT_TYPE, EGL10.EGL_TRANSPARENT_RED_VALUE, EGL10.EGL_TRANSPARENT_GREEN_VALUE, EGL10.EGL_TRANSPARENT_BLUE_VALUE, 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB, 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA, 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL, 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL, EGL10.EGL_LUMINANCE_SIZE, EGL10.EGL_ALPHA_MASK_SIZE, EGL10.EGL_COLOR_BUFFER_TYPE, EGL10.EGL_RENDERABLE_TYPE, 0x3042 // EGL10.EGL_CONFORMANT }; String[] names = { "EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT" }; int[] value = new int[1]; for (int i = 0; i < attributes.length; i++) { int attribute = attributes[i]; String name = names[i]; if ( egl.eglGetConfigAttrib(display, config, attribute, value)) { Log.w(TAG, String.format(" %s: %d\n", name, value[0])); } else { // Log.w(TAG, String.format(" %s: failed\n", name)); while (egl.eglGetError() != EGL10.EGL_SUCCESS); } } } // Subclasses can adjust these values: protected int mRedSize; protected int mGreenSize; protected int mBlueSize; protected int mAlphaSize; protected int mDepthSize; protected int mStencilSize; private int[] mValue = new int[1]; } // The activity we're a part of. private Activity mActivity; // Have we started yet? public boolean mStarted = false; // Is Python ready to receive input events? static boolean mInputActivated = false; // The number of swaps we should skip. Experimentally derived from // watching SDL initialize. // XXX Kivy no swap skips, because kivy draw when needed. // XXX If we lost our first frame, we have a black screen. private int mSwapSkips = 0; // The number of times we should clear the screen after swap. private int mClears = 2; // Has the display been changed? private boolean mChanged = false; // Are we running yet? private boolean mRunning = false; // The EGL used by our thread. private EGL10 mEgl = null; // The EGL Display used. private EGLDisplay mEglDisplay = null; // The EGL Context used. private EGLContext mEglContext = null; // The EGL Surface used. private EGLSurface mEglSurface = null; // The EGL Config used. private EGLConfig mEglConfig = null; // The user program is not participating in the pause protocol. public final int PAUSE_NOT_PARTICIPATING = 0; // A pause has not been requested by the OS. public final int PAUSE_NONE = 1; // A pause has been requested by Android, but the user program has // not bothered responding yet. public final int PAUSE_REQUEST = 2; // The user program is waiting in waitForResume. public final int PAUSE_WAIT_FOR_RESUME = 3; // This stores the state of the pause system. private int mPause = PAUSE_NOT_PARTICIPATING; private PowerManager.WakeLock wakeLock; // The width and height. (This should be set at startup time - // these values just prevent segfaults and divide by zero, etc.) int mWidth = 100; int mHeight = 100; // The name of the directory where the context stores its files. String mFilesDirectory = null; // The value of the argument passed in. String mArgument = null; // The resource manager we use. ResourceManager mResourceManager; public SDLSurfaceView(Activity act, String argument) { super(act); mActivity = act; mResourceManager = new ResourceManager(act); SurfaceHolder holder = getHolder(); holder.addCallback(this); holder.setType(SurfaceHolder.SURFACE_TYPE_GPU); mFilesDirectory = mActivity.getFilesDir().getAbsolutePath(); mArgument = argument; PowerManager pm = (PowerManager) act.getSystemService(Context.POWER_SERVICE); wakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Screen On"); } /** * The user program should call this frequently to check if a * pause has been requested by android. If this ever returns * true, the user program should clean up and call waitForResume. */ public int checkPause() { if (mPause == PAUSE_NOT_PARTICIPATING) { mPause = PAUSE_NONE; } if (mPause == PAUSE_REQUEST) { return 1; } else { return 0; } } /** * The user program should call this quickly after checkPause * returns true. This causes the android application to sleep, * waiting for resume. While sleeping, it should not have any * activity. (Notably, it should stop all timers.) * * While we're waiting in this method, android is allowed to * kill us to reclaim memory, without any further warning. */ public void waitForResume() { synchronized (this) { mPause = PAUSE_WAIT_FOR_RESUME; // Notify any threads waiting in onPause. this.notifyAll(); while (mPause == PAUSE_WAIT_FOR_RESUME) { try { this.wait(); } catch (InterruptedException e) { } } } } /** * Inform the view that the activity is paused. The owner of this view must * call this method when the activity is paused. Calling this method will * pause the rendering thread. * Must not be called before a renderer has been set. */ public void onPause() { synchronized (this) { if (mPause == PAUSE_NONE) { mPause = PAUSE_REQUEST; while (mPause == PAUSE_REQUEST) { try { this.wait(); } catch (InterruptedException e) { // pass } } } } wakeLock.release(); } /** * Inform the view that the activity is resumed. The owner of this view must * call this method when the activity is resumed. Calling this method will * recreate the OpenGL display and resume the rendering * thread. * Must not be called before a renderer has been set. */ public void onResume() { synchronized (this) { if (mPause == PAUSE_WAIT_FOR_RESUME) { mPause = PAUSE_NONE; this.notifyAll(); } } wakeLock.acquire(); } /** * This method is part of the SurfaceHolder.Callback interface, and is * not normally called or subclassed by clients of GLSurfaceView. */ public void surfaceCreated(SurfaceHolder holder) { } /** * This method is part of the SurfaceHolder.Callback interface, and is * not normally called or subclassed by clients of GLSurfaceView. */ public void surfaceDestroyed(SurfaceHolder holder) { } /** * This method is part of the SurfaceHolder.Callback interface, and is * not normally called or subclassed by clients of GLSurfaceView. */ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { mWidth = w; mHeight = h; if (mActivity.getRequestedOrientation() == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE && mWidth < mHeight) { return; } if (mActivity.getRequestedOrientation() == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT && mWidth > mHeight) { return; } if (!mRunning) { mRunning = true; new Thread(this).start(); } else { mChanged = true; } } public void run() { mEgl = (EGL10) EGLContext.getEGL(); mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); int[] version = new int[2]; mEgl.eglInitialize(mEglDisplay, version); // Pick an appropriate config. We just take the first config // the system offers to us, because anything more complicated // than that stands a really good chance of not working. int[] configSpec = { // RENDERABLE_TYPE = OpenGL ES is the default. EGL10.EGL_NONE }; EGLConfig[] configs = new EGLConfig[1]; int EGL_CONTEXT_CLIENT_VERSION = 0x3098; int[] num_config = new int[1]; int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; // Create an opengl es 2.0 surface Log.w(TAG, "Choose egl configuration"); int configToTest = 0; boolean configFound = false; while (true) { try { if (configToTest == 0) { Log.i(TAG, "Try to use graphics config R8G8B8A8S8"); ConfigChooser chooser = new ConfigChooser(8, 8, 8, 8, 0, 8); mEglConfig = chooser.chooseConfig(mEgl, mEglDisplay); } else if (configToTest == 1) { Log.i(TAG, "Try to use graphics config R5G6B5S8"); ConfigChooser chooser = new ConfigChooser(5, 6, 5, 0, 0, 8); mEglConfig = chooser.chooseConfig(mEgl, mEglDisplay); } else { Log.e(TAG, "Unable to found a correct surface for this device !"); break; } } catch (IllegalArgumentException e) { configToTest++; continue; } Log.w(TAG, "Create egl context"); mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); if (mEglContext == null) { Log.w(TAG, "Unable to create egl context with this configuration, try the next one."); configToTest++; continue; } Log.w(TAG, "Create egl surface"); if (!createSurface()) { Log.w(TAG, "Unable to create egl surface with this configuration, try the next one."); configToTest++; continue; } configFound = true; break; } if (!configFound) { System.exit(0); return; } Log.w(TAG, "Done"); waitForStart(); nativeResize(mWidth, mHeight); nativeInitJavaCallbacks(); nativeSetEnv("ANDROID_PRIVATE", mFilesDirectory); nativeSetEnv("ANDROID_ARGUMENT", mArgument); nativeSetEnv("PYTHONOPTIMIZE", "2"); nativeSetEnv("PYTHONHOME", mFilesDirectory); nativeSetEnv("PYTHONPATH", mArgument + ":" + mFilesDirectory + "/lib"); //nativeSetMouseUsed(); nativeSetMultitouchUsed(); nativeInit(); System.exit(0); } private void glCheck(GL10 gl) { int gle = gl.glGetError(); if (gle != gl.GL_NO_ERROR) { throw new RuntimeException("GL Error: " + gle); } } private void waitForStart() { int presplashId = mResourceManager.getIdentifier("presplash", "drawable"); InputStream is = mActivity.getResources().openRawResource(presplashId); Bitmap bitmap = null; try { bitmap = BitmapFactory.decodeStream(is); } finally { try { is.close(); } catch (IOException e) { } } mTriangleVertices = ByteBuffer.allocateDirect(mTriangleVerticesData.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer(); mTriangleVertices.put(mTriangleVerticesData).position(0); mProgram = createProgram(mVertexShader, mFragmentShader); if (mProgram == 0) { synchronized (this) { while (!mStarted) { try { this.wait(250); } catch (InterruptedException e) { continue; } } } return; } maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); checkGlError("glGetAttribLocation aPosition"); if (maPositionHandle == -1) { throw new RuntimeException("Could not get attrib location for aPosition"); } maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); checkGlError("glGetAttribLocation aTextureCoord"); if (maTextureHandle == -1) { throw new RuntimeException("Could not get attrib location for aTextureCoord"); } muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); checkGlError("glGetUniformLocation uMVPMatrix"); if (muMVPMatrixHandle == -1) { throw new RuntimeException("Could not get attrib location for uMVPMatrix"); } // Create our texture. This has to be done each time the // surface is created. int[] textures = new int[1]; GLES20.glGenTextures(1, textures, 0); mTextureID = textures[0]; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); Matrix.setLookAtM(mVMatrix, 0, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f); GLES20.glViewport(0, 0, mWidth, mHeight); if (bitmap != null) { float mx = ((float)mWidth / bitmap.getWidth()) / 2.0f; float my = ((float)mHeight / bitmap.getHeight()) / 2.0f; Matrix.orthoM(mProjMatrix, 0, -mx, mx, my, -my, 0, 10); int value = bitmap.getPixel(0, 0); Color color = new Color(); GLES20.glClearColor( (float)color.red(value) / 255.0f, (float)color.green(value) / 255.0f, (float)color.blue(value) / 255.0f, 0.0f); } else { Matrix.orthoM(mProjMatrix, 0, -1, 1, -1, 1, 0, 10); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); } GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); checkGlError("glUseProgram"); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID); mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); checkGlError("glVertexAttribPointer maPosition"); mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); GLES20.glEnableVertexAttribArray(maPositionHandle); checkGlError("glEnableVertexAttribArray maPositionHandle"); GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); checkGlError("glVertexAttribPointer maTextureHandle"); GLES20.glEnableVertexAttribArray(maTextureHandle); checkGlError("glEnableVertexAttribArray maTextureHandle"); Matrix.setRotateM(mMMatrix, 0, 0, 0, 0, 1.0f); Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0); Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6); checkGlError("glDrawArrays"); mEgl.eglSwapBuffers(mEglDisplay, mEglSurface); // Wait to be notified it's okay to start Python. synchronized (this) { while (!mStarted) { // Draw & Flip. GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6); mEgl.eglSwapBuffers(mEglDisplay, mEglSurface); try { this.wait(250); } catch (InterruptedException e) { continue; } } } GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); // Delete texture. GLES20.glDeleteTextures(1, textures, 0); if (bitmap != null) bitmap.recycle(); // Delete program GLES20.glDeleteProgram(mProgram); } public void start() { this.setFocusableInTouchMode(true); this.setFocusable(true); this.requestFocus(); synchronized (this) { mStarted = true; this.notify(); } } public boolean createSurface() { mChanged = false; // Destroy the old surface. if (mEglSurface != null) { /* * Unbind and destroy the old EGL surface, if * there is one. */ mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); mEgl.eglDestroySurface(mEglDisplay, mEglSurface); } // Create a new surface. mEglSurface = mEgl.eglCreateWindowSurface( mEglDisplay, mEglConfig, getHolder(), null); // Make the new surface current. boolean rv = mEgl.eglMakeCurrent( mEglDisplay, mEglSurface, mEglSurface, mEglContext); if (!rv) { mEglSurface = null; return false; } if (mStarted) { nativeResize(mWidth, mHeight); } return true; } public int swapBuffers() { // Prevent us from drawing too early, at startup. if (mSwapSkips-- > 0) { return 1; } // If the display has been changed, then disregard all the // rendering we've done to it, and make a new surface. // // Otherwise, swap the buffers. if (mChanged) { createSurface(); mClears = 2; return 0; } else { mEgl.eglSwapBuffers(mEglDisplay, mEglSurface); if (mClears-- != 0) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); } return 1; } } private static final int INVALID_POINTER_ID = -1; private int mActivePointerId = INVALID_POINTER_ID; @Override public boolean onTouchEvent(final MotionEvent event) { if (mInputActivated == false) return true; int action = event.getAction() & MotionEvent.ACTION_MASK; int sdlAction = -1; int pointerId = -1; int pointerIndex = -1; switch ( action ) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_POINTER_DOWN: sdlAction = 0; break; case MotionEvent.ACTION_MOVE: sdlAction = 2; break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_POINTER_UP: sdlAction = 1; break; } // http://android-developers.blogspot.com/2010/06/making-sense-of-multitouch.html switch ( action & MotionEvent.ACTION_MASK ) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_UP: pointerIndex = event.findPointerIndex(mActivePointerId); break; case MotionEvent.ACTION_POINTER_DOWN: case MotionEvent.ACTION_POINTER_UP: pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT; if ( action == MotionEvent.ACTION_POINTER_UP ) { pointerId = event.getPointerId(pointerIndex); if ( pointerId == mActivePointerId ) mActivePointerId = event.getPointerId(pointerIndex == 0 ? 1 : 0); } break; } if ( sdlAction >= 0 ) { for ( int i = 0; i < event.getPointerCount(); i++ ) { if ( pointerIndex == -1 || pointerIndex == i ) { /** Log.i("python", String.format("mouse id=%d action=%d x=%f y=%f", event.getPointerId(i), sdlAction, event.getX(i), event.getY(i) )); **/ SDLSurfaceView.nativeMouse( (int)event.getX(i), (int)event.getY(i), sdlAction, event.getPointerId(i), (int)(event.getPressure(i) * 1000.0), (int)(event.getSize(i) * 1000.0)); } } } synchronized (this) { try { this.wait(1000 / 60); } catch (InterruptedException e) { } } return true; }; @Override public boolean onKeyDown(int keyCode, final KeyEvent event) { Log.i("python", String.format("key down %d", keyCode)); if (mInputActivated && nativeKey(keyCode, 1, event.getUnicodeChar())) { return true; } else { return super.onKeyDown(keyCode, event); } } @Override public boolean onKeyUp(int keyCode, final KeyEvent event) { Log.i("python", String.format("key up %d", keyCode)); if (mInputActivated && nativeKey(keyCode, 0, event.getUnicodeChar())) { return true; } else { return super.onKeyUp(keyCode, event); } } static void activateInput() { mInputActivated = true; } // Taken from the "GLES20TriangleRenderer" in Android SDK private int loadShader(int shaderType, String source) { int shader = GLES20.glCreateShader(shaderType); if (shader != 0) { GLES20.glShaderSource(shader, source); GLES20.glCompileShader(shader); int[] compiled = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); if (compiled[0] == 0) { Log.e(TAG, "Could not compile shader " + shaderType + ":"); Log.e(TAG, GLES20.glGetShaderInfoLog(shader)); GLES20.glDeleteShader(shader); shader = 0; } } return shader; } private int createProgram(String vertexSource, String fragmentSource) { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); if (vertexShader == 0) { return 0; } int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); if (pixelShader == 0) { return 0; } int program = GLES20.glCreateProgram(); if (program != 0) { GLES20.glAttachShader(program, vertexShader); checkGlError("glAttachShader"); GLES20.glAttachShader(program, pixelShader); checkGlError("glAttachShader"); GLES20.glLinkProgram(program); int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { Log.e(TAG, "Could not link program: "); Log.e(TAG, GLES20.glGetProgramInfoLog(program)); GLES20.glDeleteProgram(program); program = 0; } } return program; } private void checkGlError(String op) { int error; while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { Log.e(TAG, op + ": glError " + error); throw new RuntimeException(op + ": glError " + error); } } private static final int FLOAT_SIZE_BYTES = 4; private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; private final float[] mTriangleVerticesData = { // X, Y, Z, U, V -0.5f, -0.5f, 0, 1.0f, 0.0f, 0.5f, -0.5f, 0, 0.0f, 0.0f, 0.5f, 0.5f, 0, 0.0f, 1.0f, -0.5f, -0.5f, 0, 1.0f, 0.0f, 0.5f, 0.5f, 0, 0.0f, 1.0f, -0.5f, 0.5f, 0, 1.0f, 1.0f, }; private FloatBuffer mTriangleVertices; private float[] mMVPMatrix = new float[16]; private float[] mProjMatrix = new float[16]; private float[] mMMatrix = new float[16]; private float[] mVMatrix = new float[16]; private int mProgram; private int mTextureID; private int muMVPMatrixHandle; private int maPositionHandle; private int maTextureHandle; // Native part public static native void nativeSetEnv(String name, String value); public static native void nativeInit(); public static native void nativeMouse( int x, int y, int action, int pointerId, int pressure, int radius ); public static native boolean nativeKey(int keyCode, int down, int unicode); public static native void nativeSetMouseUsed(); public static native void nativeSetMultitouchUsed(); public native void nativeResize(int width, int height); public native void nativeInitJavaCallbacks(); }
src/src/org/renpy/android/SDLSurfaceView.java
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // This string is autogenerated by ChangeAppSettings.sh, do not change spaces amount package org.renpy.android; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGL11; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLContext; import javax.microedition.khronos.egl.EGLDisplay; import javax.microedition.khronos.egl.EGLSurface; import javax.microedition.khronos.opengles.GL; import javax.microedition.khronos.opengles.GL10; import android.opengl.GLES20; import android.opengl.Matrix; import android.app.Activity; import android.content.Context; import android.content.pm.ActivityInfo; import android.util.AttributeSet; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.opengl.GLSurfaceView; import android.view.MotionEvent; import android.view.KeyEvent; import android.os.Build; import android.os.PowerManager; import java.io.IOException; import java.io.InputStream; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.opengl.GLUtils; import java.nio.FloatBuffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import android.graphics.Color; import android.content.res.Resources; public class SDLSurfaceView extends SurfaceView implements SurfaceHolder.Callback, Runnable { private static String TAG = "SDLSurface"; private final String mVertexShader = "uniform mat4 uMVPMatrix;\n" + "attribute vec4 aPosition;\n" + "attribute vec2 aTextureCoord;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_Position = uMVPMatrix * aPosition;\n" + " vTextureCoord = aTextureCoord;\n" + "}\n"; private final String mFragmentShader = "precision mediump float;\n" + "varying vec2 vTextureCoord;\n" + "uniform sampler2D sTexture;\n" + "void main() {\n" + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + "}\n"; private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser { public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { mRedSize = r; mGreenSize = g; mBlueSize = b; mAlphaSize = a; mDepthSize = depth; mStencilSize = stencil; } /* This EGL config specification is used to specify 2.0 rendering. * We use a minimum size of 4 bits for red/green/blue, but will * perform actual matching in chooseConfig() below. */ private static int EGL_OPENGL_ES2_BIT = 4; private static int[] s_configAttribs2 = { EGL10.EGL_RED_SIZE, 4, EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_BLUE_SIZE, 4, EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_NONE }; public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { /* Get the number of minimally matching EGL configurations */ int[] num_config = new int[1]; egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); int numConfigs = num_config[0]; if (numConfigs <= 0) { throw new IllegalArgumentException("No configs match configSpec"); } /* Allocate then read the array of minimally matching EGL configs */ EGLConfig[] configs = new EGLConfig[numConfigs]; egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); /* Now return the "best" one */ //printConfigs(egl, display, configs); return chooseConfig(egl, display, configs); } public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { for(EGLConfig config : configs) { int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0); int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0); // We need at least mDepthSize and mStencilSize bits if (d < mDepthSize || s < mStencilSize) continue; // We want an *exact* match for red/green/blue/alpha int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0); int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0); int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0); int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0); if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) return config; } return null; } private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) { if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { return mValue[0]; } return defaultValue; } private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { int numConfigs = configs.length; Log.w(TAG, String.format("%d configurations", numConfigs)); for (int i = 0; i < numConfigs; i++) { Log.w(TAG, String.format("Configuration %d:\n", i)); printConfig(egl, display, configs[i]); } } private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) { int[] attributes = { EGL10.EGL_BUFFER_SIZE, EGL10.EGL_ALPHA_SIZE, EGL10.EGL_BLUE_SIZE, EGL10.EGL_GREEN_SIZE, EGL10.EGL_RED_SIZE, EGL10.EGL_DEPTH_SIZE, EGL10.EGL_STENCIL_SIZE, EGL10.EGL_CONFIG_CAVEAT, EGL10.EGL_CONFIG_ID, EGL10.EGL_LEVEL, EGL10.EGL_MAX_PBUFFER_HEIGHT, EGL10.EGL_MAX_PBUFFER_PIXELS, EGL10.EGL_MAX_PBUFFER_WIDTH, EGL10.EGL_NATIVE_RENDERABLE, EGL10.EGL_NATIVE_VISUAL_ID, EGL10.EGL_NATIVE_VISUAL_TYPE, 0x3030, // EGL10.EGL_PRESERVED_RESOURCES, EGL10.EGL_SAMPLES, EGL10.EGL_SAMPLE_BUFFERS, EGL10.EGL_SURFACE_TYPE, EGL10.EGL_TRANSPARENT_TYPE, EGL10.EGL_TRANSPARENT_RED_VALUE, EGL10.EGL_TRANSPARENT_GREEN_VALUE, EGL10.EGL_TRANSPARENT_BLUE_VALUE, 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB, 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA, 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL, 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL, EGL10.EGL_LUMINANCE_SIZE, EGL10.EGL_ALPHA_MASK_SIZE, EGL10.EGL_COLOR_BUFFER_TYPE, EGL10.EGL_RENDERABLE_TYPE, 0x3042 // EGL10.EGL_CONFORMANT }; String[] names = { "EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT" }; int[] value = new int[1]; for (int i = 0; i < attributes.length; i++) { int attribute = attributes[i]; String name = names[i]; if ( egl.eglGetConfigAttrib(display, config, attribute, value)) { Log.w(TAG, String.format(" %s: %d\n", name, value[0])); } else { // Log.w(TAG, String.format(" %s: failed\n", name)); while (egl.eglGetError() != EGL10.EGL_SUCCESS); } } } // Subclasses can adjust these values: protected int mRedSize; protected int mGreenSize; protected int mBlueSize; protected int mAlphaSize; protected int mDepthSize; protected int mStencilSize; private int[] mValue = new int[1]; } // The activity we're a part of. private Activity mActivity; // Have we started yet? public boolean mStarted = false; // Is Python ready to receive input events? static boolean mInputActivated = false; // The number of swaps we should skip. Experimentally derived from // watching SDL initialize. // XXX Kivy no swap skips, because kivy draw when needed. // XXX If we lost our first frame, we have a black screen. private int mSwapSkips = 0; // The number of times we should clear the screen after swap. private int mClears = 2; // Has the display been changed? private boolean mChanged = false; // Are we running yet? private boolean mRunning = false; // The EGL used by our thread. private EGL10 mEgl = null; // The EGL Display used. private EGLDisplay mEglDisplay = null; // The EGL Context used. private EGLContext mEglContext = null; // The EGL Surface used. private EGLSurface mEglSurface = null; // The EGL Config used. private EGLConfig mEglConfig = null; // The user program is not participating in the pause protocol. public final int PAUSE_NOT_PARTICIPATING = 0; // A pause has not been requested by the OS. public final int PAUSE_NONE = 1; // A pause has been requested by Android, but the user program has // not bothered responding yet. public final int PAUSE_REQUEST = 2; // The user program is waiting in waitForResume. public final int PAUSE_WAIT_FOR_RESUME = 3; // This stores the state of the pause system. private int mPause = PAUSE_NOT_PARTICIPATING; private PowerManager.WakeLock wakeLock; // The width and height. (This should be set at startup time - // these values just prevent segfaults and divide by zero, etc.) int mWidth = 100; int mHeight = 100; // The name of the directory where the context stores its files. String mFilesDirectory = null; // The value of the argument passed in. String mArgument = null; // The resource manager we use. ResourceManager mResourceManager; public SDLSurfaceView(Activity act, String argument) { super(act); mActivity = act; mResourceManager = new ResourceManager(act); SurfaceHolder holder = getHolder(); holder.addCallback(this); holder.setType(SurfaceHolder.SURFACE_TYPE_GPU); mFilesDirectory = mActivity.getFilesDir().getAbsolutePath(); mArgument = argument; PowerManager pm = (PowerManager) act.getSystemService(Context.POWER_SERVICE); wakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Screen On"); } /** * The user program should call this frequently to check if a * pause has been requested by android. If this ever returns * true, the user program should clean up and call waitForResume. */ public int checkPause() { if (mPause == PAUSE_NOT_PARTICIPATING) { mPause = PAUSE_NONE; } if (mPause == PAUSE_REQUEST) { return 1; } else { return 0; } } /** * The user program should call this quickly after checkPause * returns true. This causes the android application to sleep, * waiting for resume. While sleeping, it should not have any * activity. (Notably, it should stop all timers.) * * While we're waiting in this method, android is allowed to * kill us to reclaim memory, without any further warning. */ public void waitForResume() { synchronized (this) { mPause = PAUSE_WAIT_FOR_RESUME; // Notify any threads waiting in onPause. this.notifyAll(); while (mPause == PAUSE_WAIT_FOR_RESUME) { try { this.wait(); } catch (InterruptedException e) { } } } } /** * Inform the view that the activity is paused. The owner of this view must * call this method when the activity is paused. Calling this method will * pause the rendering thread. * Must not be called before a renderer has been set. */ public void onPause() { synchronized (this) { if (mPause == PAUSE_NONE) { mPause = PAUSE_REQUEST; while (mPause == PAUSE_REQUEST) { try { this.wait(); } catch (InterruptedException e) { // pass } } } } wakeLock.release(); } /** * Inform the view that the activity is resumed. The owner of this view must * call this method when the activity is resumed. Calling this method will * recreate the OpenGL display and resume the rendering * thread. * Must not be called before a renderer has been set. */ public void onResume() { synchronized (this) { if (mPause == PAUSE_WAIT_FOR_RESUME) { mPause = PAUSE_NONE; this.notifyAll(); } } wakeLock.acquire(); } /** * This method is part of the SurfaceHolder.Callback interface, and is * not normally called or subclassed by clients of GLSurfaceView. */ public void surfaceCreated(SurfaceHolder holder) { } /** * This method is part of the SurfaceHolder.Callback interface, and is * not normally called or subclassed by clients of GLSurfaceView. */ public void surfaceDestroyed(SurfaceHolder holder) { } /** * This method is part of the SurfaceHolder.Callback interface, and is * not normally called or subclassed by clients of GLSurfaceView. */ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { mWidth = w; mHeight = h; if (mActivity.getRequestedOrientation() == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE && mWidth < mHeight) { return; } if (mActivity.getRequestedOrientation() == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT && mWidth > mHeight) { return; } if (!mRunning) { mRunning = true; new Thread(this).start(); } else { mChanged = true; } } public void run() { mEgl = (EGL10) EGLContext.getEGL(); mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); int[] version = new int[2]; mEgl.eglInitialize(mEglDisplay, version); // Pick an appropriate config. We just take the first config // the system offers to us, because anything more complicated // than that stands a really good chance of not working. int[] configSpec = { // RENDERABLE_TYPE = OpenGL ES is the default. EGL10.EGL_NONE }; EGLConfig[] configs = new EGLConfig[1]; int EGL_CONTEXT_CLIENT_VERSION = 0x3098; int[] num_config = new int[1]; int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; // Create an opengl es 2.0 surface Log.w(TAG, "Choose egl configuration"); int configToTest = 0; boolean configFound = false; while (true) { try { if (configToTest == 0) { Log.i(TAG, "Try to use graphics config R8G8B8A8S8"); ConfigChooser chooser = new ConfigChooser(8, 8, 8, 8, 0, 8); mEglConfig = chooser.chooseConfig(mEgl, mEglDisplay); } else if (configToTest == 1) { Log.i(TAG, "Try to use graphics config R5G6B5S8"); ConfigChooser chooser = new ConfigChooser(5, 6, 5, 0, 0, 8); mEglConfig = chooser.chooseConfig(mEgl, mEglDisplay); } else { Log.e(TAG, "Unable to found a correct surface for this device !"); break; } } catch (IllegalArgumentException e) { configToTest++; continue; } Log.w(TAG, "Create egl context"); mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); if (mEglContext == null) { Log.w(TAG, "Unable to create egl context with this configuration, try the next one."); configToTest++; continue; } Log.w(TAG, "Create egl surface"); if (!createSurface()) { Log.w(TAG, "Unable to create egl surface with this configuration, try the next one."); configToTest++; continue; } configFound = true; break; } if (!configFound) { System.exit(0); return; } Log.w(TAG, "Done"); waitForStart(); nativeResize(mWidth, mHeight); nativeInitJavaCallbacks(); nativeSetEnv("ANDROID_PRIVATE", mFilesDirectory); nativeSetEnv("ANDROID_ARGUMENT", mArgument); nativeSetEnv("PYTHONOPTIMIZE", "2"); nativeSetEnv("PYTHONHOME", mFilesDirectory); nativeSetEnv("PYTHONPATH", mArgument + ":" + mFilesDirectory + "/lib"); //nativeSetMouseUsed(); nativeSetMultitouchUsed(); nativeInit(); System.exit(0); } private void glCheck(GL10 gl) { int gle = gl.glGetError(); if (gle != gl.GL_NO_ERROR) { throw new RuntimeException("GL Error: " + gle); } } private void waitForStart() { int presplashId = mResourceManager.getIdentifier("presplash", "drawable"); InputStream is = mActivity.getResources().openRawResource(presplashId); Bitmap bitmap = null; try { bitmap = BitmapFactory.decodeStream(is); } finally { try { is.close(); } catch (IOException e) { } } mTriangleVertices = ByteBuffer.allocateDirect(mTriangleVerticesData.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer(); mTriangleVertices.put(mTriangleVerticesData).position(0); mProgram = createProgram(mVertexShader, mFragmentShader); if (mProgram == 0) { synchronized (this) { while (!mStarted) { try { this.wait(250); } catch (InterruptedException e) { continue; } } } return; } maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); checkGlError("glGetAttribLocation aPosition"); if (maPositionHandle == -1) { throw new RuntimeException("Could not get attrib location for aPosition"); } maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); checkGlError("glGetAttribLocation aTextureCoord"); if (maTextureHandle == -1) { throw new RuntimeException("Could not get attrib location for aTextureCoord"); } muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); checkGlError("glGetUniformLocation uMVPMatrix"); if (muMVPMatrixHandle == -1) { throw new RuntimeException("Could not get attrib location for uMVPMatrix"); } // Create our texture. This has to be done each time the // surface is created. int[] textures = new int[1]; GLES20.glGenTextures(1, textures, 0); mTextureID = textures[0]; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); Matrix.setLookAtM(mVMatrix, 0, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f); GLES20.glViewport(0, 0, mWidth, mHeight); if (bitmap != null) { float mx = ((float)mWidth / bitmap.getWidth()); float my = ((float)mHeight / bitmap.getHeight()); Matrix.orthoM(mProjMatrix, 0, -mx, mx, my, -my, 0, 10); int value = bitmap.getPixel(0, 0); Color color = new Color(); GLES20.glClearColor( (float)color.red(value) / 255.0f, (float)color.green(value) / 255.0f, (float)color.blue(value) / 255.0f, 0.0f); } else { Matrix.orthoM(mProjMatrix, 0, -1, 1, -1, 1, 0, 10); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); } GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); checkGlError("glUseProgram"); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID); mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); checkGlError("glVertexAttribPointer maPosition"); mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); GLES20.glEnableVertexAttribArray(maPositionHandle); checkGlError("glEnableVertexAttribArray maPositionHandle"); GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); checkGlError("glVertexAttribPointer maTextureHandle"); GLES20.glEnableVertexAttribArray(maTextureHandle); checkGlError("glEnableVertexAttribArray maTextureHandle"); Matrix.setRotateM(mMMatrix, 0, 0, 0, 0, 1.0f); Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0); Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6); checkGlError("glDrawArrays"); mEgl.eglSwapBuffers(mEglDisplay, mEglSurface); // Wait to be notified it's okay to start Python. synchronized (this) { while (!mStarted) { // Draw & Flip. GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 6); mEgl.eglSwapBuffers(mEglDisplay, mEglSurface); try { this.wait(250); } catch (InterruptedException e) { continue; } } } GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); // Delete texture. GLES20.glDeleteTextures(1, textures, 0); if (bitmap != null) bitmap.recycle(); // Delete program GLES20.glDeleteProgram(mProgram); } public void start() { this.setFocusableInTouchMode(true); this.setFocusable(true); this.requestFocus(); synchronized (this) { mStarted = true; this.notify(); } } public boolean createSurface() { mChanged = false; // Destroy the old surface. if (mEglSurface != null) { /* * Unbind and destroy the old EGL surface, if * there is one. */ mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); mEgl.eglDestroySurface(mEglDisplay, mEglSurface); } // Create a new surface. mEglSurface = mEgl.eglCreateWindowSurface( mEglDisplay, mEglConfig, getHolder(), null); // Make the new surface current. boolean rv = mEgl.eglMakeCurrent( mEglDisplay, mEglSurface, mEglSurface, mEglContext); if (!rv) { mEglSurface = null; return false; } if (mStarted) { nativeResize(mWidth, mHeight); } return true; } public int swapBuffers() { // Prevent us from drawing too early, at startup. if (mSwapSkips-- > 0) { return 1; } // If the display has been changed, then disregard all the // rendering we've done to it, and make a new surface. // // Otherwise, swap the buffers. if (mChanged) { createSurface(); mClears = 2; return 0; } else { mEgl.eglSwapBuffers(mEglDisplay, mEglSurface); if (mClears-- != 0) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); } return 1; } } private static final int INVALID_POINTER_ID = -1; private int mActivePointerId = INVALID_POINTER_ID; @Override public boolean onTouchEvent(final MotionEvent event) { if (mInputActivated == false) return true; int action = event.getAction() & MotionEvent.ACTION_MASK; int sdlAction = -1; int pointerId = -1; int pointerIndex = -1; switch ( action ) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_POINTER_DOWN: sdlAction = 0; break; case MotionEvent.ACTION_MOVE: sdlAction = 2; break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_POINTER_UP: sdlAction = 1; break; } // http://android-developers.blogspot.com/2010/06/making-sense-of-multitouch.html switch ( action & MotionEvent.ACTION_MASK ) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_UP: pointerIndex = event.findPointerIndex(mActivePointerId); break; case MotionEvent.ACTION_POINTER_DOWN: case MotionEvent.ACTION_POINTER_UP: pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT; if ( action == MotionEvent.ACTION_POINTER_UP ) { pointerId = event.getPointerId(pointerIndex); if ( pointerId == mActivePointerId ) mActivePointerId = event.getPointerId(pointerIndex == 0 ? 1 : 0); } break; } if ( sdlAction >= 0 ) { for ( int i = 0; i < event.getPointerCount(); i++ ) { if ( pointerIndex == -1 || pointerIndex == i ) { /** Log.i("python", String.format("mouse id=%d action=%d x=%f y=%f", event.getPointerId(i), sdlAction, event.getX(i), event.getY(i) )); **/ SDLSurfaceView.nativeMouse( (int)event.getX(i), (int)event.getY(i), sdlAction, event.getPointerId(i), (int)(event.getPressure(i) * 1000.0), (int)(event.getSize(i) * 1000.0)); } } } synchronized (this) { try { this.wait(1000 / 60); } catch (InterruptedException e) { } } return true; }; @Override public boolean onKeyDown(int keyCode, final KeyEvent event) { Log.i("python", String.format("key down %d", keyCode)); if (mInputActivated && nativeKey(keyCode, 1, event.getUnicodeChar())) { return true; } else { return super.onKeyDown(keyCode, event); } } @Override public boolean onKeyUp(int keyCode, final KeyEvent event) { Log.i("python", String.format("key up %d", keyCode)); if (mInputActivated && nativeKey(keyCode, 0, event.getUnicodeChar())) { return true; } else { return super.onKeyUp(keyCode, event); } } static void activateInput() { mInputActivated = true; } // Taken from the "GLES20TriangleRenderer" in Android SDK private int loadShader(int shaderType, String source) { int shader = GLES20.glCreateShader(shaderType); if (shader != 0) { GLES20.glShaderSource(shader, source); GLES20.glCompileShader(shader); int[] compiled = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); if (compiled[0] == 0) { Log.e(TAG, "Could not compile shader " + shaderType + ":"); Log.e(TAG, GLES20.glGetShaderInfoLog(shader)); GLES20.glDeleteShader(shader); shader = 0; } } return shader; } private int createProgram(String vertexSource, String fragmentSource) { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); if (vertexShader == 0) { return 0; } int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); if (pixelShader == 0) { return 0; } int program = GLES20.glCreateProgram(); if (program != 0) { GLES20.glAttachShader(program, vertexShader); checkGlError("glAttachShader"); GLES20.glAttachShader(program, pixelShader); checkGlError("glAttachShader"); GLES20.glLinkProgram(program); int[] linkStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { Log.e(TAG, "Could not link program: "); Log.e(TAG, GLES20.glGetProgramInfoLog(program)); GLES20.glDeleteProgram(program); program = 0; } } return program; } private void checkGlError(String op) { int error; while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { Log.e(TAG, op + ": glError " + error); throw new RuntimeException(op + ": glError " + error); } } private static final int FLOAT_SIZE_BYTES = 4; private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; private final float[] mTriangleVerticesData = { // X, Y, Z, U, V -0.5f, -0.5f, 0, 1.0f, 0.0f, 0.5f, -0.5f, 0, 0.0f, 0.0f, 0.5f, 0.5f, 0, 0.0f, 1.0f, -0.5f, -0.5f, 0, 1.0f, 0.0f, 0.5f, 0.5f, 0, 0.0f, 1.0f, -0.5f, 0.5f, 0, 1.0f, 1.0f, }; private FloatBuffer mTriangleVertices; private float[] mMVPMatrix = new float[16]; private float[] mProjMatrix = new float[16]; private float[] mMMatrix = new float[16]; private float[] mVMatrix = new float[16]; private int mProgram; private int mTextureID; private int muMVPMatrixHandle; private int maPositionHandle; private int maTextureHandle; // Native part public static native void nativeSetEnv(String name, String value); public static native void nativeInit(); public static native void nativeMouse( int x, int y, int action, int pointerId, int pressure, int radius ); public static native boolean nativeKey(int keyCode, int down, int unicode); public static native void nativeSetMouseUsed(); public static native void nativeSetMultitouchUsed(); public native void nativeResize(int width, int height); public native void nativeInitJavaCallbacks(); }
fix size of presplash, was incorrectly displayed at 50% of its initial size.
src/src/org/renpy/android/SDLSurfaceView.java
fix size of presplash, was incorrectly displayed at 50% of its initial size.
Java
mit
061672223e92135c72f8a3dbe51a1d735c7e921c
0
mhogrefe/wheels
package mho.wheels.misc; import org.junit.Test; import java.math.BigDecimal; import java.math.BigInteger; import static mho.wheels.misc.Readers.*; import static org.junit.Assert.*; public class ReadersTest { @Test public void testConstants() { aeq(MAX_POSITIVE_BYTE_LENGTH, 3); aeq(MAX_POSITIVE_SHORT_LENGTH, 5); aeq(MAX_POSITIVE_INTEGER_LENGTH, 10); aeq(MAX_POSITIVE_LONG_LENGTH, 19); } @Test public void testReadBoolean() { aeq(readBoolean("false").get(), "false"); aeq(readBoolean("true").get(), "true"); assertFalse(readBoolean(" true").isPresent()); assertFalse(readBoolean("TRUE").isPresent()); assertFalse(readBoolean("true ").isPresent()); assertFalse(readBoolean("").isPresent()); assertFalse(readBoolean("dsfsdfgd").isPresent()); } @Test public void testFindBooleanIn() { aeq(findBooleanIn("true").get(), "(true, 0)"); aeq(findBooleanIn("false").get(), "(false, 0)"); aeq(findBooleanIn("xxtruefalsexx").get(), "(true, 2)"); aeq(findBooleanIn("xxfalsetruexx").get(), "(false, 2)"); assertFalse(findOrderingIn("hello").isPresent()); assertFalse(findOrderingIn("").isPresent()); } public void testReadOrdering() { aeq(readOrdering("LT").get(), "LT"); aeq(readOrdering("EQ").get(), "EQ"); aeq(readOrdering("GT").get(), "GT"); assertFalse(readOrdering(" LT").isPresent()); assertFalse(readOrdering("eq").isPresent()); assertFalse(readOrdering("gt ").isPresent()); assertFalse(readOrdering("").isPresent()); assertFalse(readOrdering("dsfsdfgd").isPresent()); } @Test public void testFindOrderingIn() { aeq(findOrderingIn("EQ").get(), "(EQ, 0)"); aeq(findOrderingIn("LT").get(), "(LT, 0)"); aeq(findOrderingIn("BELT").get(), "(LT, 2)"); aeq(findOrderingIn("EGGTOWER").get(), "(GT, 2)"); assertFalse(findOrderingIn("hello").isPresent()); assertFalse(findOrderingIn("").isPresent()); } public void testReadRoundingMode() { aeq(readRoundingMode("UP").get(), "UP"); aeq(readRoundingMode("UNNECESSARY").get(), "UNNECESSARY"); aeq(readRoundingMode("HALF_EVEN").get(), "HALF_EVEN"); assertFalse(readRoundingMode(" DOWN").isPresent()); assertFalse(readRoundingMode("HALF-EVEN").isPresent()); assertFalse(readRoundingMode("FLOOR ").isPresent()); assertFalse(readRoundingMode("").isPresent()); assertFalse(readRoundingMode("dsfsdfgd").isPresent()); } @Test public void testFindRoundingModeIn() { aeq(findRoundingModeIn("HALF_UP").get(), "(HALF_UP, 0)"); aeq(findRoundingModeIn("CEILING").get(), "(CEILING, 0)"); aeq(findRoundingModeIn("UPSIDE-DOWN").get(), "(UP, 0)"); aeq(findRoundingModeIn("JLNUIDOWNJLNILN").get(), "(DOWN, 5)"); assertFalse(findRoundingModeIn("hello").isPresent()); assertFalse(findRoundingModeIn("").isPresent()); } @Test public void testReadBigInteger() { aeq(readBigInteger("0").get(), "0"); aeq(readBigInteger("5").get(), "5"); aeq(readBigInteger("314159265358").get(), "314159265358"); aeq(readBigInteger("-314159265358").get(), "-314159265358"); assertFalse(readBigInteger(" 1").isPresent()); assertFalse(readBigInteger("00").isPresent()); assertFalse(readBigInteger("-0").isPresent()); assertFalse(readBigInteger("0xff").isPresent()); assertFalse(readBigInteger("0xff").isPresent()); assertFalse(readBigInteger("2 ").isPresent()); assertFalse(readBigInteger("--1").isPresent()); assertFalse(readBigInteger("1-2").isPresent()); assertFalse(readBigInteger("+4").isPresent()); } @Test public void testFindBigIntegerIn() { aeq(findBigIntegerIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findBigIntegerIn("0123").get(), "(0, 0)"); aeq(findBigIntegerIn("a-23").get(), "(-23, 1)"); aeq(findBigIntegerIn("---34--4").get(), "(-34, 2)"); aeq(findBigIntegerIn(" 20.1 ").get(), "(20, 1)"); assertFalse(findBigIntegerIn("").isPresent()); assertFalse(findBigIntegerIn("hello").isPresent()); assertFalse(findBigIntegerIn("vdfsvfbf").isPresent()); } @Test public void testReadByte() { aeq(readByte("0").get(), "0"); aeq(readByte("5").get(), "5"); aeq(readByte("-100").get(), "-100"); aeq(readByte(Integer.toString(Byte.MAX_VALUE)).get(), "127"); aeq(readByte(Integer.toString(Byte.MIN_VALUE)).get(), "-128"); assertFalse(readByte(Integer.toString(Byte.MAX_VALUE + 1)).isPresent()); assertFalse(readByte(Integer.toString(Byte.MIN_VALUE - 1)).isPresent()); assertFalse(readByte(" 1").isPresent()); assertFalse(readByte("00").isPresent()); assertFalse(readByte("-0").isPresent()); assertFalse(readByte("0xff").isPresent()); assertFalse(readByte("0xff").isPresent()); assertFalse(readByte("2 ").isPresent()); assertFalse(readByte("--1").isPresent()); assertFalse(readByte("1-2").isPresent()); assertFalse(readByte("+4").isPresent()); } @Test public void testFindByteIn() { aeq(findByteIn("abcd1234xyz").get(), "(123, 4)"); aeq(findByteIn("abcd8234xyz").get(), "(82, 4)"); aeq(findByteIn("0123").get(), "(0, 0)"); aeq(findByteIn("a-23").get(), "(-23, 1)"); aeq(findByteIn("---34--4").get(), "(-34, 2)"); aeq(findByteIn(" 20.1 ").get(), "(20, 1)"); aeq(findByteIn("abcd" + Byte.MAX_VALUE + "xyz").get(), "(127, 4)"); aeq(findByteIn("abcd" + Byte.MIN_VALUE + "xyz").get(), "(-128, 4)"); aeq(findByteIn("abcd" + (Byte.MAX_VALUE + 1) + "xyz").get(), "(12, 4)"); aeq(findByteIn("abcd" + (Byte.MIN_VALUE - 1) + "xyz").get(), "(-12, 4)"); assertFalse(findByteIn("").isPresent()); assertFalse(findByteIn("hello").isPresent()); assertFalse(findByteIn("vdfsvfbf").isPresent()); } @Test public void testReadShort() { aeq(readShort("0").get(), "0"); aeq(readShort("5").get(), "5"); aeq(readShort("-100").get(), "-100"); aeq(readShort(Integer.toString(Short.MAX_VALUE)).get(), "32767"); aeq(readShort(Integer.toString(Short.MIN_VALUE)).get(), "-32768"); assertFalse(readShort(Integer.toString(Short.MAX_VALUE + 1)).isPresent()); assertFalse(readShort(Integer.toString(Short.MIN_VALUE - 1)).isPresent()); assertFalse(readShort(" 1").isPresent()); assertFalse(readShort("00").isPresent()); assertFalse(readShort("-0").isPresent()); assertFalse(readShort("0xff").isPresent()); assertFalse(readShort("0xff").isPresent()); assertFalse(readShort("2 ").isPresent()); assertFalse(readShort("--1").isPresent()); assertFalse(readShort("1-2").isPresent()); assertFalse(readShort("+4").isPresent()); } @Test public void testFindShortIn() { aeq(findShortIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findShortIn("abcd8234xyz").get(), "(8234, 4)"); aeq(findShortIn("0123").get(), "(0, 0)"); aeq(findShortIn("a-23").get(), "(-23, 1)"); aeq(findShortIn("---34--4").get(), "(-34, 2)"); aeq(findShortIn(" 20.1 ").get(), "(20, 1)"); aeq(findShortIn("abcd" + Short.MAX_VALUE + "xyz").get(), "(32767, 4)"); aeq(findShortIn("abcd" + Short.MIN_VALUE + "xyz").get(), "(-32768, 4)"); aeq(findShortIn("abcd" + (Short.MAX_VALUE + 1) + "xyz").get(), "(3276, 4)"); aeq(findShortIn("abcd" + (Short.MIN_VALUE - 1) + "xyz").get(), "(-3276, 4)"); assertFalse(findShortIn("").isPresent()); assertFalse(findShortIn("hello").isPresent()); assertFalse(findShortIn("vdfsvfbf").isPresent()); } @Test public void testReadInteger() { aeq(readInteger("0").get(), "0"); aeq(readInteger("5").get(), "5"); aeq(readInteger("-100").get(), "-100"); aeq(readInteger(Integer.toString(Integer.MAX_VALUE)).get(), "2147483647"); aeq(readInteger(Integer.toString(Integer.MIN_VALUE)).get(), "-2147483648"); assertFalse(readInteger(Long.toString((long) Integer.MAX_VALUE + 1)).isPresent()); assertFalse(readInteger(Long.toString((long) Integer.MIN_VALUE - 1)).isPresent()); assertFalse(readInteger(" 1").isPresent()); assertFalse(readInteger("00").isPresent()); assertFalse(readInteger("-0").isPresent()); assertFalse(readInteger("0xff").isPresent()); assertFalse(readInteger("0xff").isPresent()); assertFalse(readInteger("2 ").isPresent()); assertFalse(readInteger("--1").isPresent()); assertFalse(readInteger("1-2").isPresent()); assertFalse(readInteger("+4").isPresent()); } @Test public void testFindIntegerIn() { aeq(findIntegerIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findIntegerIn("abcd8234xyz").get(), "(8234, 4)"); aeq(findIntegerIn("0123").get(), "(0, 0)"); aeq(findIntegerIn("a-23").get(), "(-23, 1)"); aeq(findIntegerIn("---34--4").get(), "(-34, 2)"); aeq(findIntegerIn(" 20.1 ").get(), "(20, 1)"); aeq(findIntegerIn("abcd" + Integer.MAX_VALUE + "xyz").get(), "(2147483647, 4)"); aeq(findIntegerIn("abcd" + Integer.MIN_VALUE + "xyz").get(), "(-2147483648, 4)"); aeq(findIntegerIn("abcd" + ((long) Integer.MAX_VALUE + 1) + "xyz").get(), "(214748364, 4)"); aeq(findIntegerIn("abcd" + ((long) Integer.MIN_VALUE - 1) + "xyz").get(), "(-214748364, 4)"); assertFalse(findIntegerIn("").isPresent()); assertFalse(findIntegerIn("hello").isPresent()); assertFalse(findIntegerIn("vdfsvfbf").isPresent()); } @Test public void testReadLong() { aeq(readLong("0").get(), "0"); aeq(readLong("5").get(), "5"); aeq(readLong("-100").get(), "-100"); aeq(readLong(Long.toString(Long.MAX_VALUE)).get(), "9223372036854775807"); aeq(readLong(Long.toString(Long.MIN_VALUE)).get(), "-9223372036854775808"); assertFalse(readLong(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE).toString()).isPresent()); assertFalse(readLong(BigInteger.valueOf(Long.MIN_VALUE).subtract(BigInteger.ONE).toString()).isPresent()); assertFalse(readLong(" 1").isPresent()); assertFalse(readLong("00").isPresent()); assertFalse(readLong("-0").isPresent()); assertFalse(readLong("0xff").isPresent()); assertFalse(readLong("0xff").isPresent()); assertFalse(readLong("2 ").isPresent()); assertFalse(readLong("--1").isPresent()); assertFalse(readLong("1-2").isPresent()); assertFalse(readLong("+4").isPresent()); } @Test public void testFindLongIn() { aeq(findLongIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findLongIn("abcd8234xyz").get(), "(8234, 4)"); aeq(findLongIn("0123").get(), "(0, 0)"); aeq(findLongIn("a-23").get(), "(-23, 1)"); aeq(findLongIn("---34--4").get(), "(-34, 2)"); aeq(findLongIn(" 20.1 ").get(), "(20, 1)"); aeq(findLongIn("abcd" + Long.MAX_VALUE + "xyz").get(), "(9223372036854775807, 4)"); aeq(findLongIn("abcd" + Long.MIN_VALUE + "xyz").get(), "(-9223372036854775808, 4)"); aeq( findLongIn("abcd" + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE) + "xyz").get(), "(922337203685477580, 4)" ); aeq( findLongIn("abcd" + BigInteger.valueOf(Long.MIN_VALUE).subtract(BigInteger.ONE) + "xyz").get(), "(-922337203685477580, 4)" ); assertFalse(findLongIn("").isPresent()); assertFalse(findLongIn("hello").isPresent()); assertFalse(findLongIn("vdfsvfbf").isPresent()); } @Test public void testReadFloat() { aeq(readFloat("0.0").get(), "0.0"); aeq(readFloat("-0.0").get(), "-0.0"); aeq(readFloat("5.0").get(), "5.0"); aeq(readFloat("-100.0").get(), "-100.0"); aeq(readFloat("1.0E10").get(), "1.0E10"); aeq(readFloat("1.0E-10").get(), "1.0E-10"); aeq(readFloat("1.234").get(), "1.234"); aeq(readFloat("1.111111").get(), "1.111111"); aeq(readFloat("NaN").get(), "NaN"); aeq(readFloat("Infinity").get(), "Infinity"); aeq(readFloat("-Infinity").get(), "-Infinity"); assertFalse(readFloat("1.1111111").isPresent()); assertFalse(readFloat("1.0e10").isPresent()); assertFalse(readFloat("1.0e-10").isPresent()); assertFalse(readFloat(".").isPresent()); assertFalse(readFloat("0.").isPresent()); assertFalse(readFloat(".0").isPresent()); assertFalse(readFloat(" 1.0").isPresent()); assertFalse(readFloat("--1.0").isPresent()); } @Test public void testFindFloatIn() { aeq(findFloatIn("abcd1234.0xyz").get(), "(1234.0, 4)"); aeq(findFloatIn("abcd823.4xyz").get(), "(823.4, 4)"); aeq(findFloatIn("0.0.123").get(), "(0.0, 0)"); aeq(findFloatIn("a-2.3E8z").get(), "(-2.3E8, 1)"); aeq(findFloatIn("a-2.3E10z").get(), "(-2.3, 1)"); aeq(findFloatIn("---34.4-").get(), "(-34.4, 2)"); aeq(findFloatIn(" 20.1 ").get(), "(20.1, 1)"); aeq(findFloatIn("AnAnANaNAN").get(), "(NaN, 5)"); aeq(findFloatIn("1.1111111111111111111").get(), "(1.111111, 0)"); assertFalse(findFloatIn("").isPresent()); assertFalse(findFloatIn("3").isPresent()); assertFalse(findFloatIn("hello").isPresent()); assertFalse(findFloatIn("vdfsvfbf").isPresent()); } @Test public void testReadDouble() { aeq(readDouble("0.0").get(), "0.0"); aeq(readDouble("-0.0").get(), "-0.0"); aeq(readDouble("5.0").get(), "5.0"); aeq(readDouble("-100.0").get(), "-100.0"); aeq(readDouble("1.0E10").get(), "1.0E10"); aeq(readDouble("1.0E-10").get(), "1.0E-10"); aeq(readDouble("1.234").get(), "1.234"); aeq(readDouble("1.111111111111111").get(), "1.111111111111111"); aeq(readDouble("NaN").get(), "NaN"); aeq(readDouble("Infinity").get(), "Infinity"); aeq(readDouble("-Infinity").get(), "-Infinity"); assertFalse(readDouble("1.1111111111111111").isPresent()); assertFalse(readDouble("1.0e10").isPresent()); assertFalse(readDouble("1.0e-10").isPresent()); assertFalse(readDouble(".").isPresent()); assertFalse(readDouble("0.").isPresent()); assertFalse(readDouble(".0").isPresent()); assertFalse(readDouble(" 1.0").isPresent()); assertFalse(readDouble("--1.0").isPresent()); } @Test public void testFindDoubleIn() { aeq(findDoubleIn("abcd1234.0xyz").get(), "(1234.0, 4)"); aeq(findDoubleIn("abcd823.4xyz").get(), "(823.4, 4)"); aeq(findDoubleIn("0.0.123").get(), "(0.0, 0)"); aeq(findDoubleIn("a-2.3E8z").get(), "(-2.3E8, 1)"); aeq(findDoubleIn("a-2.3E1000z").get(), "(-2.3E100, 1)"); aeq(findDoubleIn("---34.4-").get(), "(-34.4, 2)"); aeq(findDoubleIn(" 20.1 ").get(), "(20.1, 1)"); aeq(findDoubleIn("AnAnANaNAN").get(), "(NaN, 5)"); aeq(findDoubleIn("1.1111111111111111111").get(), "(1.111111111111111, 0)"); assertFalse(findDoubleIn("").isPresent()); assertFalse(findDoubleIn("3").isPresent()); assertFalse(findDoubleIn("hello").isPresent()); assertFalse(findDoubleIn("vdfsvfbf").isPresent()); } @Test public void testReadBigDecimal() { aeq(readBigDecimal("0.0").get(), "0.0"); aeq(readBigDecimal("5.0").get(), "5.0"); aeq(readBigDecimal("-100.0").get(), "-100.0"); aeq(readBigDecimal("1.0E+10").get(), "1.0E+10"); aeq(readBigDecimal("1.0E-10").get(), "1.0E-10"); aeq(readBigDecimal("1.234").get(), "1.234"); aeq(readBigDecimal("1.111111111111111").get(), "1.111111111111111"); assertFalse(readBigDecimal("1.0e10").isPresent()); assertFalse(readBigDecimal("1.0e-10").isPresent()); assertFalse(readBigDecimal(".").isPresent()); assertFalse(readBigDecimal("0.").isPresent()); assertFalse(readBigDecimal(".0").isPresent()); assertFalse(readBigDecimal(" 1.0").isPresent()); assertFalse(readBigDecimal("--1.0").isPresent()); assertFalse(readBigDecimal("-0.0").isPresent()); assertFalse(readBigDecimal("NaN").isPresent()); assertFalse(readBigDecimal("Infinity").isPresent()); } @Test public void testFindBigDecimalIn() { aeq(findBigDecimalIn("abcd1234.0xyz").get(), "(1234.0, 4)"); aeq(findBigDecimalIn("abcd823.4xyz").get(), "(823.4, 4)"); aeq(findBigDecimalIn("3").get(), "(3, 0)"); aeq(findBigDecimalIn("00").get(), "(0, 0)"); aeq(findBigDecimalIn("0.0.123").get(), "(0.0, 0)"); aeq(findBigDecimalIn("a-2.3E+8z").get(), "(-2.3E+8, 1)"); aeq(findBigDecimalIn("a-2.3E+1000z").get(), "(-2.3E+1000, 1)"); aeq(findBigDecimalIn("---34.4-").get(), "(-34.4, 2)"); aeq(findBigDecimalIn(" 20.1 ").get(), "(20.1, 1)"); aeq(findBigDecimalIn("1.1111111111111111111").get(), "(1.1111111111111111111, 0)"); assertFalse(findBigDecimalIn("").isPresent()); assertFalse(findBigDecimalIn("hello").isPresent()); assertFalse(findBigDecimalIn("vdfsvfbf").isPresent()); } @Test public void testReadCharacter() { aeq(readCharacter("a").get(), "a"); aeq(readCharacter("ø").get(), "ø"); assertFalse(readCharacter("hi").isPresent()); assertFalse(readCharacter("").isPresent()); } @Test public void testFindCharacterIn() { aeq(findCharacterIn("Hello").get(), "(H, 0)"); aeq(findCharacterIn("ø").get(), "(ø, 0)"); assertFalse(findCharacterIn("").isPresent()); } @Test public void testReadString() { aeq(readString("Hello").get(), "Hello"); aeq(readString("ø").get(), "ø"); aeq(readString("").get(), ""); } private static void aeq(Object a, Object b) { assertEquals(a.toString(), b.toString()); } }
src/test/java/mho/wheels/misc/ReadersTest.java
package mho.wheels.misc; import org.junit.Test; import java.math.BigDecimal; import java.math.BigInteger; import static mho.wheels.misc.Readers.*; import static org.junit.Assert.*; public class ReadersTest { @Test public void testConstants() { aeq(MAX_POSITIVE_BYTE_LENGTH, 3); aeq(MAX_POSITIVE_SHORT_LENGTH, 5); aeq(MAX_POSITIVE_INTEGER_LENGTH, 10); aeq(MAX_POSITIVE_LONG_LENGTH, 19); } @Test public void testReadBoolean() { aeq(readBoolean("false").get(), "false"); aeq(readBoolean("true").get(), "true"); assertFalse(readBoolean(" true").isPresent()); assertFalse(readBoolean("TRUE").isPresent()); assertFalse(readBoolean("true ").isPresent()); assertFalse(readBoolean("").isPresent()); assertFalse(readBoolean("dsfsdfgd").isPresent()); } @Test public void testFindBooleanIn() { aeq(findBooleanIn("true").get(), "(true, 0)"); aeq(findBooleanIn("false").get(), "(false, 0)"); aeq(findBooleanIn("xxtruefalsexx").get(), "(true, 2)"); aeq(findBooleanIn("xxfalsetruexx").get(), "(false, 2)"); assertFalse(findOrderingIn("hello").isPresent()); assertFalse(findOrderingIn("").isPresent()); } public void testReadOrdering() { aeq(readOrdering("LT").get(), "LT"); aeq(readOrdering("EQ").get(), "EQ"); aeq(readOrdering("GT").get(), "GT"); assertFalse(readOrdering(" LT").isPresent()); assertFalse(readOrdering("eq").isPresent()); assertFalse(readOrdering("gt ").isPresent()); assertFalse(readOrdering("").isPresent()); assertFalse(readOrdering("dsfsdfgd").isPresent()); } @Test public void testFindOrderingIn() { aeq(findOrderingIn("EQ").get(), "(EQ, 0)"); aeq(findOrderingIn("LT").get(), "(LT, 0)"); aeq(findOrderingIn("BELT").get(), "(LT, 2)"); aeq(findOrderingIn("EGGTOWER").get(), "(GT, 2)"); assertFalse(findOrderingIn("hello").isPresent()); assertFalse(findOrderingIn("").isPresent()); } public void testReadRoundingMode() { aeq(readRoundingMode("UP").get(), "UP"); aeq(readRoundingMode("UNNECESSARY").get(), "UNNECESSARY"); aeq(readRoundingMode("HALF_EVEN").get(), "HALF_EVEN"); assertFalse(readRoundingMode(" DOWN").isPresent()); assertFalse(readRoundingMode("HALF-EVEN").isPresent()); assertFalse(readRoundingMode("FLOOR ").isPresent()); assertFalse(readRoundingMode("").isPresent()); assertFalse(readRoundingMode("dsfsdfgd").isPresent()); } @Test public void testFindRoundingModeIn() { aeq(findRoundingModeIn("HALF_UP").get(), "(HALF_UP, 0)"); aeq(findRoundingModeIn("CEILING").get(), "(CEILING, 0)"); aeq(findRoundingModeIn("UPSIDE-DOWN").get(), "(UP, 0)"); aeq(findRoundingModeIn("JLNUIDOWNJLNILN").get(), "(DOWN, 5)"); assertFalse(findRoundingModeIn("hello").isPresent()); assertFalse(findRoundingModeIn("").isPresent()); } @Test public void testReadBigInteger() { aeq(readBigInteger("0").get(), "0"); aeq(readBigInteger("5").get(), "5"); aeq(readBigInteger("314159265358").get(), "314159265358"); aeq(readBigInteger("-314159265358").get(), "-314159265358"); assertFalse(readBigInteger(" 1").isPresent()); assertFalse(readBigInteger("00").isPresent()); assertFalse(readBigInteger("-0").isPresent()); assertFalse(readBigInteger("0xff").isPresent()); assertFalse(readBigInteger("0xff").isPresent()); assertFalse(readBigInteger("2 ").isPresent()); assertFalse(readBigInteger("--1").isPresent()); assertFalse(readBigInteger("1-2").isPresent()); assertFalse(readBigInteger("+4").isPresent()); } @Test public void testFindBigIntegerIn() { aeq(findBigIntegerIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findBigIntegerIn("0123").get(), "(0, 0)"); aeq(findBigIntegerIn("a-23").get(), "(-23, 1)"); aeq(findBigIntegerIn("---34--4").get(), "(-34, 2)"); aeq(findBigIntegerIn(" 20.1 ").get(), "(20, 1)"); assertFalse(findBigIntegerIn("").isPresent()); assertFalse(findBigIntegerIn("hello").isPresent()); assertFalse(findBigIntegerIn("vdfsvfbf").isPresent()); } @Test public void testReadByte() { aeq(readByte("0").get(), "0"); aeq(readByte("5").get(), "5"); aeq(readByte("-100").get(), "-100"); aeq(readByte(Integer.toString(Byte.MAX_VALUE)).get(), "127"); aeq(readByte(Integer.toString(Byte.MIN_VALUE)).get(), "-128"); assertFalse(readByte(Integer.toString(Byte.MAX_VALUE + 1)).isPresent()); assertFalse(readByte(Integer.toString(Byte.MIN_VALUE - 1)).isPresent()); assertFalse(readByte(" 1").isPresent()); assertFalse(readByte("00").isPresent()); assertFalse(readByte("-0").isPresent()); assertFalse(readByte("0xff").isPresent()); assertFalse(readByte("0xff").isPresent()); assertFalse(readByte("2 ").isPresent()); assertFalse(readByte("--1").isPresent()); assertFalse(readByte("1-2").isPresent()); assertFalse(readByte("+4").isPresent()); } @Test public void testFindByteIn() { aeq(findByteIn("abcd1234xyz").get(), "(123, 4)"); aeq(findByteIn("abcd8234xyz").get(), "(82, 4)"); aeq(findByteIn("0123").get(), "(0, 0)"); aeq(findByteIn("a-23").get(), "(-23, 1)"); aeq(findByteIn("---34--4").get(), "(-34, 2)"); aeq(findByteIn(" 20.1 ").get(), "(20, 1)"); aeq(findByteIn("abcd" + Byte.MAX_VALUE + "xyz").get(), "(127, 4)"); aeq(findByteIn("abcd" + Byte.MIN_VALUE + "xyz").get(), "(-128, 4)"); aeq(findByteIn("abcd" + (Byte.MAX_VALUE + 1) + "xyz").get(), "(12, 4)"); aeq(findByteIn("abcd" + (Byte.MIN_VALUE - 1) + "xyz").get(), "(-12, 4)"); assertFalse(findByteIn("").isPresent()); assertFalse(findByteIn("hello").isPresent()); assertFalse(findByteIn("vdfsvfbf").isPresent()); } @Test public void testReadShort() { aeq(readShort("0").get(), "0"); aeq(readShort("5").get(), "5"); aeq(readShort("-100").get(), "-100"); aeq(readShort(Integer.toString(Short.MAX_VALUE)).get(), "32767"); aeq(readShort(Integer.toString(Short.MIN_VALUE)).get(), "-32768"); assertFalse(readShort(Integer.toString(Short.MAX_VALUE + 1)).isPresent()); assertFalse(readShort(Integer.toString(Short.MIN_VALUE - 1)).isPresent()); assertFalse(readShort(" 1").isPresent()); assertFalse(readShort("00").isPresent()); assertFalse(readShort("-0").isPresent()); assertFalse(readShort("0xff").isPresent()); assertFalse(readShort("0xff").isPresent()); assertFalse(readShort("2 ").isPresent()); assertFalse(readShort("--1").isPresent()); assertFalse(readShort("1-2").isPresent()); assertFalse(readShort("+4").isPresent()); } @Test public void testFindShortIn() { aeq(findShortIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findShortIn("abcd8234xyz").get(), "(8234, 4)"); aeq(findShortIn("0123").get(), "(0, 0)"); aeq(findShortIn("a-23").get(), "(-23, 1)"); aeq(findShortIn("---34--4").get(), "(-34, 2)"); aeq(findShortIn(" 20.1 ").get(), "(20, 1)"); aeq(findShortIn("abcd" + Short.MAX_VALUE + "xyz").get(), "(32767, 4)"); aeq(findShortIn("abcd" + Short.MIN_VALUE + "xyz").get(), "(-32768, 4)"); aeq(findShortIn("abcd" + (Short.MAX_VALUE + 1) + "xyz").get(), "(3276, 4)"); aeq(findShortIn("abcd" + (Short.MIN_VALUE - 1) + "xyz").get(), "(-3276, 4)"); assertFalse(findShortIn("").isPresent()); assertFalse(findShortIn("hello").isPresent()); assertFalse(findShortIn("vdfsvfbf").isPresent()); } @Test public void testReadInteger() { aeq(readInteger("0").get(), "0"); aeq(readInteger("5").get(), "5"); aeq(readInteger("-100").get(), "-100"); aeq(readInteger(Integer.toString(Integer.MAX_VALUE)).get(), "2147483647"); aeq(readInteger(Integer.toString(Integer.MIN_VALUE)).get(), "-2147483648"); assertFalse(readInteger(Long.toString((long) Integer.MAX_VALUE + 1)).isPresent()); assertFalse(readInteger(Long.toString((long) Integer.MIN_VALUE - 1)).isPresent()); assertFalse(readInteger(" 1").isPresent()); assertFalse(readInteger("00").isPresent()); assertFalse(readInteger("-0").isPresent()); assertFalse(readInteger("0xff").isPresent()); assertFalse(readInteger("0xff").isPresent()); assertFalse(readInteger("2 ").isPresent()); assertFalse(readInteger("--1").isPresent()); assertFalse(readInteger("1-2").isPresent()); assertFalse(readInteger("+4").isPresent()); } @Test public void testFindIntegerIn() { aeq(findIntegerIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findIntegerIn("abcd8234xyz").get(), "(8234, 4)"); aeq(findIntegerIn("0123").get(), "(0, 0)"); aeq(findIntegerIn("a-23").get(), "(-23, 1)"); aeq(findIntegerIn("---34--4").get(), "(-34, 2)"); aeq(findIntegerIn(" 20.1 ").get(), "(20, 1)"); aeq(findIntegerIn("abcd" + Integer.MAX_VALUE + "xyz").get(), "(2147483647, 4)"); aeq(findIntegerIn("abcd" + Integer.MIN_VALUE + "xyz").get(), "(-2147483648, 4)"); aeq(findIntegerIn("abcd" + ((long) Integer.MAX_VALUE + 1) + "xyz").get(), "(214748364, 4)"); aeq(findIntegerIn("abcd" + ((long) Integer.MIN_VALUE - 1) + "xyz").get(), "(-214748364, 4)"); assertFalse(findIntegerIn("").isPresent()); assertFalse(findIntegerIn("hello").isPresent()); assertFalse(findIntegerIn("vdfsvfbf").isPresent()); } @Test public void testReadLong() { aeq(readLong("0").get(), "0"); aeq(readLong("5").get(), "5"); aeq(readLong("-100").get(), "-100"); aeq(readLong(Long.toString(Long.MAX_VALUE)).get(), "9223372036854775807"); aeq(readLong(Long.toString(Long.MIN_VALUE)).get(), "-9223372036854775808"); assertFalse(readLong(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE).toString()).isPresent()); assertFalse(readLong(BigInteger.valueOf(Long.MIN_VALUE).subtract(BigInteger.ONE).toString()).isPresent()); assertFalse(readLong(" 1").isPresent()); assertFalse(readLong("00").isPresent()); assertFalse(readLong("-0").isPresent()); assertFalse(readLong("0xff").isPresent()); assertFalse(readLong("0xff").isPresent()); assertFalse(readLong("2 ").isPresent()); assertFalse(readLong("--1").isPresent()); assertFalse(readLong("1-2").isPresent()); assertFalse(readLong("+4").isPresent()); } @Test public void testFindLongIn() { aeq(findLongIn("abcd1234xyz").get(), "(1234, 4)"); aeq(findLongIn("abcd8234xyz").get(), "(8234, 4)"); aeq(findLongIn("0123").get(), "(0, 0)"); aeq(findLongIn("a-23").get(), "(-23, 1)"); aeq(findLongIn("---34--4").get(), "(-34, 2)"); aeq(findLongIn(" 20.1 ").get(), "(20, 1)"); aeq(findLongIn("abcd" + Long.MAX_VALUE + "xyz").get(), "(9223372036854775807, 4)"); aeq(findLongIn("abcd" + Long.MIN_VALUE + "xyz").get(), "(-9223372036854775808, 4)"); aeq( findLongIn("abcd" + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE) + "xyz").get(), "(922337203685477580, 4)" ); aeq( findLongIn("abcd" + BigInteger.valueOf(Long.MIN_VALUE).subtract(BigInteger.ONE) + "xyz").get(), "(-922337203685477580, 4)" ); assertFalse(findLongIn("").isPresent()); assertFalse(findLongIn("hello").isPresent()); assertFalse(findLongIn("vdfsvfbf").isPresent()); } @Test public void testReadFloat() { aeq(readFloat("0.0").get(), "0.0"); aeq(readFloat("-0.0").get(), "-0.0"); aeq(readFloat("5.0").get(), "5.0"); aeq(readFloat("-100.0").get(), "-100.0"); aeq(readFloat("1.0E10").get(), "1.0E10"); aeq(readFloat("1.0E-10").get(), "1.0E-10"); aeq(readFloat("1.234").get(), "1.234"); aeq(readFloat("1.111111").get(), "1.111111"); aeq(readFloat("NaN").get(), "NaN"); aeq(readFloat("Infinity").get(), "Infinity"); aeq(readFloat("-Infinity").get(), "-Infinity"); assertFalse(readFloat("1.1111111").isPresent()); assertFalse(readFloat("1.0e10").isPresent()); assertFalse(readFloat("1.0e-10").isPresent()); assertFalse(readFloat(".").isPresent()); assertFalse(readFloat("0.").isPresent()); assertFalse(readFloat(".0").isPresent()); assertFalse(readFloat(" 1.0").isPresent()); assertFalse(readFloat("--1.0").isPresent()); } @Test public void testFindFloatIn() { aeq(findFloatIn("abcd1234.0xyz").get(), "(1234.0, 4)"); aeq(findFloatIn("abcd823.4xyz").get(), "(823.4, 4)"); aeq(findFloatIn("0.0.123").get(), "(0.0, 0)"); aeq(findFloatIn("a-2.3E8z").get(), "(-2.3E8, 1)"); aeq(findFloatIn("a-2.3E10z").get(), "(-2.3, 1)"); aeq(findFloatIn("---34.4-").get(), "(-34.4, 2)"); aeq(findFloatIn(" 20.1 ").get(), "(20.1, 1)"); aeq(findFloatIn("AnAnANaNAN").get(), "(NaN, 5)"); aeq(findFloatIn("1.1111111111111111111").get(), "(1.111111, 0)"); assertFalse(findFloatIn("").isPresent()); assertFalse(findFloatIn("3").isPresent()); assertFalse(findFloatIn("hello").isPresent()); assertFalse(findFloatIn("vdfsvfbf").isPresent()); } @Test public void testReadDouble() { aeq(readDouble("0.0").get(), "0.0"); aeq(readDouble("-0.0").get(), "-0.0"); aeq(readDouble("5.0").get(), "5.0"); aeq(readDouble("-100.0").get(), "-100.0"); aeq(readDouble("1.0E10").get(), "1.0E10"); aeq(readDouble("1.0E-10").get(), "1.0E-10"); aeq(readDouble("1.234").get(), "1.234"); aeq(readDouble("1.111111111111111").get(), "1.111111111111111"); aeq(readDouble("NaN").get(), "NaN"); aeq(readDouble("Infinity").get(), "Infinity"); aeq(readDouble("-Infinity").get(), "-Infinity"); assertFalse(readDouble("1.1111111111111111").isPresent()); assertFalse(readDouble("1.0e10").isPresent()); assertFalse(readDouble("1.0e-10").isPresent()); assertFalse(readDouble(".").isPresent()); assertFalse(readDouble("0.").isPresent()); assertFalse(readDouble(".0").isPresent()); assertFalse(readDouble(" 1.0").isPresent()); assertFalse(readDouble("--1.0").isPresent()); } @Test public void testFindDoubleIn() { aeq(findDoubleIn("abcd1234.0xyz").get(), "(1234.0, 4)"); aeq(findDoubleIn("abcd823.4xyz").get(), "(823.4, 4)"); aeq(findDoubleIn("0.0.123").get(), "(0.0, 0)"); aeq(findDoubleIn("a-2.3E8z").get(), "(-2.3E8, 1)"); aeq(findDoubleIn("a-2.3E1000z").get(), "(-2.3E100, 1)"); aeq(findDoubleIn("---34.4-").get(), "(-34.4, 2)"); aeq(findDoubleIn(" 20.1 ").get(), "(20.1, 1)"); aeq(findDoubleIn("AnAnANaNAN").get(), "(NaN, 5)"); aeq(findDoubleIn("1.1111111111111111111").get(), "(1.111111111111111, 0)"); assertFalse(findDoubleIn("").isPresent()); assertFalse(findDoubleIn("3").isPresent()); assertFalse(findDoubleIn("hello").isPresent()); assertFalse(findDoubleIn("vdfsvfbf").isPresent()); } @Test public void testReadBigDecimal() { aeq(readBigDecimal("0.0").get(), "0.0"); aeq(readBigDecimal("5.0").get(), "5.0"); aeq(readBigDecimal("-100.0").get(), "-100.0"); aeq(readBigDecimal("1.0E+10").get(), "1.0E+10"); aeq(readBigDecimal("1.0E-10").get(), "1.0E-10"); aeq(readBigDecimal("1.234").get(), "1.234"); aeq(readBigDecimal("1.111111111111111").get(), "1.111111111111111"); assertFalse(readBigDecimal("1.0e10").isPresent()); assertFalse(readBigDecimal("1.0e-10").isPresent()); assertFalse(readBigDecimal(".").isPresent()); assertFalse(readBigDecimal("0.").isPresent()); assertFalse(readBigDecimal(".0").isPresent()); assertFalse(readBigDecimal(" 1.0").isPresent()); assertFalse(readBigDecimal("--1.0").isPresent()); assertFalse(readBigDecimal("-0.0").isPresent()); assertFalse(readBigDecimal("NaN").isPresent()); assertFalse(readBigDecimal("Infinity").isPresent()); } @Test public void testFindBigDecimalIn() { aeq(findBigDecimalIn("abcd1234.0xyz").get(), "(1234.0, 4)"); aeq(findBigDecimalIn("abcd823.4xyz").get(), "(823.4, 4)"); aeq(findBigDecimalIn("3").get(), "(3, 0)"); aeq(findBigDecimalIn("00").get(), "(0, 0)"); aeq(findBigDecimalIn("0.0.123").get(), "(0.0, 0)"); aeq(findBigDecimalIn("a-2.3E+8z").get(), "(-2.3E+8, 1)"); aeq(findBigDecimalIn("a-2.3E+1000z").get(), "(-2.3E+1000, 1)"); aeq(findBigDecimalIn("---34.4-").get(), "(-34.4, 2)"); aeq(findBigDecimalIn(" 20.1 ").get(), "(20.1, 1)"); aeq(findBigDecimalIn("1.1111111111111111111").get(), "(1.1111111111111111111, 0)"); assertFalse(findBigDecimalIn("").isPresent()); assertFalse(findBigDecimalIn("hello").isPresent()); assertFalse(findBigDecimalIn("vdfsvfbf").isPresent()); } private static void aeq(Object a, Object b) { assertEquals(a.toString(), b.toString()); } }
added String and Character reader tests
src/test/java/mho/wheels/misc/ReadersTest.java
added String and Character reader tests
Java
mit
5b82a80012128f74094be051fd9718ae1c49d320
0
bamless/chromium-swe-updater
package com.bamless.chromiumsweupdater; import android.Manifest; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Color; import android.graphics.Paint; import android.os.Bundle; import android.os.Environment; import android.support.annotation.NonNull; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.TextView; import android.widget.Toast; import com.bamless.chromiumsweupdater.models.BuildDate; import com.bamless.chromiumsweupdater.network.ChromiumUpdater; import com.bamless.chromiumsweupdater.receivers.AlarmReceiver; import com.bamless.chromiumsweupdater.utils.Constants; import com.bamless.chromiumsweupdater.views.AnimatedImageButton; import com.bamless.chromiumsweupdater.views.ProgressNotification; import java.util.Calendar; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; public class MainActivity extends AppCompatActivity { public final static String TAG = MainActivity.class.getSimpleName(); /**Permission request code*/ public final static int REQUEST_EXTERNAL_WRITE = 1; /**Argument key. Boolean indicating whether to reset the {@link AlarmReceiver}*/ public final static String ARG_START_ALARM_ON_OPEN = "startAlarmOnOpen"; /**The progressNotification used to show download progress*/ private ProgressNotification progressNotification; /**The ChromiumUupdater used to check and update Chromium SWE*/ private ChromiumUpdater cu; /**The button that checks the update on click*/ @BindView(R.id.checkUpdateButton) protected AnimatedImageButton checkUpdateButton; @BindView(R.id.updateStatusIcon) protected AnimatedImageButton updateStatusIcon; /** * Creates the intent to start the {@link android.app.Activity} * @param context the current {@link Context} * @param restartTimer whether the {@link android.app.Activity} should reset the {@link AlarmReceiver} at startup * @return the {@link Intent} */ public static Intent createIntent(Context context, boolean restartTimer) { Bundle b = new Bundle(); b.putBoolean(ARG_START_ALARM_ON_OPEN, restartTimer); Intent i = new Intent(context, MainActivity.class); i.putExtras(b); return i; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); cu = new ChromiumUpdater(this); progressNotification = new ProgressNotification(this, getString(R.string.updateNotificationText)); //checks the external write permissions checkPermissions(); //check arguments checkArguments(); //init the status text updateStatusText(); //checks for update at application start checkUpdateButton.performClick(); } @OnClick(R.id.checkUpdateButton) protected void checkUpdateOnClick(final AnimatedImageButton b) { cu.checkForUpdate(new ChromiumUpdater.ReturnCallback<Boolean>() { public void onReturn(Boolean returnValue) { b.stopButtonAnimationSmooth(); if(returnValue == null) { Toast.makeText(MainActivity.this, R.string.updateFailed, Toast.LENGTH_SHORT).show(); return; } updateStatusText(); } }); } @OnClick(R.id.updateStatusIcon) protected void startUpdateOnClick(final AnimatedImageButton b) { b.setClickable(false); progressNotification.start(); //start the actual update cu.update(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS), progressNotification, new ChromiumUpdater.ReturnCallback<Boolean>() { public void onReturn(Boolean returnValue) { if(returnValue) updateStatusText(); else updateFailed(); b.stopButtonAnimationSmooth(); } }); } /**Updates the status text*/ private void updateStatusText() { updateStatusText(null); } /**Updates the status text to string passed*/ private void updateStatusText(String message) { TextView updateStatusText = ButterKnife.findById(this, R.id.updateStatusText); BuildDate curr = cu.getInstalledBuildDate(); BuildDate last = cu.getLatestBuildDate(); //If there is a new build if(curr.compareTo(last) < 0) { //Update text with new build info, change color, add underline and set update listener String newBuildText = (message == null) ? getResources().getString(R.string.newBuildText, last.dateToString()) : message; updateStatusText.setText(newBuildText); updateStatusText.setTextColor(Color.WHITE); updateStatusText.setPaintFlags(updateStatusText.getPaintFlags() | Paint.FAKE_BOLD_TEXT_FLAG); //makes the text clickable to start update updateStatusIcon.setVisibility(View.VISIBLE); updateStatusIcon.setClickable(true); } else { //There is no new build, reset color, underline and remove update listener updateStatusText.setText(R.string.noUpdateText); updateStatusText.setPaintFlags(updateStatusText.getPaintFlags() & (~ Paint.FAKE_BOLD_TEXT_FLAG)); updateStatusIcon.setVisibility(View.GONE); //makes the text unclickable (can't start update if there is none) updateStatusIcon.setClickable(false); } } /**Checks for the WRITE permission on the external storage. If not present it asks for it*/ private void checkPermissions() { int canRead = ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE); if(canRead != PackageManager.PERMISSION_GRANTED) ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_EXTERNAL_WRITE); } /**Checks the arguments passed via the bundle*/ private void checkArguments() { Bundle extras = getIntent().getExtras(); boolean restartAlarm = extras == null || extras.getBoolean(ARG_START_ALARM_ON_OPEN, true); Log.d(TAG, ARG_START_ALARM_ON_OPEN + ": " + restartAlarm); if(restartAlarm) startAlarm(); } /**Restart {@link AlarmReceiver}*/ private void startAlarm() { Intent intent = new Intent(this, AlarmReceiver.class); AlarmManager alarmMgr = (AlarmManager) this.getSystemService(Context.ALARM_SERVICE); PendingIntent alarmIntent = PendingIntent.getBroadcast(this, 0, intent, 0); Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(System.currentTimeMillis()); calendar.set(Calendar.HOUR_OF_DAY, Constants.ALARM_HOUR); calendar.set(Calendar.MINUTE, Constants.ALARM_MINUTE); alarmMgr.setRepeating(AlarmManager.RTC_WAKEUP, calendar.getTimeInMillis(), Constants.DAY_INTERVAL, alarmIntent); Log.d(TAG, "started update alarm"); } /**Called upon update failure*/ private void updateFailed() { //the update failed, reset status text to last build available for download updateStatusText(); updateStatusText("Build update has failed!"); //dismiss progress notification progressNotification.cancel(); } /**Returns the result of the permission request. If the WRITE permissions on external storage was * negated it stops the app*/ @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { switch(requestCode) { case REQUEST_EXTERNAL_WRITE: //exit if the permission is not granted if(grantResults.length == 0 || grantResults[0] != PackageManager.PERMISSION_GRANTED) { Toast.makeText(this, R.string.permDeniedError, Toast.LENGTH_SHORT).show(); finish(); } break; } } @Override protected void onDestroy() { progressNotification.destroy(); super.onDestroy(); } }
app/src/main/java/com/bamless/chromiumsweupdater/MainActivity.java
package com.bamless.chromiumsweupdater; import android.Manifest; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Color; import android.graphics.Paint; import android.os.Bundle; import android.os.Environment; import android.support.annotation.NonNull; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.TextView; import android.widget.Toast; import com.bamless.chromiumsweupdater.models.BuildDate; import com.bamless.chromiumsweupdater.network.ChromiumUpdater; import com.bamless.chromiumsweupdater.receivers.AlarmReceiver; import com.bamless.chromiumsweupdater.utils.Constants; import com.bamless.chromiumsweupdater.views.AnimatedImageButton; import com.bamless.chromiumsweupdater.views.ProgressNotification; import java.util.Calendar; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; public class MainActivity extends AppCompatActivity { public final static String TAG = MainActivity.class.getSimpleName(); /**Permission request code*/ public final static int REQUEST_EXTERNAL_WRITE = 1; /**Argument key. Boolean indicating whether to reset the {@link AlarmReceiver}*/ public final static String ARG_START_ALARM_ON_OPEN = "startAlarmOnOpen"; /**The progressNotification used to show download progress*/ private ProgressNotification progressNotification; /**The ChromiumUupdater used to check and update Chromium SWE*/ private ChromiumUpdater cu; /**The button that checks the update on click*/ @BindView(R.id.checkUpdateButton) protected AnimatedImageButton checkUpdateButton; @BindView(R.id.updateStatusIcon) protected AnimatedImageButton updateStatusIcon; /** * Creates the intent to start the {@link android.app.Activity} * @param context the current {@link Context} * @param restartTimer whether the {@link android.app.Activity} should reset the {@link AlarmReceiver} at startup * @return the {@link Intent} */ public static Intent createIntent(Context context, boolean restartTimer) { Bundle b = new Bundle(); b.putBoolean(ARG_START_ALARM_ON_OPEN, restartTimer); Intent i = new Intent(context, MainActivity.class); i.putExtras(b); return i; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); cu = new ChromiumUpdater(this); progressNotification = new ProgressNotification(this, getString(R.string.updateNotificationText)); //checks the external write permissions checkPermissions(); //check arguments checkArguments(); //init the status text updateStatusText(); //checks for update at application start checkUpdateButton.performClick(); } @OnClick(R.id.checkUpdateButton) protected void checkUpdateOnClick(final AnimatedImageButton b) { cu.checkForUpdate(new ChromiumUpdater.ReturnCallback<Boolean>() { public void onReturn(Boolean returnValue) { if(returnValue == null) { Toast.makeText(MainActivity.this, R.string.updateFailed, Toast.LENGTH_SHORT).show(); return; } updateStatusText(); b.stopButtonAnimationSmooth(); } }); } @OnClick(R.id.updateStatusIcon) protected void startUpdateOnClick(final AnimatedImageButton b) { b.setClickable(false); progressNotification.start(); //start the actual update cu.update(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS), progressNotification, new ChromiumUpdater.ReturnCallback<Boolean>() { public void onReturn(Boolean returnValue) { if(returnValue) updateStatusText(); else updateFailed(); b.stopButtonAnimationSmooth(); } }); } /**Updates the status text*/ private void updateStatusText() { updateStatusText(null); } /**Updates the status text to string passed*/ private void updateStatusText(String message) { TextView updateStatusText = ButterKnife.findById(this, R.id.updateStatusText); BuildDate curr = cu.getInstalledBuildDate(); BuildDate last = cu.getLatestBuildDate(); //If there is a new build if(curr.compareTo(last) < 0) { //Update text with new build info, change color, add underline and set update listener String newBuildText = (message == null) ? getResources().getString(R.string.newBuildText, last.dateToString()) : message; updateStatusText.setText(newBuildText); updateStatusText.setTextColor(Color.WHITE); updateStatusText.setPaintFlags(updateStatusText.getPaintFlags() | Paint.FAKE_BOLD_TEXT_FLAG); //makes the text clickable to start update updateStatusIcon.setVisibility(View.VISIBLE); updateStatusIcon.setClickable(true); } else { //There is no new build, reset color, underline and remove update listener updateStatusText.setText(R.string.noUpdateText); updateStatusText.setPaintFlags(updateStatusText.getPaintFlags() & (~ Paint.FAKE_BOLD_TEXT_FLAG)); updateStatusIcon.setVisibility(View.GONE); //makes the text unclickable (can't start update if there is none) updateStatusIcon.setClickable(false); } } /**Checks for the WRITE permission on the external storage. If not present it asks for it*/ private void checkPermissions() { int canRead = ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE); if(canRead != PackageManager.PERMISSION_GRANTED) ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_EXTERNAL_WRITE); } /**Checks the arguments passed via the bundle*/ private void checkArguments() { Bundle extras = getIntent().getExtras(); boolean restartAlarm = extras == null || extras.getBoolean(ARG_START_ALARM_ON_OPEN, true); Log.d(TAG, ARG_START_ALARM_ON_OPEN + ": " + restartAlarm); if(restartAlarm) startAlarm(); } /**Restart {@link AlarmReceiver}*/ private void startAlarm() { Intent intent = new Intent(this, AlarmReceiver.class); AlarmManager alarmMgr = (AlarmManager) this.getSystemService(Context.ALARM_SERVICE); PendingIntent alarmIntent = PendingIntent.getBroadcast(this, 0, intent, 0); Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(System.currentTimeMillis()); calendar.set(Calendar.HOUR_OF_DAY, Constants.ALARM_HOUR); calendar.set(Calendar.MINUTE, Constants.ALARM_MINUTE); alarmMgr.setRepeating(AlarmManager.RTC_WAKEUP, calendar.getTimeInMillis(), Constants.DAY_INTERVAL, alarmIntent); Log.d(TAG, "started update alarm"); } /**Called upon update failure*/ private void updateFailed() { //the update failed, reset status text to last build available for download updateStatusText(); updateStatusText("Build update has failed!"); //dismiss progress notification progressNotification.cancel(); } /**Returns the result of the permission request. If the WRITE permissions on external storage was * negated it stops the app*/ @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { switch(requestCode) { case REQUEST_EXTERNAL_WRITE: //exit if the permission is not granted if(grantResults.length == 0 || grantResults[0] != PackageManager.PERMISSION_GRANTED) { Toast.makeText(this, R.string.permDeniedError, Toast.LENGTH_SHORT).show(); finish(); } break; } } @Override protected void onDestroy() { progressNotification.destroy(); super.onDestroy(); } }
Stop the "checkUpdateButton" on failure When ChromiumUpdater#checkForUpdate fails the chheckUpdateButton animation stops. Previously the statement for stopping the animation was placed after a return statement called in case of failure. Now it is called before, so that the animation always stops when checkForUpdate returns.
app/src/main/java/com/bamless/chromiumsweupdater/MainActivity.java
Stop the "checkUpdateButton" on failure
Java
mit
d73fbd7e5fec212f2f1a15ac8c715e673cac3187
0
jananzhu/slogo,jananzhu/slogo
package commands; import java.util.Queue; import slogo_back.Model; public class Forward extends Command { private final static int numParams = 1; private final static String PARAM_NAME = "forward"; public Forward(Queue<String> cmdQueue, Model model) { super(cmdQueue, model, numParams); } @Override public double getValue () { return myModel.toFront(PARAM_NAME, (Object) new double[]{myParams[0].getValue()}); } }
src/commands/Forward.java
package commands; import java.util.Queue; import slogo_back.Model; public class Forward extends Command { private final static int numParams = 1; private final static String PARAM_NAME = "Forward"; public Forward(Queue<String> cmdQueue, Model model) { super(cmdQueue, model, numParams); } @Override public double getValue () { return myModel.toFront(PARAM_NAME, (Object) new double[]{myParams[0].getValue()}); } }
integration slight changes
src/commands/Forward.java
integration slight changes
Java
mit
e9dfc17c374392d49d3a61f237da1063c3ababe0
0
JonathanxD/CodeAPI,JonathanxD/CodeAPI
/* * CodeAPI - Framework to generate Java code and Bytecode code. <https://github.com/JonathanxD/CodeAPI> * * The MIT License (MIT) * * Copyright (c) 2017 TheRealBuggy/JonathanxD (https://github.com/JonathanxD/ & https://github.com/TheRealBuggy/) <[email protected]> * Copyright (c) contributors * * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.jonathanxd.codeapi.test; import com.github.jonathanxd.codeapi.CodeSource; import com.github.jonathanxd.codeapi.base.ClassDeclaration; import com.github.jonathanxd.codeapi.base.FieldDeclaration; import com.github.jonathanxd.codeapi.base.Label; import com.github.jonathanxd.codeapi.base.StaticBlock; import com.github.jonathanxd.codeapi.base.VariableDeclaration; import com.github.jonathanxd.codeapi.inspect.SourceInspect; import org.junit.Assert; import org.junit.Test; import java.util.List; public class TestInspect { @Test public void inspect() { CodeSource source = GenericClass_.$().getMethods().get(0).getBody(); List<VariableDeclaration> inspect = SourceInspect.Companion .builder(codePart -> codePart instanceof VariableDeclaration) .include(bodied -> bodied instanceof Label) .includeSource(true) .mapTo(codePart -> (VariableDeclaration) codePart) .inspect(source); System.out.println(inspect); Assert.assertTrue(inspect.size() == 3); Assert.assertEquals("fieldi", inspect.get(0).getName()); } }
src/test/java/com/github/jonathanxd/codeapi/test/TestInspect.java
/* * CodeAPI - Framework to generate Java code and Bytecode code. <https://github.com/JonathanxD/CodeAPI> * * The MIT License (MIT) * * Copyright (c) 2017 TheRealBuggy/JonathanxD (https://github.com/JonathanxD/ & https://github.com/TheRealBuggy/) <[email protected]> * Copyright (c) contributors * * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.jonathanxd.codeapi.test; import com.github.jonathanxd.codeapi.CodeSource; import com.github.jonathanxd.codeapi.base.ClassDeclaration; import com.github.jonathanxd.codeapi.base.FieldDeclaration; import com.github.jonathanxd.codeapi.base.Label; import com.github.jonathanxd.codeapi.base.StaticBlock; import com.github.jonathanxd.codeapi.base.VariableDeclaration; import com.github.jonathanxd.codeapi.inspect.SourceInspect; import org.junit.Assert; import org.junit.Test; import java.util.List; public class TestInspect { @Test public void inspect() { CodeSource source = GenericClass_.$().getMethods().get(0).getBody(); List<VariableDeclaration> inspect = SourceInspect.Companion .builder(codePart -> codePart instanceof VariableDeclaration) .include(bodied -> bodied instanceof Label) .includeSource(true) .mapTo(codePart -> (VariableDeclaration) codePart) .inspect(source); System.out.println(inspect); Assert.assertTrue(inspect.size() == 1); Assert.assertEquals("fieldi", inspect.get(0).getName()); } }
Fixed inspect test
src/test/java/com/github/jonathanxd/codeapi/test/TestInspect.java
Fixed inspect test
Java
mit
f4efe0522dc437e002822d1848d2e9e539e6a5ae
0
itenente/igv,godotgildor/igv,itenente/igv,godotgildor/igv,itenente/igv,godotgildor/igv,amwenger/igv,amwenger/igv,godotgildor/igv,amwenger/igv,igvteam/igv,igvteam/igv,amwenger/igv,itenente/igv,igvteam/igv,amwenger/igv,igvteam/igv,igvteam/igv,godotgildor/igv,itenente/igv
/* * Copyright (c) 2007-2011 by The Broad Institute of MIT and Harvard. All Rights Reserved. * * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), * Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php. * * THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR * WARRANTES OF ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER * OR NOT DISCOVERABLE. IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR RESPECTIVE * TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES * OF ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES, * ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER * THE BROAD OR MIT SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT * SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. */ /* * GenomeManager.java * * Created on November 9, 2007, 9:12 AM * * To change this template, choose Tools | Template Manager * and open the template in the editor. */ package org.broad.igv.feature.genome; import org.apache.log4j.Logger; import org.broad.igv.Globals; import org.broad.igv.PreferenceManager; import org.broad.igv.feature.Chromosome; import org.broad.igv.feature.CytoBandFileParser; import org.broad.igv.ui.IGV; import org.broad.igv.ui.util.ConfirmDialog; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.ui.util.ProgressMonitor; import org.broad.igv.util.FileUtils; import org.broad.igv.util.HttpUtils; import org.broad.igv.util.Utilities; import org.broad.tribble.readers.AsciiLineReader; import java.io.*; import java.net.*; import java.util.*; import java.util.zip.*; /** * @author jrobinso */ public class GenomeManager { private static Logger log = Logger.getLogger(GenomeManager.class); final public static String USER_DEFINED_GENOME_LIST_FILE = "user-defined-genomes.txt"; private static GenomeDescriptor DEFAULT_GENOME; public Genome currentGenome; private List<GenomeListItem> userDefinedGenomeArchiveList; private List<GenomeListItem> cachedGenomeArchiveList; private List<GenomeListItem> serverGenomeArchiveList; /** * The IGV instance that owns this GenomeManager. Can be null. */ IGV igv; public GenomeManager(IGV igv) { // genomeDescriptorMap = new HashMap(); this.igv = igv; } public GenomeManager() { // genomeDescriptorMap = new HashMap(); this.igv = null; } /** * Load a genome from the given path. Could be a .genome, or fasta file * * @param genomePath File, http, or ftp path to the .genome or indexed fasta file * @param monitor ProgressMonitor Monitor object, can be null * @return Genome * @throws FileNotFoundException */ public Genome loadGenome( String genomePath, ProgressMonitor monitor) throws IOException { try { log.info("Loading genome: " + genomePath); GenomeDescriptor genomeDescriptor = null; if (monitor != null) { monitor.fireProgressChange(25); } if (genomePath.endsWith(".genome")) { File archiveFile; if (HttpUtils.getInstance().isURL(genomePath.toLowerCase())) { // We need a local copy, as there is no http zip file reader URL genomeArchiveURL = new URL(genomePath); String cachedFilename = Utilities.getFileNameFromURL( URLDecoder.decode(new URL(genomePath).getFile(), "UTF-8")); if (!Globals.getGenomeCacheDirectory().exists()) { Globals.getGenomeCacheDirectory().mkdir(); } archiveFile = new File(Globals.getGenomeCacheDirectory(), cachedFilename); refreshCache(archiveFile, genomeArchiveURL); } else { archiveFile = new File(genomePath); } genomeDescriptor = parseGenomeArchiveFile(archiveFile); LinkedHashMap<String, Chromosome> chromMap = loadCytobandFile(genomeDescriptor); Map<String, String> aliases = loadAliasFile(genomeDescriptor); final String id = genomeDescriptor.getId(); final String displayName = genomeDescriptor.getName(); boolean isFasta = genomeDescriptor.isFasta(); currentGenome = new Genome(id, displayName, genomeDescriptor.getSequenceLocation(), isFasta); log.info("Genome loaded. id= " + id); currentGenome.setChromosomeMap(chromMap, genomeDescriptor.isChromosomesAreOrdered()); if (aliases != null) currentGenome.addChrAliases(aliases); if(!Globals.isHeadless()) { updateGeneTrack(genomeDescriptor); } } else { // Assume its a fasta String fastaPath = null; String fastaIndexPath = null; if (genomePath.endsWith(".fai")) { fastaPath = genomePath.substring(0, genomePath.length() - 4); fastaIndexPath = genomePath; } else { fastaPath = genomePath; fastaIndexPath = genomePath + ".fai"; } if (!FileUtils.resourceExists(fastaIndexPath)) { throw new RuntimeException("<html>No index found, fasta files must be indexed.<br>" + "Indexes can be created with samtools (http://samtools.sourceforge.net/)<br>" + "or Picard(http://picard.sourceforge.net/)."); } String id = fastaPath; String name = (new File(fastaPath)).getName(); currentGenome = new Genome(id, name, fastaPath, true); log.info("Genome loaded. id= " + id); IGV.getInstance().createGeneTrack(currentGenome, null, null, null, null); } if (monitor != null) { monitor.fireProgressChange(25); } // Do this last so that user defined aliases have preference. currentGenome.loadUserDefinedAliases(); return currentGenome; } catch (SocketException e) { throw new GenomeServerException("Server connection error", e); } } private void updateGeneTrack(GenomeDescriptor genomeDescriptor) throws IOException { InputStream geneStream = null; try { geneStream = genomeDescriptor.getGeneStream(); AsciiLineReader reader = geneStream == null ? null : new AsciiLineReader(geneStream); IGV.getInstance().createGeneTrack(currentGenome, reader, genomeDescriptor.getGeneFileName(), genomeDescriptor.getGeneTrackName(), genomeDescriptor.getUrl()); } finally { if (geneStream != null) geneStream.close(); } } /** * Load the cytoband file specified in the genome descriptor and return an ordered hash map of * chromsome name -> chromosome. This is a legacy method, kept for backward compatibiltiy of * .genome files in which the chromosome lengths are specified in a cytoband file. * * @param genomeDescriptor * @return */ private LinkedHashMap<String, Chromosome> loadCytobandFile(GenomeDescriptor genomeDescriptor) { InputStream is = null; try { is = genomeDescriptor.getCytoBandStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); return CytoBandFileParser.loadData(reader); } catch (IOException ex) { log.warn("Error loading cytoband file", ex); throw new RuntimeException("Error loading cytoband file" + genomeDescriptor.cytoBandFileName); } finally { try { if (is != null) { is.close(); } } catch (IOException ex) { log.warn("Error closing zip stream!", ex); } } } /** * Load the chromosome alias file, if any, specified in the genome descriptor. * * @param genomeDescriptor * @return The chromosome alias map, or null if none is defined. */ private Map<String, String> loadAliasFile(GenomeDescriptor genomeDescriptor) { InputStream aliasStream = null; try { aliasStream = genomeDescriptor.getChrAliasStream(); if (aliasStream != null) { Map<String, String> chrAliasTable = new HashMap(); BufferedReader reader = new BufferedReader(new InputStreamReader(aliasStream)); String nextLine = ""; while ((nextLine = reader.readLine()) != null) { String[] kv = nextLine.split("\t"); if (kv.length > 1) { chrAliasTable.put(kv[0], kv[1]); } } return chrAliasTable; } else { return null; } } catch (Exception e) { // We don't want to bomb if the alias load fails. Just log it and proceed. log.error("Error loading chromosome alias table"); return null; } finally { try { if (aliasStream != null) { aliasStream.close(); } } catch (IOException ex) { log.warn("Error closing zip stream!", ex); } } } /** * Refresh a locally cached genome * * @param archiveFile * @param genomeArchiveURL * @throws IOException */ private void refreshCache(File archiveFile, URL genomeArchiveURL) { // Look in cache first try { if (archiveFile.exists()) { long fileLength = archiveFile.length(); long contentLength = HttpUtils.getInstance().getContentLength(genomeArchiveURL); if (contentLength <= 0) { log.info("Skipping genome update of " + archiveFile.getName() + " due to unknown content length"); } // Force an update of cached genome if file length does not equal remote content length boolean forceUpdate = (contentLength != fileLength) && PreferenceManager.getInstance().getAsBoolean(PreferenceManager.AUTO_UPDATE_GENOMES); if (forceUpdate) { log.info("Refreshing genome: " + genomeArchiveURL.toString()); File tmpFile = new File(archiveFile.getAbsolutePath() + ".tmp"); if (HttpUtils.getInstance().downloadFile(genomeArchiveURL.toExternalForm(), tmpFile)) { FileUtils.copyFile(tmpFile, archiveFile); tmpFile.deleteOnExit(); } } } else { // Copy file directly from the server to local cache. HttpUtils.getInstance().downloadFile(genomeArchiveURL.toExternalForm(), archiveFile); } } catch (Exception e) { log.error("Error refreshing genome cache. ", e); MessageUtils.showMessage(("An error was encountered refreshing the genome cache: " + e.getMessage() + "<br> If this problem persists please contact [email protected]")); } } /** * Creates a genome descriptor. */ public GenomeDescriptor parseGenomeArchiveFile(File f) throws IOException { String zipFilePath = f.getAbsolutePath(); if (!f.exists()) { log.error("Genome file: " + f.getAbsolutePath() + " does not exist."); return null; } GenomeDescriptor genomeDescriptor = null; Map<String, ZipEntry> zipEntries = new HashMap(); ZipFile zipFile = new ZipFile(zipFilePath); ZipInputStream zipInputStream = null; try { zipInputStream = new ZipInputStream(new FileInputStream(f)); ZipEntry zipEntry = zipInputStream.getNextEntry(); while (zipEntry != null) { String zipEntryName = zipEntry.getName(); zipEntries.put(zipEntryName, zipEntry); if (zipEntryName.equalsIgnoreCase(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME)) { InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); // Cytoband String cytobandZipEntryName = properties.getProperty(Globals.GENOME_ARCHIVE_CYTOBAND_FILE_KEY); // RefFlat String geneFileName = properties.getProperty(Globals.GENOME_ARCHIVE_GENE_FILE_KEY); String chrAliasFileName = properties.getProperty(Globals.GENOME_CHR_ALIAS_FILE_KEY); String sequenceLocation = properties.getProperty(Globals.GENOME_ARCHIVE_SEQUENCE_FILE_LOCATION_KEY); if ((sequenceLocation != null) && !HttpUtils.getInstance().isURL(sequenceLocation)) { File sequenceFolder = null; // Relative or absolute location? if (sequenceLocation.startsWith("/") || sequenceLocation.startsWith("\\")) { sequenceFolder = new File(sequenceLocation); } else { File tempZipFile = new File(zipFilePath); sequenceFolder = new File(tempZipFile.getParent(), sequenceLocation); } sequenceLocation = sequenceFolder.getCanonicalPath(); sequenceLocation.replace('\\', '/'); } int version = 0; String versionString = properties.getProperty(Globals.GENOME_ARCHIVE_VERSION_KEY); if (versionString != null) { try { version = Integer.parseInt(versionString); } catch (Exception e) { log.error("Error parsing version string: " + versionString); } } boolean chrNamesAltered = false; String chrNamesAlteredString = properties.getProperty("filenamesAltered"); if (chrNamesAlteredString != null) { try { chrNamesAltered = Boolean.parseBoolean(chrNamesAlteredString); } catch (Exception e) { log.error("Error parsing version string: " + versionString); } } boolean chromosomesAreOrdered = false; String tmp = properties.getProperty(Globals.GENOME_ORDERED_KEY); if (tmp != null) { try { chromosomesAreOrdered = Boolean.parseBoolean(tmp); } catch (Exception e) { log.error("Error parsing ordered string: " + tmp); } } String url = properties.getProperty(Globals.GENOME_URL_KEY); // The new descriptor genomeDescriptor = new GenomeZipDescriptor( properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), version, chrNamesAltered, properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY), cytobandZipEntryName, geneFileName, chrAliasFileName, properties.getProperty(Globals.GENOME_GENETRACK_NAME, "Gene"), sequenceLocation, zipFile, zipEntries, chromosomesAreOrdered); if (url != null) { genomeDescriptor.setUrl(url); } } zipEntry = zipInputStream.getNextEntry(); } } finally { try { if (zipInputStream != null) { zipInputStream.close(); } } catch (IOException ex) { log.warn("Error closing imported genome zip stream!", ex); } } return genomeDescriptor; } boolean serverGenomeListUnreachable = false; /** * Gets a list of all the server genome archive files that * IGV knows about. * * @param excludedArchivesUrls The set of file location to exclude in the return list. * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ public List<GenomeListItem> getServerGenomeArchiveList(Set excludedArchivesUrls) throws IOException { if (serverGenomeListUnreachable) { return null; } if (serverGenomeArchiveList == null) { serverGenomeArchiveList = new LinkedList(); BufferedReader dataReader = null; InputStream inputStream = null; String genomeListURLString = ""; try { genomeListURLString = PreferenceManager.getInstance().getGenomeListURL(); URL serverGenomeURL = new URL(genomeListURLString); if (HttpUtils.getInstance().isURL(genomeListURLString)) { inputStream = HttpUtils.getInstance().openConnectionStream(serverGenomeURL); } else { File file = new File(genomeListURLString.startsWith("file:") ? serverGenomeURL.getFile() : genomeListURLString); inputStream = new FileInputStream(file); } dataReader = new BufferedReader(new InputStreamReader(inputStream)); String genomeRecord; while ((genomeRecord = dataReader.readLine()) != null) { if (genomeRecord.startsWith("<") || genomeRecord.startsWith("(#")) { continue; } if (genomeRecord != null) { genomeRecord = genomeRecord.trim(); String[] fields = genomeRecord.split("\t"); if ((fields != null) && (fields.length >= 3)) { // Throw away records we don't want to see if (excludedArchivesUrls != null) { if (excludedArchivesUrls.contains(fields[1])) { continue; } } int version = 0; if (fields.length > 3) { try { version = Integer.parseInt(fields[3]); } catch (Exception e) { log.error("Error parsing genome version: " + fields[0], e); } } //String displayableName, String url, String id, int version, boolean isUserDefined String name = fields[0]; String url = fields[1]; String id = fields[2]; boolean valid = true; if (url.length() == 0) { log.error("Genome entry : " + name + " has an empty URL string. Check for extra tabs in the definition file: " + PreferenceManager.getInstance().getGenomeListURL()); valid = false; } // TODO -- more validation if (valid) { try { GenomeListItem item = new GenomeListItem(fields[0], fields[1], fields[2], false); serverGenomeArchiveList.add(item); } catch (Exception e) { log.error( "Error reading a line from server genome list" + " line was: [" + genomeRecord + "]", e); } } } else { log.error("Found invalid server genome list record: " + genomeRecord); } } } } catch (Exception e) { serverGenomeListUnreachable = true; log.error("Error fetching genome list: ", e); ConfirmDialog.optionallyShowInfoDialog("Warning: could not connect to the genome server (" + genomeListURLString + "). Only locally defined genomes will be available.", PreferenceManager.SHOW_GENOME_SERVER_WARNING); } finally { if (dataReader != null) { dataReader.close(); } if (inputStream != null) { inputStream.close(); } } } return serverGenomeArchiveList; } /** * Gets a list of all the user-defined genome archive files that * IGV knows about. * * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ public List<GenomeListItem> getUserDefinedGenomeArchiveList() throws IOException { if (userDefinedGenomeArchiveList == null) { boolean updateClientGenomeListFile = false; userDefinedGenomeArchiveList = new LinkedList(); File listFile = new File(Globals.getGenomeCacheDirectory(), USER_DEFINED_GENOME_LIST_FILE); BufferedReader reader = null; boolean mightBeProperties = false; try { reader = new BufferedReader(new FileReader(listFile)); String nextLine; while ((nextLine = reader.readLine()) != null) { if (nextLine.startsWith("#") || nextLine.trim().length() == 0) { mightBeProperties = true; continue; } String[] fields = nextLine.split("\t"); if (fields.length < 3) { if (mightBeProperties && fields[0].contains("=")) { fields = nextLine.split("\\\\t"); if(fields.length < 3) { continue; } int idx = fields[0].indexOf("="); fields[0] = fields[0].substring(idx + 1); } } String file = fields[1]; if (!FileUtils.resourceExists(file)) { updateClientGenomeListFile = true; continue; } GenomeListItem item = new GenomeListItem(fields[0], file, fields[2], true); userDefinedGenomeArchiveList.add(item); } } finally { if (reader != null) reader.close(); } if (updateClientGenomeListFile) { updateImportedGenomePropertyFile(); } } return userDefinedGenomeArchiveList; } /** * Method description */ public void clearGenomeCache() { File[] files = Globals.getGenomeCacheDirectory().listFiles(); for (File file : files) { if (file.getName().toLowerCase().endsWith(Globals.GENOME_FILE_EXTENSION)) { file.delete(); } } } /** * Gets a list of all the locally cached genome archive files that * IGV knows about. * * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ public List<GenomeListItem> getCachedGenomeArchiveList() throws IOException { if (cachedGenomeArchiveList == null) { cachedGenomeArchiveList = new LinkedList(); if (!Globals.getGenomeCacheDirectory().exists()) { return cachedGenomeArchiveList; } File[] files = Globals.getGenomeCacheDirectory().listFiles(); for (File file : files) { if (file.isDirectory()) { continue; } if (!file.getName().toLowerCase().endsWith(Globals.GENOME_FILE_EXTENSION)) { continue; } ZipFile zipFile = null; FileInputStream fis = null; ZipInputStream zipInputStream = null; try { zipFile = new ZipFile(file); fis = new FileInputStream(file); zipInputStream = new ZipInputStream(new BufferedInputStream(fis)); ZipEntry zipEntry = zipFile.getEntry(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME); if (zipEntry == null) { continue; // Should never happen } InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); int version = 0; if (properties.containsKey(Globals.GENOME_ARCHIVE_VERSION_KEY)) { try { version = Integer.parseInt( properties.getProperty(Globals.GENOME_ARCHIVE_VERSION_KEY)); } catch (Exception e) { log.error("Error parsing genome version: " + version, e); } } GenomeListItem item = new GenomeListItem(properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), file.getAbsolutePath(), properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY), false); cachedGenomeArchiveList.add(item); } catch (ZipException ex) { log.error("\nZip error unzipping cached genome.", ex); try { file.delete(); zipInputStream.close(); } catch (Exception e) { //ignore exception when trying to delete file } } catch (IOException ex) { log.warn("\nIO error unzipping cached genome.", ex); try { file.delete(); } catch (Exception e) { //ignore exception when trying to delete file } } finally { try { if (zipInputStream != null) { zipInputStream.close(); } if (zipFile != null) { zipFile.close(); } if (fis != null) { fis.close(); } } catch (IOException ex) { log.warn("Error closing genome zip stream!", ex); } } } } return cachedGenomeArchiveList; } /** * Reconstructs the user-define genome property file. * * @throws IOException */ public void updateImportedGenomePropertyFile() { if (userDefinedGenomeArchiveList == null) { return; } File listFile = new File(Globals.getGenomeCacheDirectory(), USER_DEFINED_GENOME_LIST_FILE); File backup = null; if (listFile.exists()) { backup = new File(listFile.getAbsolutePath() + ".bak"); try { FileUtils.copyFile(listFile, backup); } catch (IOException e) { log.error("Error backing up user-defined genome list file", e); backup = null; } } PrintWriter writer = null; try { writer = new PrintWriter(new BufferedWriter(new FileWriter(listFile))); for (GenomeListItem genomeListItem : userDefinedGenomeArchiveList) { writer.print(genomeListItem.getDisplayableName()); writer.print("\t"); writer.print(genomeListItem.getLocation()); writer.print("\t"); writer.println(genomeListItem.getId()); } } catch (Exception e) { if (backup != null) { try { FileUtils.copyFile(backup, listFile); } catch (IOException e1) { log.error("Error restoring genome-list file from backup"); } } log.error("Error updating genome property file", e); MessageUtils.showMessage("Error updating user-defined genome list " + e.getMessage()); } finally { if (writer != null) writer.close(); if (backup != null) backup.delete(); } } /** * Create a genome archive (.genome) file. * * @param genomeZipLocation A File path to a directory in which the .genome * output file will be written. * @param cytobandFileName A File path to a file that contains cytoband data. * @param refFlatFileName A File path to a gene file. * @param fastaFileName A File path to a FASTA file, a .gz file containing a * single FASTA file, or a directory containing ONLY FASTA files. * @param relativeSequenceLocation A relative path to the location * (relative to the .genome file to be created) where the sequence data for * the new genome will be written. * @param genomeDisplayName The unique user-readable name of the new genome. * @param genomeId The id to be assigned to the genome. * @param genomeFileName The file name (not path) of the .genome archive * file to be created. * @param monitor A ProgressMonitor used to track progress - null, * if no progress updating is required. * @param sequenceOutputLocationOverride * @return GenomeListItem * @throws FileNotFoundException */ public GenomeListItem defineGenome(String genomeZipLocation, String cytobandFileName, String refFlatFileName, String fastaFileName, String chrAliasFileName, String relativeSequenceLocation, String genomeDisplayName, String genomeId, String genomeFileName, ProgressMonitor monitor, String sequenceOutputLocationOverride) throws IOException { File zipFileLocation = null; File fastaInputFile = null; File refFlatFile = null; File cytobandFile = null; File chrAliasFile = null; File sequenceLocation; if ((genomeZipLocation != null) && (genomeZipLocation.trim().length() != 0)) { zipFileLocation = new File(genomeZipLocation); PreferenceManager.getInstance().setLastGenomeImportDirectory(zipFileLocation); } if ((cytobandFileName != null) && (cytobandFileName.trim().length() != 0)) { cytobandFile = new File(cytobandFileName); } if ((refFlatFileName != null) && (refFlatFileName.trim().length() != 0)) { refFlatFile = new File(refFlatFileName); } if ((chrAliasFileName != null) && (chrAliasFileName.trim().length() != 0)) { chrAliasFile = new File(chrAliasFileName); } if ((fastaFileName != null) && (fastaFileName.trim().length() != 0)) { fastaInputFile = new File(fastaFileName); // The sequence info only matters if we have FASTA if ((relativeSequenceLocation != null) && (relativeSequenceLocation.trim().length() != 0)) { sequenceLocation = new File(genomeZipLocation, relativeSequenceLocation); if (!sequenceLocation.exists()) { sequenceLocation.mkdir(); } } } if (monitor != null) monitor.fireProgressChange(25); File archiveFile = (new GenomeImporter()).createGenomeArchive(zipFileLocation, genomeFileName, genomeId, genomeDisplayName, relativeSequenceLocation, fastaInputFile, refFlatFile, cytobandFile, chrAliasFile, sequenceOutputLocationOverride, monitor); if (monitor != null) monitor.fireProgressChange(75); if (archiveFile == null) { return null; } else { GenomeListItem newItem = new GenomeListItem(genomeDisplayName, archiveFile.getAbsolutePath(), genomeId, true); addUserDefineGenomeItem(newItem); return newItem; } } public String getGenomeId() { return currentGenome == null ? null : currentGenome.getId(); } public Genome getCurrentGenome() { return currentGenome; } public void addUserDefineGenomeItem(GenomeListItem genomeListItem) { userDefinedGenomeArchiveList.add(0, genomeListItem); updateImportedGenomePropertyFile(); } }
src/org/broad/igv/feature/genome/GenomeManager.java
/* * Copyright (c) 2007-2011 by The Broad Institute of MIT and Harvard. All Rights Reserved. * * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), * Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php. * * THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR * WARRANTES OF ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER * OR NOT DISCOVERABLE. IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR RESPECTIVE * TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES * OF ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES, * ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER * THE BROAD OR MIT SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT * SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. */ /* * GenomeManager.java * * Created on November 9, 2007, 9:12 AM * * To change this template, choose Tools | Template Manager * and open the template in the editor. */ package org.broad.igv.feature.genome; import org.apache.log4j.Logger; import org.broad.igv.Globals; import org.broad.igv.PreferenceManager; import org.broad.igv.feature.Chromosome; import org.broad.igv.feature.CytoBandFileParser; import org.broad.igv.ui.IGV; import org.broad.igv.ui.util.ConfirmDialog; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.ui.util.ProgressMonitor; import org.broad.igv.util.FileUtils; import org.broad.igv.util.HttpUtils; import org.broad.igv.util.Utilities; import org.broad.tribble.readers.AsciiLineReader; import java.io.*; import java.net.*; import java.util.*; import java.util.zip.*; /** * @author jrobinso */ public class GenomeManager { private static Logger log = Logger.getLogger(GenomeManager.class); final public static String USER_DEFINED_GENOME_LIST_FILE = "user-defined-genomes.txt"; private static GenomeDescriptor DEFAULT_GENOME; public Genome currentGenome; private List<GenomeListItem> userDefinedGenomeArchiveList; private List<GenomeListItem> cachedGenomeArchiveList; private List<GenomeListItem> serverGenomeArchiveList; /** * The IGV instance that owns this GenomeManager. Can be null. */ IGV igv; public GenomeManager(IGV igv) { // genomeDescriptorMap = new HashMap(); this.igv = igv; } public GenomeManager() { // genomeDescriptorMap = new HashMap(); this.igv = null; } /** * Load a genome from the given path. Could be a .genome, or fasta file * * @param genomePath File, http, or ftp path to the .genome or indexed fasta file * @param monitor ProgressMonitor Monitor object, can be null * @return Genome * @throws FileNotFoundException */ public Genome loadGenome( String genomePath, ProgressMonitor monitor) throws IOException { try { GenomeDescriptor genomeDescriptor = null; if (monitor != null) { monitor.fireProgressChange(25); } if (genomePath.endsWith(".genome")) { File archiveFile; if (HttpUtils.getInstance().isURL(genomePath.toLowerCase())) { // We need a local copy, as there is no http zip file reader URL genomeArchiveURL = new URL(genomePath); String cachedFilename = Utilities.getFileNameFromURL( URLDecoder.decode(new URL(genomePath).getFile(), "UTF-8")); if (!Globals.getGenomeCacheDirectory().exists()) { Globals.getGenomeCacheDirectory().mkdir(); } archiveFile = new File(Globals.getGenomeCacheDirectory(), cachedFilename); refreshCache(archiveFile, genomeArchiveURL); } else { archiveFile = new File(genomePath); } genomeDescriptor = parseGenomeArchiveFile(archiveFile); LinkedHashMap<String, Chromosome> chromMap = loadCytobandFile(genomeDescriptor); Map<String, String> aliases = loadAliasFile(genomeDescriptor); final String id = genomeDescriptor.getId(); final String displayName = genomeDescriptor.getName(); boolean isFasta = genomeDescriptor.isFasta(); currentGenome = new Genome(id, displayName, genomeDescriptor.getSequenceLocation(), isFasta); currentGenome.setChromosomeMap(chromMap, genomeDescriptor.isChromosomesAreOrdered()); if (aliases != null) currentGenome.addChrAliases(aliases); if(!Globals.isHeadless()) { updateGeneTrack(genomeDescriptor); } } else { // Assume its a fasta String fastaPath = null; String fastaIndexPath = null; if (genomePath.endsWith(".fai")) { fastaPath = genomePath.substring(0, genomePath.length() - 4); fastaIndexPath = genomePath; } else { fastaPath = genomePath; fastaIndexPath = genomePath + ".fai"; } if (!FileUtils.resourceExists(fastaIndexPath)) { throw new RuntimeException("<html>No index found, fasta files must be indexed.<br>" + "Indexes can be created with samtools (http://samtools.sourceforge.net/)<br>" + "or Picard(http://picard.sourceforge.net/)."); } String id = fastaPath; String name = (new File(fastaPath)).getName(); currentGenome = new Genome(id, name, fastaPath, true); IGV.getInstance().createGeneTrack(currentGenome, null, null, null, null); } if (monitor != null) { monitor.fireProgressChange(25); } // Do this last so that user defined aliases have preference. currentGenome.loadUserDefinedAliases(); return currentGenome; } catch (SocketException e) { throw new GenomeServerException("Server connection error", e); } } private void updateGeneTrack(GenomeDescriptor genomeDescriptor) throws IOException { InputStream geneStream = null; try { geneStream = genomeDescriptor.getGeneStream(); AsciiLineReader reader = geneStream == null ? null : new AsciiLineReader(geneStream); IGV.getInstance().createGeneTrack(currentGenome, reader, genomeDescriptor.getGeneFileName(), genomeDescriptor.getGeneTrackName(), genomeDescriptor.getUrl()); } finally { if (geneStream != null) geneStream.close(); } } /** * Load the cytoband file specified in the genome descriptor and return an ordered hash map of * chromsome name -> chromosome. This is a legacy method, kept for backward compatibiltiy of * .genome files in which the chromosome lengths are specified in a cytoband file. * * @param genomeDescriptor * @return */ private LinkedHashMap<String, Chromosome> loadCytobandFile(GenomeDescriptor genomeDescriptor) { InputStream is = null; try { is = genomeDescriptor.getCytoBandStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); return CytoBandFileParser.loadData(reader); } catch (IOException ex) { log.warn("Error loading cytoband file", ex); throw new RuntimeException("Error loading cytoband file" + genomeDescriptor.cytoBandFileName); } finally { try { if (is != null) { is.close(); } } catch (IOException ex) { log.warn("Error closing zip stream!", ex); } } } /** * Load the chromosome alias file, if any, specified in the genome descriptor. * * @param genomeDescriptor * @return The chromosome alias map, or null if none is defined. */ private Map<String, String> loadAliasFile(GenomeDescriptor genomeDescriptor) { InputStream aliasStream = null; try { aliasStream = genomeDescriptor.getChrAliasStream(); if (aliasStream != null) { Map<String, String> chrAliasTable = new HashMap(); BufferedReader reader = new BufferedReader(new InputStreamReader(aliasStream)); String nextLine = ""; while ((nextLine = reader.readLine()) != null) { String[] kv = nextLine.split("\t"); if (kv.length > 1) { chrAliasTable.put(kv[0], kv[1]); } } return chrAliasTable; } else { return null; } } catch (Exception e) { // We don't want to bomb if the alias load fails. Just log it and proceed. log.error("Error loading chromosome alias table"); return null; } finally { try { if (aliasStream != null) { aliasStream.close(); } } catch (IOException ex) { log.warn("Error closing zip stream!", ex); } } } /** * Refresh a locally cached genome * * @param archiveFile * @param genomeArchiveURL * @throws IOException */ private void refreshCache(File archiveFile, URL genomeArchiveURL) { // Look in cache first try { if (archiveFile.exists()) { long fileLength = archiveFile.length(); long contentLength = HttpUtils.getInstance().getContentLength(genomeArchiveURL); if (contentLength <= 0) { log.info("Skipping genome update of " + archiveFile.getName() + " due to unknown content length"); } // Force an update of cached genome if file length does not equal remote content length boolean forceUpdate = (contentLength != fileLength) && PreferenceManager.getInstance().getAsBoolean(PreferenceManager.AUTO_UPDATE_GENOMES); if (forceUpdate) { log.info("Refreshing genome: " + genomeArchiveURL.toString()); File tmpFile = new File(archiveFile.getAbsolutePath() + ".tmp"); if (HttpUtils.getInstance().downloadFile(genomeArchiveURL.toExternalForm(), tmpFile)) { FileUtils.copyFile(tmpFile, archiveFile); tmpFile.deleteOnExit(); } } } else { // Copy file directly from the server to local cache. HttpUtils.getInstance().downloadFile(genomeArchiveURL.toExternalForm(), archiveFile); } } catch (Exception e) { log.error("Error refreshing genome cache. ", e); MessageUtils.showMessage(("An error was encountered refreshing the genome cache: " + e.getMessage() + "<br> If this problem persists please contact [email protected]")); } } /** * Creates a genome descriptor. */ public GenomeDescriptor parseGenomeArchiveFile(File f) throws IOException { String zipFilePath = f.getAbsolutePath(); if (!f.exists()) { log.error("Genome file: " + f.getAbsolutePath() + " does not exist."); return null; } GenomeDescriptor genomeDescriptor = null; Map<String, ZipEntry> zipEntries = new HashMap(); ZipFile zipFile = new ZipFile(zipFilePath); ZipInputStream zipInputStream = null; try { zipInputStream = new ZipInputStream(new FileInputStream(f)); ZipEntry zipEntry = zipInputStream.getNextEntry(); while (zipEntry != null) { String zipEntryName = zipEntry.getName(); zipEntries.put(zipEntryName, zipEntry); if (zipEntryName.equalsIgnoreCase(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME)) { InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); // Cytoband String cytobandZipEntryName = properties.getProperty(Globals.GENOME_ARCHIVE_CYTOBAND_FILE_KEY); // RefFlat String geneFileName = properties.getProperty(Globals.GENOME_ARCHIVE_GENE_FILE_KEY); String chrAliasFileName = properties.getProperty(Globals.GENOME_CHR_ALIAS_FILE_KEY); String sequenceLocation = properties.getProperty(Globals.GENOME_ARCHIVE_SEQUENCE_FILE_LOCATION_KEY); if ((sequenceLocation != null) && !HttpUtils.getInstance().isURL(sequenceLocation)) { File sequenceFolder = null; // Relative or absolute location? if (sequenceLocation.startsWith("/") || sequenceLocation.startsWith("\\")) { sequenceFolder = new File(sequenceLocation); } else { File tempZipFile = new File(zipFilePath); sequenceFolder = new File(tempZipFile.getParent(), sequenceLocation); } sequenceLocation = sequenceFolder.getCanonicalPath(); sequenceLocation.replace('\\', '/'); } int version = 0; String versionString = properties.getProperty(Globals.GENOME_ARCHIVE_VERSION_KEY); if (versionString != null) { try { version = Integer.parseInt(versionString); } catch (Exception e) { log.error("Error parsing version string: " + versionString); } } boolean chrNamesAltered = false; String chrNamesAlteredString = properties.getProperty("filenamesAltered"); if (chrNamesAlteredString != null) { try { chrNamesAltered = Boolean.parseBoolean(chrNamesAlteredString); } catch (Exception e) { log.error("Error parsing version string: " + versionString); } } boolean chromosomesAreOrdered = false; String tmp = properties.getProperty(Globals.GENOME_ORDERED_KEY); if (tmp != null) { try { chromosomesAreOrdered = Boolean.parseBoolean(tmp); } catch (Exception e) { log.error("Error parsing ordered string: " + tmp); } } String url = properties.getProperty(Globals.GENOME_URL_KEY); // The new descriptor genomeDescriptor = new GenomeZipDescriptor( properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), version, chrNamesAltered, properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY), cytobandZipEntryName, geneFileName, chrAliasFileName, properties.getProperty(Globals.GENOME_GENETRACK_NAME, "Gene"), sequenceLocation, zipFile, zipEntries, chromosomesAreOrdered); if (url != null) { genomeDescriptor.setUrl(url); } } zipEntry = zipInputStream.getNextEntry(); } } finally { try { if (zipInputStream != null) { zipInputStream.close(); } } catch (IOException ex) { log.warn("Error closing imported genome zip stream!", ex); } } return genomeDescriptor; } boolean serverGenomeListUnreachable = false; /** * Gets a list of all the server genome archive files that * IGV knows about. * * @param excludedArchivesUrls The set of file location to exclude in the return list. * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ public List<GenomeListItem> getServerGenomeArchiveList(Set excludedArchivesUrls) throws IOException { if (serverGenomeListUnreachable) { return null; } if (serverGenomeArchiveList == null) { serverGenomeArchiveList = new LinkedList(); BufferedReader dataReader = null; InputStream inputStream = null; String genomeListURLString = ""; try { genomeListURLString = PreferenceManager.getInstance().getGenomeListURL(); URL serverGenomeURL = new URL(genomeListURLString); if (HttpUtils.getInstance().isURL(genomeListURLString)) { inputStream = HttpUtils.getInstance().openConnectionStream(serverGenomeURL); } else { File file = new File(genomeListURLString.startsWith("file:") ? serverGenomeURL.getFile() : genomeListURLString); inputStream = new FileInputStream(file); } dataReader = new BufferedReader(new InputStreamReader(inputStream)); String genomeRecord; while ((genomeRecord = dataReader.readLine()) != null) { if (genomeRecord.startsWith("<") || genomeRecord.startsWith("(#")) { continue; } if (genomeRecord != null) { genomeRecord = genomeRecord.trim(); String[] fields = genomeRecord.split("\t"); if ((fields != null) && (fields.length >= 3)) { // Throw away records we don't want to see if (excludedArchivesUrls != null) { if (excludedArchivesUrls.contains(fields[1])) { continue; } } int version = 0; if (fields.length > 3) { try { version = Integer.parseInt(fields[3]); } catch (Exception e) { log.error("Error parsing genome version: " + fields[0], e); } } //String displayableName, String url, String id, int version, boolean isUserDefined String name = fields[0]; String url = fields[1]; String id = fields[2]; boolean valid = true; if (url.length() == 0) { log.error("Genome entry : " + name + " has an empty URL string. Check for extra tabs in the definition file: " + PreferenceManager.getInstance().getGenomeListURL()); valid = false; } // TODO -- more validation if (valid) { try { GenomeListItem item = new GenomeListItem(fields[0], fields[1], fields[2], false); serverGenomeArchiveList.add(item); } catch (Exception e) { log.error( "Error reading a line from server genome list" + " line was: [" + genomeRecord + "]", e); } } } else { log.error("Found invalid server genome list record: " + genomeRecord); } } } } catch (Exception e) { serverGenomeListUnreachable = true; log.error("Error fetching genome list: ", e); ConfirmDialog.optionallyShowInfoDialog("Warning: could not connect to the genome server (" + genomeListURLString + "). Only locally defined genomes will be available.", PreferenceManager.SHOW_GENOME_SERVER_WARNING); } finally { if (dataReader != null) { dataReader.close(); } if (inputStream != null) { inputStream.close(); } } } return serverGenomeArchiveList; } /** * Gets a list of all the user-defined genome archive files that * IGV knows about. * * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ public List<GenomeListItem> getUserDefinedGenomeArchiveList() throws IOException { if (userDefinedGenomeArchiveList == null) { boolean updateClientGenomeListFile = false; userDefinedGenomeArchiveList = new LinkedList(); File listFile = new File(Globals.getGenomeCacheDirectory(), USER_DEFINED_GENOME_LIST_FILE); BufferedReader reader = null; boolean mightBeProperties = false; try { reader = new BufferedReader(new FileReader(listFile)); String nextLine; while ((nextLine = reader.readLine()) != null) { if (nextLine.startsWith("#") || nextLine.trim().length() == 0) { mightBeProperties = true; continue; } String[] fields = nextLine.split("\t"); if (fields.length < 3) { if (mightBeProperties && fields[0].contains("=")) { fields = nextLine.split("\\\\t"); if(fields.length < 3) { continue; } int idx = fields[0].indexOf("="); fields[0] = fields[0].substring(idx + 1); } } String file = fields[1]; if (!FileUtils.resourceExists(file)) { updateClientGenomeListFile = true; continue; } GenomeListItem item = new GenomeListItem(fields[0], file, fields[2], true); userDefinedGenomeArchiveList.add(item); } } finally { if (reader != null) reader.close(); } if (updateClientGenomeListFile) { updateImportedGenomePropertyFile(); } } return userDefinedGenomeArchiveList; } /** * Method description */ public void clearGenomeCache() { File[] files = Globals.getGenomeCacheDirectory().listFiles(); for (File file : files) { if (file.getName().toLowerCase().endsWith(Globals.GENOME_FILE_EXTENSION)) { file.delete(); } } } /** * Gets a list of all the locally cached genome archive files that * IGV knows about. * * @return LinkedHashSet<GenomeListItem> * @throws IOException * @see GenomeListItem */ public List<GenomeListItem> getCachedGenomeArchiveList() throws IOException { if (cachedGenomeArchiveList == null) { cachedGenomeArchiveList = new LinkedList(); if (!Globals.getGenomeCacheDirectory().exists()) { return cachedGenomeArchiveList; } File[] files = Globals.getGenomeCacheDirectory().listFiles(); for (File file : files) { if (file.isDirectory()) { continue; } if (!file.getName().toLowerCase().endsWith(Globals.GENOME_FILE_EXTENSION)) { continue; } ZipFile zipFile = null; FileInputStream fis = null; ZipInputStream zipInputStream = null; try { zipFile = new ZipFile(file); fis = new FileInputStream(file); zipInputStream = new ZipInputStream(new BufferedInputStream(fis)); ZipEntry zipEntry = zipFile.getEntry(Globals.GENOME_ARCHIVE_PROPERTY_FILE_NAME); if (zipEntry == null) { continue; // Should never happen } InputStream inputStream = zipFile.getInputStream(zipEntry); Properties properties = new Properties(); properties.load(inputStream); int version = 0; if (properties.containsKey(Globals.GENOME_ARCHIVE_VERSION_KEY)) { try { version = Integer.parseInt( properties.getProperty(Globals.GENOME_ARCHIVE_VERSION_KEY)); } catch (Exception e) { log.error("Error parsing genome version: " + version, e); } } GenomeListItem item = new GenomeListItem(properties.getProperty(Globals.GENOME_ARCHIVE_NAME_KEY), file.getAbsolutePath(), properties.getProperty(Globals.GENOME_ARCHIVE_ID_KEY), false); cachedGenomeArchiveList.add(item); } catch (ZipException ex) { log.error("\nZip error unzipping cached genome.", ex); try { file.delete(); zipInputStream.close(); } catch (Exception e) { //ignore exception when trying to delete file } } catch (IOException ex) { log.warn("\nIO error unzipping cached genome.", ex); try { file.delete(); } catch (Exception e) { //ignore exception when trying to delete file } } finally { try { if (zipInputStream != null) { zipInputStream.close(); } if (zipFile != null) { zipFile.close(); } if (fis != null) { fis.close(); } } catch (IOException ex) { log.warn("Error closing genome zip stream!", ex); } } } } return cachedGenomeArchiveList; } /** * Reconstructs the user-define genome property file. * * @throws IOException */ public void updateImportedGenomePropertyFile() { if (userDefinedGenomeArchiveList == null) { return; } File listFile = new File(Globals.getGenomeCacheDirectory(), USER_DEFINED_GENOME_LIST_FILE); File backup = null; if (listFile.exists()) { backup = new File(listFile.getAbsolutePath() + ".bak"); try { FileUtils.copyFile(listFile, backup); } catch (IOException e) { log.error("Error backing up user-defined genome list file", e); backup = null; } } PrintWriter writer = null; try { writer = new PrintWriter(new BufferedWriter(new FileWriter(listFile))); for (GenomeListItem genomeListItem : userDefinedGenomeArchiveList) { writer.print(genomeListItem.getDisplayableName()); writer.print("\t"); writer.print(genomeListItem.getLocation()); writer.print("\t"); writer.println(genomeListItem.getId()); } } catch (Exception e) { if (backup != null) { try { FileUtils.copyFile(backup, listFile); } catch (IOException e1) { log.error("Error restoring genome-list file from backup"); } } log.error("Error updating genome property file", e); MessageUtils.showMessage("Error updating user-defined genome list " + e.getMessage()); } finally { if (writer != null) writer.close(); if (backup != null) backup.delete(); } } /** * Create a genome archive (.genome) file. * * @param genomeZipLocation A File path to a directory in which the .genome * output file will be written. * @param cytobandFileName A File path to a file that contains cytoband data. * @param refFlatFileName A File path to a gene file. * @param fastaFileName A File path to a FASTA file, a .gz file containing a * single FASTA file, or a directory containing ONLY FASTA files. * @param relativeSequenceLocation A relative path to the location * (relative to the .genome file to be created) where the sequence data for * the new genome will be written. * @param genomeDisplayName The unique user-readable name of the new genome. * @param genomeId The id to be assigned to the genome. * @param genomeFileName The file name (not path) of the .genome archive * file to be created. * @param monitor A ProgressMonitor used to track progress - null, * if no progress updating is required. * @param sequenceOutputLocationOverride * @return GenomeListItem * @throws FileNotFoundException */ public GenomeListItem defineGenome(String genomeZipLocation, String cytobandFileName, String refFlatFileName, String fastaFileName, String chrAliasFileName, String relativeSequenceLocation, String genomeDisplayName, String genomeId, String genomeFileName, ProgressMonitor monitor, String sequenceOutputLocationOverride) throws IOException { File zipFileLocation = null; File fastaInputFile = null; File refFlatFile = null; File cytobandFile = null; File chrAliasFile = null; File sequenceLocation; if ((genomeZipLocation != null) && (genomeZipLocation.trim().length() != 0)) { zipFileLocation = new File(genomeZipLocation); PreferenceManager.getInstance().setLastGenomeImportDirectory(zipFileLocation); } if ((cytobandFileName != null) && (cytobandFileName.trim().length() != 0)) { cytobandFile = new File(cytobandFileName); } if ((refFlatFileName != null) && (refFlatFileName.trim().length() != 0)) { refFlatFile = new File(refFlatFileName); } if ((chrAliasFileName != null) && (chrAliasFileName.trim().length() != 0)) { chrAliasFile = new File(chrAliasFileName); } if ((fastaFileName != null) && (fastaFileName.trim().length() != 0)) { fastaInputFile = new File(fastaFileName); // The sequence info only matters if we have FASTA if ((relativeSequenceLocation != null) && (relativeSequenceLocation.trim().length() != 0)) { sequenceLocation = new File(genomeZipLocation, relativeSequenceLocation); if (!sequenceLocation.exists()) { sequenceLocation.mkdir(); } } } if (monitor != null) monitor.fireProgressChange(25); File archiveFile = (new GenomeImporter()).createGenomeArchive(zipFileLocation, genomeFileName, genomeId, genomeDisplayName, relativeSequenceLocation, fastaInputFile, refFlatFile, cytobandFile, chrAliasFile, sequenceOutputLocationOverride, monitor); if (monitor != null) monitor.fireProgressChange(75); if (archiveFile == null) { return null; } else { GenomeListItem newItem = new GenomeListItem(genomeDisplayName, archiveFile.getAbsolutePath(), genomeId, true); addUserDefineGenomeItem(newItem); return newItem; } } public String getGenomeId() { return currentGenome == null ? null : currentGenome.getId(); } public Genome getCurrentGenome() { return currentGenome; } public void addUserDefineGenomeItem(GenomeListItem genomeListItem) { userDefinedGenomeArchiveList.add(0, genomeListItem); updateImportedGenomePropertyFile(); } }
Added logging statements git-svn-id: b5cf87c434d9ee7c8f18865e4378c9faabe04646@1543 17392f64-ead8-4cea-ae29-09b3ab513800
src/org/broad/igv/feature/genome/GenomeManager.java
Added logging statements
Java
mit
74f53ecc61b11a37cf3c7c09323760bd1f899a39
0
Canadensys/vascan,Canadensys/vascan,Canadensys/vascan
package net.canadensys.dataportal.vascan.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import net.canadensys.dataportal.vascan.ChecklistService; import net.canadensys.dataportal.vascan.constant.Rank; import net.canadensys.dataportal.vascan.dao.TaxonDAO; import net.canadensys.dataportal.vascan.model.TaxonLookupModel; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.ObjectUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @Service("checklistService") public class ChecklistServiceImpl implements ChecklistService{ private static final String CHECKED = "checked=\"checked\""; private static final String SELECTED = "selected=\"selected\""; private static final List<String> CHECKLIST_RELATED_QUERY_TERMS = new ArrayList<String>(8); static{ CHECKLIST_RELATED_QUERY_TERMS.add("province"); CHECKLIST_RELATED_QUERY_TERMS.add("combination"); CHECKLIST_RELATED_QUERY_TERMS.add("habit"); CHECKLIST_RELATED_QUERY_TERMS.add("taxon"); CHECKLIST_RELATED_QUERY_TERMS.add("status"); CHECKLIST_RELATED_QUERY_TERMS.add("rank"); CHECKLIST_RELATED_QUERY_TERMS.add("sort"); CHECKLIST_RELATED_QUERY_TERMS.add("hybrids"); CHECKLIST_RELATED_QUERY_TERMS.add("limitResults"); } @Autowired private TaxonDAO taxonDAO; @Transactional(readOnly=true) @Override public Map<String,Object> retrieveChecklistData(Map<String,String[]> parameters){ //this will be used to know if you should provide default values boolean noChecklistQuery = !containsChecklistQueryParameter(parameters); Map<String,Object> data = new HashMap<String,Object>(); /* request params */ /* provinces */ String[] province = null; if(parameters.get("province") != null) province = parameters.get("province"); /* combination */ String combination = null; if(parameters.get("combination") != null){ combination = parameters.get("combination")[0]; } /* habitus */ String habit = null; if(parameters.get("habit") != null) habit = parameters.get("habit")[0]; /* taxonid */ int taxon = -1; if(parameters.get("taxon") != null) taxon = Integer.valueOf(parameters.get("taxon")[0]); /* distribution */ String[] status = null; if(parameters.get("status") != null) status = parameters.get("status"); /* rank */ String[] rank = null; if(parameters.get("rank") != null) rank = parameters.get("rank"); /* include hybrids */ boolean hybrids; String shybrids = null; if(parameters.get("hybrids") != null) shybrids = parameters.get("hybrids")[0]; /* sort */ String sort = null; if(parameters.get("sort") != null) sort = parameters.get("sort")[0]; /* limit number of results */ String nolimit = null; if(parameters.get("nolimit") != null) nolimit = parameters.get("nolimit")[0]; String limitResults = null; if(parameters.get("limitResults") != null) limitResults = parameters.get("limitResults")[0]; /* postback values checks & selects */ // for taxon dropdown list, property selected is added to taxon hashmap Map<String,String> habitusSelected = new HashMap<String,String>(); Map<String,String> combinationSelected = new HashMap<String,String>(); Map<String,String> sortSelected = new HashMap<String,String>(); Map<String,String> statusChecked = new HashMap<String,String>(); Map<String,String> rankChecked = new HashMap<String,String>(); Map<String,String> limitResultsChecked = new HashMap<String,String>(); Map<String,String> hybridsChecked = new HashMap<String,String>(); Map<String,String> territoryChecked = new HashMap<String,String>(); if(habit != "" && habit != null){ habitusSelected.put(habit.toLowerCase(),SELECTED); } else{ habitusSelected.put("all",SELECTED); habit = "all"; } if(combination != "" && combination != null){ combinationSelected.put(combination.toLowerCase(),SELECTED); } else{ combinationSelected.put("anyof",SELECTED); combination = "anyof"; } // get statuses from the querystring. if statuses are empty, force // native, introduced and ephemeral... if(status != null){ for(String s : status){ statusChecked.put(s.toLowerCase(),CHECKED); } } else{ statusChecked.put("introduced",CHECKED); statusChecked.put("native",CHECKED); statusChecked.put("ephemeral",CHECKED); statusChecked.put("excluded",CHECKED); statusChecked.put("extirpated",CHECKED); statusChecked.put("doubtful",CHECKED); String statuses[] = {"introduced","native","ephemeral","excluded","extirpated","doubtful"}; status = statuses; } // checked provinces and territories if(province != null){ for(String s : province){ territoryChecked.put(s.toUpperCase(),CHECKED); } } // hybrids checkbox // the default value is true but if not check, the form will not send it. if(BooleanUtils.toBoolean(shybrids) || noChecklistQuery){ hybrids = true; hybridsChecked.put("display",CHECKED); } else{ hybrids = false; hybridsChecked.put("display",""); } // sort options if(sort != "" && sort != null){ sortSelected.put(sort.toLowerCase(),SELECTED); } else{ sort = "taxonomically"; sortSelected.put(sort,SELECTED); } String[] ranks = { Rank.CLASS_LABEL, Rank.SUBCLASS_LABEL, Rank.SUPERORDER_LABEL, Rank.ORDER_LABEL, Rank.FAMILY_LABEL, Rank.SUBFAMILY_LABEL, Rank.TRIBE_LABEL, Rank.SUBTRIBE_LABEL, Rank.GENUS_LABEL, Rank.SUBGENUS_LABEL, Rank.SECTION_LABEL, Rank.SUBSECTION_LABEL, Rank.SERIES_LABEL, Rank.SPECIES_LABEL, Rank.SUBSPECIES_LABEL, Rank.VARIETY_LABEL }; // init all ranks as checked for(String r : ranks){ rankChecked.put(r,CHECKED); } // check main_rank & sub_rank "All" checkbox since all ranks are checked rankChecked.put("main_rank",CHECKED); rankChecked.put("sub_rank",CHECKED); // if rank is received from querystring, reinit all ranks to unchecked and only check ranks present in querystring int main_rank = 0; int sub_rank = 0; if(rank != null){ for(String r : ranks){ rankChecked.put(r,""); } rankChecked.put("main_rank",""); rankChecked.put("sub_rank",""); for(String r : rank){ rankChecked.put(r.toLowerCase(),CHECKED); if(r.toLowerCase().equals(Rank.CLASS_LABEL) || r.toLowerCase().equals(Rank.ORDER_LABEL) || r.toLowerCase().equals(Rank.FAMILY_LABEL) || r.toLowerCase().equals(Rank.GENUS_LABEL) || r.toLowerCase().equals(Rank.SPECIES_LABEL)) main_rank++; else sub_rank++; } } // there must be a better way to do this... maybe only with jquery stuff... if(main_rank == 5) rankChecked.put("main_rank",CHECKED); if(sub_rank == 11) rankChecked.put("sub_rank",CHECKED); // limit checkbox if(nolimit == null && limitResults == null){ limitResults = "true"; limitResultsChecked.put("display",CHECKED); } else if(nolimit != null && limitResults != null){ limitResults = "true"; limitResultsChecked.put("display",CHECKED); } else{ limitResults = ""; limitResultsChecked.put("display",""); } /* */ boolean searchOccured = false; Integer totalResults = 0; List<Map<String,Object>> taxonDistributions = new ArrayList<Map<String,Object>>(); if(taxon != -1){ searchOccured = true; int limitResultsTo = 0; totalResults = taxonDAO.countTaxonLookup(habit, taxon,combination, province, status, rank, hybrids); if(limitResults.equals("true")){ limitResultsTo = 200; } Iterator<TaxonLookupModel> it = taxonDAO.loadTaxonLookup(limitResultsTo, habit, taxon, combination, province, status, rank, hybrids, sort); if(it !=null){ while(it.hasNext()){ HashMap<String,Object> distributionData = new HashMap<String,Object>(); TaxonLookupModel currTlm = it.next(); distributionData.put("fullScientificName",currTlm.getCalnamehtml()); distributionData.put("taxonId",currTlm.getTaxonId()); distributionData.put("rank",currTlm.getRank()); List<Map<String,Object>> taxonHabitus = new ArrayList<Map<String,Object>>(); String habituses[] = currTlm.getCalhabit().split(","); if(habituses != null){ for(String h : habituses){ HashMap<String,Object> habitusData = new HashMap<String,Object>(); habitusData.put("habit",h); taxonHabitus.add(habitusData); } } distributionData.put("habit",taxonHabitus); distributionData.put("AB",currTlm.getAB()); distributionData.put("BC",currTlm.getBC()); distributionData.put("GL",currTlm.getGL()); distributionData.put("NL_L",currTlm.getNL_L()); distributionData.put("MB",currTlm.getMB()); distributionData.put("NB",currTlm.getNB()); distributionData.put("NL_N",currTlm.getNL_N()); distributionData.put("NT",currTlm.getNT()); distributionData.put("NS",currTlm.getNS()); distributionData.put("NU",currTlm.getNU()); distributionData.put("ON",currTlm.getON()); distributionData.put("PE",currTlm.getPE()); distributionData.put("QC",currTlm.getQC()); distributionData.put("PM",currTlm.getPM()); distributionData.put("SK",currTlm.getSK()); distributionData.put("YT",currTlm.getYT()); taxonDistributions.add(distributionData); } } } data.put("distributions",taxonDistributions); data.put("habit",habitusSelected); data.put("sort",sortSelected); data.put("hybrids",hybridsChecked); data.put("status",statusChecked); data.put("limitResults",limitResultsChecked); data.put("rank",rankChecked); data.put("combination",combinationSelected); data.put("territory",territoryChecked); data.put("taxons",getChecklistTaxons(taxon)); data.put("isSearch",searchOccured); data.put("numResults",ObjectUtils.defaultIfNull(totalResults,0).intValue()); return data; } public List<Map<String,Object>> getChecklistTaxons(int selectedTaxonId){ List<Map<String,Object>> results = new ArrayList<Map<String,Object>>(); List<Object[]> taxons = taxonDAO.getAcceptedTaxon(Rank.GENUS); if(taxons != null){ HashMap<String,Object> t; for(Object[] taxon : taxons){ int id = (Integer)taxon[0]; String calname = (String)taxon[1]; String rank = (String)taxon[2]; t = new HashMap<String,Object>(); if(id == selectedTaxonId){ t.put("selected", "selected=\"selected\""); } t.put("id",id); t.put("calname", calname); t.put("rank", rank); results.add(t); } taxons.clear(); } return results; } /** * Check if the received parameters contains at least one parameter related to the checklist builder. * @param parameters * @return */ private boolean containsChecklistQueryParameter(Map<String,String[]> parameters){ for(String currKey : parameters.keySet()){ if(CHECKLIST_RELATED_QUERY_TERMS.contains(currKey)){ return true; } } return false; } }
src/main/java/net/canadensys/dataportal/vascan/impl/ChecklistServiceImpl.java
package net.canadensys.dataportal.vascan.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import net.canadensys.dataportal.vascan.ChecklistService; import net.canadensys.dataportal.vascan.constant.Rank; import net.canadensys.dataportal.vascan.dao.TaxonDAO; import net.canadensys.dataportal.vascan.model.TaxonLookupModel; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.ObjectUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @Service("checklistService") public class ChecklistServiceImpl implements ChecklistService{ private static final String CHECKED = "checked=\"checked\""; private static final String SELECTED = "selected=\"selected\""; @Autowired private TaxonDAO taxonDAO; @Transactional(readOnly=true) @Override public Map<String,Object> retrieveChecklistData(Map<String,String[]> parameters){ Map<String,Object> data = new HashMap<String,Object>(); /* request params */ /* provinces */ String[] province = null; if(parameters.get("province") != null) province = parameters.get("province"); /* combination */ String combination = null; if(parameters.get("combination") != null){ combination = parameters.get("combination")[0]; } /* habitus */ String habit = null; if(parameters.get("habit") != null) habit = parameters.get("habit")[0]; /* taxonid */ int taxon = -1; if(parameters.get("taxon") != null) taxon = Integer.valueOf(parameters.get("taxon")[0]); /* distribution */ String[] status = null; if(parameters.get("status") != null) status = parameters.get("status"); /* rank */ String[] rank = null; if(parameters.get("rank") != null) rank = parameters.get("rank"); /* include hybrids */ boolean hybrids; String shybrids = null; if(parameters.get("hybrids") != null) shybrids = parameters.get("hybrids")[0]; /* sort */ String sort = null; if(parameters.get("sort") != null) sort = parameters.get("sort")[0]; /* limit number of results */ String nolimit = null; if(parameters.get("nolimit") != null) nolimit = parameters.get("nolimit")[0]; String limitResults = null; if(parameters.get("limitResults") != null) limitResults = parameters.get("limitResults")[0]; /* postback values checks & selects */ // for taxon dropdown list, property selected is added to taxon hashmap Map<String,String> habitusSelected = new HashMap<String,String>(); Map<String,String> combinationSelected = new HashMap<String,String>(); Map<String,String> sortSelected = new HashMap<String,String>(); Map<String,String> statusChecked = new HashMap<String,String>(); Map<String,String> rankChecked = new HashMap<String,String>(); Map<String,String> limitResultsChecked = new HashMap<String,String>(); Map<String,String> hybridsChecked = new HashMap<String,String>(); Map<String,String> territoryChecked = new HashMap<String,String>(); if(habit != "" && habit != null){ habitusSelected.put(habit.toLowerCase(),SELECTED); } else{ habitusSelected.put("all",SELECTED); habit = "all"; } if(combination != "" && combination != null){ combinationSelected.put(combination.toLowerCase(),SELECTED); } else{ combinationSelected.put("anyof",SELECTED); combination = "anyof"; } // get statuses from the querystring. if statuses are empty, force // native, introduced and ephemeral... if(status != null){ for(String s : status){ statusChecked.put(s.toLowerCase(),CHECKED); } } else{ statusChecked.put("introduced",CHECKED); statusChecked.put("native",CHECKED); statusChecked.put("ephemeral",CHECKED); statusChecked.put("excluded",CHECKED); statusChecked.put("extirpated",CHECKED); statusChecked.put("doubtful",CHECKED); String statuses[] = {"introduced","native","ephemeral","excluded","extirpated","doubtful"}; status = statuses; } // checked provinces and territories if(province != null){ for(String s : province){ territoryChecked.put(s.toUpperCase(),CHECKED); } } // hybrids checkbox if(BooleanUtils.toBoolean(shybrids)){ hybrids = true; hybridsChecked.put("display",CHECKED); } else{ hybrids = false; hybridsChecked.put("display",""); } // sort options if(sort != "" && sort != null){ sortSelected.put(sort.toLowerCase(),SELECTED); } else{ sort = "taxonomically"; sortSelected.put(sort,SELECTED); } String[] ranks = { Rank.CLASS_LABEL, Rank.SUBCLASS_LABEL, Rank.SUPERORDER_LABEL, Rank.ORDER_LABEL, Rank.FAMILY_LABEL, Rank.SUBFAMILY_LABEL, Rank.TRIBE_LABEL, Rank.SUBTRIBE_LABEL, Rank.GENUS_LABEL, Rank.SUBGENUS_LABEL, Rank.SECTION_LABEL, Rank.SUBSECTION_LABEL, Rank.SERIES_LABEL, Rank.SPECIES_LABEL, Rank.SUBSPECIES_LABEL, Rank.VARIETY_LABEL }; // init all ranks as checked for(String r : ranks){ rankChecked.put(r,CHECKED); } // check main_rank & sub_rank "All" checkbox since all ranks are checked rankChecked.put("main_rank",CHECKED); rankChecked.put("sub_rank",CHECKED); // if rank is received from querystring, reinit all ranks to unchecked and only check ranks present in querystring int main_rank = 0; int sub_rank = 0; if(rank != null){ for(String r : ranks){ rankChecked.put(r,""); } rankChecked.put("main_rank",""); rankChecked.put("sub_rank",""); for(String r : rank){ rankChecked.put(r.toLowerCase(),CHECKED); if(r.toLowerCase().equals(Rank.CLASS_LABEL) || r.toLowerCase().equals(Rank.ORDER_LABEL) || r.toLowerCase().equals(Rank.FAMILY_LABEL) || r.toLowerCase().equals(Rank.GENUS_LABEL) || r.toLowerCase().equals(Rank.SPECIES_LABEL)) main_rank++; else sub_rank++; } } // there must be a better way to do this... maybe only with jquery stuff... if(main_rank == 5) rankChecked.put("main_rank",CHECKED); if(sub_rank == 11) rankChecked.put("sub_rank",CHECKED); // limit checkbox if(nolimit == null && limitResults == null){ limitResults = "true"; limitResultsChecked.put("display",CHECKED); } else if(nolimit != null && limitResults != null){ limitResults = "true"; limitResultsChecked.put("display",CHECKED); } else{ limitResults = ""; limitResultsChecked.put("display",""); } /* */ boolean searchOccured = false; Integer totalResults = 0; List<Map<String,Object>> taxonDistributions = new ArrayList<Map<String,Object>>(); if(taxon != -1){ searchOccured = true; int limitResultsTo = 0; totalResults = taxonDAO.countTaxonLookup(habit, taxon,combination, province, status, rank, hybrids); if(limitResults.equals("true")){ limitResultsTo = 200; } Iterator<TaxonLookupModel> it = taxonDAO.loadTaxonLookup(limitResultsTo, habit, taxon, combination, province, status, rank, hybrids, sort); if(it !=null){ while(it.hasNext()){ HashMap<String,Object> distributionData = new HashMap<String,Object>(); TaxonLookupModel currTlm = it.next(); distributionData.put("fullScientificName",currTlm.getCalnamehtml()); distributionData.put("taxonId",currTlm.getTaxonId()); distributionData.put("rank",currTlm.getRank()); List<Map<String,Object>> taxonHabitus = new ArrayList<Map<String,Object>>(); String habituses[] = currTlm.getCalhabit().split(","); if(habituses != null){ for(String h : habituses){ HashMap<String,Object> habitusData = new HashMap<String,Object>(); habitusData.put("habit",h); taxonHabitus.add(habitusData); } } distributionData.put("habit",taxonHabitus); distributionData.put("AB",currTlm.getAB()); distributionData.put("BC",currTlm.getBC()); distributionData.put("GL",currTlm.getGL()); distributionData.put("NL_L",currTlm.getNL_L()); distributionData.put("MB",currTlm.getMB()); distributionData.put("NB",currTlm.getNB()); distributionData.put("NL_N",currTlm.getNL_N()); distributionData.put("NT",currTlm.getNT()); distributionData.put("NS",currTlm.getNS()); distributionData.put("NU",currTlm.getNU()); distributionData.put("ON",currTlm.getON()); distributionData.put("PE",currTlm.getPE()); distributionData.put("QC",currTlm.getQC()); distributionData.put("PM",currTlm.getPM()); distributionData.put("SK",currTlm.getSK()); distributionData.put("YT",currTlm.getYT()); taxonDistributions.add(distributionData); } } } data.put("distributions",taxonDistributions); data.put("habit",habitusSelected); data.put("sort",sortSelected); data.put("hybrids",hybridsChecked); data.put("status",statusChecked); data.put("limitResults",limitResultsChecked); data.put("rank",rankChecked); data.put("combination",combinationSelected); data.put("territory",territoryChecked); data.put("taxons",getChecklistTaxons(taxon)); data.put("isSearch",searchOccured); data.put("numResults",ObjectUtils.defaultIfNull(totalResults,0).intValue()); return data; } public List<Map<String,Object>> getChecklistTaxons(int selectedTaxonId){ List<Map<String,Object>> results = new ArrayList<Map<String,Object>>(); List<Object[]> taxons = taxonDAO.getAcceptedTaxon(Rank.GENUS); if(taxons != null){ HashMap<String,Object> t; for(Object[] taxon : taxons){ int id = (Integer)taxon[0]; String calname = (String)taxon[1]; String rank = (String)taxon[2]; t = new HashMap<String,Object>(); if(id == selectedTaxonId){ t.put("selected", "selected=\"selected\""); } t.put("id",id); t.put("calname", calname); t.put("rank", rank); results.add(t); } taxons.clear(); } return results; } }
fixed hybrids default value
src/main/java/net/canadensys/dataportal/vascan/impl/ChecklistServiceImpl.java
fixed hybrids default value
Java
mpl-2.0
1a714d884c31f6be9d467bf7863753eecc7467d1
0
powsybl/powsybl-core,powsybl/powsybl-core,powsybl/powsybl-core
/** * Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.powsybl.iidm.network.impl; import com.google.common.collect.*; import com.powsybl.commons.PowsyblException; import com.powsybl.math.graph.GraphUtil; import com.powsybl.math.graph.GraphUtil.ConnectedComponentsComputationResult; import com.powsybl.iidm.network.*; import com.powsybl.iidm.network.TwoTerminalsConnectable.Side; import com.powsybl.iidm.network.impl.util.RefChain; import com.powsybl.iidm.network.impl.util.RefObj; import gnu.trove.list.array.TIntArrayList; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; /** * * @author Geoffroy Jamgotchian <geoffroy.jamgotchian at rte-france.com> */ class NetworkImpl extends AbstractIdentifiable<Network> implements Network, MultiStateObject, Stateful { private static final Logger LOGGER = LoggerFactory.getLogger(NetworkImpl.class); private final RefChain<NetworkImpl> ref = new RefChain<>(new RefObj<>(this)); private DateTime caseDate = new DateTime(); // default is the time at which the network has been created private int forecastDistance = 0; private String sourceFormat; private final ObjectStore objectStore = new ObjectStore(); private final StateManagerImpl stateManager; private final NetworkListenerList listeners = new NetworkListenerList(); class BusBreakerViewImpl implements BusBreakerView { @Override public Iterable<Bus> getBuses() { return FluentIterable.from(getVoltageLevels()) .transformAndConcat(vl -> vl.getBusBreakerView().getBuses()); } @Override public Stream<Bus> getBusStream() { return getVoltageLevelStream().flatMap(vl -> vl.getBusBreakerView().getBusStream()); } @Override public Iterable<Switch> getSwitchs() { return getSwitches(); } @Override public Iterable<Switch> getSwitches() { return FluentIterable.from(getVoltageLevels()) .transformAndConcat(vl -> vl.getBusBreakerView().getSwitches()); } @Override public Stream<Switch> getSwitchStream() { return getVoltageLevelStream().flatMap(vl -> vl.getBusBreakerView().getSwitchStream()); } } private final BusBreakerViewImpl busBreakerView = new BusBreakerViewImpl(); class BusViewImpl implements BusView { @Override public Iterable<Bus> getBuses() { return FluentIterable.from(getVoltageLevels()) .transformAndConcat(vl -> vl.getBusView().getBuses()); } @Override public Stream<Bus> getBusStream() { return getVoltageLevelStream().flatMap(vl -> vl.getBusView().getBusStream()); } @Override public Collection<Component> getConnectedComponents() { return Collections.unmodifiableList(states.get().connectedComponentsManager.getConnectedComponents()); } } private final BusViewImpl busView = new BusViewImpl(); NetworkImpl(String id, String name, String sourceFormat) { super(id, name); Objects.requireNonNull(sourceFormat, "source format is null"); this.sourceFormat = sourceFormat; stateManager = new StateManagerImpl(objectStore); states = new StateArray<>(ref, StateImpl::new); // add the network the object list as it is a stateful object // and it needs to be notified when and extension or a reduction of // the state array is requested objectStore.checkAndAdd(this); } @Override public ContainerType getContainerType() { return ContainerType.NETWORK; } @Override public DateTime getCaseDate() { return caseDate; } @Override public NetworkImpl setCaseDate(DateTime caseDate) { ValidationUtil.checkCaseDate(this, caseDate); this.caseDate = caseDate; return this; } @Override public int getForecastDistance() { return forecastDistance; } @Override public NetworkImpl setForecastDistance(int forecastDistance) { ValidationUtil.checkForecastDistance(this, forecastDistance); this.forecastDistance = forecastDistance; return this; } @Override public String getSourceFormat() { return sourceFormat; } RefChain<NetworkImpl> getRef() { return ref; } NetworkListenerList getListeners() { return listeners; } public ObjectStore getObjectStore() { return objectStore; } @Override public StateManagerImpl getStateManager() { return stateManager; } @Override public int getStateIndex() { return stateManager.getStateContext().getStateIndex(); } @Override public Set<Country> getCountries() { return getSubstationStream().map(Substation::getCountry).collect(Collectors.toCollection(() -> EnumSet.noneOf(Country.class))); } @Override public int getCountryCount() { return getCountries().size(); } @Override public SubstationAdder newSubstation() { return new SubstationAdderImpl(ref); } @Override public Iterable<Substation> getSubstations() { return Collections.unmodifiableCollection(objectStore.getAll(SubstationImpl.class)); } @Override public Stream<Substation> getSubstationStream() { return objectStore.getAll(SubstationImpl.class).stream().map(Function.identity()); } @Override public int getSubstationCount() { return objectStore.getAll(SubstationImpl.class).size(); } @Override public Iterable<Substation> getSubstations(Country country, String tsoId, String... geographicalTags) { return Substations.filter(getSubstations(), country, tsoId, geographicalTags); } @Override public SubstationImpl getSubstation(String id) { return objectStore.get(id, SubstationImpl.class); } @Override public Iterable<VoltageLevel> getVoltageLevels() { return Iterables.concat(objectStore.getAll(BusBreakerVoltageLevel.class), objectStore.getAll(NodeBreakerVoltageLevel.class)); } @Override public Stream<VoltageLevel> getVoltageLevelStream() { return Stream.concat(objectStore.getAll(BusBreakerVoltageLevel.class).stream(), objectStore.getAll(NodeBreakerVoltageLevel.class).stream()); } @Override public int getVoltageLevelCount() { return objectStore.getAll(BusBreakerVoltageLevel.class).size() + objectStore.getAll(NodeBreakerVoltageLevel.class).size(); } @Override public VoltageLevelExt getVoltageLevel(String id) { return objectStore.get(id, VoltageLevelExt.class); } @Override public LineAdderImpl newLine() { return new LineAdderImpl(this); } @Override public Iterable<Line> getLines() { return Iterables.concat(objectStore.getAll(LineImpl.class), objectStore.getAll(TieLineImpl.class)); } @Override public Branch getBranch(String branchId) { Objects.requireNonNull(branchId); Branch branch = getLine(branchId); if (branch == null) { branch = getTwoWindingsTransformer(branchId); } return branch; } @Override public Iterable<Branch> getBranches() { return Iterables.concat(getLines(), getTwoWindingsTransformers()); } @Override public Stream<Branch> getBranchStream() { return Stream.concat(getLineStream(), getTwoWindingsTransformerStream()); } @Override public int getBranchCount() { return getLineCount() + getTwoWindingsTransformerCount(); } @Override public Stream<Line> getLineStream() { return Stream.concat(objectStore.getAll(LineImpl.class).stream(), objectStore.getAll(TieLineImpl.class).stream()); } @Override public int getLineCount() { return objectStore.getAll(LineImpl.class).size() + objectStore.getAll(TieLineImpl.class).size(); } @Override public LineImpl getLine(String id) { LineImpl line = objectStore.get(id, LineImpl.class); if (line == null) { line = objectStore.get(id, TieLineImpl.class); } return line; } @Override public TieLineAdderImpl newTieLine() { return new TieLineAdderImpl(this); } @Override public Iterable<TwoWindingsTransformer> getTwoWindingsTransformers() { return Collections.unmodifiableCollection(objectStore.getAll(TwoWindingsTransformerImpl.class)); } @Override public Stream<TwoWindingsTransformer> getTwoWindingsTransformerStream() { return objectStore.getAll(TwoWindingsTransformerImpl.class).stream().map(Function.identity()); } @Override public int getTwoWindingsTransformerCount() { return objectStore.getAll(TwoWindingsTransformerImpl.class).size(); } @Override public TwoWindingsTransformer getTwoWindingsTransformer(String id) { return objectStore.get(id, TwoWindingsTransformerImpl.class); } @Override public Iterable<ThreeWindingsTransformer> getThreeWindingsTransformers() { return Collections.unmodifiableCollection(objectStore.getAll(ThreeWindingsTransformerImpl.class)); } @Override public Stream<ThreeWindingsTransformer> getThreeWindingsTransformerStream() { return objectStore.getAll(ThreeWindingsTransformerImpl.class).stream().map(Function.identity()); } @Override public int getThreeWindingsTransformerCount() { return objectStore.getAll(ThreeWindingsTransformerImpl.class).size(); } @Override public ThreeWindingsTransformer getThreeWindingsTransformer(String id) { return objectStore.get(id, ThreeWindingsTransformerImpl.class); } @Override public Iterable<Generator> getGenerators() { return Collections.unmodifiableCollection(objectStore.getAll(GeneratorImpl.class)); } @Override public Stream<Generator> getGeneratorStream() { return objectStore.getAll(GeneratorImpl.class).stream().map(Function.identity()); } @Override public int getGeneratorCount() { return objectStore.getAll(GeneratorImpl.class).size(); } @Override public GeneratorImpl getGenerator(String id) { return objectStore.get(id, GeneratorImpl.class); } @Override public Iterable<Load> getLoads() { return Collections.unmodifiableCollection(objectStore.getAll(LoadImpl.class)); } @Override public Stream<Load> getLoadStream() { return objectStore.getAll(LoadImpl.class).stream().map(Function.identity()); } @Override public int getLoadCount() { return objectStore.getAll(LoadImpl.class).size(); } @Override public LoadImpl getLoad(String id) { return objectStore.get(id, LoadImpl.class); } @Override public Iterable<ShuntCompensator> getShunts() { return Collections.unmodifiableCollection(objectStore.getAll(ShuntCompensatorImpl.class)); } @Override public Stream<ShuntCompensator> getShuntStream() { return objectStore.getAll(ShuntCompensatorImpl.class).stream().map(Function.identity()); } @Override public int getShuntCount() { return objectStore.getAll(ShuntCompensatorImpl.class).size(); } @Override public ShuntCompensatorImpl getShunt(String id) { return objectStore.get(id, ShuntCompensatorImpl.class); } @Override public Iterable<DanglingLine> getDanglingLines() { return Collections.unmodifiableCollection(objectStore.getAll(DanglingLineImpl.class)); } @Override public Stream<DanglingLine> getDanglingLineStream() { return objectStore.getAll(DanglingLineImpl.class).stream().map(Function.identity()); } @Override public int getDanglingLineCount() { return objectStore.getAll(DanglingLineImpl.class).size(); } @Override public DanglingLineImpl getDanglingLine(String id) { return objectStore.get(id, DanglingLineImpl.class); } @Override public Iterable<StaticVarCompensator> getStaticVarCompensators() { return Collections.unmodifiableCollection(objectStore.getAll(StaticVarCompensatorImpl.class)); } @Override public Stream<StaticVarCompensator> getStaticVarCompensatorStream() { return objectStore.getAll(StaticVarCompensatorImpl.class).stream().map(Function.identity()); } @Override public int getStaticVarCompensatorCount() { return objectStore.getAll(StaticVarCompensatorImpl.class).size(); } @Override public StaticVarCompensatorImpl getStaticVarCompensator(String id) { return objectStore.get(id, StaticVarCompensatorImpl.class); } @Override public Switch getSwitch(String id) { return objectStore.get(id, SwitchImpl.class); } @Override public Stream<Switch> getSwitchStream() { return objectStore.getAll(SwitchImpl.class).stream().map(Function.identity()); } @Override public BusbarSection getBusbarSection(String id) { return objectStore.get(id, BusbarSectionImpl.class); } @Override public AbstractHvdcConverterStation<?> getHvdcConverterStation(String id) { AbstractHvdcConverterStation<?> converterStation = getLccConverterStation(id); if (converterStation == null) { converterStation = getVscConverterStation(id); } return converterStation; } @Override public int getHvdcConverterStationCount() { return getLccConverterStationCount() + getVscConverterStationCount(); } @Override public Iterable<HvdcConverterStation<?>> getHvdcConverterStations() { return Iterables.concat(getLccConverterStations(), getVscConverterStations()); } @Override public Stream<HvdcConverterStation<?>> getHvdcConverterStationStream() { return Stream.concat(getLccConverterStationStream(), getVscConverterStationStream()); } @Override public Iterable<LccConverterStation> getLccConverterStations() { return Collections.unmodifiableCollection(objectStore.getAll(LccConverterStationImpl.class)); } @Override public Stream<LccConverterStation> getLccConverterStationStream() { return objectStore.getAll(LccConverterStationImpl.class).stream().map(Function.identity()); } @Override public int getLccConverterStationCount() { return objectStore.getAll(LccConverterStationImpl.class).size(); } @Override public LccConverterStationImpl getLccConverterStation(String id) { return objectStore.get(id, LccConverterStationImpl.class); } @Override public Iterable<VscConverterStation> getVscConverterStations() { return Collections.unmodifiableCollection(objectStore.getAll(VscConverterStationImpl.class)); } @Override public Stream<VscConverterStation> getVscConverterStationStream() { return objectStore.getAll(VscConverterStationImpl.class).stream().map(Function.identity()); } @Override public int getVscConverterStationCount() { return objectStore.getAll(VscConverterStationImpl.class).size(); } @Override public VscConverterStationImpl getVscConverterStation(String id) { return objectStore.get(id, VscConverterStationImpl.class); } @Override public HvdcLine getHvdcLine(String id) { return objectStore.get(id, HvdcLineImpl.class); } @Override public int getHvdcLineCount() { return objectStore.getAll(HvdcLineImpl.class).size(); } @Override public Iterable<HvdcLine> getHvdcLines() { return Collections.unmodifiableCollection(objectStore.getAll(HvdcLineImpl.class)); } @Override public Stream<HvdcLine> getHvdcLineStream() { return objectStore.getAll(HvdcLineImpl.class).stream().map(Function.identity()); } @Override public HvdcLineAdder newHvdcLine() { return new HvdcLineAdderImpl(ref); } @Override public Identifiable<?> getIdentifiable(String id) { return objectStore.get(id, Identifiable.class); } @Override public Collection<Identifiable<?>> getIdentifiables() { return objectStore.getAll(); } @Override public BusBreakerViewImpl getBusBreakerView() { return busBreakerView; } @Override public BusViewImpl getBusView() { return busView; } private abstract static class AbstractComponentsManager<C extends Component> { protected final NetworkImpl network; private AbstractComponentsManager(NetworkImpl network) { this.network = Objects.requireNonNull(network); } private List<C> components; void invalidate() { components = null; } protected void addToAdjacencyList(Bus bus1, Bus bus2, Map<String, Integer> id2num, TIntArrayList[] adjacencyList) { if (bus1 != null && bus2 != null) { int busNum1 = id2num.get(bus1.getId()); int busNum2 = id2num.get(bus2.getId()); adjacencyList[busNum1].add(busNum2); adjacencyList[busNum2].add(busNum1); } } protected void fillAdjacencyList(Map<String, Integer> id2num, TIntArrayList[] adjacencyList) { for (LineImpl line : Sets.union(network.objectStore.getAll(LineImpl.class), network.objectStore.getAll(TieLineImpl.class))) { BusExt bus1 = line.getTerminal1().getBusView().getBus(); BusExt bus2 = line.getTerminal2().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); } for (TwoWindingsTransformerImpl transfo : network.objectStore.getAll(TwoWindingsTransformerImpl.class)) { BusExt bus1 = transfo.getTerminal1().getBusView().getBus(); BusExt bus2 = transfo.getTerminal2().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); } for (ThreeWindingsTransformerImpl transfo : network.objectStore.getAll(ThreeWindingsTransformerImpl.class)) { BusExt bus1 = transfo.getLeg1().getTerminal().getBusView().getBus(); BusExt bus2 = transfo.getLeg2().getTerminal().getBusView().getBus(); BusExt bus3 = transfo.getLeg3().getTerminal().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); addToAdjacencyList(bus1, bus3, id2num, adjacencyList); addToAdjacencyList(bus2, bus3, id2num, adjacencyList); } } protected abstract C createComponent(int num, int size); protected abstract String getComponentLabel(); protected abstract void setComponentNumber(BusExt bus, int num); void update() { if (components != null) { return; } long startTime = System.currentTimeMillis(); // reset for (Bus b : network.getBusBreakerView().getBuses()) { setComponentNumber((BusExt) b, -1); } int num = 0; Map<String, Integer> id2num = new HashMap<>(); List<BusExt> num2bus = new ArrayList<>(); for (Bus bus : network.getBusView().getBuses()) { num2bus.add((BusExt) bus); id2num.put(bus.getId(), num); num++; } TIntArrayList[] adjacencyList = new TIntArrayList[num]; for (int i = 0; i < adjacencyList.length; i++) { adjacencyList[i] = new TIntArrayList(3); } fillAdjacencyList(id2num, adjacencyList); ConnectedComponentsComputationResult result = GraphUtil.computeConnectedComponents(adjacencyList); components = new ArrayList<>(result.getComponentSize().length); for (int i = 0; i < result.getComponentSize().length; i++) { components.add(createComponent(i, result.getComponentSize()[i])); } for (int i = 0; i < result.getComponentNumber().length; i++) { BusExt bus = num2bus.get(i); setComponentNumber(bus, result.getComponentNumber()[i]); } LOGGER.debug("{} components computed in {} ms", getComponentLabel(), System.currentTimeMillis() - startTime); } List<C> getConnectedComponents() { update(); return components; } C getComponent(int num) { // update() must not be put here, but explicitly called each time before because update may // trigger a new component computation and so on a change in the value of the num component already passed // (and outdated consequently) in parameter of this method return num != -1 ? components.get(num) : null; } } static final class ConnectedComponentsManager extends AbstractComponentsManager<ConnectedComponentImpl> { private ConnectedComponentsManager(NetworkImpl network) { super(network); } @Override protected void fillAdjacencyList(Map<String, Integer> id2num, TIntArrayList[] adjacencyList) { super.fillAdjacencyList(id2num, adjacencyList); for (HvdcLineImpl line : network.objectStore.getAll(HvdcLineImpl.class)) { BusExt bus1 = line.getConverterStation1().getTerminal().getBusView().getBus(); BusExt bus2 = line.getConverterStation2().getTerminal().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); } } @Override protected String getComponentLabel() { return "Connected"; } @Override protected void setComponentNumber(BusExt bus, int num) { Objects.requireNonNull(bus); bus.setConnectedComponentNumber(num); } protected ConnectedComponentImpl createComponent(int num, int size) { return new ConnectedComponentImpl(num, size, network.ref); } } static final class SynchronousComponentsManager extends AbstractComponentsManager<ComponentImpl> { private SynchronousComponentsManager(NetworkImpl network) { super(network); } protected ComponentImpl createComponent(int num, int size) { return new ComponentImpl(num, size, network.ref); } @Override protected String getComponentLabel() { return "Synchronous"; } @Override protected void setComponentNumber(BusExt bus, int num) { Objects.requireNonNull(bus); bus.setSynchronousComponentNumber(num); } } private class StateImpl implements State { private final ConnectedComponentsManager connectedComponentsManager = new ConnectedComponentsManager(NetworkImpl.this); private final SynchronousComponentsManager synchronousComponentsManager = new SynchronousComponentsManager(NetworkImpl.this); @Override public StateImpl copy() { return new StateImpl(); } } private final StateArray<StateImpl> states; ConnectedComponentsManager getConnectedComponentsManager() { return states.get().connectedComponentsManager; } SynchronousComponentsManager getSynchronousComponentsManager() { return states.get().synchronousComponentsManager; } @Override public void extendStateArraySize(int initStateArraySize, int number, final int sourceIndex) { states.push(number, () -> states.copy(sourceIndex)); } @Override public void reduceStateArraySize(int number) { states.pop(number); } @Override public void deleteStateArrayElement(int index) { states.delete(index); } @Override public void allocateStateArrayElement(int[] indexes, final int sourceIndex) { states.allocate(indexes, () -> states.copy(sourceIndex)); } @Override protected String getTypeDescription() { return "Network"; } private void setId(String id) { objectStore.remove(this); this.id = id; name = null; // reset the name objectStore.checkAndAdd(this); } @Override public void merge(Network other) { NetworkImpl otherNetwork = (NetworkImpl) other; // this check must not be done on the number of state but on the size // of the internal state array because the network can have only // one state but an internal array with a size greater that one and // some re-usable states if (stateManager.getStateArraySize() != 1 || otherNetwork.stateManager.getStateArraySize() != 1) { throw new PowsyblException("Merging of multi-states network is not supported"); } long start = System.currentTimeMillis(); // check mergeability Multimap<Class<? extends Identifiable>, String> intersection = objectStore.intersection(otherNetwork.objectStore); for (Map.Entry<Class<? extends Identifiable>, Collection<String>> entry : intersection.asMap().entrySet()) { Class<? extends Identifiable> clazz = entry.getKey(); if (clazz == DanglingLineImpl.class) { // fine for dangling lines continue; } Collection<String> objs = entry.getValue(); if (!objs.isEmpty()) { throw new PowsyblException("The following object(s) of type " + clazz.getSimpleName() + " exist(s) in both networks: " + objs); } } class LineMerge { String id; String voltageLevel1; String voltageLevel2; String xnode; String bus1; String bus2; String connectableBus1; String connectableBus2; Integer node1; Integer node2; class HalfLineMerge { String id; String name; float r; float x; float g1; float g2; float b1; float b2; float xnodeP; float xnodeQ; } final HalfLineMerge half1 = new HalfLineMerge(); final HalfLineMerge half2 = new HalfLineMerge(); CurrentLimits limits1; CurrentLimits limits2; float p1; float q1; float p2; float q2; Country country1; Country country2; } // try to find dangling lines couples Map<String, DanglingLine> dl1byXnodeCode = new HashMap<>(); for (DanglingLine dl1 : getDanglingLines()) { if (dl1.getUcteXnodeCode() != null) { dl1byXnodeCode.put(dl1.getUcteXnodeCode(), dl1); } } List<LineMerge> lines = new ArrayList<>(); for (DanglingLine dl2 : Lists.newArrayList(other.getDanglingLines())) { DanglingLine dl1 = getDanglingLine(dl2.getId()); if (dl1 == null) { // mapping by ucte xnode code if (dl2.getUcteXnodeCode() != null) { dl1 = dl1byXnodeCode.get(dl2.getUcteXnodeCode()); } } else { // mapping by id if (dl1.getUcteXnodeCode() != null && dl2.getUcteXnodeCode() != null && !dl1.getUcteXnodeCode().equals(dl2.getUcteXnodeCode())) { throw new PowsyblException("Dangling line couple " + dl1.getId() + " have inconsistent Xnodes (" + dl1.getUcteXnodeCode() + "!=" + dl2.getUcteXnodeCode() + ")"); } } if (dl1 != null) { LineMerge l = new LineMerge(); l.id = dl1.getId().compareTo(dl2.getId()) < 0 ? dl1.getId() + " + " + dl2.getId() : dl2.getId() + " + " + dl1.getId(); Terminal t1 = dl1.getTerminal(); Terminal t2 = dl2.getTerminal(); VoltageLevel vl1 = t1.getVoltageLevel(); VoltageLevel vl2 = t2.getVoltageLevel(); l.voltageLevel1 = vl1.getId(); l.voltageLevel2 = vl2.getId(); l.xnode = dl1.getUcteXnodeCode(); l.half1.id = dl1.getId(); l.half1.name = dl1.getName(); l.half1.r = dl1.getR(); l.half1.x = dl1.getX(); l.half1.g1 = dl1.getG(); l.half1.g2 = 0; l.half1.b1 = dl1.getB(); l.half1.b2 = 0; l.half1.xnodeP = dl1.getP0(); l.half1.xnodeQ = dl1.getQ0(); l.half2.id = dl2.getId(); l.half2.name = dl2.getName(); l.half2.r = dl2.getR(); l.half2.x = dl2.getX(); l.half2.g1 = dl2.getG(); l.half2.g2 = 0; l.half2.b1 = dl2.getB(); l.half2.b2 = 0; l.half2.xnodeP = dl2.getP0(); l.half2.xnodeQ = dl2.getQ0(); l.limits1 = dl1.getCurrentLimits(); l.limits2 = dl2.getCurrentLimits(); if (t1.getVoltageLevel().getTopologyKind() == TopologyKind.BUS_BREAKER) { Bus b1 = t1.getBusBreakerView().getBus(); if (b1 != null) { l.bus1 = b1.getId(); } l.connectableBus1 = t1.getBusBreakerView().getConnectableBus().getId(); } else { l.node1 = t1.getNodeBreakerView().getNode(); } if (t2.getVoltageLevel().getTopologyKind() == TopologyKind.BUS_BREAKER) { Bus b2 = t2.getBusBreakerView().getBus(); if (b2 != null) { l.bus2 = b2.getId(); } l.connectableBus2 = t2.getBusBreakerView().getConnectableBus().getId(); } else { l.node2 = t2.getNodeBreakerView().getNode(); } l.p1 = t1.getP(); l.q1 = t1.getQ(); l.p2 = t2.getP(); l.q2 = t2.getQ(); l.country1 = vl1.getSubstation().getCountry(); l.country2 = vl2.getSubstation().getCountry(); lines.add(l); // remove the 2 dangling lines dl1.remove(); dl2.remove(); } } // do not forget to remove the other network from its store!!! otherNetwork.objectStore.remove(otherNetwork); // merge the stores objectStore.merge(otherNetwork.objectStore); // fix network back reference of the other network objects otherNetwork.ref.setRef(ref); Multimap<Boundary, LineMerge> mergedLineByBoundary = HashMultimap.create(); for (LineMerge lm : lines) { LOGGER.debug("Replacing dangling line couple '{}' (xnode={}, country1={}, country2={}) by a line", lm.id, lm.xnode, lm.country1, lm.country2); TieLineAdderImpl la = newTieLine() .setId(lm.id) .setVoltageLevel1(lm.voltageLevel1) .setVoltageLevel2(lm.voltageLevel2) .line1().setId(lm.half1.id) .setName(lm.half1.name) .setR(lm.half1.r) .setX(lm.half1.x) .setG1(lm.half1.g1) .setG2(lm.half1.g2) .setB1(lm.half1.b1) .setB2(lm.half1.b2) .setXnodeP(lm.half1.xnodeP) .setXnodeQ(lm.half1.xnodeQ) .line2().setId(lm.half2.id) .setName(lm.half2.name) .setR(lm.half2.r) .setX(lm.half2.x) .setG1(lm.half2.g1) .setG2(lm.half2.g2) .setB1(lm.half2.b1) .setB2(lm.half2.b2) .setXnodeP(lm.half2.xnodeP) .setXnodeQ(lm.half2.xnodeQ) .setUcteXnodeCode(lm.xnode); if (lm.bus1 != null) { la.setBus1(lm.bus1); } la.setConnectableBus1(lm.connectableBus1); if (lm.bus2 != null) { la.setBus2(lm.bus2); } la.setConnectableBus2(lm.connectableBus2); if (lm.node1 != null) { la.setNode1(lm.node1); } if (lm.node2 != null) { la.setNode2(lm.node2); } TieLineImpl l = la.add(); l.setCurrentLimits(Side.ONE, (CurrentLimitsImpl) lm.limits1); l.setCurrentLimits(Side.TWO, (CurrentLimitsImpl) lm.limits2); l.getTerminal1().setP(lm.p1).setQ(lm.q1); l.getTerminal2().setP(lm.p2).setQ(lm.q2); mergedLineByBoundary.put(new Boundary(lm.country1, lm.country2), lm); } if (!lines.isEmpty()) { LOGGER.info("{} dangling line couples have been replaced by a line: {}", lines.size(), mergedLineByBoundary.asMap().entrySet().stream().map(e -> e.getKey() + ": " + e.getValue().size()).collect(Collectors.toList())); } // update the source format if (!sourceFormat.equals(otherNetwork.sourceFormat)) { sourceFormat = "hybrid"; } // change the network id setId(getId() + " + " + otherNetwork.getId()); LOGGER.info("Merging of {} done in {} ms", id, System.currentTimeMillis() - start); } @Override public void merge(Network... others) { for (Network other : others) { merge(other); } } @Override public void addListener(NetworkListener listener) { listeners.add(listener); } @Override public void removeListener(NetworkListener listener) { listeners.remove(listener); } }
iidm/iidm-impl/src/main/java/com/powsybl/iidm/network/impl/NetworkImpl.java
/** * Copyright (c) 2016, All partners of the iTesla project (http://www.itesla-project.eu/consortium) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.powsybl.iidm.network.impl; import com.google.common.collect.*; import com.powsybl.commons.PowsyblException; import com.powsybl.math.graph.GraphUtil; import com.powsybl.math.graph.GraphUtil.ConnectedComponentsComputationResult; import com.powsybl.iidm.network.*; import com.powsybl.iidm.network.TwoTerminalsConnectable.Side; import com.powsybl.iidm.network.impl.util.RefChain; import com.powsybl.iidm.network.impl.util.RefObj; import gnu.trove.list.array.TIntArrayList; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; /** * * @author Geoffroy Jamgotchian <geoffroy.jamgotchian at rte-france.com> */ class NetworkImpl extends AbstractIdentifiable<Network> implements Network, MultiStateObject, Stateful { private static final Logger LOGGER = LoggerFactory.getLogger(NetworkImpl.class); private final RefChain<NetworkImpl> ref = new RefChain<>(new RefObj<>(this)); private DateTime caseDate = new DateTime(); // default is the time at which the network has been created private int forecastDistance = 0; private String sourceFormat; private final ObjectStore objectStore = new ObjectStore(); private final StateManagerImpl stateManager; private final NetworkListenerList listeners = new NetworkListenerList(); class BusBreakerViewImpl implements BusBreakerView { @Override public Iterable<Bus> getBuses() { return FluentIterable.from(getVoltageLevels()) .transformAndConcat(vl -> vl.getBusBreakerView().getBuses()); } @Override public Stream<Bus> getBusStream() { return getVoltageLevelStream().flatMap(vl -> vl.getBusBreakerView().getBusStream()); } @Override public Iterable<Switch> getSwitchs() { return getSwitches(); } @Override public Iterable<Switch> getSwitches() { return FluentIterable.from(getVoltageLevels()) .transformAndConcat(vl -> vl.getBusBreakerView().getSwitches()); } @Override public Stream<Switch> getSwitchStream() { return getVoltageLevelStream().flatMap(vl -> vl.getBusBreakerView().getSwitchStream()); } } private final BusBreakerViewImpl busBreakerView = new BusBreakerViewImpl(); class BusViewImpl implements BusView { @Override public Iterable<Bus> getBuses() { return FluentIterable.from(getVoltageLevels()) .transformAndConcat(vl -> vl.getBusView().getBuses()); } @Override public Stream<Bus> getBusStream() { return getVoltageLevelStream().flatMap(vl -> vl.getBusView().getBusStream()); } @Override public Collection<Component> getConnectedComponents() { return Collections.unmodifiableList(states.get().connectedComponentsManager.getConnectedComponents()); } } private final BusViewImpl busView = new BusViewImpl(); NetworkImpl(String id, String name, String sourceFormat) { super(id, name); Objects.requireNonNull(sourceFormat, "source format is null"); this.sourceFormat = sourceFormat; stateManager = new StateManagerImpl(objectStore); states = new StateArray<>(ref, StateImpl::new); // add the network the object list as it is a stateful object // and it needs to be notified when and extension or a reduction of // the state array is requested objectStore.checkAndAdd(this); } @Override public ContainerType getContainerType() { return ContainerType.NETWORK; } @Override public DateTime getCaseDate() { return caseDate; } @Override public NetworkImpl setCaseDate(DateTime caseDate) { ValidationUtil.checkCaseDate(this, caseDate); this.caseDate = caseDate; return this; } @Override public int getForecastDistance() { return forecastDistance; } @Override public NetworkImpl setForecastDistance(int forecastDistance) { ValidationUtil.checkForecastDistance(this, forecastDistance); this.forecastDistance = forecastDistance; return this; } @Override public String getSourceFormat() { return sourceFormat; } RefChain<NetworkImpl> getRef() { return ref; } NetworkListenerList getListeners() { return listeners; } public ObjectStore getObjectStore() { return objectStore; } @Override public StateManagerImpl getStateManager() { return stateManager; } @Override public int getStateIndex() { return stateManager.getStateContext().getStateIndex(); } @Override public Set<Country> getCountries() { return getSubstationStream().map(Substation::getCountry).collect(Collectors.toSet()); } @Override public int getCountryCount() { return getCountries().size(); } @Override public SubstationAdder newSubstation() { return new SubstationAdderImpl(ref); } @Override public Iterable<Substation> getSubstations() { return Collections.unmodifiableCollection(objectStore.getAll(SubstationImpl.class)); } @Override public Stream<Substation> getSubstationStream() { return objectStore.getAll(SubstationImpl.class).stream().map(Function.identity()); } @Override public int getSubstationCount() { return objectStore.getAll(SubstationImpl.class).size(); } @Override public Iterable<Substation> getSubstations(Country country, String tsoId, String... geographicalTags) { return Substations.filter(getSubstations(), country, tsoId, geographicalTags); } @Override public SubstationImpl getSubstation(String id) { return objectStore.get(id, SubstationImpl.class); } @Override public Iterable<VoltageLevel> getVoltageLevels() { return Iterables.concat(objectStore.getAll(BusBreakerVoltageLevel.class), objectStore.getAll(NodeBreakerVoltageLevel.class)); } @Override public Stream<VoltageLevel> getVoltageLevelStream() { return Stream.concat(objectStore.getAll(BusBreakerVoltageLevel.class).stream(), objectStore.getAll(NodeBreakerVoltageLevel.class).stream()); } @Override public int getVoltageLevelCount() { return objectStore.getAll(BusBreakerVoltageLevel.class).size() + objectStore.getAll(NodeBreakerVoltageLevel.class).size(); } @Override public VoltageLevelExt getVoltageLevel(String id) { return objectStore.get(id, VoltageLevelExt.class); } @Override public LineAdderImpl newLine() { return new LineAdderImpl(this); } @Override public Iterable<Line> getLines() { return Iterables.concat(objectStore.getAll(LineImpl.class), objectStore.getAll(TieLineImpl.class)); } @Override public Branch getBranch(String branchId) { Objects.requireNonNull(branchId); Branch branch = getLine(branchId); if (branch == null) { branch = getTwoWindingsTransformer(branchId); } return branch; } @Override public Iterable<Branch> getBranches() { return Iterables.concat(getLines(), getTwoWindingsTransformers()); } @Override public Stream<Branch> getBranchStream() { return Stream.concat(getLineStream(), getTwoWindingsTransformerStream()); } @Override public int getBranchCount() { return getLineCount() + getTwoWindingsTransformerCount(); } @Override public Stream<Line> getLineStream() { return Stream.concat(objectStore.getAll(LineImpl.class).stream(), objectStore.getAll(TieLineImpl.class).stream()); } @Override public int getLineCount() { return objectStore.getAll(LineImpl.class).size() + objectStore.getAll(TieLineImpl.class).size(); } @Override public LineImpl getLine(String id) { LineImpl line = objectStore.get(id, LineImpl.class); if (line == null) { line = objectStore.get(id, TieLineImpl.class); } return line; } @Override public TieLineAdderImpl newTieLine() { return new TieLineAdderImpl(this); } @Override public Iterable<TwoWindingsTransformer> getTwoWindingsTransformers() { return Collections.unmodifiableCollection(objectStore.getAll(TwoWindingsTransformerImpl.class)); } @Override public Stream<TwoWindingsTransformer> getTwoWindingsTransformerStream() { return objectStore.getAll(TwoWindingsTransformerImpl.class).stream().map(Function.identity()); } @Override public int getTwoWindingsTransformerCount() { return objectStore.getAll(TwoWindingsTransformerImpl.class).size(); } @Override public TwoWindingsTransformer getTwoWindingsTransformer(String id) { return objectStore.get(id, TwoWindingsTransformerImpl.class); } @Override public Iterable<ThreeWindingsTransformer> getThreeWindingsTransformers() { return Collections.unmodifiableCollection(objectStore.getAll(ThreeWindingsTransformerImpl.class)); } @Override public Stream<ThreeWindingsTransformer> getThreeWindingsTransformerStream() { return objectStore.getAll(ThreeWindingsTransformerImpl.class).stream().map(Function.identity()); } @Override public int getThreeWindingsTransformerCount() { return objectStore.getAll(ThreeWindingsTransformerImpl.class).size(); } @Override public ThreeWindingsTransformer getThreeWindingsTransformer(String id) { return objectStore.get(id, ThreeWindingsTransformerImpl.class); } @Override public Iterable<Generator> getGenerators() { return Collections.unmodifiableCollection(objectStore.getAll(GeneratorImpl.class)); } @Override public Stream<Generator> getGeneratorStream() { return objectStore.getAll(GeneratorImpl.class).stream().map(Function.identity()); } @Override public int getGeneratorCount() { return objectStore.getAll(GeneratorImpl.class).size(); } @Override public GeneratorImpl getGenerator(String id) { return objectStore.get(id, GeneratorImpl.class); } @Override public Iterable<Load> getLoads() { return Collections.unmodifiableCollection(objectStore.getAll(LoadImpl.class)); } @Override public Stream<Load> getLoadStream() { return objectStore.getAll(LoadImpl.class).stream().map(Function.identity()); } @Override public int getLoadCount() { return objectStore.getAll(LoadImpl.class).size(); } @Override public LoadImpl getLoad(String id) { return objectStore.get(id, LoadImpl.class); } @Override public Iterable<ShuntCompensator> getShunts() { return Collections.unmodifiableCollection(objectStore.getAll(ShuntCompensatorImpl.class)); } @Override public Stream<ShuntCompensator> getShuntStream() { return objectStore.getAll(ShuntCompensatorImpl.class).stream().map(Function.identity()); } @Override public int getShuntCount() { return objectStore.getAll(ShuntCompensatorImpl.class).size(); } @Override public ShuntCompensatorImpl getShunt(String id) { return objectStore.get(id, ShuntCompensatorImpl.class); } @Override public Iterable<DanglingLine> getDanglingLines() { return Collections.unmodifiableCollection(objectStore.getAll(DanglingLineImpl.class)); } @Override public Stream<DanglingLine> getDanglingLineStream() { return objectStore.getAll(DanglingLineImpl.class).stream().map(Function.identity()); } @Override public int getDanglingLineCount() { return objectStore.getAll(DanglingLineImpl.class).size(); } @Override public DanglingLineImpl getDanglingLine(String id) { return objectStore.get(id, DanglingLineImpl.class); } @Override public Iterable<StaticVarCompensator> getStaticVarCompensators() { return Collections.unmodifiableCollection(objectStore.getAll(StaticVarCompensatorImpl.class)); } @Override public Stream<StaticVarCompensator> getStaticVarCompensatorStream() { return objectStore.getAll(StaticVarCompensatorImpl.class).stream().map(Function.identity()); } @Override public int getStaticVarCompensatorCount() { return objectStore.getAll(StaticVarCompensatorImpl.class).size(); } @Override public StaticVarCompensatorImpl getStaticVarCompensator(String id) { return objectStore.get(id, StaticVarCompensatorImpl.class); } @Override public Switch getSwitch(String id) { return objectStore.get(id, SwitchImpl.class); } @Override public Stream<Switch> getSwitchStream() { return objectStore.getAll(SwitchImpl.class).stream().map(Function.identity()); } @Override public BusbarSection getBusbarSection(String id) { return objectStore.get(id, BusbarSectionImpl.class); } @Override public AbstractHvdcConverterStation<?> getHvdcConverterStation(String id) { AbstractHvdcConverterStation<?> converterStation = getLccConverterStation(id); if (converterStation == null) { converterStation = getVscConverterStation(id); } return converterStation; } @Override public int getHvdcConverterStationCount() { return getLccConverterStationCount() + getVscConverterStationCount(); } @Override public Iterable<HvdcConverterStation<?>> getHvdcConverterStations() { return Iterables.concat(getLccConverterStations(), getVscConverterStations()); } @Override public Stream<HvdcConverterStation<?>> getHvdcConverterStationStream() { return Stream.concat(getLccConverterStationStream(), getVscConverterStationStream()); } @Override public Iterable<LccConverterStation> getLccConverterStations() { return Collections.unmodifiableCollection(objectStore.getAll(LccConverterStationImpl.class)); } @Override public Stream<LccConverterStation> getLccConverterStationStream() { return objectStore.getAll(LccConverterStationImpl.class).stream().map(Function.identity()); } @Override public int getLccConverterStationCount() { return objectStore.getAll(LccConverterStationImpl.class).size(); } @Override public LccConverterStationImpl getLccConverterStation(String id) { return objectStore.get(id, LccConverterStationImpl.class); } @Override public Iterable<VscConverterStation> getVscConverterStations() { return Collections.unmodifiableCollection(objectStore.getAll(VscConverterStationImpl.class)); } @Override public Stream<VscConverterStation> getVscConverterStationStream() { return objectStore.getAll(VscConverterStationImpl.class).stream().map(Function.identity()); } @Override public int getVscConverterStationCount() { return objectStore.getAll(VscConverterStationImpl.class).size(); } @Override public VscConverterStationImpl getVscConverterStation(String id) { return objectStore.get(id, VscConverterStationImpl.class); } @Override public HvdcLine getHvdcLine(String id) { return objectStore.get(id, HvdcLineImpl.class); } @Override public int getHvdcLineCount() { return objectStore.getAll(HvdcLineImpl.class).size(); } @Override public Iterable<HvdcLine> getHvdcLines() { return Collections.unmodifiableCollection(objectStore.getAll(HvdcLineImpl.class)); } @Override public Stream<HvdcLine> getHvdcLineStream() { return objectStore.getAll(HvdcLineImpl.class).stream().map(Function.identity()); } @Override public HvdcLineAdder newHvdcLine() { return new HvdcLineAdderImpl(ref); } @Override public Identifiable<?> getIdentifiable(String id) { return objectStore.get(id, Identifiable.class); } @Override public Collection<Identifiable<?>> getIdentifiables() { return objectStore.getAll(); } @Override public BusBreakerViewImpl getBusBreakerView() { return busBreakerView; } @Override public BusViewImpl getBusView() { return busView; } private abstract static class AbstractComponentsManager<C extends Component> { protected final NetworkImpl network; private AbstractComponentsManager(NetworkImpl network) { this.network = Objects.requireNonNull(network); } private List<C> components; void invalidate() { components = null; } protected void addToAdjacencyList(Bus bus1, Bus bus2, Map<String, Integer> id2num, TIntArrayList[] adjacencyList) { if (bus1 != null && bus2 != null) { int busNum1 = id2num.get(bus1.getId()); int busNum2 = id2num.get(bus2.getId()); adjacencyList[busNum1].add(busNum2); adjacencyList[busNum2].add(busNum1); } } protected void fillAdjacencyList(Map<String, Integer> id2num, TIntArrayList[] adjacencyList) { for (LineImpl line : Sets.union(network.objectStore.getAll(LineImpl.class), network.objectStore.getAll(TieLineImpl.class))) { BusExt bus1 = line.getTerminal1().getBusView().getBus(); BusExt bus2 = line.getTerminal2().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); } for (TwoWindingsTransformerImpl transfo : network.objectStore.getAll(TwoWindingsTransformerImpl.class)) { BusExt bus1 = transfo.getTerminal1().getBusView().getBus(); BusExt bus2 = transfo.getTerminal2().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); } for (ThreeWindingsTransformerImpl transfo : network.objectStore.getAll(ThreeWindingsTransformerImpl.class)) { BusExt bus1 = transfo.getLeg1().getTerminal().getBusView().getBus(); BusExt bus2 = transfo.getLeg2().getTerminal().getBusView().getBus(); BusExt bus3 = transfo.getLeg3().getTerminal().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); addToAdjacencyList(bus1, bus3, id2num, adjacencyList); addToAdjacencyList(bus2, bus3, id2num, adjacencyList); } } protected abstract C createComponent(int num, int size); protected abstract String getComponentLabel(); protected abstract void setComponentNumber(BusExt bus, int num); void update() { if (components != null) { return; } long startTime = System.currentTimeMillis(); // reset for (Bus b : network.getBusBreakerView().getBuses()) { setComponentNumber((BusExt) b, -1); } int num = 0; Map<String, Integer> id2num = new HashMap<>(); List<BusExt> num2bus = new ArrayList<>(); for (Bus bus : network.getBusView().getBuses()) { num2bus.add((BusExt) bus); id2num.put(bus.getId(), num); num++; } TIntArrayList[] adjacencyList = new TIntArrayList[num]; for (int i = 0; i < adjacencyList.length; i++) { adjacencyList[i] = new TIntArrayList(3); } fillAdjacencyList(id2num, adjacencyList); ConnectedComponentsComputationResult result = GraphUtil.computeConnectedComponents(adjacencyList); components = new ArrayList<>(result.getComponentSize().length); for (int i = 0; i < result.getComponentSize().length; i++) { components.add(createComponent(i, result.getComponentSize()[i])); } for (int i = 0; i < result.getComponentNumber().length; i++) { BusExt bus = num2bus.get(i); setComponentNumber(bus, result.getComponentNumber()[i]); } LOGGER.debug("{} components computed in {} ms", getComponentLabel(), System.currentTimeMillis() - startTime); } List<C> getConnectedComponents() { update(); return components; } C getComponent(int num) { // update() must not be put here, but explicitly called each time before because update may // trigger a new component computation and so on a change in the value of the num component already passed // (and outdated consequently) in parameter of this method return num != -1 ? components.get(num) : null; } } static final class ConnectedComponentsManager extends AbstractComponentsManager<ConnectedComponentImpl> { private ConnectedComponentsManager(NetworkImpl network) { super(network); } @Override protected void fillAdjacencyList(Map<String, Integer> id2num, TIntArrayList[] adjacencyList) { super.fillAdjacencyList(id2num, adjacencyList); for (HvdcLineImpl line : network.objectStore.getAll(HvdcLineImpl.class)) { BusExt bus1 = line.getConverterStation1().getTerminal().getBusView().getBus(); BusExt bus2 = line.getConverterStation2().getTerminal().getBusView().getBus(); addToAdjacencyList(bus1, bus2, id2num, adjacencyList); } } @Override protected String getComponentLabel() { return "Connected"; } @Override protected void setComponentNumber(BusExt bus, int num) { Objects.requireNonNull(bus); bus.setConnectedComponentNumber(num); } protected ConnectedComponentImpl createComponent(int num, int size) { return new ConnectedComponentImpl(num, size, network.ref); } } static final class SynchronousComponentsManager extends AbstractComponentsManager<ComponentImpl> { private SynchronousComponentsManager(NetworkImpl network) { super(network); } protected ComponentImpl createComponent(int num, int size) { return new ComponentImpl(num, size, network.ref); } @Override protected String getComponentLabel() { return "Synchronous"; } @Override protected void setComponentNumber(BusExt bus, int num) { Objects.requireNonNull(bus); bus.setSynchronousComponentNumber(num); } } private class StateImpl implements State { private final ConnectedComponentsManager connectedComponentsManager = new ConnectedComponentsManager(NetworkImpl.this); private final SynchronousComponentsManager synchronousComponentsManager = new SynchronousComponentsManager(NetworkImpl.this); @Override public StateImpl copy() { return new StateImpl(); } } private final StateArray<StateImpl> states; ConnectedComponentsManager getConnectedComponentsManager() { return states.get().connectedComponentsManager; } SynchronousComponentsManager getSynchronousComponentsManager() { return states.get().synchronousComponentsManager; } @Override public void extendStateArraySize(int initStateArraySize, int number, final int sourceIndex) { states.push(number, () -> states.copy(sourceIndex)); } @Override public void reduceStateArraySize(int number) { states.pop(number); } @Override public void deleteStateArrayElement(int index) { states.delete(index); } @Override public void allocateStateArrayElement(int[] indexes, final int sourceIndex) { states.allocate(indexes, () -> states.copy(sourceIndex)); } @Override protected String getTypeDescription() { return "Network"; } private void setId(String id) { objectStore.remove(this); this.id = id; name = null; // reset the name objectStore.checkAndAdd(this); } @Override public void merge(Network other) { NetworkImpl otherNetwork = (NetworkImpl) other; // this check must not be done on the number of state but on the size // of the internal state array because the network can have only // one state but an internal array with a size greater that one and // some re-usable states if (stateManager.getStateArraySize() != 1 || otherNetwork.stateManager.getStateArraySize() != 1) { throw new PowsyblException("Merging of multi-states network is not supported"); } long start = System.currentTimeMillis(); // check mergeability Multimap<Class<? extends Identifiable>, String> intersection = objectStore.intersection(otherNetwork.objectStore); for (Map.Entry<Class<? extends Identifiable>, Collection<String>> entry : intersection.asMap().entrySet()) { Class<? extends Identifiable> clazz = entry.getKey(); if (clazz == DanglingLineImpl.class) { // fine for dangling lines continue; } Collection<String> objs = entry.getValue(); if (!objs.isEmpty()) { throw new PowsyblException("The following object(s) of type " + clazz.getSimpleName() + " exist(s) in both networks: " + objs); } } class LineMerge { String id; String voltageLevel1; String voltageLevel2; String xnode; String bus1; String bus2; String connectableBus1; String connectableBus2; Integer node1; Integer node2; class HalfLineMerge { String id; String name; float r; float x; float g1; float g2; float b1; float b2; float xnodeP; float xnodeQ; } final HalfLineMerge half1 = new HalfLineMerge(); final HalfLineMerge half2 = new HalfLineMerge(); CurrentLimits limits1; CurrentLimits limits2; float p1; float q1; float p2; float q2; Country country1; Country country2; } // try to find dangling lines couples Map<String, DanglingLine> dl1byXnodeCode = new HashMap<>(); for (DanglingLine dl1 : getDanglingLines()) { if (dl1.getUcteXnodeCode() != null) { dl1byXnodeCode.put(dl1.getUcteXnodeCode(), dl1); } } List<LineMerge> lines = new ArrayList<>(); for (DanglingLine dl2 : Lists.newArrayList(other.getDanglingLines())) { DanglingLine dl1 = getDanglingLine(dl2.getId()); if (dl1 == null) { // mapping by ucte xnode code if (dl2.getUcteXnodeCode() != null) { dl1 = dl1byXnodeCode.get(dl2.getUcteXnodeCode()); } } else { // mapping by id if (dl1.getUcteXnodeCode() != null && dl2.getUcteXnodeCode() != null && !dl1.getUcteXnodeCode().equals(dl2.getUcteXnodeCode())) { throw new PowsyblException("Dangling line couple " + dl1.getId() + " have inconsistent Xnodes (" + dl1.getUcteXnodeCode() + "!=" + dl2.getUcteXnodeCode() + ")"); } } if (dl1 != null) { LineMerge l = new LineMerge(); l.id = dl1.getId().compareTo(dl2.getId()) < 0 ? dl1.getId() + " + " + dl2.getId() : dl2.getId() + " + " + dl1.getId(); Terminal t1 = dl1.getTerminal(); Terminal t2 = dl2.getTerminal(); VoltageLevel vl1 = t1.getVoltageLevel(); VoltageLevel vl2 = t2.getVoltageLevel(); l.voltageLevel1 = vl1.getId(); l.voltageLevel2 = vl2.getId(); l.xnode = dl1.getUcteXnodeCode(); l.half1.id = dl1.getId(); l.half1.name = dl1.getName(); l.half1.r = dl1.getR(); l.half1.x = dl1.getX(); l.half1.g1 = dl1.getG(); l.half1.g2 = 0; l.half1.b1 = dl1.getB(); l.half1.b2 = 0; l.half1.xnodeP = dl1.getP0(); l.half1.xnodeQ = dl1.getQ0(); l.half2.id = dl2.getId(); l.half2.name = dl2.getName(); l.half2.r = dl2.getR(); l.half2.x = dl2.getX(); l.half2.g1 = dl2.getG(); l.half2.g2 = 0; l.half2.b1 = dl2.getB(); l.half2.b2 = 0; l.half2.xnodeP = dl2.getP0(); l.half2.xnodeQ = dl2.getQ0(); l.limits1 = dl1.getCurrentLimits(); l.limits2 = dl2.getCurrentLimits(); if (t1.getVoltageLevel().getTopologyKind() == TopologyKind.BUS_BREAKER) { Bus b1 = t1.getBusBreakerView().getBus(); if (b1 != null) { l.bus1 = b1.getId(); } l.connectableBus1 = t1.getBusBreakerView().getConnectableBus().getId(); } else { l.node1 = t1.getNodeBreakerView().getNode(); } if (t2.getVoltageLevel().getTopologyKind() == TopologyKind.BUS_BREAKER) { Bus b2 = t2.getBusBreakerView().getBus(); if (b2 != null) { l.bus2 = b2.getId(); } l.connectableBus2 = t2.getBusBreakerView().getConnectableBus().getId(); } else { l.node2 = t2.getNodeBreakerView().getNode(); } l.p1 = t1.getP(); l.q1 = t1.getQ(); l.p2 = t2.getP(); l.q2 = t2.getQ(); l.country1 = vl1.getSubstation().getCountry(); l.country2 = vl2.getSubstation().getCountry(); lines.add(l); // remove the 2 dangling lines dl1.remove(); dl2.remove(); } } // do not forget to remove the other network from its store!!! otherNetwork.objectStore.remove(otherNetwork); // merge the stores objectStore.merge(otherNetwork.objectStore); // fix network back reference of the other network objects otherNetwork.ref.setRef(ref); Multimap<Boundary, LineMerge> mergedLineByBoundary = HashMultimap.create(); for (LineMerge lm : lines) { LOGGER.debug("Replacing dangling line couple '{}' (xnode={}, country1={}, country2={}) by a line", lm.id, lm.xnode, lm.country1, lm.country2); TieLineAdderImpl la = newTieLine() .setId(lm.id) .setVoltageLevel1(lm.voltageLevel1) .setVoltageLevel2(lm.voltageLevel2) .line1().setId(lm.half1.id) .setName(lm.half1.name) .setR(lm.half1.r) .setX(lm.half1.x) .setG1(lm.half1.g1) .setG2(lm.half1.g2) .setB1(lm.half1.b1) .setB2(lm.half1.b2) .setXnodeP(lm.half1.xnodeP) .setXnodeQ(lm.half1.xnodeQ) .line2().setId(lm.half2.id) .setName(lm.half2.name) .setR(lm.half2.r) .setX(lm.half2.x) .setG1(lm.half2.g1) .setG2(lm.half2.g2) .setB1(lm.half2.b1) .setB2(lm.half2.b2) .setXnodeP(lm.half2.xnodeP) .setXnodeQ(lm.half2.xnodeQ) .setUcteXnodeCode(lm.xnode); if (lm.bus1 != null) { la.setBus1(lm.bus1); } la.setConnectableBus1(lm.connectableBus1); if (lm.bus2 != null) { la.setBus2(lm.bus2); } la.setConnectableBus2(lm.connectableBus2); if (lm.node1 != null) { la.setNode1(lm.node1); } if (lm.node2 != null) { la.setNode2(lm.node2); } TieLineImpl l = la.add(); l.setCurrentLimits(Side.ONE, (CurrentLimitsImpl) lm.limits1); l.setCurrentLimits(Side.TWO, (CurrentLimitsImpl) lm.limits2); l.getTerminal1().setP(lm.p1).setQ(lm.q1); l.getTerminal2().setP(lm.p2).setQ(lm.q2); mergedLineByBoundary.put(new Boundary(lm.country1, lm.country2), lm); } if (!lines.isEmpty()) { LOGGER.info("{} dangling line couples have been replaced by a line: {}", lines.size(), mergedLineByBoundary.asMap().entrySet().stream().map(e -> e.getKey() + ": " + e.getValue().size()).collect(Collectors.toList())); } // update the source format if (!sourceFormat.equals(otherNetwork.sourceFormat)) { sourceFormat = "hybrid"; } // change the network id setId(getId() + " + " + otherNetwork.getId()); LOGGER.info("Merging of {} done in {} ms", id, System.currentTimeMillis() - start); } @Override public void merge(Network... others) { for (Network other : others) { merge(other); } } @Override public void addListener(NetworkListener listener) { listeners.add(listener); } @Override public void removeListener(NetworkListener listener) { listeners.remove(listener); } }
Use EnumSet for the countries of a network, in particular to respect a constant order from call to call (natural enum order).
iidm/iidm-impl/src/main/java/com/powsybl/iidm/network/impl/NetworkImpl.java
Use EnumSet for the countries of a network, in particular to respect a constant order from call to call (natural enum order).
Java
agpl-3.0
d924fd9e553bd6a707bc018f974759d5b56882ae
0
roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,tdefilip/opennms,roskens/opennms-pre-github,aihua/opennms,aihua/opennms,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,aihua/opennms,rdkgit/opennms,aihua/opennms,roskens/opennms-pre-github,tdefilip/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,aihua/opennms,rdkgit/opennms,tdefilip/opennms,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,aihua/opennms,aihua/opennms,rdkgit/opennms,tdefilip/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms
// // // This file is part of the OpenNMS(R) Application. // // OpenNMS(R) is Copyright (C) 2002-2005 The OpenNMS Group, Inc. All rights // reserved. // OpenNMS(R) is a derivative work, containing both original code, included // code // and modified // code that was published under the GNU General Public License. Copyrights // for // modified // and included code are below. // // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // // The code in this file is Copyright (C) 2004 DJ Gregor. // // Based on install.pl which was Copyright (C) 1999-2001 Oculan Corp. All // rights reserved. // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // For more information contact: // OpenNMS Licensing <[email protected]> // http://www.opennms.org/ // http://www.opennms.com/ // package org.opennms.install; import java.io.BufferedReader; import java.io.File; import java.io.FileFilter; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.io.Reader; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Properties; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.opennms.core.utils.ProcessExec; /* * Big To-dos: - Fix all of the XXX items (some coding, some discussion) - * Change the Exceptions to something more reasonable - Do exception handling * where it makes sense (give users reasonable error messages for common * problems) - Add a friendly startup script? - Javadoc */ public class Installer { static final float POSTGRES_MIN_VERSION = 7.3f; static final String s_version = "$Id$"; static final int s_fetch_size = 1024; String m_opennms_home = null; boolean m_update_database = false; boolean m_do_inserts = false; boolean m_skip_constraints = false; boolean m_update_iplike = false; boolean m_update_unicode = false; boolean m_install_webapp = false; boolean m_fix_constraint = false; boolean m_force = false; boolean m_debug = false; boolean m_ignore_notnull = false; boolean m_no_revert = false; String m_pg_driver = null; String m_pg_url = null; String m_pg_user = "postgres"; String m_pg_pass = ""; String m_pg_bindir = null; String m_user = null; String m_pass = null; String m_database = null; String m_sql_dir = null; String m_create_sql = null; String m_pg_iplike = null; String m_tomcat_conf = null; String m_webappdir = null; String m_install_servletdir = null; String m_fix_constraint_name = null; boolean m_fix_constraint_remove_rows = false; HashMap<String, String[]> m_seqmapping = null; LinkedList<String> m_tables = null; LinkedList<String> m_sequences = null; // LinkedList m_cfunctions = new LinkedList(); // Unused, not in // create.sql // LinkedList m_functions = new LinkedList(); // Unused, not in create.sql // LinkedList m_languages = new LinkedList(); // Unused, not in create.sql LinkedList<String> m_indexes = new LinkedList<String>(); HashMap<String, List<String>> m_inserts = new HashMap<String, List<String>>(); HashSet<String> m_drops = new HashSet<String>(); HashSet<String> m_changed = new HashSet<String>(); float m_pg_version; String m_cascade = " CASCADE"; String m_sql; PrintStream m_out = System.out; Properties m_properties = null; Connection m_dbconnection; Map m_dbtypes = null; Map<String, ColumnChangeReplacement> m_columnReplacements = new HashMap<String, ColumnChangeReplacement>(); String m_required_options = "At least one of -d, -i, -s, -U, -y, " + "-C, or -T is required."; public Installer() { AutoIntegerIdMapStore snmpInterfaceId = new AutoIntegerIdMapStore(1, new String[] { "nodeid", "ipaddr", "snmpifindex" }); m_columnReplacements.put("snmpinterface.id", snmpInterfaceId); AutoIntegerIdMapStore ipInterfaceId = new AutoIntegerIdMapStore(1, new String[] { "nodeid", "ipaddr", "ifindex" }); m_columnReplacements.put("ipinterface.id", ipInterfaceId); MapStoreIdGetter IpInterfaceSnmpInterfaceId = new MapStoreIdGetter(snmpInterfaceId, new String[] { "nodeid", "ipaddr", "ifindex" }, true); m_columnReplacements.put("ipinterface.snmpinterfaceid", IpInterfaceSnmpInterfaceId); AutoIntegerIdMapStore ifServicesId = new AutoIntegerIdMapStore(1, new String[] { "nodeid", "ipaddr", "ifindex", "serviceid" }); m_columnReplacements.put("ifservices.id", ifServicesId); MapStoreIdGetter ifServicesIpInterfaceId = new MapStoreIdGetter(ipInterfaceId, new String[] { "nodeid", "ipaddr", "ifindex" }, false); m_columnReplacements.put("ifservices.ipinterfaceid", ifServicesIpInterfaceId); m_columnReplacements.put("events.eventsource", new EventSourceReplacement()); m_columnReplacements.put("outages.outageid", new AutoInteger(1)); m_columnReplacements.put("snmpinterface.nodeid", new RowHasBogusData("snmpInterface", "nodeId")); m_columnReplacements.put("snmpinterface.snmpifindex", new RowHasBogusData("snmpInterface", "snmpIfIndex")); m_columnReplacements.put("ipinterface.nodeid", new RowHasBogusData("ipInterface", "nodeId")); m_columnReplacements.put("ipinterface.ipaddr", new RowHasBogusData("ipInterface", "ipAddr")); m_columnReplacements.put("ifservices.nodeid", new RowHasBogusData("ifservices", "nodeId")); m_columnReplacements.put("ifservices.serviceid", new RowHasBogusData("ifservices", "serviceId")); m_columnReplacements.put("outages.nodeid", new RowHasBogusData("outages", "nodeId")); m_columnReplacements.put("outages.serviceid", new RowHasBogusData("outages", "serviceId")); /* * This is totally bogus. outages.svcregainedeventid is a foreign * key that points at events.eventid, and a fixed replacement of zero * will break, because there should never be an event with an ID of * zero. I don't think it ever got executed before due to the * null replacement only being performed if a column was marked as * NOT NULL. */ /* m_columnReplacements.put("outages.svcregainedeventid", new FixedIntegerReplacement(0)); */ // Disabled for the same reason as above /* m_columnReplacements.put("notifications.eventid", new FixedIntegerReplacement(0)); */ m_columnReplacements.put("usersnotified.id", new AutoInteger(1)); } public void install(String[] argv) throws Exception { printHeader(); loadProperties(); parseArguments(argv); if (!m_update_database && !m_do_inserts && !m_update_iplike && !m_update_unicode && m_tomcat_conf == null && !m_install_webapp && !m_fix_constraint) { throw new Exception("Nothing to do.\n" + m_required_options + "\nUse '-h' for help."); } // Don't bother checking the Java version. Leave it up to runjava. // checkJava(); // XXX Check Tomcat version? if (m_update_database || m_update_iplike || m_update_unicode || m_do_inserts || m_fix_constraint) { databaseConnect("template1"); databaseCheckVersion(); databaseCheckLanguage(); } printDiagnostics(); verifyFilesAndDirectories(); if (m_install_webapp) { checkWebappOldOpennmsDir(); checkServerXmlOldOpennmsContext(); } if (m_update_database || m_fix_constraint) { readTables(); } if (m_update_database) { // XXX Check and optionally modify pg_hba.conf if (!databaseUserExists()) { databaseAddUser(); } if (!databaseDBExists()) { databaseAddDB(); } } if (m_update_database || m_update_iplike || m_update_unicode || m_do_inserts || m_fix_constraint) { databaseDisconnect(); databaseConnect(m_database); } if (m_fix_constraint) { fixConstraint(); } if (m_update_database) { checkOldTables(); if (!m_skip_constraints) { checkConstraints(); } createSequences(); createTables(); createIndexes(); // createFunctions(m_cfunctions); // Unused, not in create.sql // createLanguages(); // Unused, not in create.sql // createFunctions(m_functions); // Unused, not in create.sql fixData(); } if (m_do_inserts) { insertData(); } if (m_update_unicode) { checkUnicode(); } if (m_install_webapp) { installWebApp(); } if (m_tomcat_conf != null) { updateTomcatConf(); } if (m_update_iplike) { updateIplike(); } if (m_update_database) { // XXX should we be using createFunctions and createLanguages // instead? updatePlPgsql(); // XXX should we be using createFunctions instead? addStoredProcedures(); } if (m_update_database || m_update_iplike || m_update_unicode || m_do_inserts) { databaseDisconnect(); } if (m_update_database) { createConfiguredFile(); } System.out.println(); System.out.println("Installer completed successfully!"); } public void createConfiguredFile() throws IOException { File f = new File(m_opennms_home + File.separator + "etc" + File.separator + "configured"); f.createNewFile(); } public void printHeader() { m_out.println("===============================================" + "==============================="); m_out.println("OpenNMS Installer Version " + s_version); m_out.println("===============================================" + "==============================="); m_out.println(""); m_out.println("Configures PostgreSQL tables, users, and other " + "miscellaneous settings."); m_out.println(""); } public void loadProperties() throws Exception { m_properties = new Properties(); m_properties.load(Installer.class.getResourceAsStream("installer.properties")); /* * Do this if we want to merge our properties with the system * properties... */ Properties sys = System.getProperties(); m_properties.putAll(sys); m_opennms_home = fetchProperty("install.dir"); m_database = fetchProperty("install.database.name"); m_user = fetchProperty("install.database.user"); m_pass = fetchProperty("install.database.password"); m_pg_driver = fetchProperty("install.database.driver"); m_pg_url = fetchProperty("install.database.url"); m_pg_bindir = fetchProperty("install.database.bindir"); m_sql_dir = fetchProperty("install.etc.dir"); m_install_servletdir = fetchProperty("install.servlet.dir"); String soext = fetchProperty("build.soext"); String pg_iplike_dir = fetchProperty("install.postgresql.dir"); m_pg_iplike = pg_iplike_dir + File.separator + "iplike." + soext; m_create_sql = m_sql_dir + File.separator + "create.sql"; } public String fetchProperty(String property) throws Exception { String value; if ((value = m_properties.getProperty(property)) == null) { throw new Exception("property \"" + property + "\" not set " + "from bundled installer.properties file"); } return value; } public void parseArguments(String[] argv) throws Exception { LinkedList<String> args = new LinkedList<String>(); for (int i = 0; i < argv.length; i++) { StringBuffer b = new StringBuffer(argv[i]); boolean is_arg = false; while (b.length() > 0 && b.charAt(0) == '-') { is_arg = true; b.deleteCharAt(0); } if (is_arg) { while (b.length() > 0) { char c = b.charAt(0); b.deleteCharAt(0); switch (c) { case 'h': printHelp(); break; case 'c': m_force = true; break; case 'C': i++; m_fix_constraint = true; m_fix_constraint_name = getNextArg(argv, i, 'C'); break; case 'd': m_update_database = true; break; case 'i': m_do_inserts = true; break; case 'n': m_skip_constraints = true; case 'N': m_ignore_notnull = true; break; case 'p': i++; m_pg_pass = getNextArg(argv, i, 'p'); break; case 'R': m_no_revert = true; break; case 's': m_update_iplike = true; break; case 'T': i++; m_tomcat_conf = getNextArg(argv, i, 'T'); break; case 'u': i++; m_pg_user = getNextArg(argv, i, 'u'); break; case 'U': m_update_unicode = true; break; case 'w': i++; m_webappdir = getNextArg(argv, i, 'w'); break; case 'x': m_debug = true; break; case 'X': m_fix_constraint_remove_rows = true; break; case 'y': m_install_webapp = true; break; default: throw new Exception("unknown option '" + c + "'" + ", use '-h' option for usage"); } } } else { args.add(argv[i]); } } if (args.size() != 0) { throw new Exception("too many command-line arguments specified"); } } public String getNextArg(String[] argv, int i, char letter) throws Exception { if (i >= argv.length) { throw new Exception("no argument provided for '" + letter + "' option"); } if (argv[i].charAt(0) == '-') { throw new Exception("argument to '" + letter + "' option looks " + "like another option (begins with a dash): \"" + argv[i] + "\""); } return argv[i]; } public void printDiagnostics() { m_out.println("* using '" + m_user + "' as the PostgreSQL " + "user for OpenNMS"); m_out.println("* using '" + m_pass + "' as the PostgreSQL " + "password for OpenNMS"); m_out.println("* using '" + m_database + "' as the PostgreSQL " + "database name for OpenNMS"); } public void readTables() throws Exception { readTables(new FileReader(m_create_sql)); } public void readTables(Reader reader) throws Exception { BufferedReader r = new BufferedReader(reader); String line; m_tables = new LinkedList<String>(); m_seqmapping = new HashMap<String, String[]>(); m_sequences = new LinkedList<String>(); LinkedList<String> sql_l = new LinkedList<String>(); Pattern seqmappingPattern = Pattern.compile("\\s*--#\\s+install:\\s*" + "(\\S+)\\s+(\\S+)\\s+" + "(\\S+)\\s*.*"); Pattern createPattern = Pattern.compile("(?i)\\s*create\\b.*"); Pattern insertPattern = Pattern.compile("(?i)INSERT INTO " + "[\"']?([\\w_]+)[\"']?.*"); Pattern dropPattern = Pattern.compile("(?i)DROP TABLE [\"']?" + "([\\w_]+)[\"']?.*"); while ((line = r.readLine()) != null) { Matcher m; if (line.matches("\\s*") || line.matches("\\s*\\\\.*")) { continue; } m = seqmappingPattern.matcher(line); if (m.matches()) { String[] a = { m.group(2), m.group(3) }; m_seqmapping.put(m.group(1), a); continue; } if (line.matches("--.*")) { continue; } if (createPattern.matcher(line).matches()) { m = Pattern.compile( "(?i)\\s*create\\s+((?:unique )?\\w+)" + "\\s+[\"']?(\\w+)[\"']?.*").matcher( line); if (m.matches()) { String type = m.group(1); String name = m.group(2).replaceAll("^[\"']", "").replaceAll( "[\"']$", ""); if (type.toLowerCase().indexOf("table") != -1) { m_tables.add(name); } else if (type.toLowerCase().indexOf("sequence") != -1) { m_sequences.add(name); /* * -- Not used, nothing in create.sql to get us here } * else if (type.toLowerCase().indexOf("function") != * -1) { if (type.toLowerCase().indexOf("language * 'c'") != -1) { m_cfunctions.add(name); } else { * m_functions.add(name); } } else if * (type.toLowerCase().indexOf("trusted") != -1) { m = * Pattern.compile("(?i)\\s*create\\s+trutsed " + * "procedural language\\s+[\"']?" + * "(\\w+)[\"']?.*").matcher(line); if (!m.matches()) { * throw new Exception("Could not match name and " + * "type of the trusted " + "procedural language in * this" + "line: " + line); } * m_languages.add(m.group(1)); */ } else if (type.toLowerCase().matches(".*\\bindex\\b.*")) { m = Pattern.compile( "(?i)\\s*create\\s+(?:unique )?" + "index\\s+[\"']?([\\w_]+)" + "[\"']?.*").matcher( line); if (!m.matches()) { throw new Exception("Could not match name and " + "type of the index " + "in this" + "line: " + line); } m_indexes.add(m.group(1)); } else { throw new Exception("Unknown CREATE encountered: " + "CREATE " + type + " " + name); } } else { throw new Exception("Unknown CREATE encountered: " + line); } sql_l.add(line); continue; } m = insertPattern.matcher(line); if (m.matches()) { String table = m.group(1); if (!m_inserts.containsKey(table)) { m_inserts.put(table, new LinkedList<String>()); } m_inserts.get(table).add(line); continue; } if (line.toLowerCase().startsWith("select setval ")) { String table = "select_setval"; if (!m_inserts.containsKey(table)) { m_inserts.put(table, new LinkedList<String>()); } m_inserts.get(table).add(line); sql_l.add(line); continue; } m = dropPattern.matcher(line); if (m.matches()) { m_drops.add(m.group(1)); sql_l.add(line); continue; } // XXX should do something here so we can catch what we can't // parse // m_out.println("unmatched line: " + line); sql_l.add(line); } r.close(); m_sql = cleanText(sql_l); } public void databaseConnect(String database) throws Exception { Class.forName(m_pg_driver); m_dbconnection = DriverManager.getConnection(m_pg_url + database, m_pg_user, m_pg_pass); } public void databaseDisconnect() throws Exception { if (m_dbconnection != null) { m_dbconnection.close(); } } public void databaseCheckVersion() throws Exception { m_out.print("- checking database version... "); Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT version()"); if (!rs.next()) { throw new Exception("Database didn't return any rows for " + "'SELECT version()'"); } String versionString = rs.getString(1); rs.close(); st.close(); Matcher m = Pattern.compile("^PostgreSQL (\\d+\\.\\d+)").matcher( versionString); if (!m.find()) { throw new Exception("Could not parse version number out of " + "version string: " + versionString); } m_pg_version = Float.parseFloat(m.group(1)); if (m_pg_version < POSTGRES_MIN_VERSION) { throw new Exception("Unsupported database version \"" + m_pg_version + "\" -- you need at least " + POSTGRES_MIN_VERSION); } // doesn't matter since we now require 7.3 /* * if (m_pg_version >= 7.3) { m_cascade = " CASCADE"; } */ m_out.println(Float.toString(m_pg_version)); m_out.println(" - Full version string: " + versionString); } public void databaseCheckLanguage() throws Exception { /* * Don't bother checking if the database version is 7.4 or greater and * just return without throwing an exception. We can (and do) use SQL * state checks instead of matching on the exception text, so the * language of server error messages does not matter. */ if (m_pg_version >= 7.4) { return; } /* * Use column names that should never exist and also encode the * current time, in hopes that this should never actually succeed. */ String timestamp = Long.toString(System.currentTimeMillis()); String bogus_query = "SELECT bogus_column_" + timestamp + " " + "FROM bogus_table_" + timestamp + " " + "WHERE another_bogus_column_" + timestamp + " IS NULL"; // Expected error: "ERROR: relation "bogus_table" does not exist" try { Statement st = m_dbconnection.createStatement(); st.executeQuery(bogus_query); } catch (SQLException e) { if (e.toString().indexOf("does not exist") != -1) { /* * Everything is fine, since we matched the error. We should * be safe to assume that all of the other error messages we * need to check for are in English. */ return; } throw new Exception("The database server's error messages " + "are not in English, however the installer " + "requires them to be in English when using " + "PostgreSQL earlier than 7.4. You either " + "need to set \"lc_messages = 'C'\" in your " + "postgresql.conf file and restart " + "PostgreSQL or upgrade to PostgreSQL 7.4 or " + "later. The installer executed the query " + "\"" + bogus_query + "\" and expected " + "\"does not exist\" in the error message, " + "but this exception was received instead: " + e, e); } /* * We should not get here, as the above command should always throw an * exception, so complain and throw an exception about not getting the * exception we were expecting. Are you lost yet? Good! */ throw new Exception("Expected an SQLException when executing a " + "bogus query to test for the server's error " + "message language, however the query succeeded " + "unexpectedly. SQL query: \"" + bogus_query + "\"."); } public void checkOldTables() throws SQLException, BackupTablesFoundException { Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relkind = 'r' AND " + "relname LIKE '%_old_%'"); LinkedList<String> oldTables = new LinkedList<String>(); m_out.print("- checking database for old backup tables... "); while (rs.next()) { oldTables.add(rs.getString(1)); } rs.close(); st.close(); if (oldTables.size() == 0) { // No problems, so just print "NONE" and return. m_out.println("NONE"); return; } throw new BackupTablesFoundException(oldTables); } public List<Constraint> getForeignKeyConstraints() throws Exception { LinkedList<Constraint> constraints = new LinkedList<Constraint>(); for (String table : m_tables) { String tableLower = table.toLowerCase(); for (Constraint constraint : getTableFromSQL(tableLower).getConstraints()) { if (constraint.getType() == Constraint.FOREIGN_KEY) { constraints.add(constraint); } } } return constraints; } public void checkConstraints() throws Exception { List<Constraint> constraints = getForeignKeyConstraints(); m_out.print("- checking for rows that violate constraints... "); Statement st = m_dbconnection.createStatement(); for (Constraint constraint : constraints) { String name = constraint.getName(); String table = constraint.getTable(); String column = constraint.getColumns().get(0); String ftable = constraint.getForeignTable(); String fcolumn = constraint.getForeignColumns().get(0); if (!tableExists(table) || !tableColumnExists(table, column)) { // The constrained table or column does not exist continue; } if (table.equals("usersNotified") && column.equals("id")) { // m_out.print("Skipping usersNotified.id"); continue; } String query = "SELECT count(" + table + "." + column + ") FROM " + table + " " + getForeignConstraintWhere(table, column, ftable, fcolumn); ResultSet rs = st.executeQuery(query); rs.next(); int count = rs.getInt(1); rs.close(); if (count != 0) { rs = st.executeQuery("SELECT count(*) FROM " + table); rs.next(); int total = rs.getInt(1); rs.close(); st.close(); throw new Exception("Table " + table + " contains " + count + " rows " + "(out of " + total + ") that violate new constraint " + name + ". " + "See the install guide for details " + "on how to correct this problem."); } } st.close(); m_out.println("NONE"); } public String getForeignConstraintWhere(String table, String column, String ftable, String fcolumn) throws Exception { if (tableExists(ftable) && tableColumnExists(ftable, fcolumn)) { return "WHERE NOT EXISTS (SELECT " + ftable + "." + fcolumn + " FROM " + ftable + " WHERE " + ftable + "." + fcolumn + " = " + table + "." + column + ") AND " + table + "." + column + " IS NOT NULL"; } else { return "WHERE " + table + "." + column + " IS NOT NULL"; } } public void fixConstraint() throws Exception { List<Constraint> constraints = getForeignKeyConstraints(); Constraint constraint = null; m_out.print("- fixing rows that violate constraint " + m_fix_constraint_name + "... "); for (Constraint c : constraints) { if (m_fix_constraint_name.equals(c.getName())) { constraint = c; break; } } if (constraint == null) { throw new Exception("Did not find constraint " + m_fix_constraint_name + " in the database."); } String table = constraint.getTable(); String column = constraint.getColumns().get(0); String ftable = constraint.getForeignTable(); String fcolumn = constraint.getForeignColumns().get(0); if (!tableExists(table)) { throw new Exception("Constraint " + m_fix_constraint_name + " is on table " + table + ", but table does " + "not exist (so fixing this constraint does " + "nothing)."); } if (!tableColumnExists(table, column)) { throw new Exception("Constraint " + m_fix_constraint_name + " is on column " + column + " of table " + table + ", but column does " + "not exist (so fixing this constraint does " + "nothing)."); } String where = getForeignConstraintWhere(table, column, ftable, fcolumn); String query; String change_text; if (m_fix_constraint_remove_rows) { query = "DELETE FROM " + table + " " + where; change_text = "DELETED"; } else { query = "UPDATE " + table + " SET " + column + " = NULL " + where; change_text = "UPDATED"; } Statement st = m_dbconnection.createStatement(); int num = st.executeUpdate(query); m_out.println(change_text + " " + num + (num == 1 ? " ROW" : " ROWS")); } public boolean databaseUserExists() throws SQLException { boolean exists; Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT usename FROM pg_user WHERE " + "usename = '" + m_user + "'"); exists = rs.next(); rs.close(); st.close(); return exists; } public void databaseAddUser() throws SQLException { Statement st = m_dbconnection.createStatement(); st.execute("CREATE USER " + m_user + " WITH PASSWORD '" + m_pass + "' CREATEDB CREATEUSER"); } public boolean databaseDBExists() throws SQLException { boolean exists; Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT datname from pg_database " + "WHERE datname = '" + m_database + "'"); exists = rs.next(); rs.close(); st.close(); return exists; } public void databaseAddDB() throws Exception { Statement st = m_dbconnection.createStatement(); st.execute("CREATE DATABASE " + m_database + " WITH ENCODING='UNICODE'"); } public void createSequences() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.println("- creating sequences... "); Iterator i = m_sequences.iterator(); while (i.hasNext()) { String sequence = (String) i.next(); if (!m_seqmapping.containsKey(sequence)) { throw new Exception("Cannot find sequence mapping for " + sequence); } } i = m_sequences.iterator(); while (i.hasNext()) { String sequence = (String) i.next(); // String[] mapping = (String[]) m_seqmapping.get(sequence); int minvalue = 1; boolean alreadyExists; m_out.print(" - checking \"" + sequence + "\" sequence... "); rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relname = '" + sequence.toLowerCase() + "'"); alreadyExists = rs.next(); if (alreadyExists) { m_out.println("ALREADY EXISTS"); } else { m_out.println("DOES NOT EXIST"); m_out.print(" - creating sequence \"" + sequence + "\"... "); st.execute("CREATE SEQUENCE " + sequence + " minvalue " + minvalue); st.execute("GRANT ALL on " + sequence + " TO " + m_user); m_out.println("OK"); } } m_out.println("- creating sequences... DONE"); } public void createTables() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; Iterator i = m_tables.iterator(); m_out.println("- creating tables..."); while (i.hasNext()) { String tableName = (String) i.next(); if (m_force) { tableName = tableName.toLowerCase(); String create = getTableCreateFromSQL(tableName); boolean remove; rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relname = '" + tableName + "'"); remove = rs.next(); m_out.print(" - removing old table... "); if (remove) { st.execute("DROP TABLE " + tableName + m_cascade); m_out.println("REMOVED"); } else { m_out.println("CLEAN"); } m_out.print(" - creating table \"" + tableName + "\"... "); st.execute("CREATE TABLE " + tableName + " (" + create + ")"); m_out.println("CREATED"); m_out.print(" - giving \"" + m_user + "\" permissions on \"" + tableName + "\"... "); st.execute("GRANT ALL ON " + tableName + " TO " + m_user); m_out.println("GRANTED"); } else { m_out.print(" - checking table \"" + tableName + "\"... "); tableName = tableName.toLowerCase(); Table newTable = getTableFromSQL(tableName); Table oldTable = getTableFromDB(tableName); if (newTable.equals(oldTable)) { m_out.println("UPTODATE"); } else { if (oldTable == null) { String create = getTableCreateFromSQL(tableName); st.execute("CREATE TABLE " + tableName + " (" + create + ")"); st.execute("GRANT ALL ON " + tableName + " TO " + m_user); m_out.println("CREATED"); } else { try { changeTable(tableName, oldTable, newTable); } catch (Exception e) { throw new Exception("Error changing table '" + tableName + "'. Nested exception: " + e.getMessage(), e); } } } } } m_out.println("- creating tables... DONE"); } public void createIndexes() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.println("- creating indexes..."); Iterator i = m_indexes.iterator(); while (i.hasNext()) { String index = (String) i.next(); boolean exists; m_out.print(" - creating index \"" + index + "\"... "); rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relname = '" + index.toLowerCase() + "'"); exists = rs.next(); if (exists) { m_out.println("EXISTS"); } else { st.execute(getIndexFromSQL(index)); m_out.println("OK"); } } m_out.println("- creating indexes... DONE"); } public Map getTypesFromDB() throws SQLException { if (m_dbtypes != null) { return m_dbtypes; } Statement st = m_dbconnection.createStatement(); ResultSet rs; HashMap<String, Integer> m = new HashMap<String, Integer>(); rs = st.executeQuery("SELECT oid,typname,typlen FROM pg_type"); while (rs.next()) { try { m.put(Column.normalizeColumnType(rs.getString(2), (rs.getInt(3) < 0)), new Integer(rs.getInt(1))); } catch (Exception e) { // ignore } } m_dbtypes = m; return m_dbtypes; } /* * -- Not used, nothing in create.sql... public void createFunctions(List * functions) throws Exception { Statement st = * m_dbconnection.createStatement(); ResultSet rs; Iterator i = * functions.iterator(); while (i.hasNext()) { String function = (String) * i.next(); String functionSql = getFunctionFromSQL(function); Matcher m = * Pattern.compile("\\s*\\((.+?)\\).*").matcher(functionSql); String * columns = m.group(1); if (m_force) { // XXX this doesn't check to see * if the function exists // before it drops it, so it will fail and throw * an // exception if the function doesn't exist. m_out.print("- removing * function \"" + function + "\" if it exists... "); String dropSql = * "DROP FUNCTION \"" + function + "\" (" + columns + ");"; * st.execute(dropSql); m_out.println("REMOVED"); } // XXX this doesn't * check to see if the function exists before // it tries to create it, so * it will fail and throw an // exception if the function does exist. * m_out.print("- creating function \"" + function + "\"... "); * st.execute("CREATE FUNCTION \"" + function + "\" " + functionSql); * m_out.println("OK"); } } public void createLanguages() throws Exception { * Statement st = m_dbconnection.createStatement(); ResultSet rs; Iterator * i = m_languages.iterator(); while (i.hasNext()) { String language = * (String) i.next(); String languageSql = getLanguageFromSQL(language); // * XXX this doesn't check to see if the language exists before // it tries * to create it, so it will fail and throw an // exception if the language * does already exist. m_out.print("- creating language reference \"" + * language + "\"... "); st.execute("CREATE TRUSTED PROCEDURAL LANGUAGE '" + * language + "' " + languageSql); m_out.println("OK"); } } */ public void fixData() throws Exception { Statement st = m_dbconnection.createStatement(); st.execute("UPDATE ipinterface SET issnmpprimary='N' " + "WHERE issnmpprimary IS NULL"); st.execute("UPDATE service SET servicename='SSH' " + "WHERE servicename='OpenSSH'"); st.execute("UPDATE snmpinterface SET snmpipadentnetmask=NULL"); } // XXX This causes the following Postgres error: // ERROR: duplicate key violates unique constraint "pk_dpname" void insertData() throws Exception { Statement st = m_dbconnection.createStatement(); for (Iterator i = m_inserts.keySet().iterator(); i.hasNext();) { String table = (String) i.next(); boolean exists = false; m_out.print("- inserting initial table data for \"" + table + "\"... "); for (Iterator j = ((LinkedList) m_inserts.get(table)).iterator(); j.hasNext();) { try { st.execute((String) j.next()); } catch (SQLException e) { /* * SQL Status codes: 23505: ERROR: duplicate key violates * unique constraint "%s" */ if (e.toString().indexOf("duplicate key") != -1 || "23505".equals(e.getSQLState())) { exists = true; } else { throw e; } } } if (exists) { m_out.println("EXISTS"); } else { m_out.println("OK"); } } } public void checkUnicode() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.print("- checking if database \"" + m_database + "\" is unicode... "); rs = st.executeQuery("SELECT encoding FROM pg_database WHERE " + "datname='" + m_database.toLowerCase() + "'"); rs.next(); if (rs.getInt(1) == 5 || rs.getInt(1) == 6) { m_out.println("ALREADY UNICODE"); return; } m_out.println("NOT UNICODE, CONVERTING"); databaseDisconnect(); String dumpFile = "/tmp/pg_dump-" + m_database; String logFile = "/tmp/unicode-convert.log"; PrintStream log = new PrintStream(new FileOutputStream(logFile, true)); ProcessExec e = new ProcessExec(log, log); int exitVal; log.println("------------------------------------------------------" + "------------------------"); m_out.print(" - dumping data to " + dumpFile + "... "); String[] cmd1 = { m_pg_bindir + File.separator + "pg_dump", "-U", m_pg_user, "-a", m_database, "-f", dumpFile }; if ((exitVal = e.exec(cmd1)) != 0) { throw new Exception("Dumping database returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd1) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - waiting 3s for PostgreSQL to notice " + "that pg_dump has disconnected."); Thread.sleep(1000); m_out.print("."); Thread.sleep(1000); m_out.print("."); Thread.sleep(1000); m_out.println(" OK"); m_out.print(" - dropping old database... "); String[] cmd2 = { m_pg_bindir + File.separator + "dropdb", "-U", m_pg_user, m_database }; if ((exitVal = e.exec(cmd2)) != 0) { throw new Exception("Dropping database returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd2) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - creating new unicode database... "); String[] cmd3 = { m_pg_bindir + File.separator + "createdb", "-U", m_pg_user, "-E", "UNICODE", m_database }; if ((exitVal = e.exec(cmd3)) != 0) { throw new Exception("Creating database returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd3) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - recreating tables... "); String[] cmd4 = { m_pg_bindir + File.separator + "psql", "-U", m_user, "-f", m_sql_dir + File.separator + "create.sql", m_database }; if ((exitVal = e.exec(cmd4)) != 0) { throw new Exception("Recreating tables returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd4) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - restoring data... "); String[] cmd5 = { m_pg_bindir + File.separator + "psql", "-U", m_user, "-f", dumpFile, m_database }; if ((exitVal = e.exec(cmd5)) != 0) { throw new Exception("Restoring data returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd5) + "', check " + logFile); } m_out.println("OK"); log.close(); databaseConnect(m_database); } public void verifyFilesAndDirectories() throws FileNotFoundException { if (m_update_database) { verifyFileExists(true, m_sql_dir, "SQL directory", "install.etc.dir property"); verifyFileExists(false, m_create_sql, "create.sql", "install.etc.dir property"); } if (m_update_iplike) { verifyFileExists(false, m_pg_iplike, "iplike module", "install.postgresql.dir property"); } if (m_tomcat_conf != null) { verifyFileExists( false, m_tomcat_conf, "Tomcat startup configuration file tomcat4.conf", "-T option"); } if (m_install_webapp) { verifyFileExists(true, m_webappdir, "Tomcat context directory", "-w option"); verifyFileExists(true, m_install_servletdir, "OpenNMS servlet directory", "install.servlet.dir property"); } } public void verifyFileExists(boolean isDir, String file, String description, String option) throws FileNotFoundException { File f; if (file == null) { throw new FileNotFoundException("The user most provide the " + "location of " + description + ", but this is not specified. " + "Use the " + option + " to specify this file."); } m_out.print("- using " + description + "... "); f = new File(file); if (!f.exists()) { throw new FileNotFoundException(description + " does not exist at \"" + file + "\". Use the " + option + " to specify another location."); } if (!isDir) { if (!f.isFile()) { throw new FileNotFoundException(description + " not a file at \"" + file + "\". Use the " + option + " to specify another file."); } } else { if (!f.isDirectory()) { throw new FileNotFoundException(description + " not a directory at \"" + file + "\". Use the " + option + " to specify " + "another directory."); } } m_out.println(f.getAbsolutePath()); } public void addStoredProcedures() throws Exception { Statement st = m_dbconnection.createStatement(); m_out.print("- adding stored procedures... "); FileFilter sqlFilter = new FileFilter() { public boolean accept(File pathname) { return (pathname.getName().startsWith("get") && pathname.getName().endsWith(".sql")) || pathname.getName().endsWith("Trigger.sql"); } }; File[] list = new File(m_sql_dir).listFiles(sqlFilter); for (int i = 0; i < list.length; i++) { LinkedList<String> drop = new LinkedList<String>(); StringBuffer create = new StringBuffer(); String line; m_out.print("\n - " + list[i].getName() + "... "); BufferedReader r = new BufferedReader(new FileReader(list[i])); while ((line = r.readLine()) != null) { line = line.trim(); if (line.matches("--.*")) { continue; } if (line.toLowerCase().startsWith("drop function")) { drop.add(line); } else { create.append(line); create.append("\n"); } } r.close(); Matcher m = Pattern.compile( "(?is)\\b(CREATE(?: OR REPLACE)? FUNCTION\\s+" + "(\\w+)\\s*\\((.*?)\\)\\s+" + "RETURNS\\s+(\\S+)\\s+AS\\s+" + "(.+? language ['\"]?\\w+['\"]?);)").matcher( create.toString()); if (!m.find()) { throw new Exception("Couldn't match \"" + m.pattern().pattern() + "\" in string \"" + create + "\""); } String createSql = m.group(1); String function = m.group(2); String columns = m.group(3); String returns = m.group(4); // String rest = m.group(5); if (functionExists(function, columns, returns)) { if (m_force) { st.execute("DROP FUNCTION " + function + "(" + columns + ")"); st.execute(createSql); m_out.print("OK (dropped and re-added)"); } else { m_out.print("EXISTS"); } } else { st.execute(createSql); m_out.print("OK"); } Pattern p = Pattern.compile("(?i)" + "(CREATE TRIGGER (\\S+)\\s+" + "BEFORE INSERT OR UPDATE\\s+" + "ON (\\S+) FOR EACH ROW\\s+" + "EXECUTE PROCEDURE (\\S+)\\(\\));"); m = p.matcher(create.toString()); if (m.find()) { String triggerSql = m.group(1); String triggerName = m.group(2); String triggerTable = m.group(3); String triggerProc = m.group(4); m_out.print(" - checking trigger '" + triggerName + "' ..."); if (triggerExists(triggerName, triggerTable, triggerProc)) { m_out.println("EXISTS"); } else { st.execute(triggerSql); m_out.println("ADDED"); } } } m_out.println(""); } public boolean functionExists(String function, String columns, String returnType) throws Exception { Map types = getTypesFromDB(); int[] columnTypes = new int[0]; columns = columns.trim(); if (columns.length() > 0) { String[] splitColumns = columns.split("\\s*,\\s*"); columnTypes = new int[splitColumns.length]; Column c; for (int j = 0; j < splitColumns.length; j++) { c = new Column(); c.parseColumnType(splitColumns[j]); columnTypes[j] = ((Integer) types.get(c.getType())).intValue(); } } Column c = new Column(); try { c.parseColumnType(returnType); } catch (Exception e) { throw new Exception("Could not parse column type '" + returnType + "' for function '" + function + "'. Nested exception: " + e.getMessage(), e); } int retType = ((Integer) types.get(c.getType())).intValue(); return functionExists(function, columnTypes, retType); } public boolean functionExists(String function, int[] columnTypes, int retType) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; StringBuffer ct = new StringBuffer(); for (int j = 0; j < columnTypes.length; j++) { ct.append(" " + columnTypes[j]); } String query = "SELECT oid FROM pg_proc WHERE proname='" + function.toLowerCase() + "' AND " + "prorettype=" + retType + " AND " + "proargtypes='" + ct.toString().trim() + "'"; rs = st.executeQuery(query); return rs.next(); } public void checkWebappOldOpennmsDir() throws Exception { File f = new File(m_webappdir + File.separator + "opennms"); m_out.print("- Checking for old opennms webapp directory in " + f.getAbsolutePath() + "... "); if (f.exists()) { throw new Exception("Old OpenNMS web application exists: " + f.getAbsolutePath() + ". You need to remove this " + "before continuing."); } m_out.println("OK"); } public void checkServerXmlOldOpennmsContext() throws Exception { String search_regexp = "(?ms).*<Context\\s+path=\"/opennms\".*"; StringBuffer b = new StringBuffer(); File f = new File(m_webappdir + File.separator + ".." + File.separator + "conf" + File.separator + "server.xml"); m_out.print("- Checking for old opennms context in " + f.getAbsolutePath() + "... "); if (!f.exists()) { m_out.println("DID NOT CHECK (file does not exist)"); return; } BufferedReader r = new BufferedReader(new FileReader(f)); String line; while ((line = r.readLine()) != null) { b.append(line); b.append("\n"); } r.close(); if (b.toString().matches(search_regexp)) { throw new Exception( "Old OpenNMS context found in " + f.getAbsolutePath() + ". " + "You must remove this context from server.xml and re-run the " + "installer."); } m_out.println("OK"); return; } public void installWebApp() throws Exception { m_out.println("- Install OpenNMS webapp... "); installLink(m_install_servletdir + File.separator + "META-INF" + File.separator + "context.xml", m_webappdir + File.separator + "opennms.xml", "web application context", false); m_out.println("- Installing OpenNMS webapp... DONE"); } public void installLink(String source, String destination, String description, boolean recursive) throws Exception { String[] cmd; ProcessExec e = new ProcessExec(m_out, m_out); if (new File(destination).exists()) { m_out.print(" - " + destination + " exists, removing... "); removeFile(destination, description, recursive); m_out.println("REMOVED"); } m_out.print(" - creating link to " + destination + "... "); cmd = new String[4]; cmd[0] = "ln"; cmd[1] = "-sf"; cmd[2] = source; cmd[3] = destination; if (e.exec(cmd) != 0) { throw new Exception("Non-zero exit value returned while " + "linking " + description + ", " + source + " into " + destination); } m_out.println("DONE"); } public void updateTomcatConf() throws Exception { File f = new File(m_tomcat_conf); // XXX give the user the option to set the user to something else? // if so, should we chown the appropriate OpenNMS files to the // tomcat user? // // XXX should we have the option to automatically try to determine // the tomcat user and chown the OpenNMS files to that user? m_out.print("- setting tomcat4 user to 'root'... "); BufferedReader r = new BufferedReader(new FileReader(f)); StringBuffer b = new StringBuffer(); String line; while ((line = r.readLine()) != null) { if (line.startsWith("TOMCAT_USER=")) { b.append("TOMCAT_USER=\"root\"\n"); } else { b.append(line); b.append("\n"); } } r.close(); f.renameTo(new File(m_tomcat_conf + ".before-opennms-" + System.currentTimeMillis())); f = new File(m_tomcat_conf); PrintWriter w = new PrintWriter(new FileOutputStream(f)); w.print(b.toString()); w.close(); m_out.println("done"); } public void removeFile(String destination, String description, boolean recursive) throws IOException, InterruptedException, Exception { String[] cmd; ProcessExec e = new ProcessExec(m_out, m_out); if (recursive) { cmd = new String[3]; cmd[0] = "rm"; cmd[1] = "-r"; cmd[2] = destination; } else { cmd = new String[2]; cmd[0] = "rm"; cmd[1] = destination; } if (e.exec(cmd) != 0) { throw new Exception("Non-zero exit value returned while " + "removing " + description + ", " + destination + ", using \"" + join(" ", cmd) + "\""); } if (new File(destination).exists()) { throw new Exception("Could not delete existing " + description + ": " + destination); } } public void updateIplike() throws Exception { Statement st = m_dbconnection.createStatement(); m_out.print("- checking for stale iplike references... "); try { st.execute("DROP FUNCTION iplike(text,text)"); m_out.println("REMOVED"); } catch (SQLException e) { /* * SQL Status code: 42883: ERROR: function %s does not exist */ if (e.toString().indexOf("does not exist") != -1 || "42883".equals("42883")) { m_out.println("CLEAN"); } else { throw e; } } // XXX This error is generated from Postgres if eventtime(text) // does not exist: // ERROR: function eventtime(text) does not exist m_out.print("- checking for stale eventtime.so references... "); try { st.execute("DROP FUNCTION eventtime(text)"); m_out.println("REMOVED"); } catch (SQLException e) { /* * SQL Status code: 42883: ERROR: function %s does not exist */ if (e.toString().indexOf("does not exist") != -1 || "42883".equals(e.getSQLState())) { m_out.println("CLEAN"); } else { throw e; } } m_out.print("- adding iplike database function... "); st.execute("CREATE FUNCTION iplike(text,text) RETURNS bool " + "AS '" + m_pg_iplike + "' LANGUAGE 'c' WITH(isstrict)"); m_out.println("OK"); } public void updatePlPgsql() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.print("- adding PL/pgSQL call handler... "); rs = st.executeQuery("SELECT oid FROM pg_proc WHERE " + "proname='plpgsql_call_handler' AND " + "proargtypes = ''"); if (rs.next()) { m_out.println("EXISTS"); } else { st.execute("CREATE FUNCTION plpgsql_call_handler () " + "RETURNS OPAQUE AS '$libdir/plpgsql.so' LANGUAGE 'c'"); m_out.println("OK"); } m_out.print("- adding PL/pgSQL language module... "); rs = st.executeQuery("SELECT pg_language.oid FROM " + "pg_language, pg_proc WHERE " + "pg_proc.proname='plpgsql_call_handler' AND " + "pg_proc.proargtypes = '' AND " + "pg_proc.oid = pg_language.lanplcallfoid AND " + "pg_language.lanname = 'plpgsql'"); if (rs.next()) { m_out.println("EXISTS"); } else { st.execute("CREATE TRUSTED PROCEDURAL LANGUAGE 'plpgsql' " + "HANDLER plpgsql_call_handler LANCOMPILER 'PL/pgSQL'"); m_out.println("OK"); } } public Column findColumn(List columns, String column) { Column c; for (Iterator i = columns.iterator(); i.hasNext();) { c = (Column) i.next(); if (c.getName().equals(column.toLowerCase())) { return c; } } return null; } public String getXFromSQL(String item, String regex, int itemGroup, int returnGroup, String description) throws Exception { item = item.toLowerCase(); Matcher m = Pattern.compile(regex).matcher(m_sql); while (m.find()) { if (m.group(itemGroup).toLowerCase().equals(item)) { return m.group(returnGroup); } } throw new Exception("could not find " + description + " \"" + item + "\""); } public String getTableCreateFromSQL(String table) throws Exception { return getXFromSQL(table, "(?i)\\bcreate table\\s+['\"]?(\\S+)['\"]?" + "\\s+\\((.+?)\\);", 1, 2, "table"); } public String getIndexFromSQL(String index) throws Exception { return getXFromSQL(index, "(?i)\\b(create (?:unique )?index\\s+" + "['\"]?(\\S+)['\"]?\\s+.+?);", 2, 1, "index"); } public String getFunctionFromSQL(String function) throws Exception { return getXFromSQL(function, "(?is)\\bcreate function\\s+" + "['\"]?(\\S+)['\"]?\\s+" + "(.+? language ['\"]?\\w+['\"]?);", 1, 2, "function"); } public String getLanguageFromSQL(String language) throws Exception { return getXFromSQL(language, "(?is)\\bcreate trusted procedural " + "language\\s+['\"]?(\\S+)['\"]?\\s+(.+?);", 1, 2, "language"); } public List<Column> getTableColumnsFromSQL(String tableName) throws Exception { return getTableFromSQL(tableName).getColumns(); } public Table getTableFromSQL(String tableName) throws Exception { Table table = new Table(); LinkedList<Column> columns = new LinkedList<Column>(); LinkedList<Constraint> constraints = new LinkedList<Constraint>(); boolean parens = false; StringBuffer accumulator = new StringBuffer(); String create = getTableCreateFromSQL(tableName); for (int i = 0; i <= create.length(); i++) { char c = ' '; if (i < create.length()) { c = create.charAt(i); if (c == '(' || c == ')') { parens = (c == '('); accumulator.append(c); continue; } } if (((c == ',') && !parens) || i == create.length()) { String a = accumulator.toString().trim(); if (a.toLowerCase().startsWith("constraint ")) { Constraint constraint; try { constraint = new Constraint(tableName, a); } catch (Exception e) { throw new Exception("Could not parse constraint for table '" + tableName + "'. Nested exception: " + e.getMessage(), e); } List<String> constraintColumns = constraint.getColumns(); if (constraintColumns.size() == 0) { throw new IllegalStateException( "constraint with no constrained columns"); } for (String constrainedName : constraintColumns) { Column constrained = findColumn(columns, constrainedName); if (constrained == null) { throw new Exception( "constraint " + constraint.getName() + " references column \"" + constrainedName + "\", which is not a column in the table " + tableName); } } constraints.add(constraint); } else { Column column = new Column(); try { column.parse(accumulator.toString()); columns.add(column); } catch (Exception e) { throw new Exception("Could not parse table " + tableName + ". Chained: " + e.getMessage(), e); } } accumulator = new StringBuffer(); } else { accumulator.append(c); } } table.setName(tableName); table.setColumns(columns); table.setConstraints(constraints); table.setNotNullOnPrimaryKeyColumns(); return table; } public static String cleanText(List list) { StringBuffer s = new StringBuffer(); Iterator i = list.iterator(); while (i.hasNext()) { String l = (String) i.next(); s.append(l.replaceAll("\\s+", " ")); if (l.indexOf(';') != -1) { s.append('\n'); } } return s.toString(); } public boolean tableExists(String table) throws SQLException { Statement st = m_dbconnection.createStatement(); ResultSet rs; rs = st.executeQuery("SELECT DISTINCT tablename FROM pg_tables " + "WHERE lower(tablename) = '" + table.toLowerCase() + "'"); return rs.next(); } public boolean tableColumnExists(String table, String column) throws Exception { return (findColumn(getTableColumnsFromDB(table), column) != null); } public List<Column> getTableColumnsFromDB(String tableName) throws Exception { Table table = getTableFromDB(tableName); if (table == null) { return null; } return table.getColumns(); } public List<Column> getColumnsFromDB(String tableName) throws Exception { LinkedList<Column> columns = new LinkedList<Column>(); Statement st = m_dbconnection.createStatement(); ResultSet rs; String query = "SELECT " + " attname, " + " format_type(atttypid, atttypmod), " + " attnotnull " + "FROM " + " pg_attribute " + "WHERE " + " attrelid = " + " (SELECT oid FROM pg_class WHERE relname = '" + tableName.toLowerCase() + "') AND " + " attnum > 0"; if (m_pg_version >= 7.3) { query = query + " AND attisdropped = false"; } query = query + " ORDER BY " + " attnum"; rs = st.executeQuery(query); while (rs.next()) { Column c = new Column(); c.setName(rs.getString(1)); String columnType = rs.getString(2); try { c.parseColumnType(columnType); } catch (Exception e) { throw new Exception("Error parsing column type '" + columnType + "' for column '" + rs.getString(1) + "' in table '" + tableName + "'. Nested: " + e.getMessage(), e); } c.setNotNull(rs.getBoolean(3)); columns.add(c); } rs.close(); st.close(); return columns; } public Table getTableFromDB(String tableName) throws Exception { if (!tableExists(tableName)) { return null; } Table table = new Table(); table.setName(tableName.toLowerCase()); List<Column> columns = getColumnsFromDB(tableName); List<Constraint> constraints = getConstraintsFromDB(tableName); table.setColumns(columns); table.setConstraints(constraints); return table; } public List<Constraint> getConstraintsFromDB(String tableName) throws SQLException, Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; LinkedList<Constraint> constraints = new LinkedList<Constraint>(); String query = "SELECT c.oid, c.conname, c.contype, c.conrelid, c.confrelid, a.relname, c.confdeltype from pg_class a right join pg_constraint c on c.confrelid = a.oid where c.conrelid = (select oid from pg_class where relname = '" + tableName.toLowerCase() + "') order by c.oid"; rs = st.executeQuery(query); while (rs.next()) { int oid = rs.getInt(1); String name = rs.getString(2); String type = rs.getString(3); int conrelid = rs.getInt(4); int confrelid = rs.getInt(5); String ftable = rs.getString(6); String foreignDelType = rs.getString(7); Constraint constraint; if ("p".equals(type)) { List<String> columns = getConstrainedColumnsFromDBForConstraint( oid, conrelid); constraint = new Constraint(tableName.toLowerCase(), name, columns); } else if ("f".equals(type)) { List<String> columns = getConstrainedColumnsFromDBForConstraint( oid, conrelid); List<String> fcolumns = getForeignColumnsFromDBForConstraint( oid, confrelid); constraint = new Constraint(tableName.toLowerCase(), name, columns, ftable, fcolumns, foreignDelType); } else { throw new Exception("Do not support constraint type \"" + type + "\" in constraint \"" + name + "\""); } constraints.add(constraint); } return constraints; } private List<String> getConstrainedColumnsFromDBForConstraint(int oid, int conrelid) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; LinkedList<String> columns = new LinkedList<String>(); String query = "select a.attname from pg_attribute a, pg_constraint c where a.attrelid = c.conrelid and a.attnum = ANY (c.conkey) and c.oid = " + oid + " and a.attrelid = " + conrelid; rs = st.executeQuery(query); while (rs.next()) { columns.add(rs.getString(1)); } rs.close(); st.close(); return columns; } private List<String> getForeignColumnsFromDBForConstraint(int oid, int confrelid) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; LinkedList<String> columns = new LinkedList<String>(); String query = "select a.attname from pg_attribute a, pg_constraint c where a.attrelid = c.confrelid and a.attnum = ANY (c.confkey) and c.oid = " + oid + " and a.attrelid = " + confrelid; rs = st.executeQuery(query); while (rs.next()) { columns.add(rs.getString(1)); } rs.close(); st.close(); return columns; } public void changeTable(String table, Table oldTable, Table newTable) throws Exception { List<Column> oldColumns = oldTable.getColumns(); List<Column> newColumns = newTable.getColumns(); Statement st = m_dbconnection.createStatement(); TreeMap<String, ColumnChange> columnChanges = new TreeMap<String, ColumnChange>(); String[] oldColumnNames = new String[oldColumns.size()]; int i; Iterator j; if (m_changed.contains(table)) { return; } m_changed.add(table); m_out.println("SCHEMA DOES NOT MATCH"); m_out.println(" - differences:"); for (Constraint newConstraint : newTable.getConstraints()) { m_out.println("new constraint: " + newConstraint.getTable() + ": " + newConstraint); } for (Constraint oldConstraint : oldTable.getConstraints()) { m_out.println("old constraint: " + oldConstraint.getTable() + ": " + oldConstraint); } /* * XXX This doesn't check for old column rows that don't exist * in newColumns. */ for (Column newColumn : newColumns) { Column oldColumn = findColumn(oldColumns, newColumn.getName()); if (oldColumn == null || !newColumn.equals(oldColumn)) { m_out.println(" - column \"" + newColumn.getName() + "\" is different"); if (m_debug) { m_out.println(" - old column: " + ((oldColumn == null) ? "null" : oldColumn.toString())); m_out.println(" - new column: " + newColumn); } } if (!columnChanges.containsKey(newColumn.getName())) { columnChanges.put(newColumn.getName(), new ColumnChange()); } ColumnChange columnChange = (ColumnChange) columnChanges.get(newColumn.getName()); columnChange.setColumn(newColumn); /* * If the new column has a NOT NULL constraint, set a null replace * value for the column. Throw an exception if it is possible for * null data to be inserted into the new column. This would happen * if there is not a null replacement and the column either didn't * exist before or it did NOT have the NOT NULL constraint before. */ if (m_columnReplacements.containsKey(table + "." + newColumn.getName())) { columnChange.setNullReplace(m_columnReplacements.get(table + "." + newColumn.getName())); } if (newColumn.isNotNull() && columnChange.getNullReplace() == null) { if (oldColumn == null) { String message = "Column " + newColumn.getName() + " in new table has NOT NULL " + "constraint, however this column " + "did not exist before and there is " + "no null replacement for this " + "column"; if (m_ignore_notnull) { m_out.println(message + ". Ignoring due to '-N'"); } else { throw new Exception(message); } } else if (!oldColumn.isNotNull()) { String message = "Column " + newColumn.getName() + " in new table has NOT NULL " + "constraint, however this column " + "did not have the NOT NULL " + "constraint before and there is " + "no null replacement for this " + "column"; if (m_ignore_notnull) { m_out.println(message + ". Ignoring due to '-N'"); } else { throw new Exception(message); } } } } i = 0; for (j = oldColumns.iterator(); j.hasNext(); i++) { Column oldColumn = (Column) j.next(); oldColumnNames[i] = oldColumn.getName(); if (columnChanges.containsKey(oldColumn.getName())) { ColumnChange columnChange = (ColumnChange) columnChanges.get(oldColumn.getName()); Column newColumn = (Column) columnChange.getColumn(); if (newColumn.getType().indexOf("timestamp") != -1) { columnChange.setUpgradeTimestamp(true); } } else { m_out.println(" * WARNING: column \"" + oldColumn.getName() + "\" exists in the " + "database but is not in the new schema. " + "REMOVING COLUMN"); } } String tmpTable = table + "_old_" + System.currentTimeMillis(); try { if (tableExists(tmpTable)) { st.execute("DROP TABLE " + tmpTable + m_cascade); } m_out.print(" - creating temporary table... "); st.execute("CREATE TABLE " + tmpTable + " AS SELECT " + join(", ", oldColumnNames) + " FROM " + table); m_out.println("done"); st.execute("DROP TABLE " + table + m_cascade); m_out.print(" - creating new '" + table + "' table... "); st.execute("CREATE TABLE " + table + "(" + getTableCreateFromSQL(table) + ")"); m_out.println("done"); transformData(table, tmpTable, columnChanges, oldColumnNames); st.execute("GRANT ALL ON " + table + " TO " + m_user); m_out.print(" - optimizing table " + table + "... "); st.execute("VACUUM ANALYZE " + table); m_out.println("DONE"); } catch (Exception e) { if (m_no_revert) { m_out.println("FAILED! Not reverting due to '-R' being " + "passed. Old data in " + tmpTable); throw e; } try { m_dbconnection.rollback(); m_dbconnection.setAutoCommit(true); if (tableExists(table)) { st.execute("DROP TABLE " + table + m_cascade); } st.execute("CREATE TABLE " + table + " AS SELECT " + join(", ", oldColumnNames) + " FROM " + tmpTable); st.execute("DROP TABLE " + tmpTable); } catch (SQLException se) { throw new Exception("Got SQLException while trying to " + "revert table changes due to original " + "error: " + e + "\n" + "SQLException while reverting table: " + se, e); } m_out.println("FAILED! Old data restored, however indexes and " + "constraints on this table were not re-added"); throw e; } // We don't care if dropping the tmp table fails since we've // completed copying it, so it's outside of the try/catch block above. st.execute("DROP TABLE " + tmpTable); m_out.println(" - completed updating table... "); } /* * Note: every column has a ColumnChange record for it, which lists * the column name, a null replacement, if any, and the indexes for * selected rows (for using in ResultSet.getXXX()) and prepared rows * (PreparedStatement.setObject()). * Monkey. Make monkey dance. */ public void transformData(String table, String oldTable, TreeMap<String, ColumnChange> columnChanges, String[] oldColumnNames) throws SQLException, ParseException, Exception { Statement st = m_dbconnection.createStatement(); Iterator j; int i; st.setFetchSize(s_fetch_size); String[] columns = columnChanges.keySet().toArray(new String[0]); String[] questionMarks = new String[columns.length]; for (i = 0; i < oldColumnNames.length; i++) { ColumnChange c = columnChanges.get(oldColumnNames[i]); if (c != null) { c.setSelectIndex(i + 1); } } for (i = 0; i < columns.length; i++) { questionMarks[i] = "?"; ColumnChange c = columnChanges.get(columns[i]); c.setPrepareIndex(i + 1); c.setColumnType(((Column) c.getColumn()).getColumnSqlType()); } /* * Pull everything in from the old table and filter it to update the * data to any new formats. */ m_out.print(" - transforming data into the new table...\r"); ResultSet rs = st.executeQuery("SELECT count(*) FROM " + oldTable); rs.next(); long num_rows = rs.getLong(1); PreparedStatement select = null; PreparedStatement insert = null; String order; if (table.equals("outages")) { order = " ORDER BY iflostservice"; } else { order = ""; } String dbcmd = "SELECT " + join(", ", oldColumnNames) + " FROM " + oldTable + order; if (m_debug) { m_out.println(" - performing select: " + dbcmd); } select = m_dbconnection.prepareStatement(dbcmd); select.setFetchSize(s_fetch_size); // error = "Unable to prepare select from temp"; dbcmd = "INSERT INTO " + table + " (" + join(", ", columns) + ") values (" + join(", ", questionMarks) + ")"; if (m_debug) { m_out.println(" - performing insert: " + dbcmd); } insert = m_dbconnection.prepareStatement(dbcmd); // error = "Unable to prepare insert into " + table); rs = select.executeQuery(); m_dbconnection.setAutoCommit(false); String name; ColumnChange change; Object obj; SimpleDateFormat dateParser = new SimpleDateFormat( "dd-MMM-yyyy HH:mm:ss"); SimpleDateFormat dateFormatter = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); char spin[] = { '/', '-', '\\', '|' }; int current_row = 0; while (rs.next()) { for (j = columnChanges.keySet().iterator(); j.hasNext();) { name = (String) j.next(); change = (ColumnChange) columnChanges.get(name); if (change.getSelectIndex() > 0) { obj = rs.getObject(change.getSelectIndex()); if (rs.wasNull()) { obj = null; } } else { if (m_debug) { m_out.println(" - don't know what to do " + "for \"" + name + "\", prepared column " + change.getPrepareIndex() + ": setting to null"); } obj = null; } /* if (table.equals("outages") && name.equals("outageid")) { obj = new Integer(current_row + 1); } if (table.equals("usersnotified") && name.equals("id")) { obj = new Integer(current_row + 1); } */ if (obj == null && change.isNullReplace()) { obj = change.getNullReplace(); if (obj instanceof ColumnChangeReplacement) { obj = ((ColumnChangeReplacement) obj).getColumnReplacement(rs, columnChanges); } if (m_debug) { m_out.println(" - " + name + " was NULL but is a " + "requires NULL replacement -- " + "replacing with '" + obj + "'"); } } if (obj != null) { if (change.isUpgradeTimestamp() && !obj.getClass().equals( java.sql.Timestamp.class)) { if (m_debug) { m_out.println(" - " + name + " is an old-style timestamp"); } String newObj = dateFormatter.format(dateParser.parse((String) obj)); if (m_debug) { m_out.println(" - " + obj + " -> " + newObj); } obj = newObj; } if (m_debug) { m_out.println(" - " + name + " = " + obj); } } else { if (m_debug) { m_out.println(" - " + name + " = undefined"); } } if (obj == null) { insert.setNull(change.getPrepareIndex(), change.getColumnType()); } else { insert.setObject(change.getPrepareIndex(), obj); } } try { insert.execute(); } catch (SQLException e) { SQLException ex = new SQLException( "Statement.execute() threw an " + "SQLException while inserting a row: " + "\"" + insert.toString() + "\". " + "Original exception: " + e.toString(), e.getSQLState(), e.getErrorCode()); ex.setNextException(e); throw ex; } current_row++; if ((current_row % 20) == 0) { System.err.print(" - transforming data into the new " + "table... " + (int) Math.floor((current_row * 100) / num_rows) + "% [" + spin[(current_row / 20) % spin.length] + "]\r"); } } m_dbconnection.commit(); m_dbconnection.setAutoCommit(true); if (table.equals("events") && num_rows == 0) { st.execute("INSERT INTO events (eventid, eventuei, eventtime, " + "eventsource, eventdpname, eventcreatetime, " + "eventseverity, eventlog, eventdisplay) values " + "(0, 'http://uei.opennms.org/dummyevent', now(), " + "'OpenNMS.Eventd', 'localhost', now(), 1, 'Y', 'Y')"); } m_out.println(" - transforming data into the new table... " + "DONE "); } public void printHelp() { m_out.println("usage:"); m_out.println(" $OPENNMS_HOME/bin/install -h"); m_out.println(" $OPENNMS_HOME/bin/install " + "[-r] [-x] [-N] [-R] [-c] [-d] [-i] [-s] [-U]"); m_out.println(" [-y] [-X]"); m_out.println(" " + "[-u <PostgreSQL admin user>]"); m_out.println(" " + "[-p <PostgreSQL admin password>]"); m_out.println(" " + "[-T <tomcat4.conf>]"); m_out.println(" " + "[-w <tomcat context directory>"); m_out.println(" " + "[-C <constraint>]"); m_out.println(""); m_out.println(m_required_options); m_out.println(""); m_out.println(" -h this help"); m_out.println(""); m_out.println(" -d perform database actions"); m_out.println(" -i insert data into the database"); m_out.println(" -s update iplike postgres function"); m_out.println(" -U upgrade database to unicode, if needed"); m_out.println(" -y install web application (see -w)"); m_out.println(""); m_out.println(" -u username of the PostgreSQL " + "administrator (default: \"" + m_pg_user + "\")"); m_out.println(" -p password of the PostgreSQL " + "administrator (default: \"" + m_pg_pass + "\")"); m_out.println(" -c drop and recreate tables that already " + "exist"); m_out.println(""); m_out.println(" -T location of tomcat.conf"); m_out.println(" -w location of tomcat's contcxt directory"); m_out.println(" (usually under conf/Catalina/localhost)"); m_out.println(""); m_out.println(" -r run as an RPM install (does nothing)"); m_out.println(" -x turn on debugging for database data " + "transformation"); m_out.println(" -N ignore NOT NULL constraint checks when " + "transforming data"); m_out.println(" useful after a table is reverted by a " + "previous run of the installer"); m_out.println(" -R do not revert a table to the original if " + "an error occurs when"); m_out.println(" transforming data -- only used for debugging"); m_out.println(" -C fix rows that violate the specified " + "constraint -- sets key column in"); m_out.println(" affected rows to NULL by default"); m_out.println(" -X drop rows that violate constraint instead of marking key column in"); m_out.println(" affected rows to NULL (used with \"-C\")"); System.exit(0); } public static void main(String[] argv) throws Exception { new Installer().install(argv); } /** * Join all of the elements of a String together into a single string, * inserting sep between each element. */ public static String join(String sep, String[] array) { StringBuffer sb = new StringBuffer(); if (array.length > 0) { sb.append(array[0]); } for (int i = 1; i < array.length; i++) { sb.append(sep + array[i]); } return sb.toString(); } public static String join(String sep, List<String> list) { StringBuffer sb = new StringBuffer(); Iterator i = list.iterator(); if (i.hasNext()) { sb.append(i.next()); } while (i.hasNext()) { sb.append(sep + i.next()); } return sb.toString(); } public static String join(String sep, Object[] array) { StringBuffer sb = new StringBuffer(); if (array.length > 0) { sb.append(array[0].toString()); } for (int i = 1; i < array.length; i++) { if (array[i] == null) { sb.append(sep + "(null)"); } else { sb.append(sep + array[i].toString()); } } return sb.toString(); } public String checkServerVersion() throws IOException { File catalinaHome = new File(m_webappdir).getParentFile(); String readmeVersion = getTomcatVersion(new File(catalinaHome, "README.txt")); String runningVersion = getTomcatVersion(new File(catalinaHome, "RUNNING.txt")); if (readmeVersion == null && runningVersion == null) { return null; } else if (readmeVersion != null && runningVersion != null) { return readmeVersion; // XXX what should be done here? } else if (readmeVersion != null && runningVersion == null) { return readmeVersion; } else { return runningVersion; } } public String getTomcatVersion(File file) throws IOException { if (file == null || !file.exists()) { return null; } Pattern p = Pattern.compile("The Tomcat (\\S+) Servlet/JSP Container"); BufferedReader in = new BufferedReader(new FileReader(file)); for (int i = 0; i < 5; i++) { String line = in.readLine(); if (line == null) { // EOF in.close(); return null; } Matcher m = p.matcher(line); if (m.find()) { in.close(); return m.group(1); } } in.close(); return null; } public class AutoInteger implements ColumnChangeReplacement { private int m_value; public AutoInteger(int initialValue) { m_value = initialValue; } public int getInt() { return m_value++; } public Integer getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) { return getInt(); } } public class AutoIntegerIdMapStore implements ColumnChangeReplacement { private int m_value; private String[] m_indexColumns; private Map<MultiColumnKey, Integer> m_idMap = new HashMap<MultiColumnKey, Integer>(); public AutoIntegerIdMapStore(int initialValue, String[] indexColumns) { m_value = initialValue; m_indexColumns = indexColumns; } public Integer getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { MultiColumnKey key = getKeyForColumns(rs, columnChanges, m_indexColumns); Integer newInteger = m_value++; m_idMap.put(key, newInteger); return newInteger; } public Integer getIntegerForColumns(ResultSet rs, Map<String, ColumnChange> columnChanges, String[] columns, boolean noMatchOkay) throws SQLException { MultiColumnKey key = getKeyForColumns(rs, columnChanges, columns); Integer oldInteger = m_idMap.get(key); if (oldInteger == null && !noMatchOkay) { throw new IllegalArgumentException("No entry in the map for " + key); } return oldInteger; } private MultiColumnKey getKeyForColumns(ResultSet rs, Map<String, ColumnChange> columnChanges, String[] columns) throws SQLException { Object[] objects = new Object[columns.length]; for (int i = 0; i < columns.length; i++) { String indexColumn = columns[i]; ColumnChange columnChange = columnChanges.get(indexColumn); if (columnChange == null) { throw new IllegalArgumentException("No ColumnChange entry for '" + indexColumn + "'"); } int index = columnChange.getSelectIndex(); if (index == 0) { throw new IllegalArgumentException("ColumnChange entry for '" + indexColumn + "' has no select index"); } objects[i] = rs.getObject(index); } return new MultiColumnKey(objects); } public class MultiColumnKey { private Object[] m_keys; public MultiColumnKey(Object[] keys) { m_keys = keys; } @Override public boolean equals(Object otherObject) { if (!(otherObject instanceof MultiColumnKey)) { return false; } MultiColumnKey other = (MultiColumnKey) otherObject; if (m_keys.length != other.m_keys.length) { return false; } for (int i = 0; i < m_keys.length; i++) { if (m_keys[i] == null && other.m_keys[i] == null) { continue; } if (m_keys[i] == null || other.m_keys[i] == null) { return false; } if (!m_keys[i].equals(other.m_keys[i])) { return false; } } return true; } @Override public String toString() { return join(", ", m_keys); } @Override public int hashCode() { int value = 1; for (Object o : m_keys) { if (o != null) { // not the other way around, since 1 ^ anything == 1 value = o.hashCode() ^ value; } } return value; } } } public class MapStoreIdGetter implements ColumnChangeReplacement { private AutoIntegerIdMapStore m_storeFoo; private String[] m_indexColumns; private boolean m_noMatchOkay; public MapStoreIdGetter(AutoIntegerIdMapStore storeFoo, String[] columns, boolean noMatchOkay) { m_storeFoo = storeFoo; m_indexColumns = columns; m_noMatchOkay = noMatchOkay; } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { return m_storeFoo.getIntegerForColumns(rs, columnChanges, m_indexColumns, m_noMatchOkay); } } public class EventSourceReplacement implements ColumnChangeReplacement { private static final String m_replacement = "OpenNMS.Eventd"; public EventSourceReplacement() { // we do nothing! } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { return m_replacement; } } public class FixedIntegerReplacement implements ColumnChangeReplacement { private Integer m_replacement; public FixedIntegerReplacement(int value) { m_replacement = value; } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { return m_replacement; } } public class RowHasBogusData implements ColumnChangeReplacement { private String m_table; private String m_column; public RowHasBogusData(String table, String column) { m_table = table; m_column = column; } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { throw new IllegalArgumentException("The '" + m_column + "' column in the '" + m_table + "' table should never be " + "null, but the entry for this " + "row does have a null '" + m_column + "'column. " + "It needs to be " + "removed or udpated to " + "reflect a valid '" + m_column + "' value."); } } public boolean triggerExists(String name, String table, String storedProcedure) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT oid FROM pg_trigger WHERE tgname = '" + name.toLowerCase() + "' AND tgrelid = (SELECT oid FROM pg_class WHERE relname = '" + table.toLowerCase() + "' ) AND tgfoid = (SELECT oid FROM pg_proc WHERE proname = '" + storedProcedure.toLowerCase() + "')"); return rs.next(); } }
opennms-install/src/main/java/org/opennms/install/Installer.java
// // // This file is part of the OpenNMS(R) Application. // // OpenNMS(R) is Copyright (C) 2002-2005 The OpenNMS Group, Inc. All rights // reserved. // OpenNMS(R) is a derivative work, containing both original code, included // code // and modified // code that was published under the GNU General Public License. Copyrights // for // modified // and included code are below. // // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // // The code in this file is Copyright (C) 2004 DJ Gregor. // // Based on install.pl which was Copyright (C) 1999-2001 Oculan Corp. All // rights reserved. // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // For more information contact: // OpenNMS Licensing <[email protected]> // http://www.opennms.org/ // http://www.opennms.com/ // package org.opennms.install; import java.io.BufferedReader; import java.io.File; import java.io.FileFilter; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.io.Reader; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Properties; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.opennms.core.utils.ProcessExec; /* * Big To-dos: - Fix all of the XXX items (some coding, some discussion) - * Change the Exceptions to something more reasonable - Do exception handling * where it makes sense (give users reasonable error messages for common * problems) - Add a friendly startup script? - Javadoc */ public class Installer { static final float POSTGRES_MIN_VERSION = 7.3f; static final String s_version = "$Id$"; static final int s_fetch_size = 1024; String m_opennms_home = null; boolean m_update_database = false; boolean m_do_inserts = false; boolean m_skip_constraints = false; boolean m_update_iplike = false; boolean m_update_unicode = false; boolean m_install_webapp = false; boolean m_fix_constraint = false; boolean m_force = false; boolean m_debug = false; boolean m_ignore_notnull = false; boolean m_no_revert = false; String m_pg_driver = null; String m_pg_url = null; String m_pg_user = "postgres"; String m_pg_pass = ""; String m_pg_bindir = null; String m_user = null; String m_pass = null; String m_database = null; String m_sql_dir = null; String m_create_sql = null; String m_pg_iplike = null; String m_tomcat_conf = null; String m_webappdir = null; String m_install_servletdir = null; String m_fix_constraint_name = null; boolean m_fix_constraint_remove_rows = false; HashMap<String, String[]> m_seqmapping = null; LinkedList<String> m_tables = null; LinkedList<String> m_sequences = null; // LinkedList m_cfunctions = new LinkedList(); // Unused, not in // create.sql // LinkedList m_functions = new LinkedList(); // Unused, not in create.sql // LinkedList m_languages = new LinkedList(); // Unused, not in create.sql LinkedList<String> m_indexes = new LinkedList<String>(); HashMap<String, List<String>> m_inserts = new HashMap<String, List<String>>(); HashSet<String> m_drops = new HashSet<String>(); HashSet<String> m_changed = new HashSet<String>(); float m_pg_version; String m_cascade = " CASCADE"; String m_sql; PrintStream m_out = System.out; Properties m_properties = null; Connection m_dbconnection; Map m_dbtypes = null; Map<String, ColumnChangeReplacement> m_columnReplacements = new HashMap<String, ColumnChangeReplacement>(); String m_required_options = "At least one of -d, -i, -s, -U, -y, " + "-C, or -T is required."; public Installer() { AutoIntegerIdMapStore snmpInterfaceId = new AutoIntegerIdMapStore(1, new String[] { "nodeid", "ipaddr", "snmpifindex" }); m_columnReplacements.put("snmpinterface.id", snmpInterfaceId); AutoIntegerIdMapStore ipInterfaceId = new AutoIntegerIdMapStore(1, new String[] { "nodeid", "ipaddr", "ifindex" }); m_columnReplacements.put("ipinterface.id", ipInterfaceId); MapStoreIdGetter IpInterfaceSnmpInterfaceId = new MapStoreIdGetter(snmpInterfaceId, new String[] { "nodeid", "ipaddr", "ifindex" }, true); m_columnReplacements.put("ipinterface.snmpinterfaceid", IpInterfaceSnmpInterfaceId); AutoIntegerIdMapStore ifServicesId = new AutoIntegerIdMapStore(1, new String[] { "nodeid", "ipaddr", "ifindex", "serviceid" }); m_columnReplacements.put("ifservices.id", ifServicesId); MapStoreIdGetter ifServicesIpInterfaceId = new MapStoreIdGetter(ipInterfaceId, new String[] { "nodeid", "ipaddr", "ifindex" }, false); m_columnReplacements.put("ifservices.ipinterfaceid", ifServicesIpInterfaceId); m_columnReplacements.put("events.eventsource", new EventSourceReplacement()); m_columnReplacements.put("outages.outageid", new AutoInteger(1)); m_columnReplacements.put("snmpinterface.nodeid", new RowHasBogusData("snmpInterface", "nodeId")); m_columnReplacements.put("snmpinterface.snmpifindex", new RowHasBogusData("snmpInterface", "snmpIfIndex")); m_columnReplacements.put("ipinterface.nodeid", new RowHasBogusData("ipInterface", "nodeId")); m_columnReplacements.put("ipinterface.ipaddr", new RowHasBogusData("ipInterface", "ipAddr")); m_columnReplacements.put("ifservices.nodeid", new RowHasBogusData("ifservices", "nodeId")); m_columnReplacements.put("ifservices.serviceid", new RowHasBogusData("ifservices", "serviceId")); m_columnReplacements.put("outages.nodeid", new RowHasBogusData("outages", "nodeId")); m_columnReplacements.put("outages.serviceid", new RowHasBogusData("outages", "serviceId")); /* * This is totally bogus. outages.svcregainedeventid is a foreign * key that points at events.eventid, and a fixed replacement of zero * will break, because there should never be an event with an ID of * zero. I don't think it ever got executed before due to the * null replacement only being performed if a column was marked as * NOT NULL. */ /* m_columnReplacements.put("outages.svcregainedeventid", new FixedIntegerReplacement(0)); */ // Disabled for the same reason as above /* m_columnReplacements.put("notifications.eventid", new FixedIntegerReplacement(0)); */ m_columnReplacements.put("usersnotified.id", new AutoInteger(1)); } public void install(String[] argv) throws Exception { printHeader(); loadProperties(); parseArguments(argv); if (!m_update_database && !m_do_inserts && !m_update_iplike && !m_update_unicode && m_tomcat_conf == null && !m_install_webapp && !m_fix_constraint) { throw new Exception("Nothing to do.\n" + m_required_options + "\nUse '-h' for help."); } // Don't bother checking the Java version. Leave it up to runjava. // checkJava(); // XXX Check Tomcat version? if (m_update_database || m_update_iplike || m_update_unicode || m_do_inserts || m_fix_constraint) { databaseConnect("template1"); databaseCheckVersion(); databaseCheckLanguage(); } printDiagnostics(); verifyFilesAndDirectories(); if (m_install_webapp) { checkWebappOldOpennmsDir(); checkServerXmlOldOpennmsContext(); } if (m_update_database || m_fix_constraint) { readTables(); } if (m_update_database) { // XXX Check and optionally modify pg_hba.conf if (!databaseUserExists()) { databaseAddUser(); } if (!databaseDBExists()) { databaseAddDB(); } } if (m_update_database || m_update_iplike || m_update_unicode || m_do_inserts || m_fix_constraint) { databaseDisconnect(); databaseConnect(m_database); } if (m_fix_constraint) { fixConstraint(); } if (m_update_database) { checkOldTables(); if (!m_skip_constraints) { checkConstraints(); } createSequences(); createTables(); createIndexes(); // createFunctions(m_cfunctions); // Unused, not in create.sql // createLanguages(); // Unused, not in create.sql // createFunctions(m_functions); // Unused, not in create.sql fixData(); } if (m_do_inserts) { insertData(); } if (m_update_unicode) { checkUnicode(); } if (m_install_webapp) { installWebApp(); } if (m_tomcat_conf != null) { updateTomcatConf(); } if (m_update_iplike) { updateIplike(); } if (m_update_database) { // XXX should we be using createFunctions and createLanguages // instead? updatePlPgsql(); // XXX should we be using createFunctions instead? addStoredProcedures(); } if (m_update_database || m_update_iplike || m_update_unicode || m_do_inserts) { databaseDisconnect(); } if (m_update_database) { createConfiguredFile(); } System.out.println(); System.out.println("Installer completed successfully!"); } public void createConfiguredFile() throws IOException { File f = new File(m_opennms_home + File.separator + "etc" + File.separator + "configured"); f.createNewFile(); } public void printHeader() { m_out.println("===============================================" + "==============================="); m_out.println("OpenNMS Installer Version " + s_version); m_out.println("===============================================" + "==============================="); m_out.println(""); m_out.println("Configures PostgreSQL tables, users, and other " + "miscellaneous settings."); m_out.println(""); } public void loadProperties() throws Exception { m_properties = new Properties(); m_properties.load(Installer.class.getResourceAsStream("installer.properties")); /* * Do this if we want to merge our properties with the system * properties... */ Properties sys = System.getProperties(); m_properties.putAll(sys); m_opennms_home = fetchProperty("install.dir"); m_database = fetchProperty("install.database.name"); m_user = fetchProperty("install.database.user"); m_pass = fetchProperty("install.database.password"); m_pg_driver = fetchProperty("install.database.driver"); m_pg_url = fetchProperty("install.database.url"); m_pg_bindir = fetchProperty("install.database.bindir"); m_sql_dir = fetchProperty("install.etc.dir"); m_install_servletdir = fetchProperty("install.servlet.dir"); String soext = fetchProperty("build.soext"); String pg_iplike_dir = fetchProperty("install.postgresql.dir"); m_pg_iplike = pg_iplike_dir + File.separator + "iplike." + soext; m_create_sql = m_sql_dir + File.separator + "create.sql"; } public String fetchProperty(String property) throws Exception { String value; if ((value = m_properties.getProperty(property)) == null) { throw new Exception("property \"" + property + "\" not set " + "from bundled installer.properties file"); } return value; } public void parseArguments(String[] argv) throws Exception { LinkedList<String> args = new LinkedList<String>(); for (int i = 0; i < argv.length; i++) { StringBuffer b = new StringBuffer(argv[i]); boolean is_arg = false; while (b.length() > 0 && b.charAt(0) == '-') { is_arg = true; b.deleteCharAt(0); } if (is_arg) { while (b.length() > 0) { char c = b.charAt(0); b.deleteCharAt(0); switch (c) { case 'h': printHelp(); break; case 'c': m_force = true; break; case 'C': i++; m_fix_constraint = true; m_fix_constraint_name = getNextArg(argv, i, 'C'); break; case 'd': m_update_database = true; break; case 'i': m_do_inserts = true; break; case 'n': m_skip_constraints = true; case 'N': m_ignore_notnull = true; break; case 'p': i++; m_pg_pass = getNextArg(argv, i, 'p'); break; case 'R': m_no_revert = true; break; case 's': m_update_iplike = true; break; case 'T': i++; m_tomcat_conf = getNextArg(argv, i, 'T'); break; case 'u': i++; m_pg_user = getNextArg(argv, i, 'u'); break; case 'U': m_update_unicode = true; break; case 'w': i++; m_webappdir = getNextArg(argv, i, 'w'); break; case 'x': m_debug = true; break; case 'X': m_fix_constraint_remove_rows = true; break; case 'y': m_install_webapp = true; break; default: throw new Exception("unknown option '" + c + "'" + ", use '-h' option for usage"); } } } else { args.add(argv[i]); } } if (args.size() != 0) { throw new Exception("too many command-line arguments specified"); } } public String getNextArg(String[] argv, int i, char letter) throws Exception { if (i >= argv.length) { throw new Exception("no argument provided for '" + letter + "' option"); } if (argv[i].charAt(0) == '-') { throw new Exception("argument to '" + letter + "' option looks " + "like another option (begins with a dash): \"" + argv[i] + "\""); } return argv[i]; } public void printDiagnostics() { m_out.println("* using '" + m_user + "' as the PostgreSQL " + "user for OpenNMS"); m_out.println("* using '" + m_pass + "' as the PostgreSQL " + "password for OpenNMS"); m_out.println("* using '" + m_database + "' as the PostgreSQL " + "database name for OpenNMS"); } public void readTables() throws Exception { readTables(new FileReader(m_create_sql)); } public void readTables(Reader reader) throws Exception { BufferedReader r = new BufferedReader(reader); String line; m_tables = new LinkedList<String>(); m_seqmapping = new HashMap<String, String[]>(); m_sequences = new LinkedList<String>(); LinkedList<String> sql_l = new LinkedList<String>(); Pattern seqmappingPattern = Pattern.compile("\\s*--#\\s+install:\\s*" + "(\\S+)\\s+(\\S+)\\s+" + "(\\S+)\\s*.*"); Pattern createPattern = Pattern.compile("(?i)\\s*create\\b.*"); Pattern insertPattern = Pattern.compile("(?i)INSERT INTO " + "[\"']?([\\w_]+)[\"']?.*"); Pattern dropPattern = Pattern.compile("(?i)DROP TABLE [\"']?" + "([\\w_]+)[\"']?.*"); while ((line = r.readLine()) != null) { Matcher m; if (line.matches("\\s*") || line.matches("\\s*\\\\.*")) { continue; } m = seqmappingPattern.matcher(line); if (m.matches()) { String[] a = { m.group(2), m.group(3) }; m_seqmapping.put(m.group(1), a); continue; } if (line.matches("--.*")) { continue; } if (createPattern.matcher(line).matches()) { m = Pattern.compile( "(?i)\\s*create\\s+((?:unique )?\\w+)" + "\\s+[\"']?(\\w+)[\"']?.*").matcher( line); if (m.matches()) { String type = m.group(1); String name = m.group(2).replaceAll("^[\"']", "").replaceAll( "[\"']$", ""); if (type.toLowerCase().indexOf("table") != -1) { m_tables.add(name); } else if (type.toLowerCase().indexOf("sequence") != -1) { m_sequences.add(name); /* * -- Not used, nothing in create.sql to get us here } * else if (type.toLowerCase().indexOf("function") != * -1) { if (type.toLowerCase().indexOf("language * 'c'") != -1) { m_cfunctions.add(name); } else { * m_functions.add(name); } } else if * (type.toLowerCase().indexOf("trusted") != -1) { m = * Pattern.compile("(?i)\\s*create\\s+trutsed " + * "procedural language\\s+[\"']?" + * "(\\w+)[\"']?.*").matcher(line); if (!m.matches()) { * throw new Exception("Could not match name and " + * "type of the trusted " + "procedural language in * this" + "line: " + line); } * m_languages.add(m.group(1)); */ } else if (type.toLowerCase().matches(".*\\bindex\\b.*")) { m = Pattern.compile( "(?i)\\s*create\\s+(?:unique )?" + "index\\s+[\"']?([\\w_]+)" + "[\"']?.*").matcher( line); if (!m.matches()) { throw new Exception("Could not match name and " + "type of the index " + "in this" + "line: " + line); } m_indexes.add(m.group(1)); } else { throw new Exception("Unknown CREATE encountered: " + "CREATE " + type + " " + name); } } else { throw new Exception("Unknown CREATE encountered: " + line); } sql_l.add(line); continue; } m = insertPattern.matcher(line); if (m.matches()) { String table = m.group(1); if (!m_inserts.containsKey(table)) { m_inserts.put(table, new LinkedList<String>()); } m_inserts.get(table).add(line); continue; } if (line.toLowerCase().startsWith("select setval ")) { String table = "select_setval"; if (!m_inserts.containsKey(table)) { m_inserts.put(table, new LinkedList<String>()); } m_inserts.get(table).add(line); sql_l.add(line); continue; } m = dropPattern.matcher(line); if (m.matches()) { m_drops.add(m.group(1)); sql_l.add(line); continue; } // XXX should do something here so we can catch what we can't // parse // m_out.println("unmatched line: " + line); sql_l.add(line); } r.close(); m_sql = cleanText(sql_l); } public void databaseConnect(String database) throws Exception { Class.forName(m_pg_driver); m_dbconnection = DriverManager.getConnection(m_pg_url + database, m_pg_user, m_pg_pass); } public void databaseDisconnect() throws Exception { if (m_dbconnection != null) { m_dbconnection.close(); } } public void databaseCheckVersion() throws Exception { m_out.print("- checking database version... "); Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT version()"); if (!rs.next()) { throw new Exception("Database didn't return any rows for " + "'SELECT version()'"); } String versionString = rs.getString(1); rs.close(); st.close(); Matcher m = Pattern.compile("^PostgreSQL (\\d+\\.\\d+)").matcher( versionString); if (!m.find()) { throw new Exception("Could not parse version number out of " + "version string: " + versionString); } m_pg_version = Float.parseFloat(m.group(1)); if (m_pg_version < POSTGRES_MIN_VERSION) { throw new Exception("Unsupported database version \"" + m_pg_version + "\" -- you need at least " + POSTGRES_MIN_VERSION); } // doesn't matter since we now require 7.3 /* * if (m_pg_version >= 7.3) { m_cascade = " CASCADE"; } */ m_out.println(Float.toString(m_pg_version)); m_out.println(" - Full version string: " + versionString); } public void databaseCheckLanguage() throws Exception { /* * Don't bother checking if the database version is 7.4 or greater and * just return without throwing an exception. We can (and do) use SQL * state checks instead of matching on the exception text, so the * language of server error messages does not matter. */ if (m_pg_version >= 7.4) { return; } /* * Use column names that should never exist and also encode the * current time, in hopes that this should never actually succeed. */ String timestamp = Long.toString(System.currentTimeMillis()); String bogus_query = "SELECT bogus_column_" + timestamp + " " + "FROM bogus_table_" + timestamp + " " + "WHERE another_bogus_column_" + timestamp + " IS NULL"; // Expected error: "ERROR: relation "bogus_table" does not exist" try { Statement st = m_dbconnection.createStatement(); st.executeQuery(bogus_query); } catch (SQLException e) { if (e.toString().indexOf("does not exist") != -1) { /* * Everything is fine, since we matched the error. We should * be safe to assume that all of the other error messages we * need to check for are in English. */ return; } throw new Exception("The database server's error messages " + "are not in English, however the installer " + "requires them to be in English when using " + "PostgreSQL earlier than 7.4. You either " + "need to set \"lc_messages = 'C'\" in your " + "postgresql.conf file and restart " + "PostgreSQL or upgrade to PostgreSQL 7.4 or " + "later. The installer executed the query " + "\"" + bogus_query + "\" and expected " + "\"does not exist\" in the error message, " + "but this exception was received instead: " + e, e); } /* * We should not get here, as the above command should always throw an * exception, so complain and throw an exception about not getting the * exception we were expecting. Are you lost yet? Good! */ throw new Exception("Expected an SQLException when executing a " + "bogus query to test for the server's error " + "message language, however the query succeeded " + "unexpectedly. SQL query: \"" + bogus_query + "\"."); } public void checkOldTables() throws SQLException, BackupTablesFoundException { Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relkind = 'r' AND " + "relname LIKE '%_old_%'"); LinkedList<String> oldTables = new LinkedList<String>(); m_out.print("- checking database for old backup tables... "); while (rs.next()) { oldTables.add(rs.getString(1)); } rs.close(); st.close(); if (oldTables.size() == 0) { // No problems, so just print "NONE" and return. m_out.println("NONE"); return; } throw new BackupTablesFoundException(oldTables); } public List<Constraint> getForeignKeyConstraints() throws Exception { LinkedList<Constraint> constraints = new LinkedList<Constraint>(); for (String table : m_tables) { String tableLower = table.toLowerCase(); for (Constraint constraint : getTableFromSQL(tableLower).getConstraints()) { if (constraint.getType() == Constraint.FOREIGN_KEY) { constraints.add(constraint); } } } return constraints; } public void checkConstraints() throws Exception { List<Constraint> constraints = getForeignKeyConstraints(); m_out.print("- checking for rows that violate constraints... "); Statement st = m_dbconnection.createStatement(); for (Constraint constraint : constraints) { String name = constraint.getName(); String table = constraint.getTable(); String column = constraint.getColumns().get(0); String ftable = constraint.getForeignTable(); String fcolumn = constraint.getForeignColumns().get(0); if (!tableExists(table) || !tableColumnExists(table, column)) { // The constrained table or column does not exist continue; } if (table.equals("usersNotified") && column.equals("id")) { // m_out.print("Skipping usersNotified.id"); continue; } String query = "SELECT count(" + table + "." + column + ") FROM " + table + " " + getForeignConstraintWhere(table, column, ftable, fcolumn); ResultSet rs = st.executeQuery(query); rs.next(); int count = rs.getInt(1); rs.close(); if (count != 0) { rs = st.executeQuery("SELECT count(*) FROM " + table); rs.next(); int total = rs.getInt(1); rs.close(); st.close(); throw new Exception("Table " + table + " contains " + count + " rows " + "(out of " + total + ") that violate new constraint " + name + ". " + "See the install guide for details " + "on how to correct this problem."); } } st.close(); m_out.println("NONE"); } public String getForeignConstraintWhere(String table, String column, String ftable, String fcolumn) throws Exception { if (tableExists(ftable) && tableColumnExists(ftable, fcolumn)) { return "WHERE NOT EXISTS (SELECT " + ftable + "." + fcolumn + " FROM " + ftable + " WHERE " + ftable + "." + fcolumn + " = " + table + "." + column + ") AND " + table + "." + column + " IS NOT NULL"; } else { return "WHERE " + table + "." + column + " IS NOT NULL"; } } public void fixConstraint() throws Exception { List<Constraint> constraints = getForeignKeyConstraints(); Constraint constraint = null; m_out.print("- fixing rows that violate constraint " + m_fix_constraint_name + "... "); for (Constraint c : constraints) { if (m_fix_constraint_name.equals(c.getName())) { constraint = c; break; } } if (constraint == null) { throw new Exception("Did not find constraint " + m_fix_constraint_name + " in the database."); } String table = constraint.getTable(); String column = constraint.getColumns().get(0); String ftable = constraint.getForeignTable(); String fcolumn = constraint.getForeignColumns().get(0); if (!tableExists(table)) { throw new Exception("Constraint " + m_fix_constraint_name + " is on table " + table + ", but table does " + "not exist (so fixing this constraint does " + "nothing)."); } if (!tableColumnExists(table, column)) { throw new Exception("Constraint " + m_fix_constraint_name + " is on column " + column + " of table " + table + ", but column does " + "not exist (so fixing this constraint does " + "nothing)."); } String where = getForeignConstraintWhere(table, column, ftable, fcolumn); String query; String change_text; if (m_fix_constraint_remove_rows) { query = "DELETE FROM " + table + " " + where; change_text = "DELETED"; } else { query = "UPDATE " + table + " SET " + column + " = NULL " + where; change_text = "UPDATED"; } Statement st = m_dbconnection.createStatement(); int num = st.executeUpdate(query); m_out.println(change_text + " " + num + (num == 1 ? " ROW" : " ROWS")); } public boolean databaseUserExists() throws SQLException { boolean exists; Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT usename FROM pg_user WHERE " + "usename = '" + m_user + "'"); exists = rs.next(); rs.close(); st.close(); return exists; } public void databaseAddUser() throws SQLException { Statement st = m_dbconnection.createStatement(); st.execute("CREATE USER " + m_user + " WITH PASSWORD '" + m_pass + "' CREATEDB CREATEUSER"); } public boolean databaseDBExists() throws SQLException { boolean exists; Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT datname from pg_database " + "WHERE datname = '" + m_database + "'"); exists = rs.next(); rs.close(); st.close(); return exists; } public void databaseAddDB() throws Exception { Statement st = m_dbconnection.createStatement(); st.execute("CREATE DATABASE " + m_database + " WITH ENCODING='UNICODE'"); } public void createSequences() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.println("- creating sequences... "); Iterator i = m_sequences.iterator(); while (i.hasNext()) { String sequence = (String) i.next(); if (!m_seqmapping.containsKey(sequence)) { throw new Exception("Cannot find sequence mapping for " + sequence); } } i = m_sequences.iterator(); while (i.hasNext()) { String sequence = (String) i.next(); // String[] mapping = (String[]) m_seqmapping.get(sequence); int minvalue = 1; boolean alreadyExists; m_out.print(" - checking \"" + sequence + "\" sequence... "); rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relname = '" + sequence.toLowerCase() + "'"); alreadyExists = rs.next(); if (alreadyExists) { m_out.println("ALREADY EXISTS"); } else { m_out.println("DOES NOT EXIST"); m_out.print(" - creating sequence \"" + sequence + "\"... "); st.execute("CREATE SEQUENCE " + sequence + " minvalue " + minvalue); st.execute("GRANT ALL on " + sequence + " TO " + m_user); m_out.println("OK"); } } m_out.println("- creating sequences... DONE"); } public void createTables() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; Iterator i = m_tables.iterator(); m_out.println("- creating tables..."); while (i.hasNext()) { String tableName = (String) i.next(); if (m_force) { tableName = tableName.toLowerCase(); String create = getTableCreateFromSQL(tableName); boolean remove; rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relname = '" + tableName + "'"); remove = rs.next(); m_out.print(" - removing old table... "); if (remove) { st.execute("DROP TABLE " + tableName + m_cascade); m_out.println("REMOVED"); } else { m_out.println("CLEAN"); } m_out.print(" - creating table \"" + tableName + "\"... "); st.execute("CREATE TABLE " + tableName + " (" + create + ")"); m_out.println("CREATED"); m_out.print(" - giving \"" + m_user + "\" permissions on \"" + tableName + "\"... "); st.execute("GRANT ALL ON " + tableName + " TO " + m_user); m_out.println("GRANTED"); } else { m_out.print(" - checking table \"" + tableName + "\"... "); tableName = tableName.toLowerCase(); Table newTable = getTableFromSQL(tableName); Table oldTable = getTableFromDB(tableName); if (newTable.equals(oldTable)) { m_out.println("UPTODATE"); } else { if (oldTable == null) { String create = getTableCreateFromSQL(tableName); st.execute("CREATE TABLE " + tableName + " (" + create + ")"); st.execute("GRANT ALL ON " + tableName + " TO " + m_user); m_out.println("CREATED"); } else { try { changeTable(tableName, oldTable, newTable); } catch (Exception e) { throw new Exception("Error changing table '" + tableName + "'. Nested exception: " + e.getMessage(), e); } } } } } m_out.println("- creating tables... DONE"); } public void createIndexes() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.println("- creating indexes..."); Iterator i = m_indexes.iterator(); while (i.hasNext()) { String index = (String) i.next(); boolean exists; m_out.print(" - creating index \"" + index + "\"... "); rs = st.executeQuery("SELECT relname FROM pg_class " + "WHERE relname = '" + index.toLowerCase() + "'"); exists = rs.next(); if (exists) { m_out.println("EXISTS"); } else { st.execute(getIndexFromSQL(index)); m_out.println("OK"); } } m_out.println("- creating indexes... DONE"); } public Map getTypesFromDB() throws SQLException { if (m_dbtypes != null) { return m_dbtypes; } Statement st = m_dbconnection.createStatement(); ResultSet rs; HashMap<String, Integer> m = new HashMap<String, Integer>(); rs = st.executeQuery("SELECT oid,typname,typlen FROM pg_type"); while (rs.next()) { try { m.put(Column.normalizeColumnType(rs.getString(2), (rs.getInt(3) < 0)), new Integer(rs.getInt(1))); } catch (Exception e) { // ignore } } m_dbtypes = m; return m_dbtypes; } /* * -- Not used, nothing in create.sql... public void createFunctions(List * functions) throws Exception { Statement st = * m_dbconnection.createStatement(); ResultSet rs; Iterator i = * functions.iterator(); while (i.hasNext()) { String function = (String) * i.next(); String functionSql = getFunctionFromSQL(function); Matcher m = * Pattern.compile("\\s*\\((.+?)\\).*").matcher(functionSql); String * columns = m.group(1); if (m_force) { // XXX this doesn't check to see * if the function exists // before it drops it, so it will fail and throw * an // exception if the function doesn't exist. m_out.print("- removing * function \"" + function + "\" if it exists... "); String dropSql = * "DROP FUNCTION \"" + function + "\" (" + columns + ");"; * st.execute(dropSql); m_out.println("REMOVED"); } // XXX this doesn't * check to see if the function exists before // it tries to create it, so * it will fail and throw an // exception if the function does exist. * m_out.print("- creating function \"" + function + "\"... "); * st.execute("CREATE FUNCTION \"" + function + "\" " + functionSql); * m_out.println("OK"); } } public void createLanguages() throws Exception { * Statement st = m_dbconnection.createStatement(); ResultSet rs; Iterator * i = m_languages.iterator(); while (i.hasNext()) { String language = * (String) i.next(); String languageSql = getLanguageFromSQL(language); // * XXX this doesn't check to see if the language exists before // it tries * to create it, so it will fail and throw an // exception if the language * does already exist. m_out.print("- creating language reference \"" + * language + "\"... "); st.execute("CREATE TRUSTED PROCEDURAL LANGUAGE '" + * language + "' " + languageSql); m_out.println("OK"); } } */ public void fixData() throws Exception { Statement st = m_dbconnection.createStatement(); st.execute("UPDATE ipinterface SET issnmpprimary='N' " + "WHERE issnmpprimary IS NULL"); st.execute("UPDATE service SET servicename='SSH' " + "WHERE servicename='OpenSSH'"); st.execute("UPDATE snmpinterface SET snmpipadentnetmask=NULL"); } // XXX This causes the following Postgres error: // ERROR: duplicate key violates unique constraint "pk_dpname" void insertData() throws Exception { Statement st = m_dbconnection.createStatement(); for (Iterator i = m_inserts.keySet().iterator(); i.hasNext();) { String table = (String) i.next(); boolean exists = false; m_out.print("- inserting initial table data for \"" + table + "\"... "); for (Iterator j = ((LinkedList) m_inserts.get(table)).iterator(); j.hasNext();) { try { st.execute((String) j.next()); } catch (SQLException e) { /* * SQL Status codes: 23505: ERROR: duplicate key violates * unique constraint "%s" */ if (e.toString().indexOf("duplicate key") != -1 || "23505".equals(e.getSQLState())) { exists = true; } else { throw e; } } } if (exists) { m_out.println("EXISTS"); } else { m_out.println("OK"); } } } public void checkUnicode() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.print("- checking if database \"" + m_database + "\" is unicode... "); rs = st.executeQuery("SELECT encoding FROM pg_database WHERE " + "datname='" + m_database.toLowerCase() + "'"); rs.next(); if (rs.getInt(1) == 5 || rs.getInt(1) == 6) { m_out.println("ALREADY UNICODE"); return; } m_out.println("NOT UNICODE, CONVERTING"); databaseDisconnect(); String dumpFile = "/tmp/pg_dump-" + m_database; String logFile = "/tmp/unicode-convert.log"; PrintStream log = new PrintStream(new FileOutputStream(logFile, true)); ProcessExec e = new ProcessExec(log, log); int exitVal; log.println("------------------------------------------------------" + "------------------------"); m_out.print(" - dumping data to " + dumpFile + "... "); String[] cmd1 = { m_pg_bindir + File.separator + "pg_dump", "-U", m_pg_user, "-a", m_database, "-f", dumpFile }; if ((exitVal = e.exec(cmd1)) != 0) { throw new Exception("Dumping database returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd1) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - waiting 3s for PostgreSQL to notice " + "that pg_dump has disconnected."); Thread.sleep(1000); m_out.print("."); Thread.sleep(1000); m_out.print("."); Thread.sleep(1000); m_out.println(" OK"); m_out.print(" - dropping old database... "); String[] cmd2 = { m_pg_bindir + File.separator + "dropdb", "-U", m_pg_user, m_database }; if ((exitVal = e.exec(cmd2)) != 0) { throw new Exception("Dropping database returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd2) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - creating new unicode database... "); String[] cmd3 = { m_pg_bindir + File.separator + "createdb", "-U", m_pg_user, "-E", "UNICODE", m_database }; if ((exitVal = e.exec(cmd3)) != 0) { throw new Exception("Creating database returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd3) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - recreating tables... "); String[] cmd4 = { m_pg_bindir + File.separator + "psql", "-U", m_user, "-f", m_sql_dir + File.separator + "create.sql", m_database }; if ((exitVal = e.exec(cmd4)) != 0) { throw new Exception("Recreating tables returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd4) + "', check " + logFile); } m_out.println("OK"); m_out.print(" - restoring data... "); String[] cmd5 = { m_pg_bindir + File.separator + "psql", "-U", m_user, "-f", dumpFile, m_database }; if ((exitVal = e.exec(cmd5)) != 0) { throw new Exception("Restoring data returned non-zero exit " + "value " + exitVal + " while executing " + "command '" + join(" ", cmd5) + "', check " + logFile); } m_out.println("OK"); log.close(); databaseConnect(m_database); } public void verifyFilesAndDirectories() throws FileNotFoundException { if (m_update_database) { verifyFileExists(true, m_sql_dir, "SQL directory", "install.etc.dir property"); verifyFileExists(false, m_create_sql, "create.sql", "install.etc.dir property"); } if (m_update_iplike) { verifyFileExists(false, m_pg_iplike, "iplike module", "install.postgresql.dir property"); } if (m_tomcat_conf != null) { verifyFileExists( false, m_tomcat_conf, "Tomcat startup configuration file tomcat4.conf", "-T option"); } if (m_install_webapp) { verifyFileExists(true, m_webappdir, "Tomcat context directory", "-w option"); verifyFileExists(true, m_install_servletdir, "OpenNMS servlet directory", "install.servlet.dir property"); } } public void verifyFileExists(boolean isDir, String file, String description, String option) throws FileNotFoundException { File f; if (file == null) { throw new FileNotFoundException("The user most provide the " + "location of " + description + ", but this is not specified. " + "Use the " + option + " to specify this file."); } m_out.print("- using " + description + "... "); f = new File(file); if (!f.exists()) { throw new FileNotFoundException(description + " does not exist at \"" + file + "\". Use the " + option + " to specify another location."); } if (!isDir) { if (!f.isFile()) { throw new FileNotFoundException(description + " not a file at \"" + file + "\". Use the " + option + " to specify another file."); } } else { if (!f.isDirectory()) { throw new FileNotFoundException(description + " not a directory at \"" + file + "\". Use the " + option + " to specify " + "another directory."); } } m_out.println(f.getAbsolutePath()); } public void addStoredProcedures() throws Exception { Statement st = m_dbconnection.createStatement(); m_out.print("- adding stored procedures... "); FileFilter sqlFilter = new FileFilter() { public boolean accept(File pathname) { return (pathname.getName().startsWith("get") && pathname.getName().endsWith(".sql")) || pathname.getName().endsWith("Trigger.sql"); } }; File[] list = new File(m_sql_dir).listFiles(sqlFilter); for (int i = 0; i < list.length; i++) { LinkedList<String> drop = new LinkedList<String>(); StringBuffer create = new StringBuffer(); String line; m_out.print("\n - " + list[i].getName() + "... "); BufferedReader r = new BufferedReader(new FileReader(list[i])); while ((line = r.readLine()) != null) { line = line.trim(); if (line.matches("--.*")) { continue; } if (line.toLowerCase().startsWith("drop function")) { drop.add(line); } else { create.append(line); create.append("\n"); } } r.close(); Matcher m = Pattern.compile( "(?is)\\b(CREATE(?: OR REPLACE)? FUNCTION\\s+" + "(\\w+)\\s*\\((.*?)\\)\\s+" + "RETURNS\\s+(\\S+)\\s+AS\\s+" + "(.+? language ['\"]?\\w+['\"]?);)").matcher( create.toString()); if (!m.find()) { throw new Exception("Couldn't match \"" + m.pattern().pattern() + "\" in string \"" + create + "\""); } String createSql = m.group(1); String function = m.group(2); String columns = m.group(3); String returns = m.group(4); // String rest = m.group(5); if (functionExists(function, columns, returns)) { if (m_force) { st.execute("DROP FUNCTION " + function + "(" + columns + ")"); st.execute(createSql); m_out.print("OK (dropped and re-added)"); } else { m_out.print("EXISTS"); } } else { st.execute(createSql); m_out.print("OK"); } Pattern p = Pattern.compile("(?i)" + "(CREATE TRIGGER (\\S+)\\s+" + "BEFORE INSERT OR UPDATE\\s+" + "ON (\\S+) FOR EACH ROW\\s+" + "EXECUTE PROCEDURE (\\S+)\\(\\));"); m = p.matcher(create.toString()); if (m.find()) { String triggerSql = m.group(1); String triggerName = m.group(2); String triggerTable = m.group(3); String triggerProc = m.group(4); m_out.print(" - checking trigger '" + triggerName + "' ..."); if (triggerExists(triggerName, triggerTable, triggerProc)) { m_out.println("EXISTS"); } else { st.execute(triggerSql); m_out.println("ADDED"); } } } m_out.println(""); } public boolean functionExists(String function, String columns, String returnType) throws Exception { Map types = getTypesFromDB(); int[] columnTypes = new int[0]; columns = columns.trim(); if (columns.length() > 0) { String[] splitColumns = columns.split("\\s*,\\s*"); columnTypes = new int[splitColumns.length]; Column c; for (int j = 0; j < splitColumns.length; j++) { c = new Column(); c.parseColumnType(splitColumns[j]); columnTypes[j] = ((Integer) types.get(c.getType())).intValue(); } } Column c = new Column(); try { c.parseColumnType(returnType); } catch (Exception e) { throw new Exception("Could not parse column type '" + returnType + "' for function '" + function + "'. Nested exception: " + e.getMessage(), e); } int retType = ((Integer) types.get(c.getType())).intValue(); return functionExists(function, columnTypes, retType); } public boolean functionExists(String function, int[] columnTypes, int retType) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; StringBuffer ct = new StringBuffer(); for (int j = 0; j < columnTypes.length; j++) { ct.append(" " + columnTypes[j]); } String query = "SELECT oid FROM pg_proc WHERE proname='" + function.toLowerCase() + "' AND " + "prorettype=" + retType + " AND " + "proargtypes='" + ct.toString().trim() + "'"; rs = st.executeQuery(query); return rs.next(); } public void checkWebappOldOpennmsDir() throws Exception { File f = new File(m_webappdir + File.separator + "opennms"); m_out.print("- Checking for old opennms webapp directory in " + f.getAbsolutePath() + "... "); if (f.exists()) { throw new Exception("Old OpenNMS web application exists: " + f.getAbsolutePath() + ". You need to remove this " + "before continuing."); } m_out.println("OK"); } public void checkServerXmlOldOpennmsContext() throws Exception { String search_regexp = "(?ms).*<Context\\s+path=\"/opennms\".*"; StringBuffer b = new StringBuffer(); File f = new File(m_webappdir + File.separator + ".." + File.separator + "conf" + File.separator + "server.xml"); m_out.print("- Checking for old opennms context in " + f.getAbsolutePath() + "... "); if (!f.exists()) { m_out.println("DID NOT CHECK (file does not exist)"); return; } BufferedReader r = new BufferedReader(new FileReader(f)); String line; while ((line = r.readLine()) != null) { b.append(line); b.append("\n"); } r.close(); if (b.toString().matches(search_regexp)) { throw new Exception( "Old OpenNMS context found in " + f.getAbsolutePath() + ". " + "You must remove this context from server.xml and re-run the " + "installer."); } m_out.println("OK"); return; } public void installWebApp() throws Exception { m_out.println("- Install OpenNMS webapp... "); installLink(m_install_servletdir + File.separator + "META-INF" + File.separator + "context.xml", m_webappdir + File.separator + "opennms.xml", "web application context", false); m_out.println("- Installing OpenNMS webapp... DONE"); } public void installLink(String source, String destination, String description, boolean recursive) throws Exception { String[] cmd; ProcessExec e = new ProcessExec(m_out, m_out); if (new File(destination).exists()) { m_out.print(" - " + destination + " exists, removing... "); removeFile(destination, description, recursive); m_out.println("REMOVED"); } m_out.print(" - creating link to " + destination + "... "); cmd = new String[4]; cmd[0] = "ln"; cmd[1] = "-sf"; cmd[2] = source; cmd[3] = destination; if (e.exec(cmd) != 0) { throw new Exception("Non-zero exit value returned while " + "linking " + description + ", " + source + " into " + destination); } m_out.println("DONE"); } public void updateTomcatConf() throws Exception { File f = new File(m_tomcat_conf); // XXX give the user the option to set the user to something else? // if so, should we chown the appropriate OpenNMS files to the // tomcat user? // // XXX should we have the option to automatically try to determine // the tomcat user and chown the OpenNMS files to that user? m_out.print("- setting tomcat4 user to 'root'... "); BufferedReader r = new BufferedReader(new FileReader(f)); StringBuffer b = new StringBuffer(); String line; while ((line = r.readLine()) != null) { if (line.startsWith("TOMCAT_USER=")) { b.append("TOMCAT_USER=\"root\"\n"); } else { b.append(line); b.append("\n"); } } r.close(); f.renameTo(new File(m_tomcat_conf + ".before-opennms-" + System.currentTimeMillis())); f = new File(m_tomcat_conf); PrintWriter w = new PrintWriter(new FileOutputStream(f)); w.print(b.toString()); w.close(); m_out.println("done"); } public void removeFile(String destination, String description, boolean recursive) throws IOException, InterruptedException, Exception { String[] cmd; ProcessExec e = new ProcessExec(m_out, m_out); if (recursive) { cmd = new String[3]; cmd[0] = "rm"; cmd[1] = "-r"; cmd[2] = destination; } else { cmd = new String[2]; cmd[0] = "rm"; cmd[1] = destination; } if (e.exec(cmd) != 0) { throw new Exception("Non-zero exit value returned while " + "removing " + description + ", " + destination + ", using \"" + join(" ", cmd) + "\""); } if (new File(destination).exists()) { throw new Exception("Could not delete existing " + description + ": " + destination); } } public void updateIplike() throws Exception { Statement st = m_dbconnection.createStatement(); m_out.print("- checking for stale iplike references... "); try { st.execute("DROP FUNCTION iplike(text,text)"); m_out.println("REMOVED"); } catch (SQLException e) { /* * SQL Status code: 42883: ERROR: function %s does not exist */ if (e.toString().indexOf("does not exist") != -1 || "42883".equals("42883")) { m_out.println("CLEAN"); } else { throw e; } } // XXX This error is generated from Postgres if eventtime(text) // does not exist: // ERROR: function eventtime(text) does not exist m_out.print("- checking for stale eventtime.so references... "); try { st.execute("DROP FUNCTION eventtime(text)"); m_out.println("REMOVED"); } catch (SQLException e) { /* * SQL Status code: 42883: ERROR: function %s does not exist */ if (e.toString().indexOf("does not exist") != -1 || "42883".equals(e.getSQLState())) { m_out.println("CLEAN"); } else { throw e; } } m_out.print("- adding iplike database function... "); st.execute("CREATE FUNCTION iplike(text,text) RETURNS bool " + "AS '" + m_pg_iplike + "' LANGUAGE 'c' WITH(isstrict)"); m_out.println("OK"); } public void updatePlPgsql() throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; m_out.print("- adding PL/pgSQL call handler... "); rs = st.executeQuery("SELECT oid FROM pg_proc WHERE " + "proname='plpgsql_call_handler' AND " + "proargtypes = ''"); if (rs.next()) { m_out.println("EXISTS"); } else { st.execute("CREATE FUNCTION plpgsql_call_handler () " + "RETURNS OPAQUE AS '$libdir/plpgsql.so' LANGUAGE 'c'"); m_out.println("OK"); } m_out.print("- adding PL/pgSQL language module... "); rs = st.executeQuery("SELECT pg_language.oid FROM " + "pg_language, pg_proc WHERE " + "pg_proc.proname='plpgsql_call_handler' AND " + "pg_proc.proargtypes = '' AND " + "pg_proc.oid = pg_language.lanplcallfoid AND " + "pg_language.lanname = 'plpgsql'"); if (rs.next()) { m_out.println("EXISTS"); } else { st.execute("CREATE TRUSTED PROCEDURAL LANGUAGE 'plpgsql' " + "HANDLER plpgsql_call_handler LANCOMPILER 'PL/pgSQL'"); m_out.println("OK"); } } public Column findColumn(List columns, String column) { Column c; for (Iterator i = columns.iterator(); i.hasNext();) { c = (Column) i.next(); if (c.getName().equals(column.toLowerCase())) { return c; } } return null; } public String getXFromSQL(String item, String regex, int itemGroup, int returnGroup, String description) throws Exception { item = item.toLowerCase(); Matcher m = Pattern.compile(regex).matcher(m_sql); while (m.find()) { if (m.group(itemGroup).toLowerCase().equals(item)) { return m.group(returnGroup); } } throw new Exception("could not find " + description + " \"" + item + "\""); } public String getTableCreateFromSQL(String table) throws Exception { return getXFromSQL(table, "(?i)\\bcreate table\\s+['\"]?(\\S+)['\"]?" + "\\s+\\((.+?)\\);", 1, 2, "table"); } public String getIndexFromSQL(String index) throws Exception { return getXFromSQL(index, "(?i)\\b(create (?:unique )?index\\s+" + "['\"]?(\\S+)['\"]?\\s+.+?);", 2, 1, "index"); } public String getFunctionFromSQL(String function) throws Exception { return getXFromSQL(function, "(?is)\\bcreate function\\s+" + "['\"]?(\\S+)['\"]?\\s+" + "(.+? language ['\"]?\\w+['\"]?);", 1, 2, "function"); } public String getLanguageFromSQL(String language) throws Exception { return getXFromSQL(language, "(?is)\\bcreate trusted procedural " + "language\\s+['\"]?(\\S+)['\"]?\\s+(.+?);", 1, 2, "language"); } public List<Column> getTableColumnsFromSQL(String tableName) throws Exception { return getTableFromSQL(tableName).getColumns(); } public Table getTableFromSQL(String tableName) throws Exception { Table table = new Table(); LinkedList<Column> columns = new LinkedList<Column>(); LinkedList<Constraint> constraints = new LinkedList<Constraint>(); boolean parens = false; StringBuffer accumulator = new StringBuffer(); String create = getTableCreateFromSQL(tableName); for (int i = 0; i <= create.length(); i++) { char c = ' '; if (i < create.length()) { c = create.charAt(i); if (c == '(' || c == ')') { parens = (c == '('); accumulator.append(c); continue; } } if (((c == ',') && !parens) || i == create.length()) { String a = accumulator.toString().trim(); if (a.toLowerCase().startsWith("constraint ")) { Constraint constraint; try { constraint = new Constraint(tableName, a); } catch (Exception e) { throw new Exception("Could not parse constraint for table '" + tableName + "'. Nested exception: " + e.getMessage(), e); } List<String> constraintColumns = constraint.getColumns(); if (constraintColumns.size() == 0) { throw new IllegalStateException( "constraint with no constrained columns"); } for (String constrainedName : constraintColumns) { Column constrained = findColumn(columns, constrainedName); if (constrained == null) { throw new Exception( "constraint " + constraint.getName() + " references column \"" + constrainedName + "\", which is not a column in the table " + tableName); } } constraints.add(constraint); } else { Column column = new Column(); try { column.parse(accumulator.toString()); columns.add(column); } catch (Exception e) { throw new Exception("Could not parse table " + tableName + ". Chained: " + e.getMessage(), e); } } accumulator = new StringBuffer(); } else { accumulator.append(c); } } table.setName(tableName); table.setColumns(columns); table.setConstraints(constraints); table.setNotNullOnPrimaryKeyColumns(); return table; } public static String cleanText(List list) { StringBuffer s = new StringBuffer(); Iterator i = list.iterator(); while (i.hasNext()) { String l = (String) i.next(); s.append(l.replaceAll("\\s+", " ")); if (l.indexOf(';') != -1) { s.append('\n'); } } return s.toString(); } public boolean tableExists(String table) throws SQLException { Statement st = m_dbconnection.createStatement(); ResultSet rs; rs = st.executeQuery("SELECT DISTINCT tablename FROM pg_tables " + "WHERE lower(tablename) = '" + table.toLowerCase() + "'"); return rs.next(); } public boolean tableColumnExists(String table, String column) throws Exception { return (findColumn(getTableColumnsFromDB(table), column) != null); } public List<Column> getTableColumnsFromDB(String tableName) throws Exception { Table table = getTableFromDB(tableName); if (table == null) { return null; } return table.getColumns(); } public List<Column> getColumnsFromDB(String tableName) throws Exception { LinkedList<Column> columns = new LinkedList<Column>(); Statement st = m_dbconnection.createStatement(); ResultSet rs; String query = "SELECT " + " attname, " + " format_type(atttypid, atttypmod), " + " attnotnull " + "FROM " + " pg_attribute " + "WHERE " + " attrelid = " + " (SELECT oid FROM pg_class WHERE relname = '" + tableName.toLowerCase() + "') AND " + " attnum > 0"; if (m_pg_version >= 7.3) { query = query + " AND attisdropped = false"; } query = query + " ORDER BY " + " attnum"; rs = st.executeQuery(query); while (rs.next()) { Column c = new Column(); c.setName(rs.getString(1)); String columnType = rs.getString(2); try { c.parseColumnType(columnType); } catch (Exception e) { throw new Exception("Error parsing column type '" + columnType + "' for column '" + rs.getString(1) + "' in table '" + tableName + "'. Nested: " + e.getMessage(), e); } c.setNotNull(rs.getBoolean(3)); columns.add(c); } rs.close(); st.close(); return columns; } public Table getTableFromDB(String tableName) throws Exception { if (!tableExists(tableName)) { return null; } Table table = new Table(); table.setName(tableName.toLowerCase()); List<Column> columns = getColumnsFromDB(tableName); List<Constraint> constraints = getConstraintsFromDB(tableName); table.setColumns(columns); table.setConstraints(constraints); return table; } public List<Constraint> getConstraintsFromDB(String tableName) throws SQLException, Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; LinkedList<Constraint> constraints = new LinkedList<Constraint>(); String query = "SELECT c.oid, c.conname, c.contype, c.conrelid, c.confrelid, a.relname, c.confdeltype from pg_class a right join pg_constraint c on c.confrelid = a.oid where c.conrelid = (select oid from pg_class where relname = '" + tableName.toLowerCase() + "') order by c.oid"; rs = st.executeQuery(query); while (rs.next()) { int oid = rs.getInt(1); String name = rs.getString(2); String type = rs.getString(3); int conrelid = rs.getInt(4); int confrelid = rs.getInt(5); String ftable = rs.getString(6); String foreignDelType = rs.getString(7); Constraint constraint; if ("p".equals(type)) { List<String> columns = getConstrainedColumnsFromDBForConstraint( oid, conrelid); constraint = new Constraint(tableName.toLowerCase(), name, columns); } else if ("f".equals(type)) { List<String> columns = getConstrainedColumnsFromDBForConstraint( oid, conrelid); List<String> fcolumns = getForeignColumnsFromDBForConstraint( oid, confrelid); constraint = new Constraint(tableName.toLowerCase(), name, columns, ftable, fcolumns, foreignDelType); } else { throw new Exception("Do not support constraint type \"" + type + "\" in constraint \"" + name + "\""); } constraints.add(constraint); } return constraints; } private List<String> getConstrainedColumnsFromDBForConstraint(int oid, int conrelid) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; LinkedList<String> columns = new LinkedList<String>(); String query = "select a.attname from pg_attribute a, pg_constraint c where a.attrelid = c.conrelid and a.attnum = ANY (c.conkey) and c.oid = " + oid + " and a.attrelid = " + conrelid; rs = st.executeQuery(query); while (rs.next()) { columns.add(rs.getString(1)); } rs.close(); st.close(); return columns; } private List<String> getForeignColumnsFromDBForConstraint(int oid, int confrelid) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs; LinkedList<String> columns = new LinkedList<String>(); String query = "select a.attname from pg_attribute a, pg_constraint c where a.attrelid = c.confrelid and a.attnum = ANY (c.confkey) and c.oid = " + oid + " and a.attrelid = " + confrelid; rs = st.executeQuery(query); while (rs.next()) { columns.add(rs.getString(1)); } rs.close(); st.close(); return columns; } public void changeTable(String table, Table oldTable, Table newTable) throws Exception { List<Column> oldColumns = oldTable.getColumns(); List<Column> newColumns = newTable.getColumns(); Statement st = m_dbconnection.createStatement(); TreeMap<String, ColumnChange> columnChanges = new TreeMap<String, ColumnChange>(); String[] oldColumnNames = new String[oldColumns.size()]; int i; Iterator j; if (m_changed.contains(table)) { return; } m_changed.add(table); m_out.println("SCHEMA DOES NOT MATCH"); m_out.println(" - differences:"); for (Constraint newConstraint : newTable.getConstraints()) { m_out.println("new constraint: " + newConstraint.getTable() + ": " + newConstraint); } for (Constraint oldConstraint : oldTable.getConstraints()) { m_out.println("old constraint: " + oldConstraint.getTable() + ": " + oldConstraint); } /* * XXX This doesn't check for old column rows that don't exist * in newColumns. */ for (Column newColumn : newColumns) { Column oldColumn = findColumn(oldColumns, newColumn.getName()); if (oldColumn == null || !newColumn.equals(oldColumn)) { m_out.println(" - column \"" + newColumn.getName() + "\" is different"); if (m_debug) { m_out.println(" - old column: " + ((oldColumn == null) ? "null" : oldColumn.toString())); m_out.println(" - new column: " + newColumn); } } if (!columnChanges.containsKey(newColumn.getName())) { columnChanges.put(newColumn.getName(), new ColumnChange()); } ColumnChange columnChange = (ColumnChange) columnChanges.get(newColumn.getName()); columnChange.setColumn(newColumn); /* * If the new column has a NOT NULL constraint, set a null replace * value for the column. Throw an exception if it is possible for * null data to be inserted into the new column. This would happen * if there is not a null replacement and the column either didn't * exist before or it did NOT have the NOT NULL constraint before. */ if (m_columnReplacements.containsKey(table + "." + newColumn.getName())) { columnChange.setNullReplace(m_columnReplacements.get(table + "." + newColumn.getName())); } if (newColumn.isNotNull() && columnChange.getNullReplace() == null) { if (oldColumn == null) { String message = "Column " + newColumn.getName() + " in new table has NOT NULL " + "constraint, however this column " + "did not exist before and there is " + "no null replacement for this " + "column"; if (m_ignore_notnull) { m_out.println(message + ". Ignoring due to '-N'"); } else { throw new Exception(message); } } else if (!oldColumn.isNotNull()) { String message = "Column " + newColumn.getName() + " in new table has NOT NULL " + "constraint, however this column " + "did not have the NOT NULL " + "constraint before and there is " + "no null replacement for this " + "column"; if (m_ignore_notnull) { m_out.println(message + ". Ignoring due to '-N'"); } else { throw new Exception(message); } } } } i = 0; for (j = oldColumns.iterator(); j.hasNext(); i++) { Column oldColumn = (Column) j.next(); oldColumnNames[i] = oldColumn.getName(); if (columnChanges.containsKey(oldColumn.getName())) { ColumnChange columnChange = (ColumnChange) columnChanges.get(oldColumn.getName()); Column newColumn = (Column) columnChange.getColumn(); if (newColumn.getType().indexOf("timestamp") != -1) { columnChange.setUpgradeTimestamp(true); } } else { m_out.println(" * WARNING: column \"" + oldColumn.getName() + "\" exists in the " + "database but is not in the new schema. " + "REMOVING COLUMN"); } } String tmpTable = table + "_old_" + System.currentTimeMillis(); try { if (tableExists(tmpTable)) { st.execute("DROP TABLE " + tmpTable + m_cascade); } m_out.print(" - creating temporary table... "); st.execute("CREATE TABLE " + tmpTable + " AS SELECT " + join(", ", oldColumnNames) + " FROM " + table); m_out.println("done"); st.execute("DROP TABLE " + table + m_cascade); m_out.print(" - creating new '" + table + "' table... "); st.execute("CREATE TABLE " + table + "(" + getTableCreateFromSQL(table) + ")"); m_out.println("done"); transformData(table, tmpTable, columnChanges, oldColumnNames); st.execute("GRANT ALL ON " + table + " TO " + m_user); m_out.print(" - optimizing table " + table + "... "); st.execute("VACUUM ANALYZE " + table); m_out.println("DONE"); } catch (Exception e) { if (m_no_revert) { m_out.println("FAILED! Not reverting due to '-R' being " + "passed. Old data in " + tmpTable); throw e; } try { m_dbconnection.rollback(); m_dbconnection.setAutoCommit(true); if (tableExists(table)) { st.execute("DROP TABLE " + table + m_cascade); } st.execute("CREATE TABLE " + table + " AS SELECT " + join(", ", oldColumnNames) + " FROM " + tmpTable); st.execute("DROP TABLE " + tmpTable); } catch (SQLException se) { throw new Exception("Got SQLException while trying to " + "revert table changes due to original " + "error: " + e + "\n" + "SQLException while reverting table: " + se, e); } m_out.println("FAILED! Old data restored, however indexes and " + "constraints on this table were not re-added"); throw e; } // We don't care if dropping the tmp table fails since we've // completed copying it, so it's outside of the try/catch block above. st.execute("DROP TABLE " + tmpTable); m_out.println(" - completed updating table... "); } /* * Note: every column has a ColumnChange record for it, which lists * the column name, a null replacement, if any, and the indexes for * selected rows (for using in ResultSet.getXXX()) and prepared rows * (PreparedStatement.setObject()). * Monkey. Make monkey dance. */ public void transformData(String table, String oldTable, TreeMap<String, ColumnChange> columnChanges, String[] oldColumnNames) throws SQLException, ParseException, Exception { Statement st = m_dbconnection.createStatement(); Iterator j; int i; st.setFetchSize(s_fetch_size); String[] columns = columnChanges.keySet().toArray(new String[0]); String[] questionMarks = new String[columns.length]; for (i = 0; i < oldColumnNames.length; i++) { ColumnChange c = columnChanges.get(oldColumnNames[i]); if (c != null) { c.setSelectIndex(i + 1); } } for (i = 0; i < columns.length; i++) { questionMarks[i] = "?"; ColumnChange c = columnChanges.get(columns[i]); c.setPrepareIndex(i + 1); c.setColumnType(((Column) c.getColumn()).getColumnSqlType()); } /* * Pull everything in from the old table and filter it to update the * data to any new formats. */ m_out.print(" - transforming data into the new table...\r"); ResultSet rs = st.executeQuery("SELECT count(*) FROM " + oldTable); rs.next(); long num_rows = rs.getLong(1); PreparedStatement select = null; PreparedStatement insert = null; String order; if (table.equals("outages")) { order = " ORDER BY iflostservice"; } else { order = ""; } String dbcmd = "SELECT " + join(", ", oldColumnNames) + " FROM " + oldTable + order; if (m_debug) { m_out.println(" - performing select: " + dbcmd); } select = m_dbconnection.prepareStatement(dbcmd); select.setFetchSize(s_fetch_size); // error = "Unable to prepare select from temp"; dbcmd = "INSERT INTO " + table + " (" + join(", ", columns) + ") values (" + join(", ", questionMarks) + ")"; if (m_debug) { m_out.println(" - performing insert: " + dbcmd); } insert = m_dbconnection.prepareStatement(dbcmd); // error = "Unable to prepare insert into " + table); rs = select.executeQuery(); m_dbconnection.setAutoCommit(false); String name; ColumnChange change; Object obj; SimpleDateFormat dateParser = new SimpleDateFormat( "dd-MMM-yyyy HH:mm:ss"); SimpleDateFormat dateFormatter = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); char spin[] = { '/', '-', '\\', '|' }; int current_row = 0; while (rs.next()) { for (j = columnChanges.keySet().iterator(); j.hasNext();) { name = (String) j.next(); change = (ColumnChange) columnChanges.get(name); if (change.getSelectIndex() > 0) { obj = rs.getObject(change.getSelectIndex()); if (rs.wasNull()) { obj = null; } } else { if (m_debug) { m_out.println(" - don't know what to do " + "for \"" + name + "\", prepared column " + change.getPrepareIndex() + ": setting to null"); } obj = null; } /* if (table.equals("outages") && name.equals("outageid")) { obj = new Integer(current_row + 1); } if (table.equals("usersnotified") && name.equals("id")) { obj = new Integer(current_row + 1); } */ if (obj == null && change.isNullReplace()) { obj = change.getNullReplace(); if (obj instanceof ColumnChangeReplacement) { obj = ((ColumnChangeReplacement) obj).getColumnReplacement(rs, columnChanges); } if (m_debug) { m_out.println(" - " + name + " was NULL but is a " + "requires NULL replacement -- " + "replacing with '" + obj + "'"); } } if (obj != null) { if (change.isUpgradeTimestamp() && !obj.getClass().equals( java.sql.Timestamp.class)) { if (m_debug) { m_out.println(" - " + name + " is an old-style timestamp"); } String newObj = dateFormatter.format(dateParser.parse((String) obj)); if (m_debug) { m_out.println(" - " + obj + " -> " + newObj); } obj = newObj; } if (m_debug) { m_out.println(" - " + name + " = " + obj); } } else { if (m_debug) { m_out.println(" - " + name + " = undefined"); } } if (obj == null) { insert.setNull(change.getPrepareIndex(), change.getColumnType()); } else { insert.setObject(change.getPrepareIndex(), obj); } } try { insert.execute(); } catch (SQLException e) { SQLException ex = new SQLException( "Statement.execute() threw an " + "SQLException while inserting a row: " + "\"" + insert.toString() + "\". " + "Original exception: " + e.toString(), e.getSQLState(), e.getErrorCode()); ex.setNextException(e); throw ex; } current_row++; if ((current_row % 20) == 0) { System.err.print(" - transforming data into the new " + "table... " + (int) Math.floor((current_row * 100) / num_rows) + "% [" + spin[(current_row / 20) % spin.length] + "]\r"); } } m_dbconnection.commit(); m_dbconnection.setAutoCommit(true); if (table.equals("events") && num_rows == 0) { st.execute("INSERT INTO events (eventid, eventuei, eventtime, " + "eventsource, eventdpname, eventcreatetime, " + "eventseverity, eventlog, eventdisplay) values " + "(0, 'http://uei.opennms.org/dummyevent', now(), " + "'OpenNMS.Eventd', 'localhost', now(), 1, 'Y', 'Y')"); } m_out.println(" - transforming data into the new table... " + "DONE "); } public void printHelp() { m_out.println("usage:"); m_out.println(" $OPENNMS_HOME/bin/install -h"); m_out.println(" $OPENNMS_HOME/bin/install " + "[-r] [-x] [-N] [-R] [-c] [-d] [-i] [-s] [-U]"); m_out.println(" [-y] [-X]"); m_out.println(" " + "[-u <PostgreSQL admin user>]"); m_out.println(" " + "[-p <PostgreSQL admin password>]"); m_out.println(" " + "[-T <tomcat4.conf>]"); m_out.println(" " + "[-w <tomcat context directory>"); m_out.println(" " + "[-C <constraint>]"); m_out.println(""); m_out.println(m_required_options); m_out.println(""); m_out.println(" -h this help"); m_out.println(""); m_out.println(" -d perform database actions"); m_out.println(" -i insert data into the database"); m_out.println(" -s update iplike postgres function"); m_out.println(" -U upgrade database to unicode, if needed"); m_out.println(" -y install web application (see -w)"); m_out.println(""); m_out.println(" -u username of the PostgreSQL " + "administrator (default: \"" + m_pg_user + "\")"); m_out.println(" -p password of the PostgreSQL " + "administrator (default: \"" + m_pg_pass + "\")"); m_out.println(" -c drop and recreate tables that already " + "exist"); m_out.println(""); m_out.println(" -T location of tomcat.conf"); m_out.println(" -w location of tomcat's contcxt directory"); m_out.println(" (usually under conf/Catalina/localhost)"); m_out.println(""); m_out.println(" -r run as an RPM install (does nothing)"); m_out.println(" -x turn on debugging for database data " + "transformation"); m_out.println(" -N ignore NOT NULL constraint checks when " + "transforming data"); m_out.println(" useful after a table is reverted by a " + "previous run of the installer"); m_out.println(" -R do not revert a table to the original if " + "an error occurs when"); m_out.println(" transforming data -- only used for debugging"); m_out.println(" -C fix rows that violate the specified " + "constraint -- sets key column in"); m_out.println(" affected rows to NULL by default"); m_out.println(" -X drop rows that violate constraint instead of marking key column in"); m_out.println(" affected rows to NULL (used with \"-C\")"); System.exit(0); } public static void main(String[] argv) throws Exception { new Installer().install(argv); } /** * Join all of the elements of a String together into a single string, * inserting sep between each element. */ public static String join(String sep, String[] array) { StringBuffer sb = new StringBuffer(); if (array.length > 0) { sb.append(array[0]); } for (int i = 1; i < array.length; i++) { sb.append(sep + array[i]); } return sb.toString(); } public static String join(String sep, List<String> list) { StringBuffer sb = new StringBuffer(); Iterator i = list.iterator(); if (i.hasNext()) { sb.append(i.next()); } while (i.hasNext()) { sb.append(sep + i.next()); } return sb.toString(); } public static String join(String sep, Object[] array) { StringBuffer sb = new StringBuffer(); if (array.length > 0) { sb.append(array[0].toString()); } for (int i = 1; i < array.length; i++) { if (array[i] == null) { sb.append(sep + "(null)"); } else { sb.append(sep + array[i].toString()); } } return sb.toString(); } public String checkServerVersion() throws IOException { File catalinaHome = new File(m_webappdir).getParentFile(); String readmeVersion = getTomcatVersion(new File(catalinaHome, "README.txt")); String runningVersion = getTomcatVersion(new File(catalinaHome, "RUNNING.txt")); if (readmeVersion == null && runningVersion == null) { return null; } else if (readmeVersion != null && runningVersion != null) { return readmeVersion; // XXX what should be done here? } else if (readmeVersion != null && runningVersion == null) { return readmeVersion; } else { return runningVersion; } } public String getTomcatVersion(File file) throws IOException { if (file == null || !file.exists()) { return null; } Pattern p = Pattern.compile("The Tomcat (\\S+) Servlet/JSP Container"); BufferedReader in = new BufferedReader(new FileReader(file)); for (int i = 0; i < 5; i++) { String line = in.readLine(); if (line == null) { // EOF in.close(); return null; } Matcher m = p.matcher(line); if (m.find()) { in.close(); return m.group(1); } } in.close(); return null; } public class AutoInteger implements ColumnChangeReplacement { private int m_value; public AutoInteger(int initialValue) { m_value = initialValue; } public int getInt() { return m_value++; } public Integer getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) { return getInt(); } } public class AutoIntegerIdMapStore implements ColumnChangeReplacement { private int m_value; private String[] m_indexColumns; private Map<MultiColumnKey, Integer> m_idMap = new HashMap<MultiColumnKey, Integer>(); public AutoIntegerIdMapStore(int initialValue, String[] indexColumns) { m_value = initialValue; m_indexColumns = indexColumns; } public Integer getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { MultiColumnKey key = getKeyForColumns(rs, columnChanges, m_indexColumns); Integer newInteger = m_value++; m_idMap.put(key, newInteger); return newInteger; } public Integer getIntegerForColumns(ResultSet rs, Map<String, ColumnChange> columnChanges, String[] columns, boolean noMatchOkay) throws SQLException { MultiColumnKey key = getKeyForColumns(rs, columnChanges, columns); Integer oldInteger = m_idMap.get(key); if (oldInteger == null && !noMatchOkay) { throw new IllegalArgumentException("No entry in the map for " + key); } return oldInteger; } private MultiColumnKey getKeyForColumns(ResultSet rs, Map<String, ColumnChange> columnChanges, String[] columns) throws SQLException { Object[] objects = new Object[columns.length]; for (int i = 0; i < columns.length; i++) { String indexColumn = columns[i]; ColumnChange columnChange = columnChanges.get(indexColumn); if (columnChange == null) { throw new IllegalArgumentException("No ColumnChange entry for '" + indexColumn + "'"); } int index = columnChange.getSelectIndex(); if (index == 0) { throw new IllegalArgumentException("ColumnChange entry for '" + indexColumn + "' has no select index"); } objects[i] = rs.getObject(index); } return new MultiColumnKey(objects); } public class MultiColumnKey { private Object[] m_keys; public MultiColumnKey(Object[] keys) { m_keys = keys; } @Override public boolean equals(Object otherObject) { if (!(otherObject instanceof MultiColumnKey)) { return false; } MultiColumnKey other = (MultiColumnKey) otherObject; if (m_keys.length != other.m_keys.length) { return false; } for (int i = 0; i < m_keys.length; i++) { if (m_keys[i] == null && other.m_keys[i] == null) { continue; } if (m_keys[i] == null || other.m_keys[i] == null) { return false; } if (!m_keys[i].equals(other.m_keys[i])) { return false; } } return true; } @Override public String toString() { return join(", ", m_keys); } @Override public int hashCode() { int value = 1; for (Object o : m_keys) { if (o != null) { // not the other way around, since 1 ^ anything == 1 value = o.hashCode() ^ value; } } return value; } } } public class MapStoreIdGetter implements ColumnChangeReplacement { private AutoIntegerIdMapStore m_storeFoo; private String[] m_indexColumns; private boolean m_noMatchOkay; public MapStoreIdGetter(AutoIntegerIdMapStore storeFoo, String[] columns, boolean noMatchOkay) { m_storeFoo = storeFoo; m_indexColumns = columns; m_noMatchOkay = noMatchOkay; } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { return m_storeFoo.getIntegerForColumns(rs, columnChanges, m_indexColumns, m_noMatchOkay); } } public class EventSourceReplacement implements ColumnChangeReplacement { private static final String m_replacement = "OpenNMS.Eventd"; public EventSourceReplacement() { // we do nothing! } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { return m_replacement; } } public class FixedIntegerReplacement implements ColumnChangeReplacement { private Integer m_replacement; public FixedIntegerReplacement(int value) { m_replacement = value; } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { return m_replacement; } } public class RowHasBogusData implements ColumnChangeReplacement { private String m_table; private String m_column; public RowHasBogusData(String table, String column) { m_table = table; m_column = column; } public Object getColumnReplacement(ResultSet rs, Map<String, ColumnChange> columnChanges) throws SQLException { throw new IllegalArgumentException("The '" + m_column + "' column in the '" + m_table + "' table should never be " + "null, but the entry for this " + "row does have a null '" + m_column + "'column. " + "It needs to be " + "removed or udpated to " + "reflect a valid '" + m_column + "' column."); } } public boolean triggerExists(String name, String table, String storedProcedure) throws Exception { Statement st = m_dbconnection.createStatement(); ResultSet rs = st.executeQuery("SELECT oid FROM pg_trigger WHERE tgname = '" + name.toLowerCase() + "' AND tgrelid = (SELECT oid FROM pg_class WHERE relname = '" + table.toLowerCase() + "' ) AND tgfoid = (SELECT oid FROM pg_proc WHERE proname = '" + storedProcedure.toLowerCase() + "')"); return rs.next(); } }
When the light is green, the trap is clean.
opennms-install/src/main/java/org/opennms/install/Installer.java
When the light is green, the trap is clean.
Java
agpl-3.0
10973573d5c6b3b14e08db37e858d13e5eb7cacb
0
OPEN-ENT-NG/vie-scolaire,OPEN-ENT-NG/vie-scolaire,OPEN-ENT-NG/vie-scolaire,OPEN-ENT-NG/vie-scolaire,OPEN-ENT-NG/vie-scolaire,OPEN-ENT-NG/vie-scolaire
/* * Copyright (c) Région Hauts-de-France, Département de la Seine-et-Marne, Région Nouvelle Aquitaine, Mairie de Paris, CGI, 2016. * This file is part of OPEN ENT NG. OPEN ENT NG is a versatile ENT Project based on the JVM and ENT Core Project. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation (version 3 of the License). * For the sake of explanation, any module that communicate over native * Web protocols, such as HTTP, with OPEN ENT NG is outside the scope of this * license and could be license under its own terms. This is merely considered * normal use of OPEN ENT NG, and does not fall under the heading of "covered work". * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package fr.openent.viescolaire.service.impl; import fr.openent.Viescolaire; import fr.openent.viescolaire.service.ClasseService; import fr.openent.viescolaire.service.MatiereService; import fr.openent.viescolaire.service.SousMatiereService; import fr.openent.viescolaire.service.UtilsService; import fr.openent.viescolaire.utils.FormateFutureEvent; import fr.wseduc.webutils.Either; import fr.wseduc.webutils.I18n; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.eventbus.EventBus; import org.entcore.common.neo4j.Neo4j; import org.entcore.common.neo4j.Neo4jResult; import org.entcore.common.service.impl.SqlCrudService; import io.vertx.core.Handler; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import org.entcore.common.sql.SqlResult; import java.util.*; import java.util.stream.Collectors; import static fr.openent.Viescolaire.*; import static fr.wseduc.webutils.Utils.handlerToAsyncHandler; /** * Created by ledunoiss on 18/10/2016. */ public class DefaultMatiereService extends SqlCrudService implements MatiereService { private final Neo4j neo4j = Neo4j.getInstance(); private UtilsService utilsService; private SousMatiereService sousMatiereService; private ClasseService classeService; private EventBus eb; private String subjectLibelleTable = VSCO_SCHEMA + "." + VSCO_MATIERE_LIBELLE_TABLE; private String modelSubjectLibelleTable = VSCO_SCHEMA + "." + VSCO_MODEL_MATIERE_LIBELLE_TABLE; public DefaultMatiereService(EventBus eb) { super(VSCO_SCHEMA, Viescolaire.VSCO_MATIERE_TABLE); this.eb = eb; utilsService = new DefaultUtilsService(); sousMatiereService = new DefaultSousMatiereService(); classeService = new DefaultClasseService(); } @Override public void listMatieresEleve(String userId, Handler<Either<String, JsonArray>> handler) { StringBuilder query = new StringBuilder(); JsonObject values = new JsonObject(); query.append("MATCH (u:`User` {id:{userId}}),(s:Structure)<-[:SUBJECT]-(f:Subject)") .append(" WHERE f.code in u.fieldOfStudy and s.externalId in u.structures") .append(" return f.id as id, f.code as externalId, f.label as name"); values.put("userId", userId); neo4j.execute(query.toString(), values, Neo4jResult.validResultHandler(handler)); } @Override public void listMatieresEtab(String idStructure, Boolean onlyId, Handler<Either<String, JsonArray>> handler){ String returndata; if (onlyId) { returndata = "RETURN collect(sub.id) as res "; } else { returndata = "RETURN s.id as idEtablissement, sub.id as id, sub.code as externalId, sub.source as source, " + "sub.label as name, sub.externalId as external_id_subject ORDER BY name "; } String query = "MATCH (sub:Subject)-[sj:SUBJECT]->(s:Structure {id: {idStructure}}) " + returndata; JsonObject values = new JsonObject().put("idStructure", idStructure); neo4j.execute(query, values, Neo4jResult.validResultHandler(handler)); } @Override public void listMatieres(String structureId , JsonArray aIdEnseignant, JsonArray aIdMatiere, JsonArray aIdGroupe,Handler<Either<String, JsonArray>> result) { String query = "MATCH (s:Structure)<-[:SUBJECT]-(sub:Subject)<-[r:TEACHES]-(u:User) "; String returnValue = " WITH r.classes + r.groups as libelleClasses, s, u, sub " + "MATCH (s)--(c) WHERE (c:Class OR c:FunctionalGroup OR c:ManualGroup) AND ALL(x IN c.externalId WHERE x in libelleClasses) " + "RETURN u.id as idEnseignant, s.id as idEtablissement, sub.id as id, sub.code as externalId, sub.label as name, libelleClasses, COLLECT(c.id) as idClasses"; String condition = "WHERE s.id = {structureId}"; JsonObject params = new JsonObject().put("structureId", structureId); if(aIdEnseignant != null && aIdEnseignant.size() != 0) { condition += " AND u.id IN {userIdList}"; params.put("userIdList", aIdEnseignant); } if(aIdMatiere != null && aIdMatiere.size() != 0) { condition += " AND sub.id IN {subjectIdList}"; params.put("subjectIdList", aIdMatiere); } if(aIdGroupe != null && aIdGroupe.size() != 0) { condition += " AND c.id IN {groupeIdList}"; params.put("groupeIdList", aIdGroupe); } params.put("structureId", structureId); neo4j.execute(query + condition + returnValue, params, Neo4jResult.validResultHandler(result)); } @Override public void listAllMatieres(String structureId, String idEnseignant, Boolean onlyId, Handler<Either<String, JsonArray>> handler) { utilsService.getTitulaires(idEnseignant, structureId, eventRemplacants -> { if (eventRemplacants.isRight()) { JsonArray aIdEnseignant = eventRemplacants.right().getValue(); aIdEnseignant.add(idEnseignant); listMatieres(structureId, aIdEnseignant, null, null, checkOverwrite(structureId, aIdEnseignant, event -> { if (event.isRight()) { final JsonArray resultats = event.right().getValue(); if (resultats.size() > 0) { final List<String> ids = new ArrayList<>(); for (Object res : resultats) { ids.add(((JsonObject) res).getString("id")); } if(onlyId) { handler.handle(new Either.Right<>(new JsonArray(ids))); } else { addSousMatiere(ids, resultats,handler); } } else { listMatieresEtabWithSousMatiere(structureId,onlyId,handler); } } else { handler.handle(event.left()); } })); } else { handler.handle(eventRemplacants.left()); } }); } private void addSousMatiere(List<String> ids, JsonArray resultats, Handler<Either<String, JsonArray>> handler){ sousMatiereService.getSousMatiereById(ids.toArray(new String[0]), event_ssmatiere -> { if (event_ssmatiere.isRight()) { JsonArray finalresponse = new fr.wseduc.webutils.collections.JsonArray(); JsonArray res = event_ssmatiere.right().getValue(); if(res == null) { System.out.println(" res null"); } if(resultats == null) { System.out.println(" resultats null"); } for (int i = 0; i < resultats.size(); i++) { JsonObject matiere = resultats.getJsonObject(i); String id = matiere.getString("id"); JsonArray ssms = new fr.wseduc.webutils.collections.JsonArray(); for (int j = 0; j < res.size(); j++) { JsonObject ssm = res.getJsonObject(j); if (ssm.getString("id_matiere").equals(id)) { ssms.add(ssm); } } matiere.put("sous_matieres", ssms); finalresponse.add(matiere); } handler.handle(new Either.Right<>(finalresponse)); } else { handler.handle(event_ssmatiere.left()); } }); } public void listMatieresEtabWithSousMatiere(String structureId, Boolean onlyId, Handler<Either<String, JsonArray>> handler ){ listMatieresEtab(structureId, onlyId, event2 -> { if (event2.isRight()) { if(onlyId) { handler.handle(event2.right()); } else { JsonArray matieresEtab = event2.right().getValue(); if(matieresEtab.size() > 0){ final List<String> ids = new ArrayList<>(); for (Object res : matieresEtab) { ids.add(((JsonObject) res).getString("id")); } addSousMatiere(ids,matieresEtab,handler); }else{ handler.handle(new Either.Left("no subject")); } } }else{ handler.handle(event2.left()); } }); } @Override public void getEnseignantsMatieres(ArrayList<String> classesFieldOfStudy, Handler<Either<String, JsonArray>> result) { StringBuilder query = new StringBuilder(); JsonObject params = new JsonObject(); query.append("MATCH (n:`User`) WHERE "); for(int i = 0; i < classesFieldOfStudy.size(); i++){ query.append("{id") .append(i) .append("} in n.classesFieldOfStudy "); params.put("id"+i, classesFieldOfStudy.get(i)); if(i != classesFieldOfStudy.size()-1){ query.append("OR "); } } query.append("RETURN n"); neo4j.execute(query.toString(), params, Neo4jResult.validResultHandler(result)); } @Override public void getMatieres(JsonArray idMatieres, Handler<Either<String, JsonArray>> result) { StringBuilder query = new StringBuilder(); JsonObject params = new JsonObject(); query.append("MATCH (f:Subject) WHERE f.id IN {idMatieres} ") .append("RETURN f.id as id, f.code as externalId, f.label as name, f as data "); params.put("idMatieres", idMatieres); neo4j.execute(query.toString(), params, Neo4jResult.validResultHandler(result)); } @Override public void getMatiere(String idMatiere, Handler<Either<String, JsonObject>> result) { StringBuilder query = new StringBuilder(); JsonObject params = new JsonObject(); query.append(" MATCH (n:Subject {id: {idMatiere}}) RETURN n "); params.put("idMatiere", idMatiere); neo4j.execute(query.toString(), params, Neo4jResult.validUniqueResultHandler(result)); } @Override public void subjectsListWithUnderSubjects (JsonArray idsSubject, Handler<Either<String, JsonArray>> handler) { getMatieres(idsSubject, subjectsResponse -> { if(subjectsResponse.isRight()){ JsonArray subjects = subjectsResponse.right().getValue(); if(subjects.isEmpty()){ handler.handle(new Either.Left<>(" no subject ")); }else { addSousMatiere(idsSubject.getList(), subjects, handler); } }else{ handler.handle(new Either.Left<>(subjectsResponse.left().getValue())); } }); } private Handler<Either<String, JsonArray>> checkOverwrite(String idStructure, JsonArray aIdEnseignant, Handler<Either<String, JsonArray>> handler) { return matieres -> { if(matieres.isRight() && matieres.right().getValue().size() > 0) { Set<Service> aMatieres = new HashSet<>(); aMatieres.addAll(matieres.right().getValue().stream().map(serv -> new Service((JsonObject) serv)).collect(Collectors.toList())); JsonObject action = new JsonObject() .put("action", "service.getServices") .put("idStructure", idStructure) .put("aIdEnseignant", aIdEnseignant); eb.send(Viescolaire.COMPETENCES_BUS_ADDRESS, action, handlerToAsyncHandler(message -> { JsonObject body = message.body(); if ("ok".equals(body.getString("status"))) { Set<Service> aServices = new HashSet<>(); aServices.addAll(body.getJsonArray("results").stream().map(serv -> new Service((JsonObject) serv)).collect(Collectors.toList())); Set idClasses = new HashSet(); utilsService.pluck(matieres.right().getValue(), "idClasses").forEach(array -> idClasses.addAll(((JsonArray) array).getList())); idClasses.addAll(utilsService.pluck(body.getJsonArray("results"), "id_groupe")); Set idMatieres = new HashSet<>(); idMatieres.addAll(utilsService.pluck(matieres.right().getValue(), "id")); idMatieres.addAll(utilsService.pluck(body.getJsonArray("results"), "id_matiere")); classeService.getClassesInfo(new JsonArray(new ArrayList(idClasses)), classesEvent -> { if (classesEvent.isRight()) { JsonArray classes = classesEvent.right().getValue(); getMatieres(new JsonArray(new ArrayList(idMatieres)), matieresEvent -> { if (matieresEvent.isRight()) { JsonArray newMatieres = matieresEvent.right().getValue(); for (Service oService : aServices) { Service matiereFound = (Service) utilsService.find(aMatieres, mat -> oService.equals((Service) mat)); if(matiereFound != null) { if(oService.evaluable) { matiereFound.addClasses(oService.idClasses); } else { matiereFound.rmClasses(oService.idClasses); } } else { aMatieres.add(oService); } } JsonArray res = new JsonArray( new ArrayList( aMatieres.parallelStream().filter( oMat -> oMat!= null && oMat.idClasses!= null && !oMat.idClasses.isEmpty()).map(oMat -> { oMat.fillMissingValues(newMatieres, classes); return oMat.toJson(); }).collect(Collectors.toList()))); handler.handle(new Either.Right<>(res)); } else { handler.handle(matieresEvent.left()); } }); } else { handler.handle(classesEvent.left()); } }); } }) ); } else if (matieres.isRight()) { handler.handle(matieres.right()); } else { handler.handle(matieres.left()); } }; } private class Service { public String idMatiere, idEnseignant, idEtablissement, name, externalId, modalite; public HashSet idClasses, libelleClasses; public boolean evaluable; public Service (JsonObject matiere) { if(matiere.containsKey("id") && matiere.containsKey("idEnseignant") && matiere.containsKey("idClasses")) { this.idMatiere = matiere.getString("id"); this.idEnseignant = matiere.getString("idEnseignant"); this.idEtablissement = matiere.getString("idEtablissement"); this.name = matiere.getString(NAME); this.externalId = matiere.getString("externalId"); this.idClasses = matiere.containsKey("idClasses") ? new HashSet(matiere.getJsonArray("idClasses").getList()) : new HashSet(); this.libelleClasses = matiere.containsKey("libelleClasses") ? new HashSet(matiere.getJsonArray("libelleClasses").getList()) : new HashSet(); } else { this.idMatiere = matiere.getString("id_matiere"); this.idEnseignant = matiere.getString("id_enseignant"); this.idEtablissement = matiere.getString("id_etablissement"); this.evaluable = matiere.getBoolean("evaluable"); this.modalite = matiere.getString("modalite"); this.idClasses = new HashSet(); this.idClasses.add(matiere.getString("id_groupe")); this.libelleClasses = new HashSet(); } } public void addClasses(Set<String> classes) { for(String s : classes) { this.idClasses.add(s); } } public void rmClasses(Set<String> classes) { for (String s : classes) { this.idClasses.remove(s); } } public void fillMissingValues(JsonArray matieres, JsonArray classes) { JsonArray classeToKeep = utilsService.filter(classes, classe -> this.idClasses.contains(((JsonObject) classe).getString("id"))); this.libelleClasses = new HashSet(utilsService.pluck(classeToKeep, EXTERNAL_ID_KEY)); JsonObject matiere = utilsService.findWhere(matieres, new JsonObject().put("id", this.idMatiere)); if (matiere != null){ this.name = matiere.getString(NAME); this.externalId = matiere.getString(EXTERNAL_ID_KEY); } } public boolean equals (Service s) { return s.idMatiere.equals(this.idMatiere) && s.idEnseignant.equals(this.idEnseignant); } public int hashCode () { return Objects.hash(this.idMatiere, this.idEnseignant); } public boolean isValid() { return this.idEnseignant != null && this.idMatiere != null && this.idClasses.size() > 0 && this.libelleClasses.size() > 0; } public JsonObject toJson() { return new JsonObject() .put("id", this.idMatiere) .put(EXTERNAL_ID_KEY, this.externalId) .put(ID_ETABLISSEMENT_KEY, this.idEtablissement) .put("libelleClasses", new JsonArray(new ArrayList(this.libelleClasses))) .put(NAME, this.name); } } }
src/main/java/fr/openent/viescolaire/service/impl/DefaultMatiereService.java
/* * Copyright (c) Région Hauts-de-France, Département de la Seine-et-Marne, Région Nouvelle Aquitaine, Mairie de Paris, CGI, 2016. * This file is part of OPEN ENT NG. OPEN ENT NG is a versatile ENT Project based on the JVM and ENT Core Project. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation (version 3 of the License). * For the sake of explanation, any module that communicate over native * Web protocols, such as HTTP, with OPEN ENT NG is outside the scope of this * license and could be license under its own terms. This is merely considered * normal use of OPEN ENT NG, and does not fall under the heading of "covered work". * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package fr.openent.viescolaire.service.impl; import fr.openent.Viescolaire; import fr.openent.viescolaire.service.ClasseService; import fr.openent.viescolaire.service.MatiereService; import fr.openent.viescolaire.service.SousMatiereService; import fr.openent.viescolaire.service.UtilsService; import fr.openent.viescolaire.utils.FormateFutureEvent; import fr.wseduc.webutils.Either; import fr.wseduc.webutils.I18n; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.eventbus.EventBus; import org.entcore.common.neo4j.Neo4j; import org.entcore.common.neo4j.Neo4jResult; import org.entcore.common.service.impl.SqlCrudService; import io.vertx.core.Handler; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import org.entcore.common.sql.SqlResult; import java.util.*; import java.util.stream.Collectors; import static fr.openent.Viescolaire.*; import static fr.wseduc.webutils.Utils.handlerToAsyncHandler; /** * Created by ledunoiss on 18/10/2016. */ public class DefaultMatiereService extends SqlCrudService implements MatiereService { private final Neo4j neo4j = Neo4j.getInstance(); private UtilsService utilsService; private SousMatiereService sousMatiereService; private ClasseService classeService; private EventBus eb; private String subjectLibelleTable = VSCO_SCHEMA + "." + VSCO_MATIERE_LIBELLE_TABLE; private String modelSubjectLibelleTable = VSCO_SCHEMA + "." + VSCO_MODEL_MATIERE_LIBELLE_TABLE; public DefaultMatiereService(EventBus eb) { super(VSCO_SCHEMA, Viescolaire.VSCO_MATIERE_TABLE); this.eb = eb; utilsService = new DefaultUtilsService(); sousMatiereService = new DefaultSousMatiereService(); classeService = new DefaultClasseService(); } @Override public void listMatieresEleve(String userId, Handler<Either<String, JsonArray>> handler) { StringBuilder query = new StringBuilder(); JsonObject values = new JsonObject(); query.append("MATCH (u:`User` {id:{userId}}),(s:Structure)<-[:SUBJECT]-(f:Subject)") .append(" WHERE f.code in u.fieldOfStudy and s.externalId in u.structures") .append(" return f.id as id, f.code as externalId, f.label as name"); values.put("userId", userId); neo4j.execute(query.toString(), values, Neo4jResult.validResultHandler(handler)); } @Override public void listMatieresEtab(String idStructure, Boolean onlyId, Handler<Either<String, JsonArray>> handler){ String returndata; if (onlyId) { returndata = "RETURN collect(sub.id) as res "; } else { returndata = "RETURN s.id as idEtablissement, sub.id as id, sub.code as externalId, sub.source as source, " + "sub.label as name, sub.externalId as external_id_subject ORDER BY name "; } String query = "MATCH (sub:Subject)-[sj:SUBJECT]->(s:Structure {id: {idStructure}}) " + returndata; JsonObject values = new JsonObject().put("idStructure", idStructure); neo4j.execute(query, values, Neo4jResult.validResultHandler(handler)); } @Override public void listMatieres(String structureId , JsonArray aIdEnseignant, JsonArray aIdMatiere, JsonArray aIdGroupe,Handler<Either<String, JsonArray>> result) { String query = "MATCH (s:Structure)<-[:SUBJECT]-(sub:Subject)<-[r:TEACHES]-(u:User) "; String returnValue = " WITH r.classes + r.groups as libelleClasses, s, u, sub " + "MATCH (s)--(c) WHERE (c:Class OR c:FunctionalGroup OR c:ManualGroup) AND ALL(x IN c.externalId WHERE x in libelleClasses) " + "RETURN u.id as idEnseignant, s.id as idEtablissement, sub.id as id, sub.code as externalId, sub.label as name, libelleClasses, COLLECT(c.id) as idClasses"; String condition = "WHERE s.id = {structureId}"; JsonObject params = new JsonObject().put("structureId", structureId); if(aIdEnseignant != null && aIdEnseignant.size() != 0) { condition += " AND u.id IN {userIdList}"; params.put("userIdList", aIdEnseignant); } if(aIdMatiere != null && aIdMatiere.size() != 0) { condition += " AND sub.id IN {subjectIdList}"; params.put("subjectIdList", aIdMatiere); } if(aIdGroupe != null && aIdGroupe.size() != 0) { condition += " AND c.id IN {groupeIdList}"; params.put("groupeIdList", aIdGroupe); } params.put("structureId", structureId); neo4j.execute(query + condition + returnValue, params, Neo4jResult.validResultHandler(result)); } @Override public void listAllMatieres(String structureId, String idEnseignant, Boolean onlyId, Handler<Either<String, JsonArray>> handler) { utilsService.getTitulaires(idEnseignant, structureId, eventRemplacants -> { if (eventRemplacants.isRight()) { JsonArray aIdEnseignant = eventRemplacants.right().getValue(); aIdEnseignant.add(idEnseignant); listMatieres(structureId, aIdEnseignant, null, null, checkOverwrite(structureId, aIdEnseignant, event -> { if (event.isRight()) { final JsonArray resultats = event.right().getValue(); if (resultats.size() > 0) { final List<String> ids = new ArrayList<>(); for (Object res : resultats) { ids.add(((JsonObject) res).getString("id")); } if(onlyId) { handler.handle(new Either.Right<>(new JsonArray(ids))); } else { addSousMatiere(ids, resultats,handler); } } else { listMatieresEtabWithSousMatiere(structureId,onlyId,handler); } } else { handler.handle(event.left()); } })); } else { handler.handle(eventRemplacants.left()); } }); } private void addSousMatiere(List<String> ids, JsonArray resultats, Handler<Either<String, JsonArray>> handler){ sousMatiereService.getSousMatiereById(ids.toArray(new String[0]), event_ssmatiere -> { if (event_ssmatiere.right().isRight()) { JsonArray finalresponse = new fr.wseduc.webutils.collections.JsonArray(); JsonArray res = event_ssmatiere.right().getValue(); for (int i = 0; i < resultats.size(); i++) { JsonObject matiere = resultats.getJsonObject(i); String id = matiere.getString("id"); JsonArray ssms = new fr.wseduc.webutils.collections.JsonArray(); for (int j = 0; j < res.size(); j++) { JsonObject ssm = res.getJsonObject(j); if (ssm.getString("id_matiere").equals(id)) { ssms.add(ssm); } } matiere.put("sous_matieres", ssms); finalresponse.add(matiere); } handler.handle(new Either.Right<>(finalresponse)); } else { handler.handle(event_ssmatiere.left()); } }); } public void listMatieresEtabWithSousMatiere(String structureId, Boolean onlyId, Handler<Either<String, JsonArray>> handler ){ listMatieresEtab(structureId, onlyId, event2 -> { if (event2.isRight()) { if(onlyId) { handler.handle(event2.right()); } else { JsonArray matieresEtab = event2.right().getValue(); if(matieresEtab.size() > 0){ final List<String> ids = new ArrayList<>(); for (Object res : matieresEtab) { ids.add(((JsonObject) res).getString("id")); } addSousMatiere(ids,matieresEtab,handler); }else{ handler.handle(new Either.Left("no subject")); } } }else{ handler.handle(event2.left()); } }); } @Override public void getEnseignantsMatieres(ArrayList<String> classesFieldOfStudy, Handler<Either<String, JsonArray>> result) { StringBuilder query = new StringBuilder(); JsonObject params = new JsonObject(); query.append("MATCH (n:`User`) WHERE "); for(int i = 0; i < classesFieldOfStudy.size(); i++){ query.append("{id") .append(i) .append("} in n.classesFieldOfStudy "); params.put("id"+i, classesFieldOfStudy.get(i)); if(i != classesFieldOfStudy.size()-1){ query.append("OR "); } } query.append("RETURN n"); neo4j.execute(query.toString(), params, Neo4jResult.validResultHandler(result)); } @Override public void getMatieres(JsonArray idMatieres, Handler<Either<String, JsonArray>> result) { StringBuilder query = new StringBuilder(); JsonObject params = new JsonObject(); query.append("MATCH (f:Subject) WHERE f.id IN {idMatieres} ") .append("RETURN f.id as id, f.code as externalId, f.label as name, f as data "); params.put("idMatieres", idMatieres); neo4j.execute(query.toString(), params, Neo4jResult.validResultHandler(result)); } @Override public void getMatiere(String idMatiere, Handler<Either<String, JsonObject>> result) { StringBuilder query = new StringBuilder(); JsonObject params = new JsonObject(); query.append(" MATCH (n:Subject {id: {idMatiere}}) RETURN n "); params.put("idMatiere", idMatiere); neo4j.execute(query.toString(), params, Neo4jResult.validUniqueResultHandler(result)); } @Override public void subjectsListWithUnderSubjects (JsonArray idsSubject, Handler<Either<String, JsonArray>> handler) { getMatieres(idsSubject, subjectsResponse -> { if(subjectsResponse.isRight()){ JsonArray subjects = subjectsResponse.right().getValue(); if(subjects.isEmpty()){ handler.handle(new Either.Left<>(" no subject ")); }else { addSousMatiere(idsSubject.getList(), subjects, handler); } }else{ handler.handle(new Either.Left<>(subjectsResponse.left().getValue())); } }); } private Handler<Either<String, JsonArray>> checkOverwrite(String idStructure, JsonArray aIdEnseignant, Handler<Either<String, JsonArray>> handler) { return matieres -> { if(matieres.isRight() && matieres.right().getValue().size() > 0) { Set<Service> aMatieres = new HashSet<>(); aMatieres.addAll(matieres.right().getValue().stream().map(serv -> new Service((JsonObject) serv)).collect(Collectors.toList())); JsonObject action = new JsonObject() .put("action", "service.getServices") .put("idStructure", idStructure) .put("aIdEnseignant", aIdEnseignant); eb.send(Viescolaire.COMPETENCES_BUS_ADDRESS, action, handlerToAsyncHandler(message -> { JsonObject body = message.body(); if ("ok".equals(body.getString("status"))) { Set<Service> aServices = new HashSet<>(); aServices.addAll(body.getJsonArray("results").stream().map(serv -> new Service((JsonObject) serv)).collect(Collectors.toList())); Set idClasses = new HashSet(); utilsService.pluck(matieres.right().getValue(), "idClasses").forEach(array -> idClasses.addAll(((JsonArray) array).getList())); idClasses.addAll(utilsService.pluck(body.getJsonArray("results"), "id_groupe")); Set idMatieres = new HashSet<>(); idMatieres.addAll(utilsService.pluck(matieres.right().getValue(), "id")); idMatieres.addAll(utilsService.pluck(body.getJsonArray("results"), "id_matiere")); classeService.getClassesInfo(new JsonArray(new ArrayList(idClasses)), classesEvent -> { if (classesEvent.isRight()) { JsonArray classes = classesEvent.right().getValue(); getMatieres(new JsonArray(new ArrayList(idMatieres)), matieresEvent -> { if (matieresEvent.isRight()) { JsonArray newMatieres = matieresEvent.right().getValue(); for (Service oService : aServices) { Service matiereFound = (Service) utilsService.find(aMatieres, mat -> oService.equals((Service) mat)); if(matiereFound != null) { if(oService.evaluable) { matiereFound.addClasses(oService.idClasses); } else { matiereFound.rmClasses(oService.idClasses); } } else { aMatieres.add(oService); } } handler.handle(new Either.Right<>(new JsonArray(new ArrayList(aMatieres.parallelStream().filter(oMat -> !oMat.idClasses.isEmpty()).map(oMat -> { oMat.fillMissingValues(newMatieres, classes); return oMat.toJson(); }).collect(Collectors.toList()))))); } else { handler.handle(matieresEvent.left()); } }); } else { handler.handle(classesEvent.left()); } }); } }) ); } else if (matieres.isRight()) { handler.handle(matieres.right()); } else { handler.handle(matieres.left()); } }; } private class Service { public String idMatiere, idEnseignant, idEtablissement, name, externalId, modalite; public HashSet idClasses, libelleClasses; public boolean evaluable; public Service (JsonObject matiere) { if(matiere.containsKey("id") && matiere.containsKey("idEnseignant") && matiere.containsKey("idClasses")) { this.idMatiere = matiere.getString("id"); this.idEnseignant = matiere.getString("idEnseignant"); this.idEtablissement = matiere.getString("idEtablissement"); this.name = matiere.getString(NAME); this.externalId = matiere.getString("externalId"); this.idClasses = matiere.containsKey("idClasses") ? new HashSet(matiere.getJsonArray("idClasses").getList()) : new HashSet(); this.libelleClasses = matiere.containsKey("libelleClasses") ? new HashSet(matiere.getJsonArray("libelleClasses").getList()) : new HashSet(); } else { this.idMatiere = matiere.getString("id_matiere"); this.idEnseignant = matiere.getString("id_enseignant"); this.idEtablissement = matiere.getString("id_etablissement"); this.evaluable = matiere.getBoolean("evaluable"); this.modalite = matiere.getString("modalite"); this.idClasses = new HashSet(); this.idClasses.add(matiere.getString("id_groupe")); this.libelleClasses = new HashSet(); } } public void addClasses(Set<String> classes) { for(String s : classes) { this.idClasses.add(s); } } public void rmClasses(Set<String> classes) { for (String s : classes) { this.idClasses.remove(s); } } public void fillMissingValues(JsonArray matieres, JsonArray classes) { JsonArray classeToKeep = utilsService.filter(classes, classe -> this.idClasses.contains(((JsonObject) classe).getString("id"))); this.libelleClasses = new HashSet(utilsService.pluck(classeToKeep, EXTERNAL_ID_KEY)); JsonObject matiere = utilsService.findWhere(matieres, new JsonObject().put("id", this.idMatiere)); this.name = matiere.getString(NAME); this.externalId = matiere.getString(EXTERNAL_ID_KEY); } public boolean equals (Service s) { return s.idMatiere.equals(this.idMatiere) && s.idEnseignant.equals(this.idEnseignant); } public int hashCode () { return Objects.hash(this.idMatiere, this.idEnseignant); } public boolean isValid() { return this.idEnseignant != null && this.idMatiere != null && this.idClasses.size() > 0 && this.libelleClasses.size() > 0; } public JsonObject toJson() { return new JsonObject() .put("id", this.idMatiere) .put(EXTERNAL_ID_KEY, this.externalId) .put(ID_ETABLISSEMENT_KEY, this.idEtablissement) .put("libelleClasses", new JsonArray(new ArrayList(this.libelleClasses))) .put(NAME, this.name); } } }
[CO-648]: Pas d'acces au module Competence pour deux enseignantes.
src/main/java/fr/openent/viescolaire/service/impl/DefaultMatiereService.java
[CO-648]: Pas d'acces au module Competence pour deux enseignantes.
Java
agpl-3.0
ccc3b03e5e89bbaec2a2aa623629d733b7ce0cbd
0
adamabeshouse/cbioportal,cBioPortal/cbioportal,adamabeshouse/cbioportal,mandawilson/cbioportal,zhx828/cbioportal,inodb/cbioportal,adamabeshouse/cbioportal,mandawilson/cbioportal,n1zea144/cbioportal,d3b-center/pedcbioportal,jjgao/cbioportal,cBioPortal/cbioportal,zhx828/cbioportal,onursumer/cbioportal,pughlab/cbioportal,jjgao/cbioportal,cBioPortal/cbioportal,n1zea144/cbioportal,mandawilson/cbioportal,pughlab/cbioportal,jjgao/cbioportal,angelicaochoa/cbioportal,mandawilson/cbioportal,mandawilson/cbioportal,d3b-center/pedcbioportal,inodb/cbioportal,angelicaochoa/cbioportal,inodb/cbioportal,angelicaochoa/cbioportal,cBioPortal/cbioportal,zhx828/cbioportal,onursumer/cbioportal,sheridancbio/cbioportal,adamabeshouse/cbioportal,inodb/cbioportal,onursumer/cbioportal,pughlab/cbioportal,sheridancbio/cbioportal,mandawilson/cbioportal,inodb/cbioportal,angelicaochoa/cbioportal,d3b-center/pedcbioportal,adamabeshouse/cbioportal,jjgao/cbioportal,n1zea144/cbioportal,zhx828/cbioportal,inodb/cbioportal,sheridancbio/cbioportal,n1zea144/cbioportal,onursumer/cbioportal,angelicaochoa/cbioportal,adamabeshouse/cbioportal,d3b-center/pedcbioportal,cBioPortal/cbioportal,d3b-center/pedcbioportal,d3b-center/pedcbioportal,sheridancbio/cbioportal,onursumer/cbioportal,sheridancbio/cbioportal,zhx828/cbioportal,angelicaochoa/cbioportal,pughlab/cbioportal,onursumer/cbioportal,pughlab/cbioportal,n1zea144/cbioportal,mandawilson/cbioportal,n1zea144/cbioportal,zhx828/cbioportal,angelicaochoa/cbioportal,inodb/cbioportal,cBioPortal/cbioportal,pughlab/cbioportal,zhx828/cbioportal,pughlab/cbioportal,adamabeshouse/cbioportal,n1zea144/cbioportal,jjgao/cbioportal,sheridancbio/cbioportal,jjgao/cbioportal,d3b-center/pedcbioportal,jjgao/cbioportal
/* * Copyright (c) 2015 Memorial Sloan-Kettering Cancer Center. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS * FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder * is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no * obligations to provide maintenance, support, updates, enhancements or * modifications. In no event shall Memorial Sloan-Kettering Cancer Center be * liable to any party for direct, indirect, special, incidental or * consequential damages, including lost profits, arising out of the use of this * software and its documentation, even if Memorial Sloan-Kettering Cancer * Center has been advised of the possibility of such damage. */ /* * This file is part of cBioPortal. * * cBioPortal is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.mskcc.cbio.portal.util.internal; // imports import org.mskcc.cbio.portal.dao.*; import org.mskcc.cbio.portal.util.*; import org.mskcc.cbio.portal.model.CancerStudy; import org.mskcc.cbio.portal.web_api.ProtocolException; import org.apache.commons.logging.*; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.Authentication; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.authentication.AnonymousAuthenticationToken; import org.springframework.stereotype.Component; import java.util.*; /** * Utilities for managing access control. * * @author Benjamin Gross */ @Component public class AccessControlImpl implements AccessControl { // ref to log private static Log log = LogFactory.getLog(AccessControlImpl.class); /** * Gets Cancer Studies. Used by QueryBuilder. * * @return List<CancerStudy> * @throws DaoException Database Error. * @throws ProtocolException Protocol Error. * * We use @PostFilter annotation to remove elements * in the return list inaccessible to the user. */ public List<CancerStudy> getCancerStudies() throws DaoException, ProtocolException { if (log.isDebugEnabled()) { log.debug("getCancerStudies(), getting accessible cancer studies."); } // get list of all cancer studies List<CancerStudy> allCancerStudies = DaoCancerStudy.getAllCancerStudies(); // sort the list Collections.sort(allCancerStudies, new CancerStudiesComparator()); // Then, insert "All" Cancer Types at beginning //TODO - fix this! It conflicts with ALL (Acute Lymphoid Leukemia)! ArrayList<CancerStudy> finalCancerStudiesList = new ArrayList<CancerStudy>(); String allCancerStudyTitle = (GlobalProperties.usersMustBeAuthorized()) ? "All Authorized Cancer Studies" : "All Cancer Studies"; CancerStudy allCancerStudy = new CancerStudy(allCancerStudyTitle, allCancerStudyTitle, "all", "all", true); finalCancerStudiesList.add(allCancerStudy); finalCancerStudiesList.addAll(allCancerStudies); if (finalCancerStudiesList.size() > 1) { return finalCancerStudiesList; } else { throw new ProtocolException("No cancer studies accessible; "+ "either provide credentials to access private studies, " + "or ask administrator to load public ones.\n"); } } /** * Return true if the user can access the study, false otherwise. * * @param stableStudyId * @return List<CancerStudy> * @throws DaoException * * We use @PostFilter rather than @PreAuthorize annotation to provide * permission evaluation on this cancer study so that we can process * invalid permissions via QueryBuilder.validateForm(). If we use @PreAuthorize, * thread execution does not return from this method call if a user has invalid permissions. */ public List<CancerStudy> isAccessibleCancerStudy(String stableStudyId) throws DaoException { if (log.isDebugEnabled()) { log.debug("isAccessibleCancerStudy(), stableStudyId: " + stableStudyId); } // get cancer study by stable id List<CancerStudy> toReturn = new ArrayList<CancerStudy>(); CancerStudy cancerStudy = DaoCancerStudy.getCancerStudyByStableId(stableStudyId); if (cancerStudy != null) { toReturn.add(cancerStudy); } // outta here return toReturn; } public UserDetails getUserDetails() { if (GlobalProperties.usersMustBeAuthorized()) { Authentication auth = SecurityContextHolder.getContext().getAuthentication(); if (auth == null) { String errorMessage = "Possible configuration error detected: authorization=true but no authentication found. " + "If authentication is turned off, authorization should be set to false"; log.error(errorMessage); throw new RuntimeException(errorMessage); } return !(auth instanceof AnonymousAuthenticationToken) ? (UserDetails)auth.getPrincipal() : null; } return null; } } /** * Compares Cancer Studies, so that we can sort them alphabetically. */ class CancerStudiesComparator implements Comparator { /** * Compare two cancer studies. * @param o First Cancer Study. * @param o1 Second Cancer Study. * @return int indicating name sort order. */ public int compare(Object o, Object o1) { CancerStudy study0 = (CancerStudy) o; CancerStudy study1 = (CancerStudy) o1; return study0.getName().compareTo(study1.getName()); } }
core/src/main/java/org/mskcc/cbio/portal/util/internal/AccessControlImpl.java
/* * Copyright (c) 2015 Memorial Sloan-Kettering Cancer Center. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS * FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder * is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no * obligations to provide maintenance, support, updates, enhancements or * modifications. In no event shall Memorial Sloan-Kettering Cancer Center be * liable to any party for direct, indirect, special, incidental or * consequential damages, including lost profits, arising out of the use of this * software and its documentation, even if Memorial Sloan-Kettering Cancer * Center has been advised of the possibility of such damage. */ /* * This file is part of cBioPortal. * * cBioPortal is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.mskcc.cbio.portal.util.internal; // imports import org.mskcc.cbio.portal.dao.*; import org.mskcc.cbio.portal.util.*; import org.mskcc.cbio.portal.model.CancerStudy; import org.mskcc.cbio.portal.web_api.ProtocolException; import org.apache.commons.logging.*; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.Authentication; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.authentication.AnonymousAuthenticationToken; import org.springframework.stereotype.Component; import java.util.*; /** * Utilities for managing access control. * * @author Benjamin Gross */ @Component public class AccessControlImpl implements AccessControl { // ref to log private static Log log = LogFactory.getLog(AccessControlImpl.class); /** * Gets Cancer Studies. Used by QueryBuilder. * * @return List<CancerStudy> * @throws DaoException Database Error. * @throws ProtocolException Protocol Error. * * We use @PostFilter annotation to remove elements * in the return list inaccessible to the user. */ public List<CancerStudy> getCancerStudies() throws DaoException, ProtocolException { if (log.isDebugEnabled()) { log.debug("getCancerStudies(), getting accessible cancer studies."); } // get list of all cancer studies List<CancerStudy> allCancerStudies = DaoCancerStudy.getAllCancerStudies(); // sort the list Collections.sort(allCancerStudies, new CancerStudiesComparator()); // Then, insert "All" Cancer Types at beginning //TODO - fix this! It conflicts with ALL (Acute Lymphoid Leukemia)! ArrayList<CancerStudy> finalCancerStudiesList = new ArrayList<CancerStudy>(); String allCancerStudyTitle = (GlobalProperties.usersMustBeAuthorized()) ? "All Authorized Cancer Studies" : "All Cancer Studies"; CancerStudy allCancerStudy = new CancerStudy(allCancerStudyTitle, allCancerStudyTitle, "all", "all", true); finalCancerStudiesList.add(allCancerStudy); finalCancerStudiesList.addAll(allCancerStudies); if (finalCancerStudiesList.size() > 1) { return finalCancerStudiesList; } else { throw new ProtocolException("No cancer studies accessible; "+ "either provide credentials to access private studies, " + "or ask administrator to load public ones.\n"); } } /** * Return true if the user can access the study, false otherwise. * * @param stableStudyId * @return List<CancerStudy> * @throws DaoException * * We use @PostFilter rather than @PreAuthorize annotation to provide * permission evaluation on this cancer study so that we can process * invalid permissions via QueryBuilder.validateForm(). If we use @PreAuthorize, * thread execution does not return from this method call if a user has invalid permissions. */ public List<CancerStudy> isAccessibleCancerStudy(String stableStudyId) throws DaoException { if (log.isDebugEnabled()) { log.debug("isAccessibleCancerStudy(), stableStudyId: " + stableStudyId); } // get cancer study by stable id List<CancerStudy> toReturn = new ArrayList<CancerStudy>(); CancerStudy cancerStudy = DaoCancerStudy.getCancerStudyByStableId(stableStudyId); if (cancerStudy != null) { toReturn.add(cancerStudy); } // outta here return toReturn; } public UserDetails getUserDetails() { if (GlobalProperties.usersMustBeAuthorized()) { Authentication auth = SecurityContextHolder.getContext().getAuthentication(); return !(auth instanceof AnonymousAuthenticationToken) ? (UserDetails)auth.getPrincipal() : null; } return null; } } /** * Compares Cancer Studies, so that we can sort them alphabetically. */ class CancerStudiesComparator implements Comparator { /** * Compare two cancer studies. * @param o First Cancer Study. * @param o1 Second Cancer Study. * @return int indicating name sort order. */ public int compare(Object o, Object o1) { CancerStudy study0 = (CancerStudy) o; CancerStudy study1 = (CancerStudy) o1; return study0.getName().compareTo(study1.getName()); } }
fixed NullPointerException in getUserDetails()
core/src/main/java/org/mskcc/cbio/portal/util/internal/AccessControlImpl.java
fixed NullPointerException in getUserDetails()
Java
lgpl-2.1
2c1dff12d810a1035b7cd62a8d43a847516da7a7
0
levants/lightmare
package org.lightmare.ejb.handlers; import java.lang.reflect.Method; /** * Handler class to call EJB bean methods for REST services * * @author levan * */ public class RestHandler<T> { // Appropriated bean's handler private final BeanHandler handler; //EJB bean instance private final T bean; public RestHandler(BeanHandler handler, T bean) { this.handler = handler; this.bean = bean; } /** * Invokes passed {@link Method} for bean by {@link BeanHandler} instance * * @param method * @param args * @return {@link Object} * @throws Throwable */ public Object invoke(Method method, Object[] args) throws Throwable { return handler.invoke(bean, method, args); } }
src/main/java/org/lightmare/ejb/handlers/RestHandler.java
package org.lightmare.ejb.handlers; import java.lang.reflect.Method; /** * Handler class to call EJB bean methods for REST services * * @author levan * */ public class RestHandler<T> { // Appropriated bean's handler private final BeanHandler handler; private final T bean; public RestHandler(BeanHandler handler, T bean) { this.handler = handler; this.bean = bean; } /** * Invokes passed {@link Method} for bean by {@link BeanHandler} instance * * @param method * @param args * @return {@link Object} * @throws Throwable */ public Object invoke(Method method, Object[] args) throws Throwable { return handler.invoke(bean, method, args); } }
improved code / comments at utility classes
src/main/java/org/lightmare/ejb/handlers/RestHandler.java
improved code / comments at utility classes
Java
unlicense
f96d2e10184aab392b0ad82b11674c42d12962a0
0
HenryLoenwind/EnderIO,SleepyTrousers/EnderIO,D-Inc/EnderIO
package crazypants.enderio.item; import java.util.ArrayList; import java.util.List; import com.enderio.core.api.client.gui.IAdvancedTooltipProvider; import com.enderio.core.client.handlers.SpecialTooltipHandler; import buildcraft.api.tools.IToolWrench; import crazypants.enderio.EnderIO; import crazypants.enderio.EnderIOTab; import crazypants.enderio.ModObject; import crazypants.enderio.api.tool.IConduitControl; import crazypants.enderio.api.tool.ITool; import crazypants.enderio.conduit.ConduitDisplayMode; import crazypants.enderio.config.Config; import crazypants.enderio.network.PacketHandler; import crazypants.enderio.paint.IPaintable.IBlockPaintableBlock; import crazypants.enderio.paint.PainterUtil2; import crazypants.enderio.paint.YetaUtil; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.ActionResult; import net.minecraft.util.EnumActionResult; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.player.PlayerInteractEvent.RightClickBlock; import net.minecraftforge.fml.common.Optional; import net.minecraftforge.fml.common.Optional.Interface; import net.minecraftforge.fml.common.eventhandler.Event.Result; import net.minecraftforge.fml.common.registry.GameRegistry; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; @Optional.InterfaceList({ @Interface(iface = "buildcraft.api.tools.IToolWrench", modid = "BuildCraftAPI|core") }) public class ItemYetaWrench extends Item implements ITool, IConduitControl, IAdvancedTooltipProvider, IToolWrench { public static ItemYetaWrench create() { if (Config.useSneakMouseWheelYetaWrench) { PacketHandler.INSTANCE.registerMessage(YetaWrenchPacketProcessor.class, YetaWrenchPacketProcessor.class, PacketHandler.nextID(), Side.SERVER); } ItemYetaWrench result = new ItemYetaWrench(); GameRegistry.register(result); return result; } protected ItemYetaWrench() { setCreativeTab(EnderIOTab.tabEnderIO); setUnlocalizedName(ModObject.itemYetaWrench.getUnlocalisedName()); setRegistryName(ModObject.itemYetaWrench.getUnlocalisedName()); setMaxStackSize(1); } @Override public EnumActionResult onItemUseFirst(ItemStack stack, EntityPlayer player, World world, BlockPos pos, EnumFacing side, float hitX, float hitY, float hitZ, EnumHand hand) { if (world.isRemote) { //If its client side we have to return pass so this method is called on server, where we need to perform the op return EnumActionResult.PASS; } final IBlockState blockState = world.getBlockState(pos); IBlockState bs = blockState; Block block = bs.getBlock(); boolean ret = false; if (block != null) { RightClickBlock e = new RightClickBlock(player, hand, player.getHeldItem(hand), pos,side, new Vec3d(hitX, hitY, hitZ)); if (MinecraftForge.EVENT_BUS.post(e) || e.getResult() == Result.DENY || e.getUseBlock() == Result.DENY || e.getUseItem() == Result.DENY) { return EnumActionResult.PASS; } if (!player.isSneaking() && block.rotateBlock(world, pos, side)) { ret = true; } else if (block instanceof IBlockPaintableBlock && !player.isSneaking() && !YetaUtil.shouldHeldItemHideFacades()) { IBlockState paintSource = ((IBlockPaintableBlock) block).getPaintSource(blockState, world, pos); if (paintSource != null) { final IBlockState rotatedPaintSource = PainterUtil2.rotate(paintSource); if (rotatedPaintSource != paintSource) { ((IBlockPaintableBlock) block).setPaintSource(blockState, world, pos, rotatedPaintSource); } ret = true; } } } if (ret) { player.swingArm(hand); } return ret ? EnumActionResult.SUCCESS: EnumActionResult.PASS; } @Override public ActionResult<ItemStack> onItemRightClick(ItemStack equipped, World world, EntityPlayer player, EnumHand hand) { if (!Config.useSneakRightClickYetaWrench) { return new ActionResult<ItemStack>(EnumActionResult.PASS, equipped); } if (!player.isSneaking()) { return new ActionResult<ItemStack>(EnumActionResult.PASS, equipped); } ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(equipped); if (curMode == null) { curMode = ConduitDisplayMode.ALL; } ConduitDisplayMode newMode = curMode.next(); ConduitDisplayMode.setDisplayMode(equipped, newMode); return new ActionResult<ItemStack>(EnumActionResult.SUCCESS, equipped); } @Override public boolean onBlockStartBreak(ItemStack itemstack, BlockPos pos, EntityPlayer player) { IBlockState bs = player.worldObj.getBlockState(pos); Block block = bs.getBlock(); if (player.isSneaking() && block == EnderIO.blockConduitBundle && player.capabilities.isCreativeMode) { block.onBlockClicked(player.worldObj, pos, player); return true; } return false; } @Override @SideOnly(Side.CLIENT) public boolean isFull3D() { return true; } @Override public boolean doesSneakBypassUse(ItemStack stack, IBlockAccess world, BlockPos pos, EntityPlayer player) { return true; } @Override public boolean canUse(ItemStack stack, EntityPlayer player, BlockPos pos) { return true; } @Override public void used(ItemStack stack, EntityPlayer player, BlockPos pos) { } @Override public boolean shouldHideFacades(ItemStack stack, EntityPlayer player) { ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(stack); return curMode != ConduitDisplayMode.NONE; } @Override public boolean showOverlay(ItemStack stack, EntityPlayer player) { return true; } /* IAdvancedTooltipProvider */ @Override public void addBasicEntries(ItemStack itemstack, EntityPlayer entityplayer, List<String> list, boolean flag) { } @Override public void addCommonEntries(ItemStack itemstack, EntityPlayer entityplayer, List<String> list, boolean flag) { } @Override public void addDetailedEntries(ItemStack itemstack, EntityPlayer entityplayer, List<String> list, boolean flag) { ArrayList<String> tmp = new ArrayList<String>(); SpecialTooltipHandler.addDetailedTooltipFromResources(tmp, getUnlocalizedName()); String keyName = KeyTracker.instance.getYetaWrenchMode().getDisplayName(); for (String line : tmp) { list.add(String.format(line, keyName)); } } @Override @Optional.Method(modid = "BuildCraftAPI|core") public boolean canWrench(EntityPlayer arg0, BlockPos arg1) { return true; } @Override @Optional.Method(modid = "BuildCraftAPI|core") public boolean canWrench(EntityPlayer arg0, Entity arg1) { return false; } @Override @Optional.Method(modid = "BuildCraftAPI|core") public void wrenchUsed(EntityPlayer player, BlockPos pos) { used(player.getHeldItemMainhand(), player, pos); } @Override @Optional.Method(modid = "BuildCraftAPI|core") public void wrenchUsed(EntityPlayer player, Entity arg1) { } }
src/main/java/crazypants/enderio/item/ItemYetaWrench.java
package crazypants.enderio.item; import java.util.ArrayList; import java.util.List; import org.lwjgl.input.Keyboard; import com.enderio.core.api.client.gui.IAdvancedTooltipProvider; import com.enderio.core.client.handlers.SpecialTooltipHandler; import buildcraft.api.tools.IToolWrench; import crazypants.enderio.EnderIO; import crazypants.enderio.EnderIOTab; import crazypants.enderio.ModObject; import crazypants.enderio.api.tool.IConduitControl; import crazypants.enderio.api.tool.ITool; import crazypants.enderio.conduit.ConduitDisplayMode; import crazypants.enderio.config.Config; import crazypants.enderio.network.PacketHandler; import crazypants.enderio.paint.IPaintable.IBlockPaintableBlock; import crazypants.enderio.paint.PainterUtil2; import crazypants.enderio.paint.YetaUtil; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.ActionResult; import net.minecraft.util.EnumActionResult; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumHand; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.player.PlayerInteractEvent.RightClickBlock; import net.minecraftforge.fml.common.Optional; import net.minecraftforge.fml.common.Optional.Interface; import net.minecraftforge.fml.common.eventhandler.Event.Result; import net.minecraftforge.fml.common.registry.GameRegistry; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; @Optional.InterfaceList({ @Interface(iface = "buildcraft.api.tools.IToolWrench", modid = "BuildCraftAPI|core") }) public class ItemYetaWrench extends Item implements ITool, IConduitControl, IAdvancedTooltipProvider, IToolWrench { public static ItemYetaWrench create() { if (Config.useSneakMouseWheelYetaWrench) { PacketHandler.INSTANCE.registerMessage(YetaWrenchPacketProcessor.class, YetaWrenchPacketProcessor.class, PacketHandler.nextID(), Side.SERVER); } ItemYetaWrench result = new ItemYetaWrench(); GameRegistry.register(result); return result; } protected ItemYetaWrench() { setCreativeTab(EnderIOTab.tabEnderIO); setUnlocalizedName(ModObject.itemYetaWrench.getUnlocalisedName()); setRegistryName(ModObject.itemYetaWrench.getUnlocalisedName()); setMaxStackSize(1); } @Override public EnumActionResult onItemUseFirst(ItemStack stack, EntityPlayer player, World world, BlockPos pos, EnumFacing side, float hitX, float hitY, float hitZ, EnumHand hand) { if (world.isRemote) { //If its client side we have to return pass so this method is called on server, where we need to perform the op return EnumActionResult.PASS; } final IBlockState blockState = world.getBlockState(pos); IBlockState bs = blockState; Block block = bs.getBlock(); boolean ret = false; if (block != null) { RightClickBlock e = new RightClickBlock(player, hand, player.getHeldItem(hand), pos,side, new Vec3d(hitX, hitY, hitZ)); if (MinecraftForge.EVENT_BUS.post(e) || e.getResult() == Result.DENY || e.getUseBlock() == Result.DENY || e.getUseItem() == Result.DENY) { return EnumActionResult.PASS; } if (!player.isSneaking() && block.rotateBlock(world, pos, side)) { ret = true; } else if (block instanceof IBlockPaintableBlock && !player.isSneaking() && !YetaUtil.shouldHeldItemHideFacades()) { IBlockState paintSource = ((IBlockPaintableBlock) block).getPaintSource(blockState, world, pos); if (paintSource != null) { final IBlockState rotatedPaintSource = PainterUtil2.rotate(paintSource); if (rotatedPaintSource != paintSource) { ((IBlockPaintableBlock) block).setPaintSource(blockState, world, pos, rotatedPaintSource); } ret = true; } } } if (ret) { player.swingArm(hand); } return ret ? EnumActionResult.SUCCESS: EnumActionResult.PASS; } @Override public ActionResult<ItemStack> onItemRightClick(ItemStack equipped, World world, EntityPlayer player, EnumHand hand) { if (!Config.useSneakRightClickYetaWrench) { return new ActionResult<ItemStack>(EnumActionResult.PASS, equipped); } if (!player.isSneaking()) { return new ActionResult<ItemStack>(EnumActionResult.PASS, equipped); } ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(equipped); if (curMode == null) { curMode = ConduitDisplayMode.ALL; } ConduitDisplayMode newMode = curMode.next(); ConduitDisplayMode.setDisplayMode(equipped, newMode); return new ActionResult<ItemStack>(EnumActionResult.SUCCESS, equipped); } @Override public boolean onBlockStartBreak(ItemStack itemstack, BlockPos pos, EntityPlayer player) { IBlockState bs = player.worldObj.getBlockState(pos); Block block = bs.getBlock(); if (player.isSneaking() && block == EnderIO.blockConduitBundle && player.capabilities.isCreativeMode) { block.onBlockClicked(player.worldObj, pos, player); return true; } return false; } @Override @SideOnly(Side.CLIENT) public boolean isFull3D() { return true; } @Override public boolean doesSneakBypassUse(ItemStack stack, IBlockAccess world, BlockPos pos, EntityPlayer player) { return true; } @Override public boolean canUse(ItemStack stack, EntityPlayer player, BlockPos pos) { return true; } @Override public void used(ItemStack stack, EntityPlayer player, BlockPos pos) { } @Override public boolean shouldHideFacades(ItemStack stack, EntityPlayer player) { ConduitDisplayMode curMode = ConduitDisplayMode.getDisplayMode(stack); return curMode != ConduitDisplayMode.NONE; } @Override public boolean showOverlay(ItemStack stack, EntityPlayer player) { return true; } /* IAdvancedTooltipProvider */ @Override public void addBasicEntries(ItemStack itemstack, EntityPlayer entityplayer, List<String> list, boolean flag) { } @Override public void addCommonEntries(ItemStack itemstack, EntityPlayer entityplayer, List<String> list, boolean flag) { } @Override public void addDetailedEntries(ItemStack itemstack, EntityPlayer entityplayer, List<String> list, boolean flag) { ArrayList<String> tmp = new ArrayList<String>(); SpecialTooltipHandler.addDetailedTooltipFromResources(tmp, getUnlocalizedName()); String keyName = Keyboard.getKeyName(KeyTracker.instance.getYetaWrenchMode().getKeyCode()); for (String line : tmp) { list.add(String.format(line, keyName)); } } @Override @Optional.Method(modid = "BuildCraftAPI|core") public boolean canWrench(EntityPlayer arg0, BlockPos arg1) { return true; } @Override @Optional.Method(modid = "BuildCraftAPI|core") public boolean canWrench(EntityPlayer arg0, Entity arg1) { return false; } @Override @Optional.Method(modid = "BuildCraftAPI|core") public void wrenchUsed(EntityPlayer player, BlockPos pos) { used(player.getHeldItemMainhand(), player, pos); } @Override @Optional.Method(modid = "BuildCraftAPI|core") public void wrenchUsed(EntityPlayer player, Entity arg1) { } }
Added support for showing key combos to Yeta Wrench tooltip * closes #3467
src/main/java/crazypants/enderio/item/ItemYetaWrench.java
Added support for showing key combos to Yeta Wrench tooltip
Java
unlicense
d4c3278577c6f24ffe35025dc1704c60900de7f7
0
Ohohcakester/Any-Angle-Pathfinding,Ohohcakester/Any-Angle-Pathfinding
package main; import algorithms.AStar; import algorithms.AStarOctileHeuristic; import algorithms.AStarStaticMemory; import algorithms.AcceleratedAStar; import algorithms.AdjustmentThetaStar; import algorithms.Anya; import algorithms.BasicThetaStar; import algorithms.BreadthFirstSearch; import algorithms.JumpPointSearch; import algorithms.LazyThetaStar; import algorithms.RecursiveThetaStar; import algorithms.RestrictedVisibilityGraphAlgorithm; import algorithms.StrictVisibilityGraphAlgorithm; import algorithms.StrictVisibilityGraphAlgorithmV2; import algorithms.VisibilityGraphAlgorithm; import algorithms.incrementalvgs.IVGAlgorithm; import algorithms.sparsevgs.DirectedEdgeNLevelSparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.EdgeNLevelSparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.SparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.VertexNLevelSparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.VisibilityGraphAlgorithmOptimised; import algorithms.strictthetastar.RecursiveStrictThetaStar; import algorithms.strictthetastar.StrictThetaStar; import algorithms.subgoalgraphs.AnyAngleNLevelSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.AnyAngleSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.NLevelSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.RecursiveStrictAnyAngleSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.StrictAnyAngleSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.SubgoalGraphsAlgorithm; import algorithms.vertexanya.VertexAnya; import algorithms.vertexanya.VertexAnyaMarking; import algorithms.vertexanya.VertexAnyaMarkingV2; import algorithms.vertexanya.VertexAnyaMarkingV3; import algorithms.vertexanya.VertexAnyaNoExtents; import algorithms.vertexanya.VisibilityScanSearchEager; import algorithms.vertexanya.VisibilityScanSearchSemiEager; import algorithms.visibilitygraph.BFSVisibilityGraph; import grid.GridAndGoals; import grid.GridGraph; import main.graphgeneration.AutomataGenerator; import main.graphgeneration.DefaultGenerator; import main.graphgeneration.TiledMapGenerator; import main.graphgeneration.UpscaledMapGenerator; import main.mazes.StoredTestMazes; import main.testgen.TestDataGenerator; import uiandio.GraphImporter; /** * Instructions: Look for the main method. * We can either run tests or trace the algorithm. * * To see a visualisation of an algorithm, * 1) Set choice = 0 in the first line of main(); * 2) Choose a maze in the first line of loadMaze(); * 3) Choose an algorithm in the first line of setDefaultAlgoFunction(); * * The tracing / experimentation functions are detailed in the traceAlgorithm() method. */ public class AnyAnglePathfinding { public static final String PATH_TESTDATA = "testdata/"; public static final String PATH_MAZEDATA = "mazedata/"; public static final String PATH_ANALYSISDATA = "analysisdata/"; private static AlgoFunction algoFunction; // The algorithm is stored in this function. public static void main(String[] args) { int choice = 0; // Choose an operation. 0: Visualisation.run() should be the default. switch(choice) { case 0: Visualisation.run(); break; case 1: AlgoTest.run(); break; case 2: Experiment.run(); break; case 3: TestDataGenerator.run(); break; case 4: GridGraphVisualiser.run(); break; case 5: TextOutputVisualisation.run(); break; } } /** * Choose a maze. (a gridGraph setting) */ static GridAndGoals loadMaze() { int choice = 1; // Adjust this to choose a maze. switch(choice) { case 0 : {// UNSEEDED int unblockedRatio = 10; // chance of spawning a cluster of blocked tiles is 1 in unblockedRatio. int sizeX = 20; // x-axis size of grid int sizeY = 20; // y-axis size of grid int sx = 10; // x-coordinate of start point int sy = 13; // y-coordinate of start point int ex = 6; // x-coordinate of goal point int ey = 8; // y-coordinate of goal point return DefaultGenerator.generateUnseeded(sizeX, sizeY, unblockedRatio, sx, sy, ex, ey); } case 1 : { // SEEDED int unblockedRatio = 17; // chance of spawning a cluster of blocked tiles is 1 in unblockedRatio. int seed = 1667327427; // seed for the random. int sizeX = 40; // x-axis size of grid int sizeY = 40; // y-axis size of grid int sx = 6; // x-coordinate of start point int sy = 10; // y-coordinate of start point int ex = 39; // x-coordinate of goal point int ey = 32; // y-coordinate of goal point return DefaultGenerator.generateSeeded(seed, sizeX, sizeY, unblockedRatio, sx, sy, ex, ey); } case 2 : return GraphImporter.importGraphFromFile("maze.txt", 25, 17, 2, 9); case 3 : return DefaultGenerator.generateSeededOld(-98783479, 40, 40, 7, 1, 4, 18, 18); // maze 3 case 4 : return DefaultGenerator.generateSeededOld(-565315494, 15, 15, 9, 1, 2, 1, 13); // maze 2 case 5 : return DefaultGenerator.generateSeededOld(53, 15, 15, 9, 0, 0, 10, 14); // maze 1 case 6 : return DefaultGenerator.generateSeededOld(-159182402, 15, 15, 9, 1, 1, 13, 12); // anya previously gave incorrect path case 7 : return GraphImporter.importGraphFromFile("maze14x11.txt", 0, 0, 10, 10); // Maze to contradict Theta* / A* optimality case 8 : return GraphImporter.importGraphFromFile("mazeWCS.txt", 2, 0, 28, 25); // Worst Case Scenario path length. case 9 : return DefaultGenerator.generateSeededOld(-410889275, 15, 15, 7, 0, 1, 10, 12); // maze 4 case 10 : return GraphImporter.importGraphFromFile("mazeThetaWCS.txt", 0, 0, 28, 13); // Worst Case Scenario for Theta* case 11 : return GraphImporter.importGraphFromFile("mazeReuseWCS.txt", 1, 28, 0, 27); // Worst Case Scenario for Visibility Graph reuse. case 12 : return GraphImporter.importGraphFromFile("anyaCont2.txt", 1, 6, 9, 1); // difficult case for anya case 13 : return DefaultGenerator.generateSeededOld(-1155797147, 47, 32, 38, 46, 30, 20, 1); // issue for Strict Theta* case 14 : return DefaultGenerator.generateSeededOld(-1155849806, 11, 13, 40, 7, 12, 9, 0); // Strict Theta* longer than Basic Theta* case 15 : return GraphImporter.loadStoredMaze("sc2_losttemple", "66-83_117-53"); case 16 : return GraphImporter.importGraphFromFile("custommaze.txt", 1, 1, 7, 4); case 17 : return GraphImporter.importGraphFromFile("custommaze3.txt", 1, 19, 29, 2); case 18 : return GraphImporter.loadStoredMaze("baldursgate_AR0402SR", "9-45_44-22"); case 19 : return DefaultGenerator.generateSeeded(-1131220420, 12, 13, 37, 5, 13, 2, 3); // Issue for Strict Theta* at goal case 20 : return GraphImporter.importGraphFromFile("custommaze4.txt", 2, 4, 10, 2); case 21 : return DefaultGenerator.generateSeededTrueRandomGraph(-1186265305, 15, 9, 12, 7,8, 4,2); case 22 : //return DefaultGenerator.generateSeededTrueRandomGraph(-1186644456, 6,5, 14, 0,2, 5,0); return DefaultGenerator.generateSeededTrueRandomGraph(-1185836518, 30,9, 3, 1,0, 7,9); case 23 : return DefaultGenerator.generateSeeded(138863256, 200, 200, 7, 59, 179, 160, 35); // Good large dense graph with indirect path case 24 : return DefaultGenerator.generateSeeded(138863256, 200, 200, 7, 160, 35, 59, 179); // Same graph, opposite direction case 25 : return DefaultGenerator.generateSeeded(-1878652012, 200, 200, 7, 59, 179, 160, 35); // Good large dense graph with indirect path case 26: return GraphImporter.loadStoredMaze("corr2_maze512-2-5", "171-149_313-324"); case 27 : return DefaultGenerator.generateSeeded(-1270138724, 17, 16, 26, 12, 16, 8, 0); // Edge case for Incremental VG upper bound check case 28 : return DefaultGenerator.generateSeeded(250342248, 67, 33, 17, 3, 28, 47, 32); // Difficult case for Incremental VG lower bound check case 29 : return DefaultGenerator.generateSeeded(-13991511, 80, 80, 7, 26, 37, 52, 54); // Restricted VG Inefficient case 30: return GraphImporter.loadStoredMaze("def_iQCWUDHB_iED_iED_iP", "1-42_75-81"); case 31: //return DefaultGenerator.generateSeeded(-1131088503, 8, 11, 27, 4, 11, 7, 4); // Strict Theta* with high buffer finds a much longer path. return GraphImporter.importGraphFromFile("mazehighbufferbad.txt", 4, 8, 7, 1); // Strict Theta* with high buffer finds a much longer path. case 32: return GraphImporter.loadStoredMaze("corr2_maze512-2-1", "219-187_186-334"); case 33 : return GraphImporter.importGraphFromFile("anyaCont2b.txt", 9, 9, 9, 1); // difficult case for anya case 34: return DefaultGenerator.generateSeeded(47280928, 40, 40, 15, 24, 18, 0, 0); // Line of sight test 1 case 35 : return GraphImporter.importGraphFromFile("lineOfSightTest.txt", 14, 18, 0, 1);// Line of sight test 2 case 36 : return DefaultGenerator.generateSeeded(211, 40, 40, 10, 4, 3, 36, 36); // Maze likely to cause wrapping in taut-path algorithms. case 37 : return DefaultGenerator.generateSeeded(327116666, 40, 40, 8, 4, 3, 36, 37); // Goal unreachable. Much wrapping case 38 : return DefaultGenerator.generateSeeded(579631, 60, 60, 6, 34, 8, 37, 19); // Very roundabout path to goal. case 39 : return DefaultGenerator.generateSeeded(3350874, 20, 15, 13, 3, 3, 17, 12); // Basic Theta* suboptimal case 40 : return DefaultGenerator.generateSeeded(873637608, 9, 35, 24, 9, 28, 1, 12); // Bug in Vertex Anya due to not marking vertices as visited. case 41 : return GraphImporter.loadStoredMaze("sc2_blisteringsands", "20-93_119-84"); case 42: return DefaultGenerator.generateSeeded(-387131321, 20, 17, 9, 14, 1, 15, 7); // Indirect path with lots of wrapping for Anya case 43: return DefaultGenerator.generateSeeded(-387213321, 5000, 4999, 7, 114, 4791, 4715, 17); // 5000x4999 dense map case 44: return DefaultGenerator.generateSeeded(-387213321, 2000, 1999, 7, 114, 1791, 1715, 17); // 2000x1999 dense map case 45: return GraphImporter.loadStoredMaze("wc3_gardenofwar", "378-312_74-120"); case 46: return GraphImporter.loadStoredMaze("sc1_EbonLakes", "139-13_321-470"); case 47: { // SEEDED int unblockedRatio = 5; // chance of spawning a blocked tile is 1 in unblockedRatio. int iterations = 3; // number of iterations for cellular automata int cutoffOffset = -1; // offset for the default cutoff value used for cellular automata. (Higher = Less blocked) float resolution = 1f; // (Larger -> bigger islands) boolean bordersAreBlocked = false; int seed = -44556930; // seed for the random. int sizeX = 600; // x-axis size of grid int sizeY = 600; // y-axis size of grid int sx = 19; // y-coordinate of start point int sy = 148; // x-coordinate of start point int ex = 203; // y-coordinate of goal point int ey = 145; // x-coordinate of goal point return AutomataGenerator.generateSeeded(seed, sizeX, sizeY, unblockedRatio, iterations, resolution, cutoffOffset, bordersAreBlocked, sx, sy, ex, ey); } case 48: return GraphImporter.loadStoredMaze("sc1_GreenerPastures", "210-327_722-43"); case 49: return DefaultGenerator.generateSeeded(-1089290968, 50, 50, 7, 9, 6, 40, 46); // Good, mid-size indirect path map case 50: return DefaultGenerator.generateSeeded(-63381719, 19, 13, 10, 15, 1, 9, 11); // Counterexample maze that shows that you need to consider finite-level edges even after encountering skip-vertices in ENLSVGs. case 51: return AutomataGenerator.generateSeeded(-44556930, 223, 164, 5, 3, 2.4f, -1, true, 19, 148, 203, 145); case 52: return GraphImporter.importGraphFromFile("cropped.txt", 445, 2845, 1705, 77); case 53: return GraphImporter.importGraphFromFile("fatobstaclemaze.txt", 10, 10, 990, 990); // Maze with large, roughly convex obstacles case 54: return GraphImporter.importGraphFromFile("circleobstaclemaze.txt", 10, 10, 990, 990); // Maze with one large circular obstacle case 55: return GraphImporter.importGraphFromFile("threeislands.txt", 10, 10, 390, 390); // Maze with large, roughly convex obstacles case 56: return AutomataGenerator.generateSeeded(694392, 24, 20, 5, 3, 3f, 0, false, 5, 5, 19, 5); // A maze used to demo edge marking case 57: //return StoredTestMazes.loadAutomataMaze(0, 7).gridAndGoals(0); //return StoredTestMazes.loadScaledMaze("sc2_blisteringsands", 10).gridAndGoals(0); return StoredTestMazes.loadTiledMaze(1, 7).gridAndGoals(0); case 58: return UpscaledMapGenerator.upscale(GraphImporter.loadStoredMaze("wc3_gardenofwar", "378-312_74-120"), 9, true); case 59: return TiledMapGenerator.mergeMapsDefault(new GridGraph[] { GraphImporter.loadStoredMaze("wc3_gardenofwar"), GraphImporter.loadStoredMaze("sc1_EbonLakes"), GraphImporter.loadStoredMaze("wc3_gardenofwar"), GraphImporter.loadStoredMaze("sc1_EbonLakes"), GraphImporter.loadStoredMaze("wc3_gardenofwar"), GraphImporter.loadStoredMaze("sc1_EbonLakes"), }, 3, 2); case 60: { // SEEDED float percentBlocked = 0.45f; // chance of spawning a blocked tile is 1 in unblockedRatio. float resolution = 0.1f; // (Larger -> bigger islands) int iterations = 5; // number of iterations for cellular automata boolean bordersAreBlocked = true; int seed = 5231; // seed for the random. int sizeX = 3000; // x-axis size of grid int sizeY = 3000; // y-axis size of grid int sx = 5; // y-coordinate of start point int sy = 5; // x-coordinate of start point int ex = 19; // y-coordinate of goal point int ey = 5; // x-coordinate of goal point return AutomataGenerator.generateSeededDynamicCutoff(seed, sizeX, sizeY, percentBlocked, iterations, resolution, bordersAreBlocked, sx, sy, ex, ey); } default: return null; } } /** * Choose an algorithm. */ static AlgoFunction setDefaultAlgoFunction() { int choice = 8; // adjust this to choose an algorithm switch (choice) { case 1 : algoFunction = AStar::new; break; case 2 : algoFunction = BreadthFirstSearch::new; break; case 3 : algoFunction = BreadthFirstSearch::postSmooth; break; case 4 : algoFunction = AStar::postSmooth; break; case 5 : algoFunction = AStar::dijkstra; break; case 6 : algoFunction = Anya::new; break; case 7 : algoFunction = VisibilityGraphAlgorithm::new; break; case 8 : algoFunction = BasicThetaStar::new; break; case 9 : algoFunction = BasicThetaStar::noHeuristic; break; case 10 : algoFunction = BasicThetaStar::postSmooth; break; case 11 : algoFunction = AcceleratedAStar::new; break; case 12 : //algoFunction = VisibilityGraphAlgorithm::graphReuse; algoFunction = VisibilityGraphAlgorithmOptimised::graphReuse; break; case 13 : algoFunction = AdjustmentThetaStar::new; break; case 14 : algoFunction = StrictThetaStar::new; break; case 15 : algoFunction = RecursiveStrictThetaStar::new; break; case 16 : algoFunction = BFSVisibilityGraph::graphReuse; break; case 17 : algoFunction = RestrictedVisibilityGraphAlgorithm::new; break; case 18 : algoFunction = StrictVisibilityGraphAlgorithm::new; break; case 19 : algoFunction = LazyThetaStar::new; break; case 20 : algoFunction = AStarOctileHeuristic::new; break; case 21 : algoFunction = AStarOctileHeuristic::postSmooth; break; case 22 : algoFunction = JumpPointSearch::new; break; case 23 : algoFunction = JumpPointSearch::postSmooth; break; case 24 : algoFunction = AStarStaticMemory::new; break; case 25 : algoFunction = StrictVisibilityGraphAlgorithmV2::new; break; case 26 : algoFunction = RecursiveThetaStar::new; break; case 27 : algoFunction = SparseVisibilityGraphAlgorithm::graphReuse; break; case 28 : algoFunction = IVGAlgorithm::new; break; case 29 : algoFunction = VertexAnya::new; break; case 30 : algoFunction = VertexAnyaNoExtents::new; break; case 31 : algoFunction = VisibilityScanSearchEager::new; break; case 32 : algoFunction = VisibilityScanSearchSemiEager::new; break; case 33 : algoFunction = VertexAnyaMarking::new; break; case 34 : algoFunction = VertexAnyaMarkingV2::new; break; case 35 : algoFunction = VertexAnyaMarkingV3::new; break; case 36 : algoFunction = SubgoalGraphsAlgorithm::new; break; case 37 : algoFunction = NLevelSubgoalGraphsAlgorithm::new; break; case 38 : algoFunction = AnyAngleSubgoalGraphsAlgorithm::new; break; case 39 : algoFunction = AnyAngleNLevelSubgoalGraphsAlgorithm::new; break; case 40 : algoFunction = StrictAnyAngleSubgoalGraphsAlgorithm::new; break; case 41 : algoFunction = RecursiveStrictAnyAngleSubgoalGraphsAlgorithm::new; break; case 42 : algoFunction = VertexNLevelSparseVisibilityGraphAlgorithm::graphReuse; break; case 43 : algoFunction = EdgeNLevelSparseVisibilityGraphAlgorithm::graphReuse; break; case 44 : algoFunction = EdgeNLevelSparseVisibilityGraphAlgorithm.withLevelLimit(1); break; case 45 : algoFunction = DirectedEdgeNLevelSparseVisibilityGraphAlgorithm::graphReuse; break; } return algoFunction; } }
src/main/AnyAnglePathfinding.java
package main; import algorithms.AStar; import algorithms.AStarOctileHeuristic; import algorithms.AStarStaticMemory; import algorithms.AcceleratedAStar; import algorithms.AdjustmentThetaStar; import algorithms.Anya; import algorithms.BasicThetaStar; import algorithms.BreadthFirstSearch; import algorithms.JumpPointSearch; import algorithms.LazyThetaStar; import algorithms.RecursiveThetaStar; import algorithms.RestrictedVisibilityGraphAlgorithm; import algorithms.StrictVisibilityGraphAlgorithm; import algorithms.StrictVisibilityGraphAlgorithmV2; import algorithms.VisibilityGraphAlgorithm; import algorithms.incrementalvgs.IVGAlgorithm; import algorithms.sparsevgs.DirectedEdgeNLevelSparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.EdgeNLevelSparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.SparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.VertexNLevelSparseVisibilityGraphAlgorithm; import algorithms.sparsevgs.VisibilityGraphAlgorithmOptimised; import algorithms.strictthetastar.RecursiveStrictThetaStar; import algorithms.strictthetastar.StrictThetaStar; import algorithms.subgoalgraphs.AnyAngleNLevelSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.AnyAngleSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.NLevelSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.RecursiveStrictAnyAngleSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.StrictAnyAngleSubgoalGraphsAlgorithm; import algorithms.subgoalgraphs.SubgoalGraphsAlgorithm; import algorithms.vertexanya.VertexAnya; import algorithms.vertexanya.VertexAnyaMarking; import algorithms.vertexanya.VertexAnyaMarkingV2; import algorithms.vertexanya.VertexAnyaMarkingV3; import algorithms.vertexanya.VertexAnyaNoExtents; import algorithms.vertexanya.VisibilityScanSearchEager; import algorithms.vertexanya.VisibilityScanSearchSemiEager; import algorithms.visibilitygraph.BFSVisibilityGraph; import grid.GridAndGoals; import grid.GridGraph; import main.graphgeneration.AutomataGenerator; import main.graphgeneration.DefaultGenerator; import main.graphgeneration.TiledMapGenerator; import main.graphgeneration.UpscaledMapGenerator; import main.mazes.StoredTestMazes; import main.testgen.TestDataGenerator; import uiandio.GraphImporter; /** * Instructions: Look for the main method. * We can either run tests or trace the algorithm. * * To see a visualisation of an algorithm, * 1) Set choice = 0 in the first line of main(); * 2) Choose a maze in the first line of loadMaze(); * 3) Choose an algorithm in the first line of setDefaultAlgoFunction(); * * The tracing / experimentation functions are detailed in the traceAlgorithm() method. */ public class AnyAnglePathfinding { public static final String PATH_TESTDATA = "testdata/"; public static final String PATH_MAZEDATA = "mazedata/"; public static final String PATH_ANALYSISDATA = "analysisdata/"; private static AlgoFunction algoFunction; // The algorithm is stored in this function. public static void main(String[] args) { int choice = 0; // Choose an operation. 0: Visualisation.run() should be the default. switch(choice) { case 0: Visualisation.run(); break; case 1: AlgoTest.run(); break; case 2: Experiment.run(); break; case 3: TestDataGenerator.run(); break; case 4: GridGraphVisualiser.run(); break; case 5: TextOutputVisualisation.run(); break; } } /** * Choose a maze. (a gridGraph setting) */ static GridAndGoals loadMaze() { int choice = 1; // Adjust this to choose a maze. switch(choice) { case 0 : {// UNSEEDED int unblockedRatio = 10; // chance of spawning a cluster of blocked tiles is 1 in unblockedRatio. int sizeX = 20; // x-axis size of grid int sizeY = 20; // y-axis size of grid int sx = 10; // x-coordinate of start point int sy = 13; // y-coordinate of start point int ex = 6; // x-coordinate of goal point int ey = 8; // y-coordinate of goal point return DefaultGenerator.generateUnseeded(sizeX, sizeY, unblockedRatio, sx, sy, ex, ey); } case 1 : { // SEEDED int unblockedRatio = 17; // chance of spawning a cluster of blocked tiles is 1 in unblockedRatio. int seed = 1667327427; // seed for the random. int sizeX = 40; // x-axis size of grid int sizeY = 40; // y-axis size of grid int sx = 6; // x-coordinate of start point int sy = 10; // y-coordinate of start point int ex = 39; // x-coordinate of goal point int ey = 32; // y-coordinate of goal point return DefaultGenerator.generateSeeded(seed, sizeX, sizeY, unblockedRatio, sx, sy, ex, ey); } case 2 : return GraphImporter.importGraphFromFile("maze.txt", 25, 17, 2, 9); case 3 : return DefaultGenerator.generateSeededOld(-98783479, 40, 40, 7, 1, 4, 18, 18); // maze 3 case 4 : return DefaultGenerator.generateSeededOld(-565315494, 15, 15, 9, 1, 2, 1, 13); // maze 2 case 5 : return DefaultGenerator.generateSeededOld(53, 15, 15, 9, 0, 0, 10, 14); // maze 1 case 6 : return DefaultGenerator.generateSeededOld(-159182402, 15, 15, 9, 1, 1, 13, 12); // anya previously gave incorrect path case 7 : return GraphImporter.importGraphFromFile("maze14x11.txt", 0, 0, 10, 10); // Maze to contradict Theta* / A* optimality case 8 : return GraphImporter.importGraphFromFile("mazeWCS.txt", 2, 0, 28, 25); // Worst Case Scenario path length. case 9 : return DefaultGenerator.generateSeededOld(-410889275, 15, 15, 7, 0, 1, 10, 12); // maze 4 case 10 : return GraphImporter.importGraphFromFile("mazeThetaWCS.txt", 0, 0, 28, 13); // Worst Case Scenario for Theta* case 11 : return GraphImporter.importGraphFromFile("mazeReuseWCS.txt", 1, 28, 0, 27); // Worst Case Scenario for Visibility Graph reuse. case 12 : return GraphImporter.importGraphFromFile("anyaCont2.txt", 1, 6, 9, 1); // difficult case for anya case 13 : return DefaultGenerator.generateSeededOld(-1155797147, 47, 32, 38, 46, 30, 20, 1); // issue for Strict Theta* case 14 : return DefaultGenerator.generateSeededOld(-1155849806, 11, 13, 40, 7, 12, 9, 0); // Strict Theta* longer than Basic Theta* case 15 : return GraphImporter.loadStoredMaze("sc2_losttemple", "66-83_117-53"); case 16 : return GraphImporter.importGraphFromFile("custommaze.txt", 1, 1, 7, 4); case 17 : return GraphImporter.importGraphFromFile("custommaze3.txt", 1, 19, 29, 2); case 18 : return GraphImporter.loadStoredMaze("baldursgate_AR0402SR", "9-45_44-22"); case 19 : return DefaultGenerator.generateSeeded(-1131220420, 12, 13, 37, 5, 13, 2, 3); // Issue for Strict Theta* at goal case 20 : return GraphImporter.importGraphFromFile("custommaze4.txt", 2, 4, 10, 2); case 21 : return DefaultGenerator.generateSeededTrueRandomGraph(-1186265305, 15, 9, 12, 7,8, 4,2); case 22 : //return DefaultGenerator.generateSeededTrueRandomGraph(-1186644456, 6,5, 14, 0,2, 5,0); return DefaultGenerator.generateSeededTrueRandomGraph(-1185836518, 30,9, 3, 1,0, 7,9); case 23 : return DefaultGenerator.generateSeeded(138863256, 200, 200, 7, 59, 179, 160, 35); // Good large dense graph with indirect path case 24 : return DefaultGenerator.generateSeeded(138863256, 200, 200, 7, 160, 35, 59, 179); // Same graph, opposite direction case 25 : return DefaultGenerator.generateSeeded(-1878652012, 200, 200, 7, 59, 179, 160, 35); // Good large dense graph with indirect path case 26: return GraphImporter.loadStoredMaze("corr2_maze512-2-5", "171-149_313-324"); case 27 : return DefaultGenerator.generateSeeded(-1270138724, 17, 16, 26, 12, 16, 8, 0); // Edge case for Incremental VG upper bound check case 28 : return DefaultGenerator.generateSeeded(250342248, 67, 33, 17, 3, 28, 47, 32); // Difficult case for Incremental VG lower bound check case 29 : return DefaultGenerator.generateSeeded(-13991511, 80, 80, 7, 26, 37, 52, 54); // Restricted VG Inefficient case 30: return GraphImporter.loadStoredMaze("def_iQCWUDHB_iED_iED_iP", "1-42_75-81"); case 31: //return DefaultGenerator.generateSeeded(-1131088503, 8, 11, 27, 4, 11, 7, 4); // Strict Theta* with high buffer finds a much longer path. return GraphImporter.importGraphFromFile("mazehighbufferbad.txt", 4, 8, 7, 1); // Strict Theta* with high buffer finds a much longer path. case 32: return GraphImporter.loadStoredMaze("corr2_maze512-2-1", "219-187_186-334"); case 33 : return GraphImporter.importGraphFromFile("anyaCont2b.txt", 9, 9, 9, 1); // difficult case for anya case 34: return DefaultGenerator.generateSeeded(47280928, 40, 40, 15, 24, 18, 0, 0); // Line of sight test 1 case 35 : return GraphImporter.importGraphFromFile("lineOfSightTest.txt", 14, 18, 0, 1);// Line of sight test 2 case 36 : return DefaultGenerator.generateSeeded(211, 40, 40, 10, 4, 3, 36, 36); // Maze likely to cause wrapping in taut-path algorithms. case 37 : return DefaultGenerator.generateSeeded(327116666, 40, 40, 8, 4, 3, 36, 37); // Goal unreachable. Much wrapping case 38 : return DefaultGenerator.generateSeeded(579631, 60, 60, 6, 34, 8, 37, 19); // Very roundabout path to goal. case 39 : return DefaultGenerator.generateSeeded(3350874, 20, 15, 13, 3, 3, 17, 12); // Basic Theta* suboptimal case 40 : return DefaultGenerator.generateSeeded(873637608, 9, 35, 24, 9, 28, 1, 12); // Bug in Vertex Anya due to not marking vertices as visited. case 41 : return GraphImporter.loadStoredMaze("sc2_blisteringsands", "20-93_119-84"); case 42: return DefaultGenerator.generateSeeded(-387131321, 20, 17, 9, 14, 1, 15, 7); // Indirect path with lots of wrapping for Anya case 43: return DefaultGenerator.generateSeeded(-387213321, 5000, 4999, 7, 114, 4791, 4715, 17); // 5000x4999 dense map case 44: return DefaultGenerator.generateSeeded(-387213321, 2000, 1999, 7, 114, 1791, 1715, 17); // 2000x1999 dense map case 45: return GraphImporter.loadStoredMaze("wc3_gardenofwar", "378-312_74-120"); case 46: return GraphImporter.loadStoredMaze("sc1_EbonLakes", "139-13_321-470"); case 47: { // SEEDED int unblockedRatio = 5; // chance of spawning a blocked tile is 1 in unblockedRatio. int iterations = 3; // number of iterations for cellular automata int cutoffOffset = -1; // offset for the default cutoff value used for cellular automata. (Higher = Less blocked) float resolution = 1f; // (Larger -> bigger islands) boolean bordersAreBlocked = false; int seed = -44556930; // seed for the random. int sizeX = 600; // x-axis size of grid int sizeY = 600; // y-axis size of grid int sx = 19; // y-coordinate of start point int sy = 148; // x-coordinate of start point int ex = 203; // y-coordinate of goal point int ey = 145; // x-coordinate of goal point return AutomataGenerator.generateSeeded(seed, sizeX, sizeY, unblockedRatio, iterations, resolution, cutoffOffset, bordersAreBlocked, sx, sy, ex, ey); } case 48: return GraphImporter.loadStoredMaze("sc1_GreenerPastures", "210-327_722-43"); case 49: return DefaultGenerator.generateSeeded(-1089290968, 50, 50, 7, 9, 6, 40, 46); // Good, mid-size indirect path map case 50: return DefaultGenerator.generateSeeded(-63381719, 19, 13, 10, 15, 1, 9, 11); // Counterexample maze that shows that you need to consider finite-level edges even after encountering skip-vertices in ENLSVGs. case 51: return AutomataGenerator.generateSeeded(-44556930, 223, 164, 5, 3, 2.4f, -1, true, 19, 148, 203, 145); case 52: return GraphImporter.importGraphFromFile("cropped.txt", 445, 2845, 1705, 77); case 53: return GraphImporter.importGraphFromFile("fatobstaclemaze.txt", 10, 10, 990, 990); // Maze with large, roughly convex obstacles case 54: return GraphImporter.importGraphFromFile("circleobstaclemaze.txt", 10, 10, 990, 990); // Maze with one large circular obstacle case 55: return GraphImporter.importGraphFromFile("threeislands.txt", 10, 10, 390, 390); // Maze with large, roughly convex obstacles case 56: return AutomataGenerator.generateSeeded(694392, 24, 20, 5, 3, 3f, 0, false, 5, 5, 19, 5); // A maze used to demo edge marking case 57: return StoredTestMazes.loadAutomataMaze(0, 7).gridAndGoals(0); case 58: return UpscaledMapGenerator.upscale(GraphImporter.loadStoredMaze("wc3_gardenofwar", "378-312_74-120"), 9, true); case 59: return TiledMapGenerator.mergeMapsDefault(new GridGraph[] { GraphImporter.loadStoredMaze("wc3_gardenofwar"), GraphImporter.loadStoredMaze("sc1_EbonLakes"), GraphImporter.loadStoredMaze("wc3_gardenofwar"), GraphImporter.loadStoredMaze("sc1_EbonLakes"), GraphImporter.loadStoredMaze("wc3_gardenofwar"), GraphImporter.loadStoredMaze("sc1_EbonLakes"), }, 3, 2); case 60: { // SEEDED float percentBlocked = 0.45f; // chance of spawning a blocked tile is 1 in unblockedRatio. float resolution = 0.1f; // (Larger -> bigger islands) int iterations = 5; // number of iterations for cellular automata boolean bordersAreBlocked = true; int seed = 5231; // seed for the random. int sizeX = 3000; // x-axis size of grid int sizeY = 3000; // y-axis size of grid int sx = 5; // y-coordinate of start point int sy = 5; // x-coordinate of start point int ex = 19; // y-coordinate of goal point int ey = 5; // x-coordinate of goal point return AutomataGenerator.generateSeededDynamicCutoff(seed, sizeX, sizeY, percentBlocked, iterations, resolution, bordersAreBlocked, sx, sy, ex, ey); } default: return null; } } /** * Choose an algorithm. */ static AlgoFunction setDefaultAlgoFunction() { int choice = 8; // adjust this to choose an algorithm switch (choice) { case 1 : algoFunction = AStar::new; break; case 2 : algoFunction = BreadthFirstSearch::new; break; case 3 : algoFunction = BreadthFirstSearch::postSmooth; break; case 4 : algoFunction = AStar::postSmooth; break; case 5 : algoFunction = AStar::dijkstra; break; case 6 : algoFunction = Anya::new; break; case 7 : algoFunction = VisibilityGraphAlgorithm::new; break; case 8 : algoFunction = BasicThetaStar::new; break; case 9 : algoFunction = BasicThetaStar::noHeuristic; break; case 10 : algoFunction = BasicThetaStar::postSmooth; break; case 11 : algoFunction = AcceleratedAStar::new; break; case 12 : //algoFunction = VisibilityGraphAlgorithm::graphReuse; algoFunction = VisibilityGraphAlgorithmOptimised::graphReuse; break; case 13 : algoFunction = AdjustmentThetaStar::new; break; case 14 : algoFunction = StrictThetaStar::new; break; case 15 : algoFunction = RecursiveStrictThetaStar::new; break; case 16 : algoFunction = BFSVisibilityGraph::graphReuse; break; case 17 : algoFunction = RestrictedVisibilityGraphAlgorithm::new; break; case 18 : algoFunction = StrictVisibilityGraphAlgorithm::new; break; case 19 : algoFunction = LazyThetaStar::new; break; case 20 : algoFunction = AStarOctileHeuristic::new; break; case 21 : algoFunction = AStarOctileHeuristic::postSmooth; break; case 22 : algoFunction = JumpPointSearch::new; break; case 23 : algoFunction = JumpPointSearch::postSmooth; break; case 24 : algoFunction = AStarStaticMemory::new; break; case 25 : algoFunction = StrictVisibilityGraphAlgorithmV2::new; break; case 26 : algoFunction = RecursiveThetaStar::new; break; case 27 : algoFunction = SparseVisibilityGraphAlgorithm::graphReuse; break; case 28 : algoFunction = IVGAlgorithm::new; break; case 29 : algoFunction = VertexAnya::new; break; case 30 : algoFunction = VertexAnyaNoExtents::new; break; case 31 : algoFunction = VisibilityScanSearchEager::new; break; case 32 : algoFunction = VisibilityScanSearchSemiEager::new; break; case 33 : algoFunction = VertexAnyaMarking::new; break; case 34 : algoFunction = VertexAnyaMarkingV2::new; break; case 35 : algoFunction = VertexAnyaMarkingV3::new; break; case 36 : algoFunction = SubgoalGraphsAlgorithm::new; break; case 37 : algoFunction = NLevelSubgoalGraphsAlgorithm::new; break; case 38 : algoFunction = AnyAngleSubgoalGraphsAlgorithm::new; break; case 39 : algoFunction = AnyAngleNLevelSubgoalGraphsAlgorithm::new; break; case 40 : algoFunction = StrictAnyAngleSubgoalGraphsAlgorithm::new; break; case 41 : algoFunction = RecursiveStrictAnyAngleSubgoalGraphsAlgorithm::new; break; case 42 : algoFunction = VertexNLevelSparseVisibilityGraphAlgorithm::graphReuse; break; case 43 : algoFunction = EdgeNLevelSparseVisibilityGraphAlgorithm::graphReuse; break; case 44 : algoFunction = EdgeNLevelSparseVisibilityGraphAlgorithm.withLevelLimit(1); break; case 45 : algoFunction = DirectedEdgeNLevelSparseVisibilityGraphAlgorithm::graphReuse; break; } return algoFunction; } }
Add scaled maze and tiled maze load options to maze 57 in AnyAnglePathfinding.java
src/main/AnyAnglePathfinding.java
Add scaled maze and tiled maze load options to maze 57 in AnyAnglePathfinding.java
Java
apache-2.0
f7a73863ec3c98558ea79e62e19e26403ec98ffc
0
tburch/jsonblob,tburch/jsonblob,tburch/jsonblob
package com.lowtuna.jsonblob.core; import com.codahale.metrics.CachedGauge; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.DBObject; import com.mongodb.WriteResult; import com.mongodb.util.JSON; import com.mongodb.util.JSONParseException; import io.dropwizard.lifecycle.Managed; import io.dropwizard.util.Duration; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.bson.types.ObjectId; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.util.Date; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @Slf4j public class BlobManager implements Managed { public static final String UPDATED_ATTR_NAME = "updated"; public static final String CREATED_ATTR_NAME = "created"; public static final String ACCESSED_ATTR_NAME = "accessed"; private final ScheduledExecutorService scheduledExecutorService; private final Duration blobCleanupFrequency; @Getter private final Duration blobAccessTtl; private final MetricRegistry metricRegistry; private final DBCollection collection; private final Timer createTimer; private final Timer readTimer; private final Timer updateTimer; private final Timer deleteTimer; public BlobManager(DB mongoDb, String blobCollectionName, ScheduledExecutorService scheduledExecutorService, Duration blobCleanupFrequency, Duration blobAccessTtl, MetricRegistry metrics) { this.scheduledExecutorService = scheduledExecutorService; this.blobCleanupFrequency = blobCleanupFrequency; this.blobAccessTtl = blobAccessTtl; this.metricRegistry = metrics; this.collection = mongoDb.getCollection(blobCollectionName); this.createTimer = metrics.timer(MetricRegistry.name(getClass(), "create")); this.readTimer = metrics.timer(MetricRegistry.name(getClass(), "read")); this.updateTimer = metrics.timer(MetricRegistry.name(getClass(), "update")); this.deleteTimer = metrics.timer(MetricRegistry.name(getClass(), "delete")); metrics.register(MetricRegistry.name(getClass(), "blobCount"), new CachedGauge<Long>(1, TimeUnit.HOURS) { @Override protected Long loadValue() { return collection.count(); } }); } private BasicDBObject getDBObject(ObjectId objectId) { return new BasicDBObject("_id", objectId); } private DBObject createDBObject(String json, boolean setCreated) { final DateTime now = DateTime.now(DateTimeZone.UTC); BasicDBObjectBuilder builder = BasicDBObjectBuilder .start(UPDATED_ATTR_NAME, new Date(now.getMillis())) .append(ACCESSED_ATTR_NAME, new Date(now.getMillis())) .append("blob", JSON.parse(json)); if (setCreated) { builder = builder.append(CREATED_ATTR_NAME, new Date(now.getMillis())); } return builder.get(); } public static boolean isValidJson(String json) { try { JSON.parse(json); return true; } catch (JSONParseException e) { return false; } } public DBObject create(String json) { try (Timer.Context timerContext = createTimer.time()) { log.debug("inserting blob with json='{}'", json); DBObject parsed = createDBObject(json, true); collection.insert(parsed); log.debug("successfully inserted blob of json as objectId='{}'", parsed.get("_id")); return parsed; } } public DBObject read(final ObjectId id) throws BlobNotFoundException { try (Timer.Context timerContext = readTimer.time()) { log.debug("attempting to retrieve blob with id='{}'", id); DBObject objectId = getDBObject(id); if (objectId != null) { log.debug("finding blob with objectId='{}'", objectId); final DBObject obj = collection.findOne(objectId); if (obj != null) { final DateTime accessed = DateTime.now(DateTimeZone.UTC); scheduledExecutorService.submit(new Runnable() { @Override public void run() { log.debug("updating last accessed time for block with objectId='{}' to {}", id, accessed); BasicDBObject updatedAccessedDbObject = new BasicDBObject(); updatedAccessedDbObject.append("$set", new BasicDBObject().append(ACCESSED_ATTR_NAME, new Date(accessed.getMillis()))); collection.update(obj, updatedAccessedDbObject, false, false); log.debug("updated last accessed time for block with objectId='{}' to {}", id, accessed); } }); return obj; } } log.debug("couldn't retrieve blob with id='{}'", id); throw new BlobNotFoundException(id); } } public DBObject update(ObjectId id, String json) throws BlobNotFoundException { try (Timer.Context timerContext = updateTimer.time()) { log.debug("attempting to update blob with id='{}' and json='{}'", id, json); DBObject objectId = getDBObject(id); if (objectId != null) { log.debug("finding blob to update with objectId='{}'", objectId); DBObject obj = collection.findOne(objectId); if (obj != null) { DBObject parsed = createDBObject(json, false); collection.update(obj, parsed); log.debug("successfully updated blob of json with objectId='{}'", id); return parsed; } } log.debug("couldn't update blob with id='{}'", id); throw new BlobNotFoundException(id); } } public boolean delete(ObjectId id) throws BlobNotFoundException { try (Timer.Context timerContext = deleteTimer.time();) { log.debug("attempting to delete blob with id='{}'", id); DBObject objectId = getDBObject(id); if (objectId != null) { log.debug("finding blob to delete with objectId='{}'", objectId); DBObject obj = collection.findOne(objectId); if (obj != null) { WriteResult result = collection.remove(obj); boolean removed = result.getN() > 0 && result.getLastError().ok(); if (removed) { log.debug("successfully removed {} blob(s) of json with objectId='{}'", result.getN(), id); } else { log.debug("did not remove any blob(s) of json with objectId='{}'", id); } return removed; } } log.debug("couldn't remove blob with id='{}'", id); throw new BlobNotFoundException(id); } } @Override public void start() throws Exception { scheduledExecutorService.scheduleWithFixedDelay( new BlobCleanupJob(collection, blobAccessTtl, metricRegistry), 0, blobCleanupFrequency.getQuantity(), blobCleanupFrequency.getUnit() ); } @Override public void stop() throws Exception { // nothing to do } }
src/main/java/com/lowtuna/jsonblob/core/BlobManager.java
package com.lowtuna.jsonblob.core; import com.codahale.metrics.CachedGauge; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.DBObject; import com.mongodb.WriteResult; import com.mongodb.util.JSON; import com.mongodb.util.JSONParseException; import io.dropwizard.lifecycle.Managed; import io.dropwizard.util.Duration; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.bson.types.ObjectId; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.util.Date; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @Slf4j public class BlobManager implements Managed { public static final String UPDATED_ATTR_NAME = "updated"; public static final String CREATED_ATTR_NAME = "created"; public static final String ACCESSED_ATTR_NAME = "accessed"; private final ScheduledExecutorService scheduledExecutorService; private final Duration blobCleanupFrequency; @Getter private final Duration blobAccessTtl; private final MetricRegistry metricRegistry; private final DBCollection collection; private final Timer createTimer; private final Timer readTimer; private final Timer updateTimer; private final Timer deleteTimer; public BlobManager(DB mongoDb, String blobCollectionName, ScheduledExecutorService scheduledExecutorService, Duration blobCleanupFrequency, Duration blobAccessTtl, MetricRegistry metrics) { this.scheduledExecutorService = scheduledExecutorService; this.blobCleanupFrequency = blobCleanupFrequency; this.blobAccessTtl = blobAccessTtl; this.metricRegistry = metrics; this.collection = mongoDb.getCollection(blobCollectionName); this.createTimer = metrics.timer(MetricRegistry.name(getClass(), "create")); this.readTimer = metrics.timer(MetricRegistry.name(getClass(), "read")); this.updateTimer = metrics.timer(MetricRegistry.name(getClass(), "update")); this.deleteTimer = metrics.timer(MetricRegistry.name(getClass(), "delete")); metrics.register(MetricRegistry.name(getClass(), "blobCount"), new CachedGauge<Long>(1, TimeUnit.HOURS) { @Override protected Long loadValue() { return collection.count(); } }); } private BasicDBObject getDBObject(ObjectId objectId) { return new BasicDBObject("_id", objectId); } private DBObject createDBObject(String json, boolean setCreated) { final DateTime now = DateTime.now(DateTimeZone.UTC); BasicDBObjectBuilder builder = BasicDBObjectBuilder .start(UPDATED_ATTR_NAME, new Date(now.getMillis())) .append(ACCESSED_ATTR_NAME, new Date(now.getMillis())) .append("blob", JSON.parse(json)); if (setCreated) { builder = builder.append(CREATED_ATTR_NAME, new Date(now.getMillis())); } return builder.get(); } public static boolean isValidJson(String json) { try { JSON.parse(json); return true; } catch (JSONParseException e) { return false; } } public DBObject create(String json) { try (Timer.Context timerContext = createTimer.time()) { log.debug("inserting blob with json='{}'", json); DBObject parsed = createDBObject(json, true); collection.insert(parsed); log.debug("successfully inserted blob of json as objectId='{}'", parsed.get("_id")); return parsed; } } public DBObject read(final ObjectId id) throws BlobNotFoundException { try (Timer.Context timerContext = readTimer.time()) { log.debug("attempting to retrieve blob with id='{}'", id); DBObject objectId = getDBObject(id); if (objectId != null) { log.debug("finding blob with objectId='{}'", objectId); final DBObject obj = collection.findOne(objectId); if (obj != null) { final DateTime accessed = DateTime.now(DateTimeZone.UTC); scheduledExecutorService.submit(new Runnable() { @Override public void run() { log.debug("updating last accessed time for block with objectId='{}' to {}", id, accessed); BasicDBObject updatedAccessedDbObject = new BasicDBObject(); updatedAccessedDbObject.append("$set", new BasicDBObject().append(ACCESSED_ATTR_NAME, new Date(accessed.getMillis()))); collection.update(obj, updatedAccessedDbObject, false, false); log.debug("updated last accessed time for block with objectId='{}' to {}", id, accessed); } }); return obj; } } log.debug("couldn't retrieve blob with id='{}'", id); throw new BlobNotFoundException(id); } } public DBObject update(ObjectId id, String json) throws BlobNotFoundException { try (Timer.Context timerContext = updateTimer.time()) { log.debug("attempting to update blob with id='{}' and json='{}'", id, json); DBObject objectId = getDBObject(id); if (objectId != null) { log.debug("finding blob to update with objectId='{}'", objectId); DBObject obj = collection.findOne(objectId); if (obj != null) { DBObject parsed = createDBObject(json, false); collection.update(obj, parsed); log.debug("successfully updated blob of json with objectId='{}'", id); return parsed; } } log.debug("couldn't update blob with id='{}'", id); throw new BlobNotFoundException(id); } } public boolean delete(ObjectId id) throws BlobNotFoundException { try (Timer.Context timerContext = deleteTimer.time();) { log.debug("attempting to delete blob with id='{}'", id); DBObject objectId = getDBObject(id); if (objectId != null) { log.debug("finding blob to delete with objectId='{}'", objectId); DBObject obj = collection.findOne(objectId); if (obj != null) { WriteResult result = collection.remove(obj); boolean removed = result.getN() > 0 && result.getLastError().ok(); if (removed) { log.debug("successfully removed {} blob(s) of json with objectId='{}'", result.getN(), id); } else { log.debug("did not remove any blob(s) of json with objectId='{}'", id); } return removed; } } log.debug("couldn't remove blob with id='{}'", id); throw new BlobNotFoundException(id); } } @Override public void start() throws Exception { BlobCleanupJob blobCleanupJob = new BlobCleanupJob(collection, blobAccessTtl, metricRegistry); scheduledExecutorService.scheduleWithFixedDelay( blobCleanupJob, 0, blobCleanupFrequency.getQuantity(), blobCleanupFrequency.getUnit()); } @Override public void stop() throws Exception { // nothing to do } }
formatting
src/main/java/com/lowtuna/jsonblob/core/BlobManager.java
formatting
Java
apache-2.0
96808086d5431f4116a0308bba6f472b40b6b77a
0
diennea/herddb,diennea/herddb,eolivelli/herddb,eolivelli/herddb,diennea/herddb,diennea/herddb,eolivelli/herddb,eolivelli/herddb
package herddb.core; import java.util.Collection; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import herddb.utils.ListWithMap; /** * Basic implementation of CAR algorithm. * * Based on the original work: * * <pre> * CAR: Clock with Adaptive Replacement * * Sorav Bansal† and Dharmendra S. Modha‡ * * †Stanford University, * ‡IBM Almaden Research Center * </pre> * </p> * * @see http://www-cs.stanford.edu/~sbansal/pubs/fast04.pdf * @author diego.salvi */ public class ClockAdaptiveReplacement implements PageReplacementPolicy { /** Class logger */ private static final Logger LOGGER = Logger.getLogger(ClockAdaptiveReplacement.class.getName()); /** * This <i>constants</i> rules out unecessary and expensive logs at compile level if set to {@code true}. * Note: it <b>must</b> be static final to succesfully work. */ private static final boolean COMPILE_EXPENSIVE_LOGS = false; /** Capacity */ private final int c; /** Recency clock */ private final ListWithMap<DataPage> t1; /** Frequency clock */ private final ListWithMap<DataPage> t2; /** Unloaded recency */ private final ListWithMap<Long> b1; /** Unloaded frequency */ private final ListWithMap<Long> b2; /** Self tuned parameter (target size of T1) */ private int p; /** Modification lock */ private final Lock lock = new ReentrantLock(); public ClockAdaptiveReplacement(int capacity) { c = capacity; p = 0; t1 = new ListWithMap<>(); t2 = new ListWithMap<>(); b1 = new ListWithMap<>(); b2 = new ListWithMap<>(); } @Override public int capacity() { return c; } @Override public int size() { return t1.size() + t2.size(); } @Override public DataPage add(DataPage page) { lock.lock(); try { return unsafeAdd(page); } finally { lock.unlock(); } } public DataPage pop() { lock.lock(); try { return unsafeReplace(); } finally { lock.unlock(); } } @Override public void remove(Collection<DataPage> pages) { lock.lock(); try { for(DataPage page : pages) { unsafeRemove(page); } } finally { lock.unlock(); } } @Override public boolean remove(DataPage page) { lock.lock(); try { return unsafeRemove(page); } finally { lock.unlock(); } } @Override public void clear() { lock.lock(); try { t1.clear(); t2.clear(); b1.clear(); b2.clear(); p = 0; } finally { lock.unlock(); } } /* *********************** */ /* *** PRIVATE METHODS *** */ /* *********************** */ private DataPage unsafeAdd(DataPage page) { if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Status[add started]: p = {0}, |T1| = {1}, |T2| = {2}, |B1| = {3}, |B2| = {4}, adding {5}", new Object[] {p, t1.size(), t2.size(), b1.size(), b2.size(), page}); /* Assume che sia avvenuto un cache miss e sia stato necessario caricare una pagina. */ // 4: if (|T1| + |T2| = c) then // /* cache full, replace a page from cache */ // 5: replace() // /* cache directory replacement */ // 6: if ((x is not in B1 ∪ B2) and (|T1| + |B1| = c)) then // 7: Discard the LRU page in B1. // 8: elseif ((|T1| + |T2| + |B1| + |B2| = 2c) and (x is not in B1 ∪ B2)) then // 9: Discard the LRU page in B2. // 10: endif /* * Diamo per assodato che la cache NON contiene la pagina in questione. Dato dunque il resto del * codice se v1 o b2 contengono la pagina lo faranno sempre finché non esplicitamente rimossi (ci sono * dei poll nel cache directory replacement ma vengono eseguiti solo se b1 E b2 NON contengono la * pagina). */ final boolean b1Hit = b1.contains(page.pageId); final boolean b2Hit = b2.contains(page.pageId); DataPage replaced = null; if (t1.size() + t2.size() == c) { if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 + T2 = c: looking for a replacement for {0}", page); /* cache full, replace a page from cache */ replaced = unsafeReplace(); /* cache directory replacement */ if (!b1Hit && !b2Hit) { if (t1.size() + b1.size() == c) { if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 + B1 = c: discarding B1 {0}", b1.peek()); b1.poll(); } else if (t1.size() + t2.size() + b1.size() + b2.size() == 2*c){ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 + T2 + B1 + B2 = 2c: discarding B2 {0}", b2.peek()); b2.poll(); } } } // /* cache directory miss */ // 12: if (x is not in B1 ∪ B2) then // 13: Insert x at the tail of T1. Set the page reference bit of x to 0. // /* cache directory hit */ // 14: elseif (x is in B1) then // 15: Adapt: Increase the target size for the list T1 as: p = min {p + max{1, |B2|/|B1|}, c} // 16: Move x at the tail of T2. Set the page reference bit of x to 0. // /* cache directory hit */ // 17: else /* x must be in B2 */ // 18: Adapt: Decrease the target size for the list T1 as: p = max {p − max{1, |B1|/|B2|}, 0} // 19: Move x at the tail of T2. Set the page reference bit of x to 0. // 20: endif if (!b1Hit && !b2Hit) { /* cache directory miss */ /* Insert x at the tail of T1. Set the page reference bit of x to 0. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Not in B1 U B2: insert {0} into T1 tail", page); page.reference = false; t1.append(page); } else if (b1Hit) { /* cache directory hit */ /* Move x at the tail of T2. Set the page reference bit of x to 0. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "In B1: insert {0} into T2 tail", page); page.reference = false; b1.remove(page.pageId); t2.append(page); /* Adapt: Increase the target size for the list T1 as: p = min {p + max{1, |B2|/|B1|}, c} */ int b1Size = b1.size(); p = b1Size>0 ? Math.min(p + Math.max(1, b2.size() / b1Size), c) : c; if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Adapt: p = min {p + max{1, |B2|/|B1|}, c} = {0}", p); } else { /* x must be in B2 */ /* Move x at the tail of T2. Set the page reference bit of x to 0. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "In B2: insert {0} into T2 tail", page); page.reference = false; b2.remove(page.pageId); t2.append(page); /* Adapt: Decrease the target size for the list T1 as: p = max {p − max{1, |B1|/|B2|}, 0} */ p = Math.max(p - Math.max(1, b1.size() / b2.size()), 0); if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Adapt: p = max {p − max{1, |B1|/|B2|}, 0} = {0}", p); } if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Status[add completed]: p = {0}, |T1| = {1}, |T2| = {2}, |B1| = {3}, |B2| = {4}, replaced {5}", new Object[] {p, t1.size(), t2.size(), b1.size(), b2.size(), replaced}); return replaced; } private DataPage unsafeReplace() { // 22: found = 0 // 23: repeat // 24: if (|T1| >= max(1, p)) then // 25: if (the page reference bit of head page in T1 is 0) then // 26: found = 1; // 27: Demote the head page in T1 and make it the MRU page in B1. // 28: else // 29: Set the page reference bit of head page in T1 to 0, and make it the tail page in T2. // 30: endif // 31: else // 32: if (the page reference bit of head page in T2 is 0), then // 33: found = 1; // 34: Demote the head page in T2 and make it the MRU page in B2. // 35: else // 36: Set the page reference bit of head page in T2 to 0, and make it the tail page in T2. // 37: endif // 38: endif // 39: until (found) while(true) { if (t1.size() >= Math.max(1, p)) { final DataPage t1h = t1.poll(); if (t1h.reference == false) { /* Demote the head page in T1 and make it the MRU page in B1. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 head {0} not referenced: demote to B1 MRU", t1h); b1.append(t1h.pageId); return t1h; } else { /* Set the page reference bit of head page in T1 to 0, and make it the tail page in T2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 head {0} not referenced: move into T2 tail", t1h); t1h.reference = false; t2.append(t1h); } } else { final DataPage t2h = t2.poll(); if (t2h.reference == false) { /* Demote the head page in T2 and make it the MRU page in B2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T2 head {0} not referenced: demote to B2 MRU", t2h); b2.append(t2h.pageId); return t2h; } else { /* Set the page reference bit of head page in T2 to 0, and make it the tail page in T2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T2 head {0} not referenced: move into T2 tail", t2h); t2h.reference = false; t2.append(t2h); } } } } private boolean unsafeRemove(DataPage page) { /* Custom addition to CAR algorithm, we need to drop pages too */ final DataPage t1r = t1.remove(page); if (t1r != null) { /* Demote the head page in T1 and make it the MRU page in B1. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Removing T1 element: demote to B1 MRU", t1r); b1.append(page.pageId); return true; } final DataPage t2r = t2.remove(page); if (t2r != null) { /* Demote the head page in T2 and make it the MRU page in B2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Removing T2 element: demote to B2 MRU", t1r); b2.append(page.pageId); return true; } return false; } }
herddb-core/src/main/java/herddb/core/ClockAdaptiveReplacement.java
package herddb.core; import java.util.Collection; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import herddb.utils.ListWithMap; /** * Basic implementation of CAR algorithm. * * Based on the original work: * * <pre> * CAR: Clock with Adaptive Replacement * * Sorav Bansal† and Dharmendra S. Modha‡ * * †Stanford University, * ‡IBM Almaden Research Center * </pre> * </p> * * @see http://www-cs.stanford.edu/~sbansal/pubs/fast04.pdf * @author diego.salvi */ public class ClockAdaptiveReplacement implements PageReplacementPolicy { /** Class logger */ private static final Logger LOGGER = Logger.getLogger(ClockAdaptiveReplacement.class.getName()); /** * This <i>constants</i> rules out unecessary and expensive logs at compile level if set to {@code true}. * Note: it <b>must</b> be static final to succesfully work. */ private static final boolean COMPILE_EXPENSIVE_LOGS = false; /** Capacity */ private final int c; /** Recency clock */ private final ListWithMap<DataPage> t1; /** Frequency clock */ private final ListWithMap<DataPage> t2; /** Unloaded recency */ private final ListWithMap<Long> b1; /** Unloaded frequency */ private final ListWithMap<Long> b2; /** Self tuned parameter (target size of T1) */ private int p; /** Modification lock */ private final Lock lock = new ReentrantLock(); public ClockAdaptiveReplacement(int capacity) { c = capacity; p = 0; t1 = new ListWithMap<>(); t2 = new ListWithMap<>(); b1 = new ListWithMap<>(); b2 = new ListWithMap<>(); } @Override public int capacity() { return c; } @Override public int size() { return t1.size() + t2.size(); } @Override public DataPage add(DataPage page) { lock.lock(); try { return unsafeAdd(page); } finally { lock.unlock(); } } public DataPage pop() { lock.lock(); try { return unsafeReplace(); } finally { lock.unlock(); } } @Override public void remove(Collection<DataPage> pages) { lock.lock(); try { for(DataPage page : pages) { unsafeRemove(page); } } finally { lock.unlock(); } } @Override public boolean remove(DataPage page) { lock.lock(); try { return unsafeRemove(page); } finally { lock.unlock(); } } @Override public void clear() { lock.lock(); try { t1.clear(); t2.clear(); b1.clear(); b2.clear(); p = 0; } finally { lock.unlock(); } } /* *********************** */ /* *** PRIVATE METHODS *** */ /* *********************** */ private DataPage unsafeAdd(DataPage page) { if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Status[add started]: p = {0}, |T1| = {1}, |T2| = {2}, |B1| = {3}, |B2| = {4}, adding {5}", new Object[] {p, t1.size(), t2.size(), b1.size(), b2.size(), page}); /* Assume che sia avvenuto un cache miss e sia stato necessario caricare una pagina. */ // 4: if (|T1| + |T2| = c) then // /* cache full, replace a page from cache */ // 5: replace() // /* cache directory replacement */ // 6: if ((x is not in B1 ∪ B2) and (|T1| + |B1| = c)) then // 7: Discard the LRU page in B1. // 8: elseif ((|T1| + |T2| + |B1| + |B2| = 2c) and (x is not in B1 ∪ B2)) then // 9: Discard the LRU page in B2. // 10: endif /* * Diamo per assodato che la cache NON contiene la pagina in questione. Dato dunque il resto del * codice se v1 o b2 contengono la pagina lo faranno sempre finché non esplicitamente rimossi (ci sono * dei poll nel cache directory replacement ma vengono eseguiti solo se b1 E b2 NON contengono la * pagina). */ final boolean b1Hit = b1.contains(page.pageId); final boolean b2Hit = b2.contains(page.pageId); DataPage replaced = null; if (t1.size() + t2.size() == c) { if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 + T2 = c: looking for a replacement for {0}", page); /* cache full, replace a page from cache */ replaced = unsafeReplace(); /* cache directory replacement */ if (!b1Hit && !b2Hit) { if (t1.size() + b1.size() == c) { if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 + B1 = c: discarding B1 {0}", b1.peek()); b1.poll(); } else if (t1.size() + t2.size() + b1.size() + b2.size() == 2*c){ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 + T2 + B1 + B2 = 2c: discarding B2 {0}", b2.peek()); b2.poll(); } } } // /* cache directory miss */ // 12: if (x is not in B1 ∪ B2) then // 13: Insert x at the tail of T1. Set the page reference bit of x to 0. // /* cache directory hit */ // 14: elseif (x is in B1) then // 15: Adapt: Increase the target size for the list T1 as: p = min {p + max{1, |B2|/|B1|}, c} // 16: Move x at the tail of T2. Set the page reference bit of x to 0. // /* cache directory hit */ // 17: else /* x must be in B2 */ // 18: Adapt: Decrease the target size for the list T1 as: p = max {p − max{1, |B1|/|B2|}, 0} // 19: Move x at the tail of T2. Set the page reference bit of x to 0. // 20: endif if (!b1Hit && !b2Hit) { /* cache directory miss */ /* Insert x at the tail of T1. Set the page reference bit of x to 0. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Not in B1 U B2: insert {0} into T1 tail", page); page.reference = false; t1.append(page); } else if (b1Hit) { /* cache directory hit */ /* Move x at the tail of T2. Set the page reference bit of x to 0. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "In B1: insert {0} into T2 tail", page); page.reference = false; b1.remove(page.pageId); t2.append(page); /* Adapt: Increase the target size for the list T1 as: p = min {p + max{1, |B2|/|B1|}, c} */ p = Math.min(p + Math.max(1, b2.size() / b1.size()), c); if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Adapt: p = min {p + max{1, |B2|/|B1|}, c} = {0}", p); } else { /* x must be in B2 */ /* Move x at the tail of T2. Set the page reference bit of x to 0. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "In B2: insert {0} into T2 tail", page); page.reference = false; b2.remove(page.pageId); t2.append(page); /* Adapt: Decrease the target size for the list T1 as: p = max {p − max{1, |B1|/|B2|}, 0} */ p = Math.max(p - Math.max(1, b1.size() / b2.size()), 0); if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Adapt: p = max {p − max{1, |B1|/|B2|}, 0} = {0}", p); } if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Status[add completed]: p = {0}, |T1| = {1}, |T2| = {2}, |B1| = {3}, |B2| = {4}, replaced {5}", new Object[] {p, t1.size(), t2.size(), b1.size(), b2.size(), replaced}); return replaced; } private DataPage unsafeReplace() { // 22: found = 0 // 23: repeat // 24: if (|T1| >= max(1, p)) then // 25: if (the page reference bit of head page in T1 is 0) then // 26: found = 1; // 27: Demote the head page in T1 and make it the MRU page in B1. // 28: else // 29: Set the page reference bit of head page in T1 to 0, and make it the tail page in T2. // 30: endif // 31: else // 32: if (the page reference bit of head page in T2 is 0), then // 33: found = 1; // 34: Demote the head page in T2 and make it the MRU page in B2. // 35: else // 36: Set the page reference bit of head page in T2 to 0, and make it the tail page in T2. // 37: endif // 38: endif // 39: until (found) while(true) { if (t1.size() >= Math.max(1, p)) { final DataPage t1h = t1.poll(); if (t1h.reference == false) { /* Demote the head page in T1 and make it the MRU page in B1. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 head {0} not referenced: demote to B1 MRU", t1h); b1.append(t1h.pageId); return t1h; } else { /* Set the page reference bit of head page in T1 to 0, and make it the tail page in T2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T1 head {0} not referenced: move into T2 tail", t1h); t1h.reference = false; t2.append(t1h); } } else { final DataPage t2h = t2.poll(); if (t2h.reference == false) { /* Demote the head page in T2 and make it the MRU page in B2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T2 head {0} not referenced: demote to B2 MRU", t2h); b2.append(t2h.pageId); return t2h; } else { /* Set the page reference bit of head page in T2 to 0, and make it the tail page in T2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "T2 head {0} not referenced: move into T2 tail", t2h); t2h.reference = false; t2.append(t2h); } } } } private boolean unsafeRemove(DataPage page) { /* Custom addition to CAR algorithm, we need to drop pages too */ final DataPage t1r = t1.remove(page); if (t1r != null) { /* Demote the head page in T1 and make it the MRU page in B1. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Removing T1 element: demote to B1 MRU", t1r); b1.append(page.pageId); return true; } final DataPage t2r = t2.remove(page); if (t2r != null) { /* Demote the head page in T2 and make it the MRU page in B2. */ if (COMPILE_EXPENSIVE_LOGS) LOGGER.log(Level.SEVERE, "Removing T2 element: demote to B2 MRU", t1r); b2.append(page.pageId); return true; } return false; } }
fix division by zero on CAR
herddb-core/src/main/java/herddb/core/ClockAdaptiveReplacement.java
fix division by zero on CAR
Java
apache-2.0
7167d2d1917d035ffb95841ccb047747e168e40d
0
JBYoshi/GitUpdate
/* * Copyright (c) 2015 JBYoshi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jbyoshi.gitupdate.processor; import java.util.*; import org.eclipse.jgit.api.*; import org.eclipse.jgit.lib.*; import org.eclipse.jgit.transport.*; import com.google.common.collect.*; import jbyoshi.gitupdate.*; public final class Push extends Processor { @Override public void registerTasks(Repository repo, Git git, Task root) throws Exception { Task me = root.newChild(getClass().getSimpleName()); // Group the branches by their remotes. Multimap<String, String> branchList = HashMultimap.create(); for (String branch : Utils.getLocalBranches(repo).keySet()) { String remote = new BranchConfig(repo.getConfig(), branch).getRemote(); if (remote != null) { branchList.put(remote, branch); } } for (Map.Entry<String, Collection<String>> remote : branchList.asMap().entrySet()) { me.newChild(remote.getKey(), report -> { try { process(repo, git, remote.getKey(), remote.getValue(), report); } catch (Exception e) { report.newErrorChild(e); } }); } } private static void process(Repository repo, Git git, String remote, Collection<String> branches, Report report) throws Exception { // Figure out if anything needs to be pushed. Map<String, ObjectId> oldIds = new HashMap<>(); boolean canPush = false; for (String branch : branches) { BranchConfig config = new BranchConfig(repo.getConfig(), branch); ObjectId target = repo.getRef(branch).getObjectId(); Ref remoteRef = repo.getRef(config.getRemoteTrackingBranch()); if (remoteRef == null || !target.equals(remoteRef.getObjectId())) { canPush = true; } oldIds.put(branch, remoteRef == null ? ObjectId.zeroId() : remoteRef.getObjectId()); } if (!canPush) { return; } PushCommand push = git.push().setCredentialsProvider(Prompts.INSTANCE).setTimeout(5) .setRemote(remote); for (String branch : branches) { push.add(Constants.R_HEADS + branch); } for (PushResult result : push.call()) { for (RemoteRefUpdate update : result.getRemoteUpdates()) { if (update.getStatus() == RemoteRefUpdate.Status.OK) { String branchName = Utils.getShortBranch(update.getSrcRef()); ObjectId oldId = oldIds.get(branchName); String old = oldId.equals(ObjectId.zeroId()) ? "new branch" : oldId.name(); report.newChild(branchName + ": " + old + " -> " + update.getNewObjectId().name()) .modified(); } } } } }
src/main/java/jbyoshi/gitupdate/processor/Push.java
/* * Copyright (c) 2015 JBYoshi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jbyoshi.gitupdate.processor; import java.util.*; import org.eclipse.jgit.api.*; import org.eclipse.jgit.lib.*; import org.eclipse.jgit.transport.*; import com.google.common.collect.*; import jbyoshi.gitupdate.*; public final class Push extends Processor { @Override public void registerTasks(Repository repo, Git git, Task root) throws Exception { Task me = root.newChild(getClass().getSimpleName()); // Group the branches by their remotes. Multimap<String, String> branchList = HashMultimap.create(); for (String branch : Utils.getLocalBranches(repo).keySet()) { String remote = new BranchConfig(repo.getConfig(), branch).getRemote(); if (remote != null) { branchList.put(remote, branch); } } for (Map.Entry<String, Collection<String>> remote : branchList.asMap().entrySet()) { me.newChild(remote.getKey(), report -> { try { process(repo, git, remote.getKey(), Constants.R_REMOTES + remote.getKey() + "/", remote.getValue(), report); } catch (Exception e) { report.newErrorChild(e); } }); } } private static void process(Repository repo, Git git, String remote, String fullRemote, Collection<String> branches, Report report) throws Exception { // Figure out if anything needs to be pushed. Map<String, ObjectId> oldIds = new HashMap<>(); boolean canPush = false; for (Iterator<String> it = branches.iterator(); it.hasNext();) { String branch = it.next(); BranchConfig config = new BranchConfig(repo.getConfig(), branch); ObjectId target = repo.getRef(branch).getObjectId(); Ref remoteRef = repo.getRef(config.getRemoteTrackingBranch()); if (remoteRef == null || !target.equals(remoteRef.getObjectId())) { canPush = true; } oldIds.put(branch, remoteRef == null ? ObjectId.zeroId() : remoteRef.getObjectId()); } if (!canPush) { return; } PushCommand push = git.push().setCredentialsProvider(Prompts.INSTANCE).setTimeout(5) .setRemote(remote); for (String branch : branches) { push.add(Constants.R_HEADS + branch); } for (PushResult result : push.call()) { for (RemoteRefUpdate update : result.getRemoteUpdates()) { if (update.getStatus() == RemoteRefUpdate.Status.OK) { String branchName = Utils.getShortBranch(update.getSrcRef()); ObjectId oldId = oldIds.get(branchName); String old = oldId.equals(ObjectId.zeroId()) ? "new branch" : oldId.name(); report.newChild(branchName + ": " + old + " -> " + update.getNewObjectId().name()) .modified(); } } } } }
Fix a few more warnings.
src/main/java/jbyoshi/gitupdate/processor/Push.java
Fix a few more warnings.
Java
apache-2.0
dd35dbc1443cd1f641cbbc4b69b4ea1f60fb8618
0
nikelin/Redshape-AS,nikelin/Redshape-AS
package com.redshape.servlet.views; import com.redshape.servlet.WebApplication; import com.redshape.servlet.actions.exceptions.PageNotFoundException; import com.redshape.servlet.core.IHttpRequest; import com.redshape.servlet.core.controllers.Action; import com.redshape.servlet.core.controllers.IAction; import com.redshape.servlet.core.controllers.ProcessingException; import com.redshape.servlet.core.controllers.registry.IControllersRegistry; import com.redshape.servlet.dispatchers.http.IHttpDispatcher; import com.redshape.utils.config.ConfigException; import com.redshape.utils.config.IConfig; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; /** * @author Cyril A. Karpenko <[email protected]> * @package com.redshape.servlet.views * @date 8/21/11 12:46 PM */ public final class ViewHelper { private static final Pattern protocolMatcher = Pattern.compile("(.+?)://(.*?)"); private static ThreadLocal<IHttpRequest> localRequest = new ThreadLocal<IHttpRequest>(); public static IView getLocalView() { if ( getLocalHttpRequest() == null ) { return null; } return WebApplication.getContext() .getBean(IViewsFactory.class) .getView( getLocalHttpRequest() ); } public static void setLocalHttpRequest( IHttpRequest request ) { localRequest.set( request ); } protected static IHttpRequest getLocalHttpRequest() { return localRequest.get(); } public static String url( String url ) { try { if ( url == null ) { return null; } if ( protocolMatcher.matcher(url).find() ) { return url; } String servletPath = getConfig().get("web").get("servletPath").value(); if ( !url.startsWith( servletPath ) ) { return servletPath + normalizeUrl(url); } return normalizeUrl(url); } catch ( ConfigException e ) { return url; } } public static String actionName( IAction action ) { Action actionMeta = action.getClass().getAnnotation(Action.class); if ( actionMeta == null ) { return null; } return actionMeta.name(); } public static String controllerName( IAction action ) { Action actionMeta = action.getClass().getAnnotation(Action.class); if ( actionMeta == null ) { return null; } return actionMeta.controller(); } private static String normalizeUrl( String url ) { if ( url.startsWith("/") ) { return url; } return "/" + url; } public static <T extends Serializable> String action( String controller, String action ) throws ProcessingException { return action(controller, action, new HashMap<String, Serializable>() ); } public static <T extends Serializable> String action( String controller, String action, Map<String, T> params ) throws ProcessingException { try { IAction actionInstance = WebApplication.getContext().getBean(IControllersRegistry.class) .getInstance(controller, action); if ( actionInstance == null ) { return url( WebApplication.getContext().getBean(IHttpDispatcher.class) .getExceptionHandler() .getPage404() ); } return action( actionInstance.getClass(), params ); } catch ( Throwable e ) { throw new PageNotFoundException(); } } public static String action( Class<? extends IAction> action ) { return action( action, new HashMap<String, String>() ); } public static <T extends Serializable> String action( Class<? extends IAction> action, Map<String, T> params ) { Action actionMeta = action.getAnnotation( Action.class ); if ( actionMeta == null ) { return null; } StringBuilder url = new StringBuilder(); try { url.append(getConfig().get("web").get("servletPath").value()); } catch ( ConfigException e ) { throw new IllegalStateException("Config related exception", e ); } url.append("/") .append( actionMeta.controller() ) .append( "/" ) .append( actionMeta.name() ); if ( !params.isEmpty() ) { url.append("?"); } int i = 0; for ( String key : params.keySet() ) { url.append( key ).append("=").append( params.get(key) ); if ( i++ != params.size() - 1 ) { url.append("&amp;"); } } return url.toString(); } private static IConfig getConfig() { return WebApplication.getContext().getBean(IConfig.class); } }
servlet/src/main/java/com/redshape/servlet/views/ViewHelper.java
package com.redshape.servlet.views; import com.redshape.servlet.WebApplication; import com.redshape.servlet.actions.exceptions.PageNotFoundException; import com.redshape.servlet.core.IHttpRequest; import com.redshape.servlet.core.controllers.Action; import com.redshape.servlet.core.controllers.IAction; import com.redshape.servlet.core.controllers.ProcessingException; import com.redshape.servlet.core.controllers.registry.IControllersRegistry; import com.redshape.servlet.dispatchers.http.IHttpDispatcher; import com.redshape.utils.config.ConfigException; import com.redshape.utils.config.IConfig; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; /** * @author Cyril A. Karpenko <[email protected]> * @package com.redshape.servlet.views * @date 8/21/11 12:46 PM */ public final class ViewHelper { private static final Pattern protocolMatcher = Pattern.compile("(.+?)://(.*?)"); private static ThreadLocal<IHttpRequest> localRequest = new ThreadLocal<IHttpRequest>(); public static IView getLocalView() { if ( getLocalHttpRequest() == null ) { return null; } return WebApplication.getContext() .getBean(IViewsFactory.class) .getView( getLocalHttpRequest() ); } public static void setLocalHttpRequest( IHttpRequest request ) { localRequest.set( request ); } protected static IHttpRequest getLocalHttpRequest() { return localRequest.get(); } public static String url( String url ) { try { if ( url == null ) { return null; } if ( protocolMatcher.matcher(url).find() ) { return url; } String servletPath = getConfig().get("web").get("servletPath").value(); if ( !url.startsWith( servletPath ) ) { return servletPath + normalizeUrl(url); } return normalizeUrl(url); } catch ( ConfigException e ) { return url; } } public static String actionName( IAction action ) { Action actionMeta = action.getClass().getAnnotation(Action.class); if ( actionMeta == null ) { return null; } return actionMeta.name(); } private static String normalizeUrl( String url ) { if ( url.startsWith("/") ) { return url; } return "/" + url; } public static <T extends Serializable> String action( String controller, String action ) throws ProcessingException { return action(controller, action, new HashMap<String, Serializable>() ); } public static <T extends Serializable> String action( String controller, String action, Map<String, T> params ) throws ProcessingException { try { IAction actionInstance = WebApplication.getContext().getBean(IControllersRegistry.class) .getInstance(controller, action); if ( actionInstance == null ) { return url( WebApplication.getContext().getBean(IHttpDispatcher.class) .getExceptionHandler() .getPage404() ); } return action( actionInstance.getClass(), params ); } catch ( Throwable e ) { throw new PageNotFoundException(); } } public static String action( Class<? extends IAction> action ) { return action( action, new HashMap<String, String>() ); } public static <T extends Serializable> String action( Class<? extends IAction> action, Map<String, T> params ) { Action actionMeta = action.getAnnotation( Action.class ); if ( actionMeta == null ) { return null; } StringBuilder url = new StringBuilder(); try { url.append(getConfig().get("web").get("servletPath").value()); } catch ( ConfigException e ) { throw new IllegalStateException("Config related exception", e ); } url.append("/") .append( actionMeta.controller() ) .append( "/" ) .append( actionMeta.name() ); if ( !params.isEmpty() ) { url.append("?"); } int i = 0; for ( String key : params.keySet() ) { url.append( key ).append("=").append( params.get(key) ); if ( i++ != params.size() - 1 ) { url.append("&amp;"); } } return url.toString(); } private static IConfig getConfig() { return WebApplication.getContext().getBean(IConfig.class); } }
* ViewHelper little improved
servlet/src/main/java/com/redshape/servlet/views/ViewHelper.java
* ViewHelper little improved
Java
apache-2.0
698e88f01243527964bd15a6abab0bebdde57486
0
salyh/geronimo-specs,apache/geronimo-specs,salyh/javamailspec,salyh/geronimo-specs,apache/geronimo-specs,apache/geronimo-specs,salyh/geronimo-specs
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package javax.xml.ws; import javax.xml.bind.JAXBContext; import javax.xml.namespace.QName; import javax.xml.ws.handler.HandlerResolver; import javax.xml.ws.spi.Provider; import javax.xml.ws.spi.ServiceDelegate; import java.net.URL; import java.util.Iterator; import java.util.concurrent.Executor; public class Service { public enum Mode { MESSAGE, PAYLOAD } protected Service(URL wsdlDocumentLocation, QName serviceName) { delegate = Provider.provider().createServiceDelegate(wsdlDocumentLocation, serviceName, getClass()); } public <T> T getPort(QName portName, Class<T> serviceEndpointInterface) { return (T) delegate.getPort(portName, serviceEndpointInterface); } public <T> T getPort(Class<T> serviceEndpointInterface) { return (T) delegate.getPort(serviceEndpointInterface); } public <T> T getPort(QName portName, Class<T> serviceEndpointInterface, WebServiceFeature... features) { return (T) delegate.getPort(portName, serviceEndpointInterface, features); } public <T> T getPort(Class<T> serviceEndpointInterface, WebServiceFeature... features) { return (T) delegate.getPort(serviceEndpointInterface, features); } public <T> T getPort(EndpointReference endpointReference, Class<T> serviceEndpointInterface, WebServiceFeature... features) { return (T) delegate.getPort(endpointReference, serviceEndpointInterface, features); } public void addPort(QName portName, String bindingId, String endpointAddress) { delegate.addPort(portName, bindingId, endpointAddress); } public <T>Dispatch<T> createDispatch(QName portName, Class<T> type, Mode mode) { return delegate.createDispatch(portName, type, mode); } public Dispatch<Object> createDispatch(QName portName, JAXBContext context, Mode mode) { return delegate.createDispatch(portName, context, mode); } public <T> Dispatch<T> createDispatch(QName portName, Class<T> type, Service.Mode mode, WebServiceFeature... features) { return delegate.createDispatch(portName, type, mode, features); } public <T> Dispatch<T> createDispatch(EndpointReference endpointReference, Class<T> type, Service.Mode mode, WebServiceFeature... features) { return delegate.createDispatch(endpointReference, type, mode, features); } public Dispatch<Object> createDispatch(QName portName, JAXBContext context, Service.Mode mode, WebServiceFeature... features) { return delegate.createDispatch(portName, context, mode, features); } public Dispatch<Object> createDispatch(EndpointReference endpointReference, JAXBContext context, Service.Mode mode, WebServiceFeature... features) { return delegate.createDispatch(endpointReference, context, mode, features); } public QName getServiceName() { return delegate.getServiceName(); } public Iterator<QName> getPorts() { return delegate.getPorts(); } public URL getWSDLDocumentLocation() { return delegate.getWSDLDocumentLocation(); } public HandlerResolver getHandlerResolver() { return delegate.getHandlerResolver(); } public void setHandlerResolver(HandlerResolver handlerResolver) { delegate.setHandlerResolver(handlerResolver); } public Executor getExecutor() { return delegate.getExecutor(); } public void setExecutor(Executor executor) { delegate.setExecutor(executor); } public static Service create(URL wsdlDocumentLocation, QName serviceName) { return new Service(wsdlDocumentLocation, serviceName); } public static Service create(QName serviceName) { return new Service(null, serviceName); } private ServiceDelegate delegate; }
geronimo-jaxws_2.1_spec/src/main/java/javax/xml/ws/Service.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package javax.xml.ws; import javax.xml.bind.JAXBContext; import javax.xml.namespace.QName; import javax.xml.ws.handler.HandlerResolver; import javax.xml.ws.spi.Provider; import javax.xml.ws.spi.ServiceDelegate; import java.net.URL; import java.util.Iterator; import java.util.concurrent.Executor; public class Service { public enum Mode { MESSAGE, PAYLOAD } protected Service(URL wsdlDocumentLocation, QName serviceName) { _delegate = Provider.provider().createServiceDelegate(wsdlDocumentLocation, serviceName, getClass()); } public <T> T getPort(QName portName, Class<T> serviceEndpointInterface) { return (T) _delegate.getPort(portName, serviceEndpointInterface); } public <T> T getPort(Class<T> serviceEndpointInterface) { return (T) _delegate.getPort(serviceEndpointInterface); } public <T> T getPort(QName portName, Class<T> serviceEndpointInterface, WebServiceFeature... features) { return (T) _delegate.getPort(portName, serviceEndpointInterface, features); } public <T> T getPort(Class<T> serviceEndpointInterface, WebServiceFeature... features) { return (T) _delegate.getPort(serviceEndpointInterface, features); } public <T> T getPort(EndpointReference endpointReference, Class<T> serviceEndpointInterface, WebServiceFeature... features) { return (T) _delegate.getPort(endpointReference, serviceEndpointInterface, features); } public void addPort(QName portName, String bindingId, String endpointAddress) { _delegate.addPort(portName, bindingId, endpointAddress); } public <T>Dispatch<T> createDispatch(QName portName, Class<T> type, Mode mode) { return _delegate.createDispatch(portName, type, mode); } public Dispatch<Object> createDispatch(QName portName, JAXBContext context, Mode mode) { return _delegate.createDispatch(portName, context, mode); } public <T> Dispatch<T> createDispatch(QName portName, Class<T> type, Service.Mode mode, WebServiceFeature... features) { return _delegate.createDispatch(portName, type, mode, features); } public <T> Dispatch<T> createDispatch(EndpointReference endpointReference, Class<T> type, Service.Mode mode, WebServiceFeature... features) { return _delegate.createDispatch(endpointReference, type, mode, features); } public Dispatch<Object> createDispatch(QName portName, JAXBContext context, Service.Mode mode, WebServiceFeature... features) { return _delegate.createDispatch(portName, context, mode, features); } public Dispatch<Object> createDispatch(EndpointReference endpointReference, JAXBContext context, Service.Mode mode, WebServiceFeature... features) { return _delegate.createDispatch(endpointReference, context, mode, features); } public QName getServiceName() { return _delegate.getServiceName(); } public Iterator<QName> getPorts() { return _delegate.getPorts(); } public URL getWSDLDocumentLocation() { return _delegate.getWSDLDocumentLocation(); } public HandlerResolver getHandlerResolver() { return _delegate.getHandlerResolver(); } public void setHandlerResolver(HandlerResolver handlerResolver) { _delegate.setHandlerResolver(handlerResolver); } public Executor getExecutor() { return _delegate.getExecutor(); } public void setExecutor(Executor executor) { _delegate.setExecutor(executor); } public static Service create(URL wsdlDocumentLocation, QName serviceName) { return new Service(wsdlDocumentLocation, serviceName); } public static Service create(QName serviceName) { return new Service(null, serviceName); } private ServiceDelegate _delegate; }
update variable name git-svn-id: 60e751271f50ec028ae56d425821c1c711e0b018@659402 13f79535-47bb-0310-9956-ffa450edef68
geronimo-jaxws_2.1_spec/src/main/java/javax/xml/ws/Service.java
update variable name
Java
apache-2.0
ac216520b907ebcfa4f30a6f6b9a5cf347b6b47e
0
code-distillery/baselining-maven-plugin
package net.distilledcode.maven.baselining; import org.apache.maven.it.VerificationException; import org.apache.maven.it.Verifier; import org.apache.maven.it.util.ResourceExtractor; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.io.File; import java.io.IOException; import static java.util.Arrays.asList; public class DummyApiIT { private static final String GROUP_ID = "net.distilledcode.maven.baselining-maven-plugin.it"; private static final String MAVEN_REPO_LOCAL = "maven.repo.local"; private static Verifier baseVerifier; @BeforeClass public static void setupClass() { try { baseVerifier = createVerifier("dummy-1.0.0"); baseVerifier.executeGoal("install"); baseVerifier.verifyErrorFreeLog(); baseVerifier.verifyTextInLog(BaselineMojo.MSG_NO_BASELINE); } catch (VerificationException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } } @AfterClass public static void tearDownClass() { try { baseVerifier.deleteArtifacts(GROUP_ID); baseVerifier = null; } catch (IOException e) { throw new RuntimeException(e); } } @Test public void noApiChanges() throws IOException, VerificationException { final Verifier verifier = createVerifier("dummy-1.0.1-SNAPSHOT"); verifier.executeGoal("package"); verifier.verifyErrorFreeLog(); verifier.verifyTextInLog(String.format(BaselineMojo.MSG_BASELINING, "1.0.0")); } @Test public void methodAdded() throws IOException, VerificationException { final Verifier verifier0 = createVerifier("dummy-1.0.1-SNAPSHOT"); verifier0.executeGoal("install"); final Verifier verifier = createVerifier("dummy-1.0.2"); try { verifier.executeGoal("package"); } catch (VerificationException e) { // build failure expected } verifier.verifyTextInLog(String.format(BaselineMojo.MSG_BASELINING, "1.0.0")); verifier.verifyTextInLog(String.format(BaselineMojo.MSG_RAISE_VERSION, "dummy", "1.1.0")); verifier.verifyTextInLog("BUILD FAILURE"); // cleanup installed artifacts verifier.deleteArtifacts(GROUP_ID, "dummy", "1.0.1-SNAPSHOT"); } private static Verifier createVerifier(final String testFolderName) throws IOException, VerificationException { final File testDir = ResourceExtractor.simpleExtractResources(DummyApiIT.class, "/" + testFolderName); final File settingsXml = new File(testDir.getParent(), "settings.xml"); // During release:prepare the system property maven.repo.local is set (probably // for some forked process). // Verifier gives precedence to the system property (correctly IMHO), rather than // the setting in settings.xml, which results in an incorrect local repository // during release:prepare and thus test failures. To work around this issue, we // temporarily clear the system property during the creation of the Verifier instance. final String originalLocalRepo = System.getProperty(MAVEN_REPO_LOCAL); System.clearProperty(MAVEN_REPO_LOCAL); final Verifier verifier= new Verifier(testDir.getAbsolutePath(), settingsXml.getAbsolutePath()); verifier.setCliOptions(asList("-s", settingsXml.getAbsolutePath())); if (originalLocalRepo != null) { System.setProperty(MAVEN_REPO_LOCAL, originalLocalRepo); } return verifier; } }
src/test/java/net/distilledcode/maven/baselining/DummyApiIT.java
package net.distilledcode.maven.baselining; import org.apache.maven.it.VerificationException; import org.apache.maven.it.Verifier; import org.apache.maven.it.util.ResourceExtractor; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.io.File; import java.io.IOException; import static java.util.Arrays.asList; public class DummyApiIT { private static final String GROUP_ID = "net.distilledcode.maven.baselining-maven-plugin.it"; private static final String MAVEN_REPO_LOCAL = "maven.repo.local"; private static Verifier baseVerifier; @BeforeClass public static void setupClass() { try { baseVerifier = createVerifier("dummy-1.0.0"); baseVerifier.executeGoal("install"); baseVerifier.verifyErrorFreeLog(); baseVerifier.verifyTextInLog(BaselineMojo.MSG_NO_BASELINE); } catch (VerificationException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } } @AfterClass public static void tearDownClass() { try { baseVerifier.deleteArtifacts(GROUP_ID); baseVerifier = null; } catch (IOException e) { throw new RuntimeException(e); } } @Test public void noApiChanges() throws IOException, VerificationException { final Verifier verifier = createVerifier("dummy-1.0.1-SNAPSHOT"); verifier.executeGoal("package"); verifier.verifyErrorFreeLog(); verifier.verifyTextInLog(String.format(BaselineMojo.MSG_BASELINING, "1.0.0")); } @Test public void methodAdded() throws IOException, VerificationException { final Verifier verifier0 = createVerifier("dummy-1.0.1-SNAPSHOT"); verifier0.executeGoal("install"); final Verifier verifier = createVerifier("dummy-1.0.2"); try { verifier.executeGoal("package"); } catch (VerificationException e) { // build failure expected } verifier.verifyTextInLog(String.format(BaselineMojo.MSG_BASELINING, "1.0.0")); verifier.verifyTextInLog(String.format(BaselineMojo.MSG_RAISE_VERSION, "dummy", "1.1.0")); verifier.verifyTextInLog("BUILD FAILURE"); // cleanup installed artifacts verifier.deleteArtifacts(GROUP_ID, "dummy", "1.0.1-SNAPSHOT"); } private static Verifier createVerifier(final String testFolderName) throws IOException, VerificationException { final File testDir = ResourceExtractor.simpleExtractResources(DummyApiIT.class, "/" + testFolderName); final File settingsXml = new File(testDir.getParent(), "settings.xml"); // During release:prepare the system property maven.repo.local is set (probably // for some forked process). // Verifier gives precedence to the system property (correctly IMHO), rather than // the setting in settings.xml, which results in an incorrect local repository // during release:prepare and thus test failures. To work around this issue, we // temporarily clear the system property during the creation of the Verifier instance. final String originalLocalRepo = System.getProperty(MAVEN_REPO_LOCAL); System.clearProperty(MAVEN_REPO_LOCAL); final Verifier verifier= new Verifier(testDir.getAbsolutePath(), settingsXml.getAbsolutePath()); verifier.setCliOptions(asList("-s", settingsXml.getAbsolutePath())); System.setProperty(MAVEN_REPO_LOCAL, originalLocalRepo); return verifier; } }
prevent NPE
src/test/java/net/distilledcode/maven/baselining/DummyApiIT.java
prevent NPE
Java
apache-2.0
3092ec97f9be52653d25e4a434946885c3b3ba76
0
ngageoint/mrgeo,ngageoint/mrgeo,ngageoint/mrgeo
package org.mrgeo.data.raster; import org.apache.commons.lang3.NotImplementedException; import org.gdal.gdal.Band; import org.gdal.gdal.Dataset; import org.gdal.gdal.gdal; import org.gdal.gdalconst.gdalconstConstants; import org.mrgeo.aggregators.Aggregator; import org.mrgeo.data.raster.Interpolator.Bilinear; import org.mrgeo.data.raster.Interpolator.Nearest; import org.mrgeo.utils.ByteArrayUtils; import org.mrgeo.utils.FloatUtils; import org.mrgeo.utils.GDALUtils; import org.mrgeo.utils.tms.Bounds; import java.awt.*; import java.awt.image.*; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.nio.*; public abstract class MrGeoRaster { final static int HEADER_LEN = 12; // data offset: byte (VERSION) + int (width) + int (height) + short (bands) + byte (datatype) private final static int VERSION_OFFSET = 0; // start private final static int WIDTH_OFFSET = 1; // byte (VERSION) private final static int HEIGHT_OFFSET = 5; // byte (VERSION) + int (width) private final static int BANDS_OFFSET = 9; // byte (VERSION) + int (width) + int (height) private final static int DATATYPE_OFFSET = 11; // byte (VERSION) + int (width) + int (height) + short (bands) private final static byte VERSION = 0x03; // MUST NOT BE 0! final byte[] data; private final int width; private final int height; private final int bands; private final int datatype; private final int dataoffset; private final int bandoffset; MrGeoRaster(int width, int height, int bands, int datatype, byte[] data, int dataoffset) { this.width = width; this.height = height; this.bands = bands; this.datatype = datatype; this.data = data; this.dataoffset = dataoffset; this.bandoffset = width * height; } public static MrGeoRaster createEmptyRaster(int width, int height, int bands, int datatype) { switch (datatype) { case DataBuffer.TYPE_BYTE: { return MrGeoByteRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_FLOAT: { return MrGeoFloatRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_DOUBLE: { return MrGeoDoubleRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_INT: { return MrGeoIntRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_SHORT: { return MrGeoShortRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_USHORT: { return MrGeoUShortRaster.createEmptyRaster(width, height, bands); } default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } } public static MrGeoRaster createEmptyRaster(int width, int height, int bands, int datatype, double nodata) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); raster.fill(nodata); return raster; } public static MrGeoRaster createEmptyRaster(int width, int height, int bands, int datatype, double[] nodatas) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); raster.fill(nodatas); return raster; } public static MrGeoRaster createRaster(int width, int height, int bands, int datatype, byte[] data, int dataOffset) { switch (datatype) { case DataBuffer.TYPE_BYTE: { return new MrGeoByteRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_FLOAT: { return new MrGeoFloatRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_DOUBLE: { return new MrGeoDoubleRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_INT: { return new MrGeoIntRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: { return new MrGeoShortRaster(width, height, bands, data, dataOffset); } default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } } public static MrGeoRaster fromRaster(Raster raster) throws IOException { MrGeoRaster mrgeo = MrGeoRaster.createEmptyRaster(raster.getWidth(), raster.getHeight(), raster.getNumBands(), raster.getTransferType()); for (int b = 0; b < raster.getNumBands(); b++) { for (int y = 0; y < raster.getHeight(); y++) { for (int x = 0; x < raster.getWidth(); x++) { switch (mrgeo.datatype()) { case DataBuffer.TYPE_BYTE: mrgeo.setPixel(x, y, b, (byte)raster.getSample(x, y, b)); break; case DataBuffer.TYPE_INT: mrgeo.setPixel(x, y, b, raster.getSample(x, y, b)); break; case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: mrgeo.setPixel(x, y, b, (short)raster.getSample(x, y, b)); break; case DataBuffer.TYPE_FLOAT: mrgeo.setPixel(x, y, b, raster.getSampleFloat(x, y, b)); break; case DataBuffer.TYPE_DOUBLE: mrgeo.setPixel(x, y, b, raster.getSampleDouble(x, y, b)); break; default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } } } } return mrgeo; } public static MrGeoRaster fromDataset(Dataset dataset) { return fromDataset(dataset, 0, 0, dataset.GetRasterXSize(), dataset.GetRasterYSize()); } public static MrGeoRaster fromDataset(final Dataset dataset, final int x, final int y, final int width, final int height) { int gdaltype = dataset.GetRasterBand(1).getDataType(); int bands = dataset.GetRasterCount(); int datasize = gdal.GetDataTypeSize(gdaltype) / 8; MrGeoRaster raster = MrGeoRaster.createEmptyRaster(width, height, bands, GDALUtils.toRasterDataBufferType(gdaltype)); for (int b = 0; b < bands; b++) { Band band = dataset.GetRasterBand(b + 1); // gdal bands are 1's based byte[] data = new byte[datasize * width * height]; int success = band.ReadRaster(x, y, width, height, width, height, gdaltype, data); if (success != gdalconstConstants.CE_None) { System.out.println("Failed reading raster. gdal error: " + success); } //GDALUtils.swapBytes(data, gdaltype); System.arraycopy(data, 0, raster.data, raster.calculateByteOffset(0, 0, b), data.length); } return raster; } static int writeHeader(final int width, final int height, final int bands, final int datatype, byte[] data) { ByteArrayUtils.setByte(VERSION, data, VERSION_OFFSET); ByteArrayUtils.setInt(width, data, WIDTH_OFFSET); ByteArrayUtils.setInt(height, data, HEIGHT_OFFSET); ByteArrayUtils.setShort((short) bands, data, BANDS_OFFSET); ByteArrayUtils.setByte((byte) datatype, data, DATATYPE_OFFSET); return HEADER_LEN; } static int[] readHeader(byte[] data) { return new int[] { ByteArrayUtils.getByte(data, VERSION_OFFSET), ByteArrayUtils.getInt(data, WIDTH_OFFSET), ByteArrayUtils.getInt(data, HEIGHT_OFFSET), ByteArrayUtils.getShort(data, BANDS_OFFSET), ByteArrayUtils.getByte(data, DATATYPE_OFFSET), HEADER_LEN }; } static MrGeoRaster createRaster(byte[] data) { final int[] header = MrGeoRaster.readHeader(data); return createRaster(header[1], header[2], header[3], header[4], data, header[5]); } final public MrGeoRaster createCompatibleRaster(int width, int height) { return createEmptyRaster(width, height, bands, datatype); } final public MrGeoRaster createCompatibleEmptyRaster(int width, int height, double nodata) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); MrGeoRaster row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, nodata); } int headerlen = raster.bandoffset; int len = row.data.length - headerlen; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { int offset = raster.calculateByteOffset(0, y, b); System.arraycopy(row.data, headerlen, raster.data, offset, len); } } return raster; } final public MrGeoRaster createCompatibleEmptyRaster(int width, int height, double[] nodata) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); MrGeoRaster row; for (int b = 0; b < bands; b++) { row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, nodata[b]); } int headerlen = raster.dataoffset(); int len = row.data.length - headerlen; for (int y = 0; y < height; y++) { int offset = raster.calculateByteOffset(0, y, b); System.arraycopy(row.data, headerlen, raster.data, offset, len); } } return raster; } final public int width() { return width; } final public int height() { return height; } final public int bands() { return bands; } final public int datatype() { return datatype; } final public byte[] data() { return data; } final public int dataoffset() { return dataoffset; } final public int datasize() { return data.length - dataoffset; } final public MrGeoRaster clip(int x, int y, int width, int height) { MrGeoRaster clipraster = MrGeoRaster.createEmptyRaster(width, height, bands, datatype); for (int b = 0; b < bands; b++) { for (int yy = 0; yy < height; yy++) { int[] offsets = calculateByteRangeOffset(x, yy + y, x + width, yy + y, b); int dstOffset = clipraster.calculateByteOffset(0, yy, b); System.arraycopy(data, offsets[0], clipraster.data, dstOffset, offsets[1] - offsets[0]); } } return clipraster; } final public MrGeoRaster clip(int x, int y, int width, int height, int band) { MrGeoRaster clipraster = MrGeoRaster.createEmptyRaster(width, height, 1, datatype); for (int yy = 0; yy < height; yy++) { int[] offsets = calculateByteRangeOffset(x, yy + y, x + width, yy + y, band); int dstOffset = clipraster.calculateByteOffset(0, yy, 0); System.arraycopy(data, offsets[0], clipraster.data, dstOffset, offsets[1] - offsets[0]); } return clipraster; } final public void copyFrom(int srcx, int srcy, int width, int height, MrGeoRaster src, int dstx, int dsty) { for (int b = 0; b < bands; b++) { for (int yy = 0; yy < height; yy++) { int[] srcoffcets = src.calculateByteRangeOffset(srcx, yy + srcy, srcx + width, yy + srcy, b); int dstOffset = calculateByteOffset(dstx, yy + dsty, b); System.arraycopy(src.data, srcoffcets[0], data, dstOffset, srcoffcets[1] - srcoffcets[0]); } } } final public void copyFrom(int srcx, int srcy, int srcBand, int width, int height, MrGeoRaster src, int dstx, int dsty, int dstBand) { for (int yy = 0; yy < height; yy++) { int[] srcoffcets = src.calculateByteRangeOffset(srcx, yy + srcy, srcx + width, yy + srcy, srcBand); int dstOffset = calculateByteOffset(dstx, yy + dsty, dstBand); System.arraycopy(src.data, srcoffcets[0], data, dstOffset, srcoffcets[1] - srcoffcets[0]); } } final public void fill(final double value) { MrGeoRaster row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, value); } int len = row.data.length - dataoffset; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { int offset = calculateByteOffset(0, y, b); System.arraycopy(row.data, dataoffset, data, offset, len); } } } final public void fill(final double[] values) { MrGeoRaster row[] = new MrGeoRaster[bands]; for (int b = 0; b < bands; b++) { row[b] = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row[b].setPixel(x, 0, 0, values[b]); } } int headerlen = dataoffset(); int len = row[0].data.length - headerlen; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { int offset = calculateByteOffset(0, y, b); System.arraycopy(row[b].data, headerlen, data, offset, len); } } } final public void fill(final int band, final double value) { MrGeoRaster row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, value); } int headerlen = bandoffset; int len = row.data.length - headerlen; for (int y = 0; y < height; y++) { int offset = calculateByteOffset(0, y, band); System.arraycopy(row.data, headerlen, data, offset, len); } } // Scaling algorithm taken from: http://willperone.net/Code/codescaling.php and modified to use // Rasters. It is an optimized Bresenham's algorithm. // Interpolated algorithm was http://tech-algorithm.com/articles/bilinear-image-scaling/ // Also used was http://www.compuphase.com/graphic/scale.htm, explaining interpolated // scaling public MrGeoRaster scale(final int dstWidth, final int dstHeight, final boolean interpolate, final double[] nodatas) { MrGeoRaster src = this; double scaleW = (double) dstWidth / src.width; double scaleH = (double) dstHeight / src.height; while (true) { int dw; int dh; final double scale = Math.max(scaleW, scaleH); // bresenham's scalar really doesn't like being scaled more than 2x or 1/2x without the // possibility of artifacts. But it turns out you can scale, then scale, etc. and get // an answer without artifacts. Hence the loop here... if (interpolate) { if (scale > 2.0) { dw = (int) (src.width * 2.0); dh = (int) (src.height * 2.0); } else if (scale < 0.50) { dw = (int) (src.width * 0.50); dh = (int) (src.height * 0.50); } else { dw = dstWidth; dh = dstHeight; } } else { dw = dstWidth; dh = dstHeight; } final MrGeoRaster dst = createCompatibleRaster(dw, dh); switch (datatype) { case DataBuffer.TYPE_BYTE: case DataBuffer.TYPE_INT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: if (interpolate) { Bilinear.scaleInt(src, dst, nodatas); } else { Nearest.scaleInt(src, dst); } break; case DataBuffer.TYPE_FLOAT: if (interpolate) { Bilinear.scaleFloat(src, dst, nodatas); } else { Nearest.scaleFloat(src, dst); } break; case DataBuffer.TYPE_DOUBLE: if (interpolate) { Bilinear.scaleDouble(src, dst, nodatas); } else { Nearest.scaleDouble(src, dst); } break; default: throw new RasterWritable.RasterWritableException("Error trying to scale raster. Bad raster data type"); } if (dst.width == dstWidth && dst.height == dstHeight) { return dst; } src = dst; scaleW = (double) dstWidth / src.width; scaleH = (double) dstHeight / src.height; } } final public MrGeoRaster reduce(final int xfactor, final int yfactor, Aggregator aggregator, double[] nodatas) { MrGeoRaster child = createCompatibleRaster(width / xfactor, height / yfactor); final int subsize = xfactor * yfactor; final int[] intsamples = new int[subsize]; final float[] floatsamples = new float[subsize]; final double[] doublesamples = new double[subsize]; int ndx; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y += yfactor) { for (int x = 0; x < width; x += xfactor) { switch (datatype) { case DataBuffer.TYPE_BYTE: case DataBuffer.TYPE_INT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: ndx = 0; for (int yy = y; yy < y + yfactor; yy++) { for (int xx = x; xx < x + xfactor; xx++) { intsamples[ndx++] = getPixelInt(xx, yy, b); } } int intSample = aggregator.aggregate(intsamples, (int)nodatas[b]); child.setPixel(x / xfactor, y / yfactor, b, intSample); break; case DataBuffer.TYPE_FLOAT: ndx = 0; for (int yy = y; yy < y + yfactor; yy++) { for (int xx = x; xx < x + xfactor; xx++) { floatsamples[ndx++] = getPixelInt(xx, yy, b); } } float floatsample = aggregator.aggregate(floatsamples, (float)nodatas[b]); child.setPixel(x / xfactor, y / yfactor, b, floatsample); break; case DataBuffer.TYPE_DOUBLE: ndx = 0; for (int yy = y; yy < y + yfactor; yy++) { for (int xx = x; xx < x + xfactor; xx++) { doublesamples[ndx++] = getPixelInt(xx, yy, b); } } double doublesample = aggregator.aggregate(doublesamples, nodatas[b]); child.setPixel(x / xfactor, y / yfactor, b, doublesample); break; default: throw new RasterWritable.RasterWritableException( "Error trying to get decimate pixels in the raster. Bad raster data type"); } } } } return child; } final public void mosaic(MrGeoRaster other, double[] nodata) { for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { switch (datatype) { case DataBuffer.TYPE_BYTE: { final byte p = other.getPixelByte(x, y, b); if (getPixelByte(x, y, b) != nodata[b]) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_FLOAT: { final float p = other.getPixelFloat(x, y, b); if (FloatUtils.isNotNodata(p, (float)nodata[b])) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_DOUBLE: { final double p = other.getPixelDouble(x, y, b); if (FloatUtils.isNotNodata(p, nodata[b])) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_INT: { final int p = other.getPixelInt(x, y, b); if (p != (int)nodata[b]) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_SHORT: { final short p = other.getPixelShort(x, y, b); if (p != (short)nodata[b]) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_USHORT: { final int p = other.getPixeUShort(x, y, b); if (p != (short)nodata[b]) { setPixel(x, y, b, p); } break; } } } } } } final public Dataset toDataset() { return toDataset(null, null); } final public Dataset toDataset(final Bounds bounds, final double[] nodatas) { int gdaltype = GDALUtils.toGDALDataType(datatype); Dataset ds = GDALUtils.createEmptyMemoryRaster(width, height, bands, gdaltype, nodatas); double[] xform = new double[6]; if (bounds != null) { xform[0] = bounds.w; xform[1] = bounds.width() / width; xform[2] = 0; xform[3] = bounds.n; xform[4] = 0; xform[5] = -bounds.height() / height; ds.SetProjection(GDALUtils.EPSG4326()); } else { xform[0] = 0; xform[1] = width; xform[2] = 0; xform[3] = 0; xform[4] = 0; xform[5] = -height; } ds.SetGeoTransform(xform); byte[] data = new byte[bytesPerPixel() * width * height]; for (int b = 0; b < bands; b++) { Band band = ds.GetRasterBand(b + 1); // gdal bands are 1's based if (nodatas != null) { band.SetNoDataValue(nodatas[b]); } System.arraycopy(this.data ,calculateByteOffset(0, 0, b), data, 0, data.length); int success = band.WriteRaster(0, 0, width, height, width, height, gdaltype, data); if (success != gdalconstConstants.CE_None) { System.out.println("Failed writing raster. gdal error: " + success); } } return ds; } final public Raster toRaster() { WritableRaster raster = Raster.createBandedRaster(datatype, width, height, bands, new Point(0,0)); final ByteBuffer rasterBuffer = ByteBuffer.wrap(data); // skip over the header in the data buffer for (int i = 0; i < HEADER_LEN; i++) { rasterBuffer.get(); } int databytes = data.length - HEADER_LEN; switch (datatype) { case DataBuffer.TYPE_BYTE: { // we can't use the byte buffer explicitly because the header info is // still in it... final byte[] bytedata = new byte[databytes]; rasterBuffer.get(bytedata); raster.setDataElements(0, 0, width, height, bytedata); break; } case DataBuffer.TYPE_FLOAT: { final FloatBuffer floatbuff = rasterBuffer.asFloatBuffer(); final float[] floatdata = new float[databytes / bytesPerPixel()]; floatbuff.get(floatdata); raster.setDataElements(0, 0, width, height, floatdata); break; } case DataBuffer.TYPE_DOUBLE: { final DoubleBuffer doublebuff = rasterBuffer.asDoubleBuffer(); final double[] doubledata = new double[databytes / bytesPerPixel()]; doublebuff.get(doubledata); raster.setDataElements(0, 0, width, height, doubledata); break; } case DataBuffer.TYPE_INT: { final IntBuffer intbuff = rasterBuffer.asIntBuffer(); final int[] intdata = new int[databytes / bytesPerPixel()]; intbuff.get(intdata); raster.setDataElements(0, 0, width, height, intdata); break; } case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: { final ShortBuffer shortbuff = rasterBuffer.asShortBuffer(); final short[] shortdata = new short[databytes / bytesPerPixel()]; shortbuff.get(shortdata); raster.setDataElements(0, 0, width, height, shortdata); break; } default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } return raster; } public abstract byte getPixelByte(int x, int y, int band); public abstract short getPixelShort(int x, int y, int band); public abstract short getPixeUShort(int x, int y, int band); public abstract int getPixelInt(int x, int y, int band); public abstract float getPixelFloat(int x, int y, int band); public abstract double getPixelDouble(int x, int y, int band); public abstract void setPixel(int x, int y, int band, byte pixel); public abstract void setPixel(int x, int y, int band, short pixel); public abstract void setPixel(int x, int y, int band, int pixel); public abstract void setPixel(int x, int y, int band, float pixel); public abstract void setPixel(int x, int y, int band, double pixel); final int calculateByteOffset(final int x, final int y, final int band) { return ((y * width + x) + band * bandoffset) * bytesPerPixel() + dataoffset; } final int[] calculateByteRangeOffset(final int startx, final int starty, final int endx, final int endy, final int band) { final int bpp = bytesPerPixel(); final int bandoffset = band * this.bandoffset; return new int[] { ((starty * width + startx) + bandoffset) * bpp + dataoffset, ((endy * width + endx) + bandoffset) * bpp + dataoffset}; } final int[] calculateByteRangeOffset(final int startx, final int starty, final int startband, final int endx, final int endy, final int endband) { final int bpp = bytesPerPixel(); return new int[] { ((starty * width + startx) + (startband * bandoffset)) * bpp + dataoffset, ((endy * width + endx) + (endband * bandoffset)) * bpp + dataoffset}; } abstract int bytesPerPixel(); }
mrgeo-core/src/main/java/org/mrgeo/data/raster/MrGeoRaster.java
package org.mrgeo.data.raster; import org.apache.commons.lang3.NotImplementedException; import org.gdal.gdal.Band; import org.gdal.gdal.Dataset; import org.gdal.gdal.gdal; import org.gdal.gdalconst.gdalconstConstants; import org.mrgeo.aggregators.Aggregator; import org.mrgeo.data.raster.Interpolator.Bilinear; import org.mrgeo.data.raster.Interpolator.Nearest; import org.mrgeo.utils.ByteArrayUtils; import org.mrgeo.utils.FloatUtils; import org.mrgeo.utils.GDALUtils; import org.mrgeo.utils.tms.Bounds; import java.awt.*; import java.awt.image.*; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.nio.*; public abstract class MrGeoRaster { final static int HEADER_LEN = 12; // data offset: byte (VERSION) + int (width) + int (height) + short (bands) + byte (datatype) private final static int VERSION_OFFSET = 0; // start private final static int WIDTH_OFFSET = 1; // byte (VERSION) private final static int HEIGHT_OFFSET = 5; // byte (VERSION) + int (width) private final static int BANDS_OFFSET = 9; // byte (VERSION) + int (width) + int (height) private final static int DATATYPE_OFFSET = 11; // byte (VERSION) + int (width) + int (height) + short (bands) private final static byte VERSION = 0x03; // MUST NOT BE 0! final byte[] data; private final int width; private final int height; private final int bands; private final int datatype; private final int dataoffset; private final int bandoffset; MrGeoRaster(int width, int height, int bands, int datatype, byte[] data, int dataoffset) { this.width = width; this.height = height; this.bands = bands; this.datatype = datatype; this.data = data; this.dataoffset = dataoffset; this.bandoffset = width * height; } public static MrGeoRaster createEmptyRaster(int width, int height, int bands, int datatype) { switch (datatype) { case DataBuffer.TYPE_BYTE: { return MrGeoByteRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_FLOAT: { return MrGeoFloatRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_DOUBLE: { return MrGeoDoubleRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_INT: { return MrGeoIntRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_SHORT: { return MrGeoShortRaster.createEmptyRaster(width, height, bands); } case DataBuffer.TYPE_USHORT: { return MrGeoUShortRaster.createEmptyRaster(width, height, bands); } default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } } public static MrGeoRaster createEmptyRaster(int width, int height, int bands, int datatype, double nodata) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); raster.fill(nodata); return raster; } public static MrGeoRaster createEmptyRaster(int width, int height, int bands, int datatype, double[] nodatas) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); raster.fill(nodatas); return raster; } public static MrGeoRaster createRaster(int width, int height, int bands, int datatype, byte[] data, int dataOffset) { switch (datatype) { case DataBuffer.TYPE_BYTE: { return new MrGeoByteRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_FLOAT: { return new MrGeoFloatRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_DOUBLE: { return new MrGeoDoubleRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_INT: { return new MrGeoIntRaster(width, height, bands, data, dataOffset); } case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: { return new MrGeoShortRaster(width, height, bands, data, dataOffset); } default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } } public static MrGeoRaster fromRaster(Raster raster) throws IOException { MrGeoRaster mrgeo = MrGeoRaster.createEmptyRaster(raster.getWidth(), raster.getHeight(), raster.getNumBands(), raster.getTransferType()); for (int b = 0; b < raster.getNumBands(); b++) { for (int y = 0; y < raster.getHeight(); y++) { for (int x = 0; x < raster.getWidth(); x++) { switch (mrgeo.datatype()) { case DataBuffer.TYPE_BYTE: mrgeo.setPixel(x, y, b, (byte)raster.getSample(x, y, b)); break; case DataBuffer.TYPE_INT: mrgeo.setPixel(x, y, b, raster.getSample(x, y, b)); break; case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: mrgeo.setPixel(x, y, b, (short)raster.getSample(x, y, b)); break; case DataBuffer.TYPE_FLOAT: mrgeo.setPixel(x, y, b, raster.getSampleFloat(x, y, b)); break; case DataBuffer.TYPE_DOUBLE: mrgeo.setPixel(x, y, b, raster.getSampleDouble(x, y, b)); break; default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } } } } return mrgeo; } public static MrGeoRaster fromDataset(Dataset dataset) { return fromDataset(dataset, 0, 0, dataset.GetRasterXSize(), dataset.GetRasterYSize()); } public static MrGeoRaster fromDataset(final Dataset dataset, final int x, final int y, final int width, final int height) { int gdaltype = dataset.GetRasterBand(1).getDataType(); int bands = dataset.GetRasterCount(); int datasize = gdal.GetDataTypeSize(gdaltype) / 8; MrGeoRaster raster = MrGeoRaster.createEmptyRaster(width, height, bands, GDALUtils.toRasterDataBufferType(gdaltype)); for (int b = 0; b < bands; b++) { Band band = dataset.GetRasterBand(b + 1); // gdal bands are 1's based byte[] data = new byte[datasize * width * height]; int success = band.ReadRaster(x, y, width, height, width, height, gdaltype, data); if (success != gdalconstConstants.CE_None) { System.out.println("Failed reading raster. gdal error: " + success); } //GDALUtils.swapBytes(data, gdaltype); System.arraycopy(data, 0, raster.data, raster.calculateByteOffset(0, 0, b), data.length); } return raster; } static int writeHeader(final int width, final int height, final int bands, final int datatype, byte[] data) { ByteArrayUtils.setByte(VERSION, data, VERSION_OFFSET); ByteArrayUtils.setInt(width, data, WIDTH_OFFSET); ByteArrayUtils.setInt(height, data, HEIGHT_OFFSET); ByteArrayUtils.setShort((short) bands, data, BANDS_OFFSET); ByteArrayUtils.setByte((byte) datatype, data, DATATYPE_OFFSET); return HEADER_LEN; } static int[] readHeader(byte[] data) { return new int[] { ByteArrayUtils.getByte(data, VERSION_OFFSET), ByteArrayUtils.getInt(data, WIDTH_OFFSET), ByteArrayUtils.getInt(data, HEIGHT_OFFSET), ByteArrayUtils.getShort(data, BANDS_OFFSET), ByteArrayUtils.getByte(data, DATATYPE_OFFSET), HEADER_LEN }; } static MrGeoRaster createRaster(byte[] data) { final int[] header = MrGeoRaster.readHeader(data); return createRaster(header[1], header[2], header[3], header[4], data, header[5]); } final public MrGeoRaster createCompatibleRaster(int width, int height) { return createEmptyRaster(width, height, bands, datatype); } final public MrGeoRaster createCompatibleEmptyRaster(int width, int height, double nodata) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); MrGeoRaster row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, nodata); } int headerlen = raster.bandoffset; int len = row.data.length - headerlen; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { int offset = raster.calculateByteOffset(0, y, b); System.arraycopy(row.data, headerlen, raster.data, offset, len); } } return raster; } final public MrGeoRaster createCompatibleEmptyRaster(int width, int height, double[] nodata) { MrGeoRaster raster = createEmptyRaster(width, height, bands, datatype); MrGeoRaster row; for (int b = 0; b < bands; b++) { row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, nodata[b]); } int headerlen = raster.dataoffset(); int len = row.data.length - headerlen; for (int y = 0; y < height; y++) { int offset = raster.calculateByteOffset(0, y, b); System.arraycopy(row.data, headerlen, raster.data, offset, len); } } return raster; } final public int width() { return width; } final public int height() { return height; } final public int bands() { return bands; } final public int datatype() { return datatype; } final public byte[] data() { return data; } final public int dataoffset() { return dataoffset; } final public int datasize() { return data.length - dataoffset; } final public MrGeoRaster clip(int x, int y, int width, int height) { MrGeoRaster clipraster = MrGeoRaster.createEmptyRaster(width, height, bands, datatype); for (int b = 0; b < bands; b++) { for (int yy = 0; yy < height; yy++) { int[] offsets = calculateByteRangeOffset(x, yy + y, x + width, yy + y, b); int dstOffset = clipraster.calculateByteOffset(0, yy, b); System.arraycopy(data, offsets[0], clipraster.data, dstOffset, offsets[1] - offsets[0]); } } return clipraster; } final public MrGeoRaster clip(int x, int y, int width, int height, int band) { MrGeoRaster clipraster = MrGeoRaster.createEmptyRaster(width, height, 1, datatype); for (int yy = 0; yy < height; yy++) { int[] offsets = calculateByteRangeOffset(x, yy + y, x + width, yy + y, band); int dstOffset = clipraster.calculateByteOffset(0, yy, band); System.arraycopy(data, offsets[0], clipraster.data, dstOffset, offsets[1] - offsets[0]); } return clipraster; } final public void copyFrom(int srcx, int srcy, int width, int height, MrGeoRaster src, int dstx, int dsty) { for (int b = 0; b < bands; b++) { for (int yy = 0; yy < height; yy++) { int[] srcoffcets = src.calculateByteRangeOffset(srcx, yy + srcy, srcx + width, yy + srcy, b); int dstOffset = calculateByteOffset(dstx, yy + dsty, b); System.arraycopy(src.data, srcoffcets[0], data, dstOffset, srcoffcets[1] - srcoffcets[0]); } } } final public void copyFrom(int srcx, int srcy, int srcBand, int width, int height, MrGeoRaster src, int dstx, int dsty, int dstBand) { for (int yy = 0; yy < height; yy++) { int[] srcoffcets = src.calculateByteRangeOffset(srcx, yy + srcy, srcx + width, yy + srcy, srcBand); int dstOffset = calculateByteOffset(dstx, yy + dsty, dstBand); System.arraycopy(src.data, srcoffcets[0], data, dstOffset, srcoffcets[1] - srcoffcets[0]); } } final public void fill(final double value) { MrGeoRaster row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, value); } int len = row.data.length - dataoffset; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { int offset = calculateByteOffset(0, y, b); System.arraycopy(row.data, dataoffset, data, offset, len); } } } final public void fill(final double[] values) { MrGeoRaster row[] = new MrGeoRaster[bands]; for (int b = 0; b < bands; b++) { row[b] = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row[b].setPixel(x, 0, 0, values[b]); } } int headerlen = dataoffset(); int len = row[0].data.length - headerlen; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { int offset = calculateByteOffset(0, y, b); System.arraycopy(row[b].data, headerlen, data, offset, len); } } } final public void fill(final int band, final double value) { MrGeoRaster row = MrGeoRaster.createEmptyRaster(width, 1, 1, datatype); for (int x = 0; x < width; x++) { row.setPixel(x, 0, 0, value); } int headerlen = bandoffset; int len = row.data.length - headerlen; for (int y = 0; y < height; y++) { int offset = calculateByteOffset(0, y, band); System.arraycopy(row.data, headerlen, data, offset, len); } } // Scaling algorithm taken from: http://willperone.net/Code/codescaling.php and modified to use // Rasters. It is an optimized Bresenham's algorithm. // Interpolated algorithm was http://tech-algorithm.com/articles/bilinear-image-scaling/ // Also used was http://www.compuphase.com/graphic/scale.htm, explaining interpolated // scaling public MrGeoRaster scale(final int dstWidth, final int dstHeight, final boolean interpolate, final double[] nodatas) { MrGeoRaster src = this; double scaleW = (double) dstWidth / src.width; double scaleH = (double) dstHeight / src.height; while (true) { int dw; int dh; final double scale = Math.max(scaleW, scaleH); // bresenham's scalar really doesn't like being scaled more than 2x or 1/2x without the // possibility of artifacts. But it turns out you can scale, then scale, etc. and get // an answer without artifacts. Hence the loop here... if (interpolate) { if (scale > 2.0) { dw = (int) (src.width * 2.0); dh = (int) (src.height * 2.0); } else if (scale < 0.50) { dw = (int) (src.width * 0.50); dh = (int) (src.height * 0.50); } else { dw = dstWidth; dh = dstHeight; } } else { dw = dstWidth; dh = dstHeight; } final MrGeoRaster dst = createCompatibleRaster(dw, dh); switch (datatype) { case DataBuffer.TYPE_BYTE: case DataBuffer.TYPE_INT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: if (interpolate) { Bilinear.scaleInt(src, dst, nodatas); } else { Nearest.scaleInt(src, dst); } break; case DataBuffer.TYPE_FLOAT: if (interpolate) { Bilinear.scaleFloat(src, dst, nodatas); } else { Nearest.scaleFloat(src, dst); } break; case DataBuffer.TYPE_DOUBLE: if (interpolate) { Bilinear.scaleDouble(src, dst, nodatas); } else { Nearest.scaleDouble(src, dst); } break; default: throw new RasterWritable.RasterWritableException("Error trying to scale raster. Bad raster data type"); } if (dst.width == dstWidth && dst.height == dstHeight) { return dst; } src = dst; scaleW = (double) dstWidth / src.width; scaleH = (double) dstHeight / src.height; } } final public MrGeoRaster reduce(final int xfactor, final int yfactor, Aggregator aggregator, double[] nodatas) { MrGeoRaster child = createCompatibleRaster(width / xfactor, height / yfactor); final int subsize = xfactor * yfactor; final int[] intsamples = new int[subsize]; final float[] floatsamples = new float[subsize]; final double[] doublesamples = new double[subsize]; int ndx; for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y += yfactor) { for (int x = 0; x < width; x += xfactor) { switch (datatype) { case DataBuffer.TYPE_BYTE: case DataBuffer.TYPE_INT: case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: ndx = 0; for (int yy = y; yy < y + yfactor; yy++) { for (int xx = x; xx < x + xfactor; xx++) { intsamples[ndx++] = getPixelInt(xx, yy, b); } } int intSample = aggregator.aggregate(intsamples, (int)nodatas[b]); child.setPixel(x / xfactor, y / yfactor, b, intSample); break; case DataBuffer.TYPE_FLOAT: ndx = 0; for (int yy = y; yy < y + yfactor; yy++) { for (int xx = x; xx < x + xfactor; xx++) { floatsamples[ndx++] = getPixelInt(xx, yy, b); } } float floatsample = aggregator.aggregate(floatsamples, (float)nodatas[b]); child.setPixel(x / xfactor, y / yfactor, b, floatsample); break; case DataBuffer.TYPE_DOUBLE: ndx = 0; for (int yy = y; yy < y + yfactor; yy++) { for (int xx = x; xx < x + xfactor; xx++) { doublesamples[ndx++] = getPixelInt(xx, yy, b); } } double doublesample = aggregator.aggregate(doublesamples, nodatas[b]); child.setPixel(x / xfactor, y / yfactor, b, doublesample); break; default: throw new RasterWritable.RasterWritableException( "Error trying to get decimate pixels in the raster. Bad raster data type"); } } } } return child; } final public void mosaic(MrGeoRaster other, double[] nodata) { for (int b = 0; b < bands; b++) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { switch (datatype) { case DataBuffer.TYPE_BYTE: { final byte p = other.getPixelByte(x, y, b); if (getPixelByte(x, y, b) != nodata[b]) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_FLOAT: { final float p = other.getPixelFloat(x, y, b); if (FloatUtils.isNotNodata(p, (float)nodata[b])) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_DOUBLE: { final double p = other.getPixelDouble(x, y, b); if (FloatUtils.isNotNodata(p, nodata[b])) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_INT: { final int p = other.getPixelInt(x, y, b); if (p != (int)nodata[b]) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_SHORT: { final short p = other.getPixelShort(x, y, b); if (p != (short)nodata[b]) { setPixel(x, y, b, p); } break; } case DataBuffer.TYPE_USHORT: { final int p = other.getPixeUShort(x, y, b); if (p != (short)nodata[b]) { setPixel(x, y, b, p); } break; } } } } } } final public Dataset toDataset() { return toDataset(null, null); } final public Dataset toDataset(final Bounds bounds, final double[] nodatas) { int gdaltype = GDALUtils.toGDALDataType(datatype); Dataset ds = GDALUtils.createEmptyMemoryRaster(width, height, bands, gdaltype, nodatas); double[] xform = new double[6]; if (bounds != null) { xform[0] = bounds.w; xform[1] = bounds.width() / width; xform[2] = 0; xform[3] = bounds.n; xform[4] = 0; xform[5] = -bounds.height() / height; ds.SetProjection(GDALUtils.EPSG4326()); } else { xform[0] = 0; xform[1] = width; xform[2] = 0; xform[3] = 0; xform[4] = 0; xform[5] = -height; } ds.SetGeoTransform(xform); byte[] data = new byte[bytesPerPixel() * width * height]; for (int b = 0; b < bands; b++) { Band band = ds.GetRasterBand(b + 1); // gdal bands are 1's based if (nodatas != null) { band.SetNoDataValue(nodatas[b]); } System.arraycopy(this.data ,calculateByteOffset(0, 0, b), data, 0, data.length); int success = band.WriteRaster(0, 0, width, height, width, height, gdaltype, data); if (success != gdalconstConstants.CE_None) { System.out.println("Failed writing raster. gdal error: " + success); } } return ds; } final public Raster toRaster() { WritableRaster raster = Raster.createBandedRaster(datatype, width, height, bands, new Point(0,0)); final ByteBuffer rasterBuffer = ByteBuffer.wrap(data); // skip over the header in the data buffer for (int i = 0; i < HEADER_LEN; i++) { rasterBuffer.get(); } int databytes = data.length - HEADER_LEN; switch (datatype) { case DataBuffer.TYPE_BYTE: { // we can't use the byte buffer explicitly because the header info is // still in it... final byte[] bytedata = new byte[databytes]; rasterBuffer.get(bytedata); raster.setDataElements(0, 0, width, height, bytedata); break; } case DataBuffer.TYPE_FLOAT: { final FloatBuffer floatbuff = rasterBuffer.asFloatBuffer(); final float[] floatdata = new float[databytes / bytesPerPixel()]; floatbuff.get(floatdata); raster.setDataElements(0, 0, width, height, floatdata); break; } case DataBuffer.TYPE_DOUBLE: { final DoubleBuffer doublebuff = rasterBuffer.asDoubleBuffer(); final double[] doubledata = new double[databytes / bytesPerPixel()]; doublebuff.get(doubledata); raster.setDataElements(0, 0, width, height, doubledata); break; } case DataBuffer.TYPE_INT: { final IntBuffer intbuff = rasterBuffer.asIntBuffer(); final int[] intdata = new int[databytes / bytesPerPixel()]; intbuff.get(intdata); raster.setDataElements(0, 0, width, height, intdata); break; } case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: { final ShortBuffer shortbuff = rasterBuffer.asShortBuffer(); final short[] shortdata = new short[databytes / bytesPerPixel()]; shortbuff.get(shortdata); raster.setDataElements(0, 0, width, height, shortdata); break; } default: throw new RasterWritable.RasterWritableException("Error trying to read raster. Bad raster data type"); } return raster; } public abstract byte getPixelByte(int x, int y, int band); public abstract short getPixelShort(int x, int y, int band); public abstract short getPixeUShort(int x, int y, int band); public abstract int getPixelInt(int x, int y, int band); public abstract float getPixelFloat(int x, int y, int band); public abstract double getPixelDouble(int x, int y, int band); public abstract void setPixel(int x, int y, int band, byte pixel); public abstract void setPixel(int x, int y, int band, short pixel); public abstract void setPixel(int x, int y, int band, int pixel); public abstract void setPixel(int x, int y, int band, float pixel); public abstract void setPixel(int x, int y, int band, double pixel); final int calculateByteOffset(final int x, final int y, final int band) { return ((y * width + x) + band * bandoffset) * bytesPerPixel() + dataoffset; } final int[] calculateByteRangeOffset(final int startx, final int starty, final int endx, final int endy, final int band) { final int bpp = bytesPerPixel(); final int bandoffset = band * this.bandoffset; return new int[] { ((starty * width + startx) + bandoffset) * bpp + dataoffset, ((endy * width + endx) + bandoffset) * bpp + dataoffset}; } final int[] calculateByteRangeOffset(final int startx, final int starty, final int startband, final int endx, final int endy, final int endband) { final int bpp = bytesPerPixel(); return new int[] { ((starty * width + startx) + (startband * bandoffset)) * bpp + dataoffset, ((endy * width + endx) + (endband * bandoffset)) * bpp + dataoffset}; } abstract int bytesPerPixel(); }
Fix single band clip
mrgeo-core/src/main/java/org/mrgeo/data/raster/MrGeoRaster.java
Fix single band clip
Java
apache-2.0
fddd1af14103beff7a8b60736da0c774465a73b4
0
YUKAI/konashi-android-sdk,kiryuxxu/konashi-android-sdk,YUKAI/konashi-android-sdk
package com.uxxu.konashi.lib; import android.app.Activity; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattService; import android.content.Context; import com.uxxu.konashi.lib.action.AioAnalogReadAction; import com.uxxu.konashi.lib.action.BatteryLevelReadAction; import com.uxxu.konashi.lib.action.HardwareResetAction; import com.uxxu.konashi.lib.action.I2cModeAction; import com.uxxu.konashi.lib.action.I2cReadAction; import com.uxxu.konashi.lib.action.I2cSendConditionAction; import com.uxxu.konashi.lib.action.I2cSetReadParamAction; import com.uxxu.konashi.lib.action.I2cWriteAction; import com.uxxu.konashi.lib.action.PioDigitalWriteAction; import com.uxxu.konashi.lib.action.PioPinModeAction; import com.uxxu.konashi.lib.action.PioPinPullupAction; import com.uxxu.konashi.lib.action.PwmDutyAction; import com.uxxu.konashi.lib.action.PwmLedDriveAction; import com.uxxu.konashi.lib.action.PwmPeriodAction; import com.uxxu.konashi.lib.action.PwmPinModeAction; import com.uxxu.konashi.lib.action.SpiConfigAction; import com.uxxu.konashi.lib.action.SpiReadAction; import com.uxxu.konashi.lib.action.SpiWriteAction; import com.uxxu.konashi.lib.action.UartBaudrateAction; import com.uxxu.konashi.lib.action.UartModeAction; import com.uxxu.konashi.lib.action.UartWriteAction; import com.uxxu.konashi.lib.dispatcher.AioStoreUpdater; import com.uxxu.konashi.lib.dispatcher.CharacteristicDispatcher; import com.uxxu.konashi.lib.dispatcher.DispatcherContainer; import com.uxxu.konashi.lib.dispatcher.I2cStoreUpdater; import com.uxxu.konashi.lib.dispatcher.PioStoreUpdater; import com.uxxu.konashi.lib.dispatcher.PwmStoreUpdater; import com.uxxu.konashi.lib.dispatcher.SpiStoreUpdater; import com.uxxu.konashi.lib.dispatcher.UartStoreUpdater; import com.uxxu.konashi.lib.filter.AioAnalogReadFilter; import com.uxxu.konashi.lib.filter.BatteryLevelReadFilter; import com.uxxu.konashi.lib.filter.I2cReadFilter; import com.uxxu.konashi.lib.store.AioStore; import com.uxxu.konashi.lib.store.I2cStore; import com.uxxu.konashi.lib.store.PioStore; import com.uxxu.konashi.lib.store.PwmStore; import com.uxxu.konashi.lib.store.SpiStore; import com.uxxu.konashi.lib.store.UartStore; import org.jdeferred.DoneCallback; import org.jdeferred.DonePipe; import org.jdeferred.Promise; import org.jdeferred.android.AndroidDeferredManager; import java.util.UUID; import info.izumin.android.bletia.BleState; import info.izumin.android.bletia.Bletia; import info.izumin.android.bletia.BletiaException; import info.izumin.android.bletia.action.Action; import info.izumin.android.bletia.action.ReadRemoteRssiAction; /** * konashiを管理するメインクラス * * @author monakaz, YUKAI Engineering * http://konashi.ux-xu.com * ======================================================================== * Copyright 2014 Yukai Engineering Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ public class KonashiManager { // konashi members // PIO private PioStore mPioStore; private CharacteristicDispatcher<PioStore, PioStoreUpdater> mPioDispatcher; // PWM private PwmStore mPwmStore; private CharacteristicDispatcher<PwmStore, PwmStoreUpdater> mPwmDispatcher; // AIO private AioStore mAioStore; private CharacteristicDispatcher<AioStore, AioStoreUpdater> mAioDispatcher; // I2C private I2cStore mI2cStore; private CharacteristicDispatcher<I2cStore, I2cStoreUpdater> mI2cDispatcher; // UART private UartStore mUartStore; private CharacteristicDispatcher<UartStore, UartStoreUpdater> mUartDispatcher; // SPI private SpiStore mSpiStore; private CharacteristicDispatcher<SpiStore, SpiStoreUpdater> mSpiDispatcher; private Bletia mBletia; private EventEmitter mEmitter; private CallbackHandler mCallbackHandler; private DispatcherContainer mDispacherContainer; private BluetoothDevice mDevice; private ConnectionHelper mConnectionHelper; /////////////////////////// // Initialization /////////////////////////// public KonashiManager(Context context) { mEmitter = new EventEmitter(); mDispacherContainer = new DispatcherContainer(); mCallbackHandler = new CallbackHandler(this, mEmitter, mDispacherContainer); // PIO mPioDispatcher = mDispacherContainer.getPioDispatcher(); mPioStore = new PioStore(mPioDispatcher); // PWM mPwmDispatcher = mDispacherContainer.getPwmDispatcher(); mPwmStore = new PwmStore(mPwmDispatcher); // AIO mAioDispatcher = mDispacherContainer.getAioDispatcher(); mAioStore = new AioStore(mAioDispatcher); // I2C mI2cDispatcher = mDispacherContainer.getI2cDispatcher(); mI2cStore = new I2cStore(mI2cDispatcher); // UART mUartDispatcher = mDispacherContainer.getUartDispatcher(); mUartStore = new UartStore(mUartDispatcher); // SPI mSpiDispatcher = mDispacherContainer.getSpiDispatcher(); mSpiStore = new SpiStore(mSpiDispatcher); mBletia = new Bletia(context); mConnectionHelper = new ConnectionHelper(mConnectionHelperCallback, context); mBletia.addListener(mCallbackHandler); } /** * konashiを見つける(konashiのみBLEデバイスリストに表示する) * @param activity BLEデバイスリストを表示する先のActivity */ public void find(Activity activity){ mConnectionHelper.find(activity, true, null); } /** * konashiを見つける * @param activity BLEデバイスリストを表示する先のActivity * @param isShowKonashiOnly konashiだけを表示するか、すべてのBLEデバイスを表示するか */ public void find(Activity activity, boolean isShowKonashiOnly){ mConnectionHelper.find(activity, isShowKonashiOnly, null); } /** * 名前を指定してkonashiを探索。 * @param activity BLEデバイスリストを表示する先のActivity * @param name konashiの緑色のチップに貼られているシールに書いている数字(例: konashi#0-1234) */ public void findWithName(Activity activity, String name){ mConnectionHelper.find(activity, true, name); } /** * 接続しているkonashiの名前を取得する * @return konashiの名前 */ public String getPeripheralName() { return (mDevice == null) ? "" : mDevice.getName(); } /** * konashiとの接続を解除する */ public void disconnect(){ mBletia.disconenct(); } /** * konashiと接続済みかどうか * @return konashiと接続済みだったらtrue */ public boolean isConnected(){ return (mBletia.getState() == BleState.CONNECTED) || isReady(); } /** * konashiを使える状態になっているか * @return konashiを使えるならtrue */ public boolean isReady(){ return mBletia.getState() == BleState.SERVICE_DISCOVERED; } /////////////////////////// // Observer /////////////////////////// /** * konashiのイベントのリスナーを追加する * @param listener 追加するリスナー */ public void addListener(KonashiListener listener) { mEmitter.add(listener); } /** * 指定したリスナーを削除する * @param listener 削除するリスナー */ public void removeListener(KonashiListener listener) { mEmitter.remove(listener); } /** * すべてのリスナーを削除する */ public void removeAllListeners() { mEmitter.clear(); } /////////////////////////// // PIO /////////////////////////// /** * PIOのピンを入力として使うか、出力として使うかの設定を行う * @param pin 設定するPIOのピン名。 * @param mode ピンに設定するモード。INPUT か OUTPUT が設定できます。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinMode(int pin, int mode){ return execute(new PioPinModeAction(getKonashiService(), pin, mode, mPioStore.getModes()), mPioDispatcher); } /** * PIOのピンを入力として使うか、出力として使うかの設定を行う * @param modes PIO0 〜 PIO7 の計8ピンの設定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinModeAll(int modes){ return execute(new PioPinModeAction(getKonashiService(), modes), mPioDispatcher); } /** * PIOのピンをプルアップするかの設定を行う * @param pin 設定するPIOのピン名 * @param pullup ピンをプルアップするかの設定。PULLUP か NO_PULLS が設定できます。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinPullup(int pin, int pullup){ return execute(new PioPinPullupAction(getKonashiService(), pin, pullup, mPioStore.getPullups()), mPioDispatcher); } /** * PIOのピンをプルアップするかの設定を行う * @param pullups PIO0 〜 PIO7 の計8ピンのプルアップの設定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinPullupAll(int pullups){ return execute(new PioPinPullupAction(getKonashiService(), pullups), mPioDispatcher); } /** * PIOの特定のピンの入力状態を取得する * @param pin PIOのピン名 * @return HIGH(1) もしくは LOW(0) */ public int digitalRead(int pin){ return mPioStore.getInput(pin); } /** * PIOのすべてのピンの状態を取得する * @return PIOの状態(PIO0〜PIO7の入力状態が8bit(1byte)で表現) */ public int digitalReadAll(){ return mPioStore.getInputs(); } /** * PIOの特定のピンの出力状態を設定する * @param pin 設定するPIOのピン名 * @param output 設定するPIOの出力状態。HIGH もしくは LOW が指定可能 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> digitalWrite(int pin, int output){ return execute(new PioDigitalWriteAction(getKonashiService(), pin, output, mPioStore.getOutputs()), mPioDispatcher); } /** * PIOの特定のピンの出力状態を設定する * @param outputs PIOの出力状態。PIO0〜PIO7の出力状態が8bit(1byte)で表現 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> digitalWriteAll(int outputs){ return execute(new PioDigitalWriteAction(getKonashiService(), outputs), mPioDispatcher); } /////////////////////////// // PWM /////////////////////////// /** * PIO の指定のピンを PWM として使用する/しないかを設定する * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param mode 設定するPWMのモード。Konashi.PWM_DISABLE, Konashi.PWM_ENABLE, Konashi.PWM_ENABLE_LED_MODE のいずれかをセットする。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmMode(final int pin, int mode){ Promise<BluetoothGattCharacteristic, BletiaException, Void> promise = execute(new PwmPinModeAction(getKonashiService(), pin, mode, mPwmStore.getModes())).then(mPwmDispatcher); if (mode == Konashi.PWM_ENABLE_LED_MODE) { promise.then(new DonePipe<BluetoothGattCharacteristic, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(BluetoothGattCharacteristic result) { return pwmPeriod(pin, Konashi.PWM_LED_PERIOD).then(mPwmDispatcher); } }).then(new DonePipe<BluetoothGattCharacteristic, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(BluetoothGattCharacteristic result) { return pwmLedDrive(pin, 0.0f).then(mPwmDispatcher); } }); } return promise; } /** * 指定のピンのPWM周期を設定する * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param period 周期。単位はマイクロ秒(us)で32bitで指定してください。最大2^(32)us = 71.5分。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmPeriod(int pin, int period){ return execute(new PwmPeriodAction(getKonashiService(), pin, period, mPwmStore.getDuty(pin))).then(mPwmDispatcher); } /** * 指定のピンのPWMのデューティ(ONになっている時間)を設定する。 * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param duty デューティ。単位はマイクロ秒(us)で32bitで指定してください。最大2^(32)us = 71.5分。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmDuty(int pin, int duty){ return execute(new PwmDutyAction(getKonashiService(), pin, duty, mPwmStore.getPeriod(pin))).then(mPwmDispatcher); } /** * 指定のピンのLEDの明るさを0%〜100%で指定する * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param dutyRatio LEDの明るさ。0.0F〜100.0F をしてしてください。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmLedDrive(int pin, float dutyRatio){ return execute(new PwmLedDriveAction(getKonashiService(), pin, dutyRatio, mPwmStore.getPeriod(pin))).then(mPwmDispatcher); } /** * pwmLedDrive(int pin, float dutyRatio) の doubleでdutyRatioを指定する版。 * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param dutyRatio LEDの明るさ。0.0〜100.0 をしてしてください。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmLedDrive(int pin, double dutyRatio){ return pwmLedDrive(pin, (float) dutyRatio); } /////////////////////////// // AIO /////////////////////////// /** * AIO の指定のピンの入力電圧を取得する * @param pin AIOのピン名。指定可能なピン名は AIO0, AIO1, AIO2 */ public Promise<Integer, BletiaException, Void> analogRead(final int pin) { return execute(new AioAnalogReadAction(getKonashiService(), pin)) .then(mAioDispatcher) .then(new AioAnalogReadFilter(pin)); } /** * AIO の指定のピンに任意の電圧を出力する * @param pin AIOのピン名。指定可能なピン名は AIO0, AIO1, AIO * @param milliVolt 設定する電圧をmVで指定。0〜1300を指定可能 */ // @Override // public void analogWrite(int pin, int milliVolt){ // if(!isEnableAccessKonashi()){ // notifyKonashiError(KonashiErrorReason.NOT_READY); // return; // } // // if(pin >= Konashi.AIO0 && pin <= Konashi.AIO2 && milliVolt >= 0 && milliVolt <= Konashi.ANALOG_REFERENCE){ // byte[] val = new byte[3]; // val[0] = (byte)pin; // val[1] = (byte)((milliVolt >> 8) & 0xFF); // val[2] = (byte)((milliVolt >> 0) & 0xFF); // // KonashiUtils.log("analogWrite pin: " + pin + ", value: " + milliVolt); // // addWriteMessage(KonashiUUID.ANALOG_DRIVE_UUID, val); // } else { // notifyKonashiError(KonashiErrorReason.INVALID_PARAMETER); // } // } /////////////////////////// // UART /////////////////////////// /** * UART の有効/無効を設定する * @param mode 設定するUARTのモード。Konashi.UART_DISABLE, Konashi.UART_ENABLE を指定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartMode(int mode){ return execute(new UartModeAction(getKonashiService(), mode, mUartStore), mUartDispatcher); } /** * UART の通信速度を設定する * @param baudrate UARTの通信速度。Konashi.UART_RATE_2K4 か Konashi.UART_RATE_9K6 を指定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartBaudrate(int baudrate){ return execute(new UartBaudrateAction(getKonashiService(), baudrate, mUartStore), mUartDispatcher); } /** * UART でデータを送信する * @param bytes 送信するデータ(byte[]) */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartWrite(byte[] bytes) { return execute(new UartWriteAction(getKonashiService(), bytes, mUartStore)); } /** * UART でデータを送信する * @param string 送信するデータ(string) */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartWrite(String string) { return execute(new UartWriteAction(getKonashiService(), string, mUartStore)); } /** * UART でデータを1バイト送信する * for konashi v1(old code) * @param data 送信するデータ */ // @Override // public void uartWrite(byte data){ // if(!isEnableAccessKonashi()){ // notifyKonashiError(KonashiErrorReason.NOT_READY); // return; // } // // if(mUartSetting==Konashi.UART_ENABLE){ // byte[] val = new byte[1]; // val[0] = data; // // addWriteMessage(KonashiUUID.UART_TX_UUID, val); // } else { // notifyKonashiError(KonashiErrorReason.NOT_ENABLED_UART); // } // } /////////////////////////// // I2C /////////////////////////// /** * I2Cのコンディションを発行する * @param condition コンディション。Konashi.I2C_START_CONDITION, Konashi.I2C_RESTART_CONDITION, Konashi.I2C_STOP_CONDITION を指定できる。 */ private Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cSendCondition(int condition) { return execute(new I2cSendConditionAction(getKonashiService(), condition, mI2cStore)); } private <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cSendConditionPipe(final int condition) { return new DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(D result) { return execute(new I2cSendConditionAction(getKonashiService(), condition, mI2cStore)); } }; } /** * I2Cを有効/無効を設定する * @param mode 設定するI2Cのモード。Konashi.I2C_DISABLE , Konashi.I2C_ENABLE, Konashi.I2C_ENABLE_100K, Konashi.I2C_ENABLE_400Kを指定。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cMode(int mode) { return execute(new I2cModeAction(getKonashiService(), mode, mI2cStore), mI2cDispatcher); } /** * I2Cのスタートコンディションを発行する */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cStartCondition() { return i2cSendCondition(Konashi.I2C_START_CONDITION); } /** * I2Cのスタートコンディションを発行する */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cStartConditionPipe() { return i2cSendConditionPipe(Konashi.I2C_START_CONDITION); } /** * I2Cのリスタートコンディションを発行する */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cRestartCondition() { return i2cSendCondition(Konashi.I2C_RESTART_CONDITION); } /** * I2Cのリスタートコンディションを発行する */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cRestartConditionPipe() { return i2cSendConditionPipe(Konashi.I2C_RESTART_CONDITION); } /** * I2Cのストップコンディションを発行する */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cStopCondition() { return i2cSendCondition(Konashi.I2C_STOP_CONDITION); } /** * I2Cのストップコンディションを発行する */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cStopConditionPipe() { return i2cSendConditionPipe(Konashi.I2C_STOP_CONDITION); } /** * I2Cで指定したアドレスにデータを書き込む * @param length 書き込むデータ(byte)の長さ。最大 Konashi.I2C_DATA_MAX_LENGTH (19)byteまで * @param data 書き込むデータの配列 * @param address 書き込み先アドレス */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cWrite(int length, byte[] data, byte address) { return execute(new I2cWriteAction(getKonashiService(), address, data, mI2cStore)); } /** * I2Cで指定したアドレスにデータを書き込む * @param length 書き込むデータ(byte)の長さ。最大 Konashi.I2C_DATA_MAX_LENGTH (19)byteまで * @param data 書き込むデータの配列 * @param address 書き込み先アドレス */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cWritePipe(final int length, final byte[] data, final byte address) { return new DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(D result) { return i2cWrite(length, data, address); } }; } /** * I2Cで指定したアドレスからデータを読み込むリクエストを行う * @param length 読み込むデータの長さ。最大 Konashi.I2C_DATA_MAX_LENGTHs (19) * @param address 読み込み先のアドレス */ public Promise<byte[], BletiaException, Void> i2cRead(int length, byte address) { return execute(new I2cSetReadParamAction(getKonashiService(), length, address, mI2cStore)) .then(mI2cDispatcher) .then(new DonePipe<BluetoothGattCharacteristic, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(BluetoothGattCharacteristic result) { return execute(new I2cReadAction(getKonashiService())); } }) .then(new I2cReadFilter()); } /** * I2Cで指定したアドレスからデータを読み込むリクエストを行う * @param length 読み込むデータの長さ。最大 Konashi.I2C_DATA_MAX_LENGTHs (19) * @param address 読み込み先のアドレス */ public <D> DonePipe<D, byte[], BletiaException, Void> i2cReadPipe(final int length, final byte address) { return new DonePipe<D, byte[], BletiaException, Void>() { @Override public Promise<byte[], BletiaException, Void> pipeDone(D result) { return i2cRead(length, address); } }; } /////////////////////////// // SPI /////////////////////////// public Promise<BluetoothGattCharacteristic, BletiaException, Void> spiConfig(int mode, int byteOrder, int speed) { return execute(new SpiConfigAction(getKonashiService(), mode, byteOrder, speed, mSpiStore)) .then(mSpiDispatcher); } public Promise<BluetoothGattCharacteristic, BletiaException, Void> spiWrite(String data) { return execute(new SpiWriteAction(getKonashiService(), data)); } public Promise<BluetoothGattCharacteristic, BletiaException, Void> spiWrite(byte[] data) { return execute(new SpiWriteAction(getKonashiService(), data)); } public Promise<BluetoothGattCharacteristic, BletiaException, Void> spiRead() { return execute(new SpiReadAction(getKonashiService())); } /////////////////////////// // Hardware /////////////////////////// /** * konashiをリセットする */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> reset(){ return execute(new HardwareResetAction(getKonashiService())); } /** * konashi のバッテリ残量を取得 * @return 0 〜 100 のパーセント単位でバッテリ残量が返る */ public Promise<Integer, BletiaException, Void> getBatteryLevel(){ return execute(new BatteryLevelReadAction(getService(KonashiUUID.BATTERY_SERVICE_UUID))) .then(new BatteryLevelReadFilter()); } /** * konashi の電波強度を取得 * @return 電波強度(単位はdb) */ public Promise<Integer, BletiaException, Void> getSignalStrength() { return execute(new ReadRemoteRssiAction()); } private void connect(BluetoothDevice device){ mDevice = device; mBletia.connect(device); } private <T> Promise<T, BletiaException, Void> execute(Action<T, ?> action, DoneCallback<T> callback) { return execute(action).then(callback); } private <T> Promise<T, BletiaException, Void> execute(Action<T, ?> action) { return new AndroidDeferredManager().when(mBletia.execute(action)); } private BluetoothGattService getKonashiService() { return mBletia.getService(KonashiUUID.KONASHI_SERVICE_UUID); } private BluetoothGattService getService(UUID uuid) { return mBletia.getService(uuid); } private final ConnectionHelper.Callback mConnectionHelperCallback = new ConnectionHelper.Callback() { @Override public void onSelectBleDevice(BluetoothDevice device) { connect(device); } }; }
konashi-android-sdk/src/main/java/com/uxxu/konashi/lib/KonashiManager.java
package com.uxxu.konashi.lib; import android.app.Activity; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattService; import android.content.Context; import com.uxxu.konashi.lib.action.AioAnalogReadAction; import com.uxxu.konashi.lib.action.BatteryLevelReadAction; import com.uxxu.konashi.lib.action.HardwareResetAction; import com.uxxu.konashi.lib.action.I2cModeAction; import com.uxxu.konashi.lib.action.I2cReadAction; import com.uxxu.konashi.lib.action.I2cSendConditionAction; import com.uxxu.konashi.lib.action.I2cSetReadParamAction; import com.uxxu.konashi.lib.action.I2cWriteAction; import com.uxxu.konashi.lib.action.PioDigitalWriteAction; import com.uxxu.konashi.lib.action.PioPinModeAction; import com.uxxu.konashi.lib.action.PioPinPullupAction; import com.uxxu.konashi.lib.action.PwmDutyAction; import com.uxxu.konashi.lib.action.PwmLedDriveAction; import com.uxxu.konashi.lib.action.PwmPeriodAction; import com.uxxu.konashi.lib.action.PwmPinModeAction; import com.uxxu.konashi.lib.action.UartBaudrateAction; import com.uxxu.konashi.lib.action.UartModeAction; import com.uxxu.konashi.lib.action.UartWriteAction; import com.uxxu.konashi.lib.dispatcher.AioStoreUpdater; import com.uxxu.konashi.lib.dispatcher.CharacteristicDispatcher; import com.uxxu.konashi.lib.dispatcher.DispatcherContainer; import com.uxxu.konashi.lib.dispatcher.I2cStoreUpdater; import com.uxxu.konashi.lib.dispatcher.PioStoreUpdater; import com.uxxu.konashi.lib.dispatcher.PwmStoreUpdater; import com.uxxu.konashi.lib.dispatcher.SpiStoreUpdater; import com.uxxu.konashi.lib.dispatcher.UartStoreUpdater; import com.uxxu.konashi.lib.filter.AioAnalogReadFilter; import com.uxxu.konashi.lib.filter.BatteryLevelReadFilter; import com.uxxu.konashi.lib.filter.I2cReadFilter; import com.uxxu.konashi.lib.store.AioStore; import com.uxxu.konashi.lib.store.I2cStore; import com.uxxu.konashi.lib.store.PioStore; import com.uxxu.konashi.lib.store.PwmStore; import com.uxxu.konashi.lib.store.SpiStore; import com.uxxu.konashi.lib.store.UartStore; import org.jdeferred.DoneCallback; import org.jdeferred.DonePipe; import org.jdeferred.Promise; import org.jdeferred.android.AndroidDeferredManager; import java.util.UUID; import info.izumin.android.bletia.BleState; import info.izumin.android.bletia.Bletia; import info.izumin.android.bletia.BletiaException; import info.izumin.android.bletia.action.Action; import info.izumin.android.bletia.action.ReadRemoteRssiAction; /** * konashiを管理するメインクラス * * @author monakaz, YUKAI Engineering * http://konashi.ux-xu.com * ======================================================================== * Copyright 2014 Yukai Engineering Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ public class KonashiManager { // konashi members // PIO private PioStore mPioStore; private CharacteristicDispatcher<PioStore, PioStoreUpdater> mPioDispatcher; // PWM private PwmStore mPwmStore; private CharacteristicDispatcher<PwmStore, PwmStoreUpdater> mPwmDispatcher; // AIO private AioStore mAioStore; private CharacteristicDispatcher<AioStore, AioStoreUpdater> mAioDispatcher; // I2C private I2cStore mI2cStore; private CharacteristicDispatcher<I2cStore, I2cStoreUpdater> mI2cDispatcher; // UART private UartStore mUartStore; private CharacteristicDispatcher<UartStore, UartStoreUpdater> mUartDispatcher; // SPI private SpiStore mSpiStore; private CharacteristicDispatcher<SpiStore, SpiStoreUpdater> mSpiDispatcher; private Bletia mBletia; private EventEmitter mEmitter; private CallbackHandler mCallbackHandler; private DispatcherContainer mDispacherContainer; private BluetoothDevice mDevice; private ConnectionHelper mConnectionHelper; /////////////////////////// // Initialization /////////////////////////// public KonashiManager(Context context) { mEmitter = new EventEmitter(); mDispacherContainer = new DispatcherContainer(); mCallbackHandler = new CallbackHandler(this, mEmitter, mDispacherContainer); // PIO mPioDispatcher = mDispacherContainer.getPioDispatcher(); mPioStore = new PioStore(mPioDispatcher); // PWM mPwmDispatcher = mDispacherContainer.getPwmDispatcher(); mPwmStore = new PwmStore(mPwmDispatcher); // AIO mAioDispatcher = mDispacherContainer.getAioDispatcher(); mAioStore = new AioStore(mAioDispatcher); // I2C mI2cDispatcher = mDispacherContainer.getI2cDispatcher(); mI2cStore = new I2cStore(mI2cDispatcher); // UART mUartDispatcher = mDispacherContainer.getUartDispatcher(); mUartStore = new UartStore(mUartDispatcher); // SPI mSpiDispatcher = mDispacherContainer.getSpiDispatcher(); mSpiStore = new SpiStore(mSpiDispatcher); mBletia = new Bletia(context); mConnectionHelper = new ConnectionHelper(mConnectionHelperCallback, context); mBletia.addListener(mCallbackHandler); } /** * konashiを見つける(konashiのみBLEデバイスリストに表示する) * @param activity BLEデバイスリストを表示する先のActivity */ public void find(Activity activity){ mConnectionHelper.find(activity, true, null); } /** * konashiを見つける * @param activity BLEデバイスリストを表示する先のActivity * @param isShowKonashiOnly konashiだけを表示するか、すべてのBLEデバイスを表示するか */ public void find(Activity activity, boolean isShowKonashiOnly){ mConnectionHelper.find(activity, isShowKonashiOnly, null); } /** * 名前を指定してkonashiを探索。 * @param activity BLEデバイスリストを表示する先のActivity * @param name konashiの緑色のチップに貼られているシールに書いている数字(例: konashi#0-1234) */ public void findWithName(Activity activity, String name){ mConnectionHelper.find(activity, true, name); } /** * 接続しているkonashiの名前を取得する * @return konashiの名前 */ public String getPeripheralName() { return (mDevice == null) ? "" : mDevice.getName(); } /** * konashiとの接続を解除する */ public void disconnect(){ mBletia.disconenct(); } /** * konashiと接続済みかどうか * @return konashiと接続済みだったらtrue */ public boolean isConnected(){ return (mBletia.getState() == BleState.CONNECTED) || isReady(); } /** * konashiを使える状態になっているか * @return konashiを使えるならtrue */ public boolean isReady(){ return mBletia.getState() == BleState.SERVICE_DISCOVERED; } /////////////////////////// // Observer /////////////////////////// /** * konashiのイベントのリスナーを追加する * @param listener 追加するリスナー */ public void addListener(KonashiListener listener) { mEmitter.add(listener); } /** * 指定したリスナーを削除する * @param listener 削除するリスナー */ public void removeListener(KonashiListener listener) { mEmitter.remove(listener); } /** * すべてのリスナーを削除する */ public void removeAllListeners() { mEmitter.clear(); } /////////////////////////// // PIO /////////////////////////// /** * PIOのピンを入力として使うか、出力として使うかの設定を行う * @param pin 設定するPIOのピン名。 * @param mode ピンに設定するモード。INPUT か OUTPUT が設定できます。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinMode(int pin, int mode){ return execute(new PioPinModeAction(getKonashiService(), pin, mode, mPioStore.getModes()), mPioDispatcher); } /** * PIOのピンを入力として使うか、出力として使うかの設定を行う * @param modes PIO0 〜 PIO7 の計8ピンの設定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinModeAll(int modes){ return execute(new PioPinModeAction(getKonashiService(), modes), mPioDispatcher); } /** * PIOのピンをプルアップするかの設定を行う * @param pin 設定するPIOのピン名 * @param pullup ピンをプルアップするかの設定。PULLUP か NO_PULLS が設定できます。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinPullup(int pin, int pullup){ return execute(new PioPinPullupAction(getKonashiService(), pin, pullup, mPioStore.getPullups()), mPioDispatcher); } /** * PIOのピンをプルアップするかの設定を行う * @param pullups PIO0 〜 PIO7 の計8ピンのプルアップの設定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pinPullupAll(int pullups){ return execute(new PioPinPullupAction(getKonashiService(), pullups), mPioDispatcher); } /** * PIOの特定のピンの入力状態を取得する * @param pin PIOのピン名 * @return HIGH(1) もしくは LOW(0) */ public int digitalRead(int pin){ return mPioStore.getInput(pin); } /** * PIOのすべてのピンの状態を取得する * @return PIOの状態(PIO0〜PIO7の入力状態が8bit(1byte)で表現) */ public int digitalReadAll(){ return mPioStore.getInputs(); } /** * PIOの特定のピンの出力状態を設定する * @param pin 設定するPIOのピン名 * @param output 設定するPIOの出力状態。HIGH もしくは LOW が指定可能 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> digitalWrite(int pin, int output){ return execute(new PioDigitalWriteAction(getKonashiService(), pin, output, mPioStore.getOutputs()), mPioDispatcher); } /** * PIOの特定のピンの出力状態を設定する * @param outputs PIOの出力状態。PIO0〜PIO7の出力状態が8bit(1byte)で表現 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> digitalWriteAll(int outputs){ return execute(new PioDigitalWriteAction(getKonashiService(), outputs), mPioDispatcher); } /////////////////////////// // PWM /////////////////////////// /** * PIO の指定のピンを PWM として使用する/しないかを設定する * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param mode 設定するPWMのモード。Konashi.PWM_DISABLE, Konashi.PWM_ENABLE, Konashi.PWM_ENABLE_LED_MODE のいずれかをセットする。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmMode(final int pin, int mode){ Promise<BluetoothGattCharacteristic, BletiaException, Void> promise = execute(new PwmPinModeAction(getKonashiService(), pin, mode, mPwmStore.getModes())).then(mPwmDispatcher); if (mode == Konashi.PWM_ENABLE_LED_MODE) { promise.then(new DonePipe<BluetoothGattCharacteristic, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(BluetoothGattCharacteristic result) { return pwmPeriod(pin, Konashi.PWM_LED_PERIOD).then(mPwmDispatcher); } }).then(new DonePipe<BluetoothGattCharacteristic, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(BluetoothGattCharacteristic result) { return pwmLedDrive(pin, 0.0f).then(mPwmDispatcher); } }); } return promise; } /** * 指定のピンのPWM周期を設定する * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param period 周期。単位はマイクロ秒(us)で32bitで指定してください。最大2^(32)us = 71.5分。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmPeriod(int pin, int period){ return execute(new PwmPeriodAction(getKonashiService(), pin, period, mPwmStore.getDuty(pin))).then(mPwmDispatcher); } /** * 指定のピンのPWMのデューティ(ONになっている時間)を設定する。 * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param duty デューティ。単位はマイクロ秒(us)で32bitで指定してください。最大2^(32)us = 71.5分。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmDuty(int pin, int duty){ return execute(new PwmDutyAction(getKonashiService(), pin, duty, mPwmStore.getPeriod(pin))).then(mPwmDispatcher); } /** * 指定のピンのLEDの明るさを0%〜100%で指定する * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param dutyRatio LEDの明るさ。0.0F〜100.0F をしてしてください。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmLedDrive(int pin, float dutyRatio){ return execute(new PwmLedDriveAction(getKonashiService(), pin, dutyRatio, mPwmStore.getPeriod(pin))).then(mPwmDispatcher); } /** * pwmLedDrive(int pin, float dutyRatio) の doubleでdutyRatioを指定する版。 * @param pin PWMモードの設定をするPIOのピン番号。Konashi.PIO0 〜 Konashi.PIO7。 * @param dutyRatio LEDの明るさ。0.0〜100.0 をしてしてください。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> pwmLedDrive(int pin, double dutyRatio){ return pwmLedDrive(pin, (float) dutyRatio); } /////////////////////////// // AIO /////////////////////////// /** * AIO の指定のピンの入力電圧を取得する * @param pin AIOのピン名。指定可能なピン名は AIO0, AIO1, AIO2 */ public Promise<Integer, BletiaException, Void> analogRead(final int pin) { return execute(new AioAnalogReadAction(getKonashiService(), pin)) .then(mAioDispatcher) .then(new AioAnalogReadFilter(pin)); } /** * AIO の指定のピンに任意の電圧を出力する * @param pin AIOのピン名。指定可能なピン名は AIO0, AIO1, AIO * @param milliVolt 設定する電圧をmVで指定。0〜1300を指定可能 */ // @Override // public void analogWrite(int pin, int milliVolt){ // if(!isEnableAccessKonashi()){ // notifyKonashiError(KonashiErrorReason.NOT_READY); // return; // } // // if(pin >= Konashi.AIO0 && pin <= Konashi.AIO2 && milliVolt >= 0 && milliVolt <= Konashi.ANALOG_REFERENCE){ // byte[] val = new byte[3]; // val[0] = (byte)pin; // val[1] = (byte)((milliVolt >> 8) & 0xFF); // val[2] = (byte)((milliVolt >> 0) & 0xFF); // // KonashiUtils.log("analogWrite pin: " + pin + ", value: " + milliVolt); // // addWriteMessage(KonashiUUID.ANALOG_DRIVE_UUID, val); // } else { // notifyKonashiError(KonashiErrorReason.INVALID_PARAMETER); // } // } /////////////////////////// // UART /////////////////////////// /** * UART の有効/無効を設定する * @param mode 設定するUARTのモード。Konashi.UART_DISABLE, Konashi.UART_ENABLE を指定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartMode(int mode){ return execute(new UartModeAction(getKonashiService(), mode, mUartStore), mUartDispatcher); } /** * UART の通信速度を設定する * @param baudrate UARTの通信速度。Konashi.UART_RATE_2K4 か Konashi.UART_RATE_9K6 を指定 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartBaudrate(int baudrate){ return execute(new UartBaudrateAction(getKonashiService(), baudrate, mUartStore), mUartDispatcher); } /** * UART でデータを送信する * @param bytes 送信するデータ(byte[]) */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartWrite(byte[] bytes) { return execute(new UartWriteAction(getKonashiService(), bytes, mUartStore)); } /** * UART でデータを送信する * @param string 送信するデータ(string) */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> uartWrite(String string) { return execute(new UartWriteAction(getKonashiService(), string, mUartStore)); } /** * UART でデータを1バイト送信する * for konashi v1(old code) * @param data 送信するデータ */ // @Override // public void uartWrite(byte data){ // if(!isEnableAccessKonashi()){ // notifyKonashiError(KonashiErrorReason.NOT_READY); // return; // } // // if(mUartSetting==Konashi.UART_ENABLE){ // byte[] val = new byte[1]; // val[0] = data; // // addWriteMessage(KonashiUUID.UART_TX_UUID, val); // } else { // notifyKonashiError(KonashiErrorReason.NOT_ENABLED_UART); // } // } /////////////////////////// // I2C /////////////////////////// /** * I2Cのコンディションを発行する * @param condition コンディション。Konashi.I2C_START_CONDITION, Konashi.I2C_RESTART_CONDITION, Konashi.I2C_STOP_CONDITION を指定できる。 */ private Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cSendCondition(int condition) { return execute(new I2cSendConditionAction(getKonashiService(), condition, mI2cStore)); } private <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cSendConditionPipe(final int condition) { return new DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(D result) { return execute(new I2cSendConditionAction(getKonashiService(), condition, mI2cStore)); } }; } /** * I2Cを有効/無効を設定する * @param mode 設定するI2Cのモード。Konashi.I2C_DISABLE , Konashi.I2C_ENABLE, Konashi.I2C_ENABLE_100K, Konashi.I2C_ENABLE_400Kを指定。 */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cMode(int mode) { return execute(new I2cModeAction(getKonashiService(), mode, mI2cStore), mI2cDispatcher); } /** * I2Cのスタートコンディションを発行する */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cStartCondition() { return i2cSendCondition(Konashi.I2C_START_CONDITION); } /** * I2Cのスタートコンディションを発行する */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cStartConditionPipe() { return i2cSendConditionPipe(Konashi.I2C_START_CONDITION); } /** * I2Cのリスタートコンディションを発行する */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cRestartCondition() { return i2cSendCondition(Konashi.I2C_RESTART_CONDITION); } /** * I2Cのリスタートコンディションを発行する */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cRestartConditionPipe() { return i2cSendConditionPipe(Konashi.I2C_RESTART_CONDITION); } /** * I2Cのストップコンディションを発行する */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cStopCondition() { return i2cSendCondition(Konashi.I2C_STOP_CONDITION); } /** * I2Cのストップコンディションを発行する */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cStopConditionPipe() { return i2cSendConditionPipe(Konashi.I2C_STOP_CONDITION); } /** * I2Cで指定したアドレスにデータを書き込む * @param length 書き込むデータ(byte)の長さ。最大 Konashi.I2C_DATA_MAX_LENGTH (19)byteまで * @param data 書き込むデータの配列 * @param address 書き込み先アドレス */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> i2cWrite(int length, byte[] data, byte address) { return execute(new I2cWriteAction(getKonashiService(), address, data, mI2cStore)); } /** * I2Cで指定したアドレスにデータを書き込む * @param length 書き込むデータ(byte)の長さ。最大 Konashi.I2C_DATA_MAX_LENGTH (19)byteまで * @param data 書き込むデータの配列 * @param address 書き込み先アドレス */ public <D> DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void> i2cWritePipe(final int length, final byte[] data, final byte address) { return new DonePipe<D, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(D result) { return i2cWrite(length, data, address); } }; } /** * I2Cで指定したアドレスからデータを読み込むリクエストを行う * @param length 読み込むデータの長さ。最大 Konashi.I2C_DATA_MAX_LENGTHs (19) * @param address 読み込み先のアドレス */ public Promise<byte[], BletiaException, Void> i2cRead(int length, byte address) { return execute(new I2cSetReadParamAction(getKonashiService(), length, address, mI2cStore)) .then(mI2cDispatcher) .then(new DonePipe<BluetoothGattCharacteristic, BluetoothGattCharacteristic, BletiaException, Void>() { @Override public Promise<BluetoothGattCharacteristic, BletiaException, Void> pipeDone(BluetoothGattCharacteristic result) { return execute(new I2cReadAction(getKonashiService())); } }) .then(new I2cReadFilter()); } /** * I2Cで指定したアドレスからデータを読み込むリクエストを行う * @param length 読み込むデータの長さ。最大 Konashi.I2C_DATA_MAX_LENGTHs (19) * @param address 読み込み先のアドレス */ public <D> DonePipe<D, byte[], BletiaException, Void> i2cReadPipe(final int length, final byte address) { return new DonePipe<D, byte[], BletiaException, Void>() { @Override public Promise<byte[], BletiaException, Void> pipeDone(D result) { return i2cRead(length, address); } }; } /////////////////////////// // Hardware /////////////////////////// /** * konashiをリセットする */ public Promise<BluetoothGattCharacteristic, BletiaException, Void> reset(){ return execute(new HardwareResetAction(getKonashiService())); } /** * konashi のバッテリ残量を取得 * @return 0 〜 100 のパーセント単位でバッテリ残量が返る */ public Promise<Integer, BletiaException, Void> getBatteryLevel(){ return execute(new BatteryLevelReadAction(getService(KonashiUUID.BATTERY_SERVICE_UUID))) .then(new BatteryLevelReadFilter()); } /** * konashi の電波強度を取得 * @return 電波強度(単位はdb) */ public Promise<Integer, BletiaException, Void> getSignalStrength() { return execute(new ReadRemoteRssiAction()); } private void connect(BluetoothDevice device){ mDevice = device; mBletia.connect(device); } private <T> Promise<T, BletiaException, Void> execute(Action<T, ?> action, DoneCallback<T> callback) { return execute(action).then(callback); } private <T> Promise<T, BletiaException, Void> execute(Action<T, ?> action) { return new AndroidDeferredManager().when(mBletia.execute(action)); } private BluetoothGattService getKonashiService() { return mBletia.getService(KonashiUUID.KONASHI_SERVICE_UUID); } private BluetoothGattService getService(UUID uuid) { return mBletia.getService(uuid); } private final ConnectionHelper.Callback mConnectionHelperCallback = new ConnectionHelper.Callback() { @Override public void onSelectBleDevice(BluetoothDevice device) { connect(device); } }; }
Add SPI APIs to KonashiManager
konashi-android-sdk/src/main/java/com/uxxu/konashi/lib/KonashiManager.java
Add SPI APIs to KonashiManager
Java
apache-2.0
05bc569b1df5ea2648409eecb318d8e306f5f174
0
appium/appium,Sw0rdstream/appium,appium/appium,appium/appium,appium/appium,appium/appium,appium/appium
package io.appium.android.bootstrap; import java.util.ArrayList; import org.json.JSONArray; import org.json.JSONException; import com.android.uiautomator.core.UiSelector; // Constants from // https://android.googlesource.com/platform/frameworks/testing/+/master/uiautomator/library/core-src/com/android/uiautomator/core/UiSelector.java public class Dynamic { // static final int SELECTOR_NIL = 0; // nothing. /** text(String text) */ private static final int SELECTOR_TEXT = 1; /** textStartsWith(String text) */ private static final int SELECTOR_START_TEXT = 2; /** textContains(String text) */ private static final int SELECTOR_CONTAINS_TEXT = 3; /** className(String className), className(Class<T> type) */ private static final int SELECTOR_CLASS = 4; /** description(String desc) */ private static final int SELECTOR_DESCRIPTION = 5; /** descriptionStartsWith(String desc) */ private static final int SELECTOR_START_DESCRIPTION = 6; /** descriptionContains(String desc) */ private static final int SELECTOR_CONTAINS_DESCRIPTION = 7; /** index(final int index) */ private static final int SELECTOR_INDEX = 8; /** instance(final int instance) */ private static final int SELECTOR_INSTANCE = 9; /** enabled(boolean val) */ private static final int SELECTOR_ENABLED = 10; /** focused(boolean val) */ private static final int SELECTOR_FOCUSED = 11; /** focusable(boolean val) */ private static final int SELECTOR_FOCUSABLE = 12; /** scrollable(boolean val) */ private static final int SELECTOR_SCROLLABLE = 13; /** clickable(boolean val) */ private static final int SELECTOR_CLICKABLE = 14; /** checked(boolean val) */ private static final int SELECTOR_CHECKED = 15; /** selected(boolean val) */ private static final int SELECTOR_SELECTED = 16; // static final int SELECTOR_ID = 17; // nothing. /** packageName(String name) */ private static final int SELECTOR_PACKAGE_NAME = 18; // @formatter:off // private static final int SELECTOR_CHILD = 19; // childSelector(UiSelector selector) // private static final int SELECTOR_CONTAINER = 20; // containerSelector(UiSelector selector) // private static final int SELECTOR_PATTERN = 21; // ! private ! patternSelector(UiSelector selector) // private static final int SELECTOR_PARENT = 22; // fromParent(UiSelector selector) // private static final int SELECTOR_COUNT = 23; // nothing. // @formatter:on /** longClickable(boolean val) */ private static final int SELECTOR_LONG_CLICKABLE = 24; /** textMatches(String regex) */ private static final int SELECTOR_TEXT_REGEX = 25; /** classNameMatches(String regex) */ private static final int SELECTOR_CLASS_REGEX = 26; /** descriptionMatches(String regex) */ private static final int SELECTOR_DESCRIPTION_REGEX = 27; /** packageNameMatches(String regex) */ private static final int SELECTOR_PACKAGE_NAME_REGEX = 28; /** resourceId(String id) */ private static final int SELECTOR_RESOURCE_ID = 29; /** checkable(boolean val) */ private static final int SELECTOR_CHECKABLE = 30; /** resourceIdMatches(String regex) */ private static final int SELECTOR_RESOURCE_ID_REGEX = 31; // start internal methods at 100 /** * Gets name (content desc) with a fall back to text if name is empty. * * getStringAttribute("name") */ private static final int GET_NAME = 100; public static String finalize(final AndroidElement result, final int finalizer) throws Exception { // Invoke the int 100+ method on the resulting element. String value = ""; switch (finalizer) { case GET_NAME: value = result.getStringAttribute("name"); break; } return value; } public static ArrayList<String> finalize( final ArrayList<AndroidElement> elements, final int finalizer) throws Exception { final ArrayList<String> results = new ArrayList<String>(); for (final AndroidElement e : elements) { final String result = finalize(e, finalizer); Logger.debug("Adding: " + result); results.add(result); } return results; } private UiSelector s = new UiSelector(); public UiSelector get(final JSONArray array) throws JSONException { // Reset selector. s = new UiSelector(); // Example pair. // Find everything containing the text sign. // [ [3, 'sign'] ] for (int a = 0; a < array.length(); a++) { final JSONArray pair = array.getJSONArray(a); final int int0 = pair.getInt(0); if (int0 >= 100) { // 100+ are finalizers only. continue; } final Object param1 = pair.get(1); Logger.debug("Updating " + int0 + ", " + param1); update(int0, param1); } return s; } private void update(final int method, final Object param) { switch (method) { case SELECTOR_TEXT: s = s.text((String) param); break; case SELECTOR_START_TEXT: s = s.textStartsWith((String) param); break; case SELECTOR_CONTAINS_TEXT: s = s.textContains((String) param); break; case SELECTOR_CLASS: s = s.className((String) param); break; case SELECTOR_DESCRIPTION: s = s.description((String) param); break; case SELECTOR_START_DESCRIPTION: s = s.descriptionStartsWith((String) param); break; case SELECTOR_CONTAINS_DESCRIPTION: s = s.descriptionContains((String) param); break; case SELECTOR_INDEX: s = s.index((Integer) param); break; case SELECTOR_INSTANCE: s = s.instance((Integer) param); break; case SELECTOR_ENABLED: s = s.enabled((Boolean) param); break; case SELECTOR_FOCUSED: s = s.focused((Boolean) param); break; case SELECTOR_FOCUSABLE: s = s.focusable((Boolean) param); break; case SELECTOR_SCROLLABLE: s = s.scrollable((Boolean) param); break; case SELECTOR_CLICKABLE: s = s.clickable((Boolean) param); break; case SELECTOR_CHECKED: s = s.checked((Boolean) param); break; case SELECTOR_SELECTED: s = s.selected((Boolean) param); break; case SELECTOR_PACKAGE_NAME: s = s.packageName((String) param); break; case SELECTOR_LONG_CLICKABLE: s = s.longClickable((Boolean) param); break; case SELECTOR_TEXT_REGEX: s = s.textMatches((String) param); break; case SELECTOR_CLASS_REGEX: s = s.classNameMatches((String) param); break; case SELECTOR_DESCRIPTION_REGEX: s = s.descriptionMatches((String) param); break; case SELECTOR_PACKAGE_NAME_REGEX: s = s.packageNameMatches((String) param); break; case SELECTOR_RESOURCE_ID: s = s.resourceId((String) param); break; case SELECTOR_CHECKABLE: s = s.checkable((Boolean) param); break; case SELECTOR_RESOURCE_ID_REGEX: s = s.resourceIdMatches((String) param); break; } } }
lib/devices/android/bootstrap/src/io/appium/android/bootstrap/Dynamic.java
package io.appium.android.bootstrap; import java.util.ArrayList; import org.json.JSONArray; import org.json.JSONException; import com.android.uiautomator.core.UiSelector; // Constants from // https://android.googlesource.com/platform/frameworks/testing/+/master/uiautomator/library/src/com/android/uiautomator/core/UiSelector.java public class Dynamic { // static final int SELECTOR_NIL = 0; // nothing. /** text(String text) */ private static final int SELECTOR_TEXT = 1; /** textStartsWith(String text) */ private static final int SELECTOR_START_TEXT = 2; /** textContains(String text) */ private static final int SELECTOR_CONTAINS_TEXT = 3; /** className(String className), className(Class<T> type) */ private static final int SELECTOR_CLASS = 4; /** description(String desc) */ private static final int SELECTOR_DESCRIPTION = 5; /** descriptionStartsWith(String desc) */ private static final int SELECTOR_START_DESCRIPTION = 6; /** descriptionContains(String desc) */ private static final int SELECTOR_CONTAINS_DESCRIPTION = 7; /** index(final int index) */ private static final int SELECTOR_INDEX = 8; /** instance(final int instance) */ private static final int SELECTOR_INSTANCE = 9; /** enabled(boolean val) */ private static final int SELECTOR_ENABLED = 10; /** focused(boolean val) */ private static final int SELECTOR_FOCUSED = 11; /** focusable(boolean val) */ private static final int SELECTOR_FOCUSABLE = 12; /** scrollable(boolean val) */ private static final int SELECTOR_SCROLLABLE = 13; /** clickable(boolean val) */ private static final int SELECTOR_CLICKABLE = 14; /** checked(boolean val) */ private static final int SELECTOR_CHECKED = 15; /** selected(boolean val) */ private static final int SELECTOR_SELECTED = 16; // static final int SELECTOR_ID = 17; // nothing. /** packageName(String name) */ private static final int SELECTOR_PACKAGE_NAME = 18; // @formatter:off // private static final int SELECTOR_CHILD = 19; // childSelector(UiSelector selector) // private static final int SELECTOR_CONTAINER = 20; // containerSelector(UiSelector selector) // private static final int SELECTOR_PATTERN = 21; // ! private ! patternSelector(UiSelector selector) // private static final int SELECTOR_PARENT = 22; // fromParent(UiSelector selector) // private static final int SELECTOR_COUNT = 23; // nothing. // @formatter:on /** longClickable(boolean val) */ private static final int SELECTOR_LONG_CLICKABLE = 24; /** textMatches(String regex) */ private static final int SELECTOR_TEXT_REGEX = 25; /** classNameMatches(String regex) */ private static final int SELECTOR_CLASS_REGEX = 26; /** descriptionMatches(String regex) */ private static final int SELECTOR_DESCRIPTION_REGEX = 27; /** packageNameMatches(String regex) */ private static final int SELECTOR_PACKAGE_NAME_REGEX = 28; // start internal methods at 100 /** * Gets name (content desc) with a fall back to text if name is empty. * * getStringAttribute("name") */ private static final int GET_NAME = 100; public static String finalize(final AndroidElement result, final int finalizer) throws Exception { // Invoke the int 100+ method on the resulting element. String value = ""; switch (finalizer) { case GET_NAME: value = result.getStringAttribute("name"); break; } return value; } public static ArrayList<String> finalize( final ArrayList<AndroidElement> elements, final int finalizer) throws Exception { final ArrayList<String> results = new ArrayList<String>(); for (final AndroidElement e : elements) { final String result = finalize(e, finalizer); Logger.debug("Adding: " + result); results.add(result); } return results; } private UiSelector s = new UiSelector(); public UiSelector get(final JSONArray array) throws JSONException { // Reset selector. s = new UiSelector(); // Example pair. // Find everything containing the text sign. // [ [3, 'sign'] ] for (int a = 0; a < array.length(); a++) { final JSONArray pair = array.getJSONArray(a); final int int0 = pair.getInt(0); if (int0 >= 100) { // 100+ are finalizers only. continue; } final Object param1 = pair.get(1); Logger.debug("Updating " + int0 + ", " + param1); update(int0, param1); } return s; } private void update(final int method, final Object param) { switch (method) { case SELECTOR_TEXT: s = s.text((String) param); break; case SELECTOR_START_TEXT: s = s.textStartsWith((String) param); break; case SELECTOR_CONTAINS_TEXT: s = s.textContains((String) param); break; case SELECTOR_CLASS: s = s.className((String) param); break; case SELECTOR_DESCRIPTION: s = s.description((String) param); break; case SELECTOR_START_DESCRIPTION: s = s.descriptionStartsWith((String) param); break; case SELECTOR_CONTAINS_DESCRIPTION: s = s.descriptionContains((String) param); break; case SELECTOR_INDEX: s = s.index((Integer) param); break; case SELECTOR_INSTANCE: s = s.instance((Integer) param); break; case SELECTOR_ENABLED: s = s.enabled((Boolean) param); break; case SELECTOR_FOCUSED: s = s.focused((Boolean) param); break; case SELECTOR_FOCUSABLE: s = s.focusable((Boolean) param); break; case SELECTOR_SCROLLABLE: s = s.scrollable((Boolean) param); break; case SELECTOR_CLICKABLE: s = s.clickable((Boolean) param); break; case SELECTOR_CHECKED: s = s.checked((Boolean) param); break; case SELECTOR_SELECTED: s = s.selected((Boolean) param); break; case SELECTOR_PACKAGE_NAME: s = s.packageName((String) param); break; case SELECTOR_LONG_CLICKABLE: s = s.longClickable((Boolean) param); break; case SELECTOR_TEXT_REGEX: s = s.textMatches((String) param); break; case SELECTOR_CLASS_REGEX: s = s.classNameMatches((String) param); break; case SELECTOR_DESCRIPTION_REGEX: s = s.descriptionMatches((String) param); break; case SELECTOR_PACKAGE_NAME_REGEX: s = s.packageNameMatches((String) param); break; } } }
Update complex find with new uiautomator constants
lib/devices/android/bootstrap/src/io/appium/android/bootstrap/Dynamic.java
Update complex find with new uiautomator constants
Java
apache-2.0
4c0558893c2f0a5dd1caba17f8711fa1df364706
0
sdeleuze/reactor-core,reactor/reactor-core,sdeleuze/reactor-core,sdeleuze/reactor-core,sdeleuze/reactor-core
/* * Copyright (c) 2011-2016 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.Iterator; import java.util.Queue; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import reactor.core.queue.QueueSupplier; import reactor.core.subscriber.BlockingIterable; import reactor.core.subscriber.ReactiveSession; import reactor.core.subscriber.SubscriberWithContext; import reactor.core.subscriber.Subscribers; import reactor.core.timer.Timer; import reactor.core.timer.Timers; import reactor.core.trait.Backpressurable; import reactor.core.trait.Connectable; import reactor.core.trait.Introspectable; import reactor.core.trait.Publishable; import reactor.core.util.Assert; import reactor.core.util.Logger; import reactor.core.util.PlatformDependent; import reactor.core.util.ReactiveStateUtils; import reactor.fn.BiConsumer; import reactor.fn.BiFunction; import reactor.fn.Consumer; import reactor.fn.Function; import reactor.fn.Supplier; import reactor.fn.tuple.Tuple; import reactor.fn.tuple.Tuple2; import reactor.fn.tuple.Tuple3; import reactor.fn.tuple.Tuple4; import reactor.fn.tuple.Tuple5; import reactor.fn.tuple.Tuple6; import reactor.fn.tuple.Tuple7; import reactor.fn.tuple.Tuple8; /** * A Reactive Streams {@link Publisher} with basic rx operators that emits 0 to N elements, and then completes * (successfully or with an error). * * <p> * <img width="640" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flux.png" alt=""> * <p> * * <p>It is intended to be used in implementations and return types. Input parameters should keep using raw * {@link Publisher} as much as possible. * * <p>If it is known that the underlying {@link Publisher} will emit 0 or 1 element, {@link Mono} should be used * instead. * * @author Sebastien Deleuze * @author Stephane Maldini * @see Mono * @since 2.5 */ public abstract class Flux<T> implements Publisher<T>, Introspectable { // ============================================================================================================== // Static Generators // ============================================================================================================== static final IdentityFunction IDENTITY_FUNCTION = new IdentityFunction(); static final Flux<?> EMPTY = Mono.empty() .flux(); /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new Flux eventually subscribed to one of the sources or empty */ @SuppressWarnings({"unchecked", "varargs"}) @SafeVarargs public static <I> Flux<I> amb(Publisher<? extends I>... sources) { return new FluxAmb<>(sources); } /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new Flux eventually subscribed to one of the sources or empty */ @SuppressWarnings("unchecked") public static <I> Flux<I> amb(Iterable<? extends Publisher<? extends I>> sources) { if (sources == null) { return empty(); } return new FluxAmb<>(sources); } /** * Concat all sources emitted as an onNext signal from a parent {@link Publisher}. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned {@link Publisher} which will stop listening if the main sequence has also completed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatinner.png" alt=""> * * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new Flux concatenating all inner sources sequences until complete or error */ public static <I> Flux<I> concat(Publisher<? extends Publisher<? extends I>> sources) { return new FluxFlatMap<>(sources, 1, 32); } /** * Concat all sources pulled from the supplied * {@link Iterator} on {@link Publisher#subscribe} from the passed {@link Iterable} until {@link Iterator#hasNext} * returns false. A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new Flux concatenating all source sequences */ public static <I> Flux<I> concat(Iterable<? extends Publisher<? extends I>> sources) { return concat(fromIterable(sources)); } /** * Concat all sources pulled from the given {@link Publisher[]}. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new Flux concatenating all source sequences */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> concat(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return concat(fromArray(sources)); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param <T> The type of the data sequence * * @return a new {@link Flux} */ public static <T> Flux<T> create(Consumer<SubscriberWithContext<T, Void>> requestConsumer) { return create(requestConsumer, null, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called for every new subscriber returning an immutable context (IO * connection...) * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory) { return create(requestConsumer, contextFactory, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. The argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, * onComplete, onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(final Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { Assert.notNull(requestConsumer, "A data producer must be provided"); return new FluxGenerate.FluxForEach<>(requestConsumer, contextFactory, shutdownConsumer); } /** * Create a {@link Flux} that completes without emitting any item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/empty.png" alt=""> * * @param <T> the reified type of the target {@link Subscriber} * * @return an empty {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> empty() { return (Flux<T>) EMPTY; } /** * Create a {@link Flux} that completes with the specified error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/error.png" alt=""> * * @param error the error to signal to each {@link Subscriber} * @param <T> the reified type of the target {@link Subscriber} * * @return a new failed {@link Flux} */ public static <T> Flux<T> error(Throwable error) { return Mono.<T>error(error).flux(); } /** * Consume the passed * {@link Publisher} source and transform its sequence of T into a N sequences of V via the given {@link Function}. * The produced sequences {@link Publisher} will be merged back in the returned {@link Flux}. * The backpressure will apply using the provided bufferSize which will actively consume each sequence (and the * main one) and replenish its request cycle on a threshold free capacity. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * * @param source the source to flatten * @param mapper the function to transform the upstream sequence into N sub-sequences * @param concurrency the maximum alive transformations at a given time * @param bufferSize the bounded capacity for each individual merged sequence * @param <T> the source type * @param <V> the produced merged type * * @return a new merged {@link Flux} */ public static <T, V> Flux<V> flatMap(Publisher<? extends T> source, Function<? super T, ? extends Publisher<? extends V>> mapper, int concurrency, int bufferSize) { return new FluxFlatMap<>(source, mapper, concurrency, bufferSize); } /** * Expose the specified {@link Publisher} with the {@link Flux} API. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/from.png" alt=""> * * @param source the source to decorate * @param <T> the source sequence type * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> from(Publisher<? extends T> source) { if (source instanceof Flux) { return (Flux<T>) source; } if (source instanceof Supplier) { T t = ((Supplier<T>) source).get(); if (t != null) { return just(t); } } return new FluxBarrier<>(source); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromarray.png" alt=""> * * @param array the {@link T[]} array to read data from * @param <T> the {@link Publisher} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromArray(T[] array) { if (array == null || array.length == 0) { return empty(); } if (array.length == 1) { return just(array[0]); } return new FluxArray<>(array); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * A new iterator will be created for each subscriber. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromiterable.png" alt=""> * * @param it the {@link Iterable} to read data from * @param <T> the {@link Iterable} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromIterable(Iterable<? extends T> it) { FluxGenerate.IterableSequencer<T> iterablePublisher = new FluxGenerate.IterableSequencer<>(it); return create(iterablePublisher, iterablePublisher); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Tuple}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromtuple.png" alt=""> * <p> * * @param tuple the {@link Tuple} to read data from * * @return a new {@link Flux} */ public static Flux<Object> fromTuple(Tuple tuple) { return fromArray(tuple.toArray()); } /** * Create a {@link Publisher} reacting on requests with the passed {@link BiConsumer}. The argument {@code * contextFactory} is executed once by new subscriber to generate a context shared by every request calls. The * argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, onComplete, * onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generate.png" alt=""> * * @param requestConsumer A {@link BiConsumer} with left argument request and right argument target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a fresh Reactive Flux publisher ready to be subscribed */ public static <T, C> Flux<T> generate(BiConsumer<Long, SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { return new FluxGenerate<>(new FluxGenerate.RecursiveConsumer<>(requestConsumer), contextFactory, shutdownConsumer); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N seconds on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * * @param seconds The number of seconds to wait before the next increment * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long seconds) { return interval(seconds, TimeUnit.SECONDS); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the global timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit) { return interval(period, unit, Timers.global()); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * @param timer a {@link Timer} instance * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit, Timer timer) { long timespan = TimeUnit.MILLISECONDS.convert(period, unit); Assert.isTrue(timespan >= timer.period(), "The delay " + period + "ms cannot be less than the timer resolution" + "" + timer.period() + "ms"); return new FluxInterval(timer, period, unit, period); } /** * Create a new {@link Flux} that emits the specified items and then complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/justn.png" alt=""> * * @param data the consecutive data objects to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <T> Flux<T> just(T... data) { return fromArray(data); } /** * Create a new {@link Flux} that will only emit the passed data then onComplete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/just.png" alt=""> * * @param data the unique data to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ public static <T> Flux<T> just(T data) { return new FluxJust<>(data); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param source the source {@link Publisher} to log * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @param <T> the {@link Subscriber} type target * * @return a logged {@link Flux} */ public static <T> Flux<T> log(Publisher<T> source, String category, Level level, int options) { return new FluxLog<>(source, category, level, options); } /** * Create a {@link Flux} that will transform all signals into a target type. OnError will be transformed into * completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mapsignal.png" alt=""> * * @param source the source {@link Publisher} to map * @param mapperOnNext the {@link Function} to call on next data and returning the target transformed data * @param mapperOnError the {@link Function} to call on error signal and returning the target transformed data * @param mapperOnComplete the {@link Function} to call on complete signal and returning the target transformed data * @param <T> the input publisher type * @param <V> the output {@link Publisher} type target * * @return a new {@link Flux} */ public static <T, V> Flux<V> mapSignal(Publisher<T> source, Function<? super T, ? extends V> mapperOnNext, Function<Throwable, ? extends V> mapperOnError, Supplier<? extends V> mapperOnComplete) { return new FluxMapSignal<>(source, mapperOnNext, mapperOnError, mapperOnComplete); } /** * Merge emitted {@link Publisher} sequences by the passed {@link Publisher} into an interleaved merged sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mergeinner.png" alt=""> * * @param source a {@link Publisher} of {@link Publisher} sequence to merge * @param <T> the merged type * * @return a merged {@link Flux} */ public static <T> Flux<T> merge(Publisher<? extends Publisher<? extends T>> source) { return new FluxFlatMap<>(source, PlatformDependent.SMALL_BUFFER_SIZE, 32); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Iterable} into an interleaved merged sequence. * {@link Iterable#iterator()} will be called for each {@link Publisher#subscribe}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * * @param sources the {@link Iterable} to lazily iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive Flux publisher ready to be subscribed */ public static <I> Flux<I> merge(Iterable<? extends Publisher<? extends I>> sources) { return merge(fromIterable(sources)); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Publisher[]} into an interleaved merged * sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * * @param sources the {@link Publisher[]} to iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive Flux publisher ready to be subscribed */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> merge(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return merge(fromArray(sources)); } /** * Create a {@link Flux} that will never signal any data, error or completion signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/never.png" alt=""> * * @param <T> the {@link Subscriber} type target * * @return a never completing {@link Flux} */ public static <T> Flux<T> never() { return FluxNever.instance(); } /** * Create a {@link Flux} that will fallback to the produced {@link Publisher} given an onError signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * * @param <T> the {@link Subscriber} type target * * @return a resilient {@link Flux} */ public static <T> Flux<T> onErrorResumeWith( Publisher<? extends T> source, Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(source, fallback); } /** * Create a {@link Flux} reacting on subscribe with the passed {@link Consumer}. The argument {@code * sessionConsumer} is executed once by new subscriber to generate a {@link ReactiveSession} context ready to accept * signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/yield.png" alt=""> * * @param sessionConsumer A {@link Consumer} called once everytime a subscriber subscribes * @param <T> The type of the data sequence * * @return a fresh Reactive Flux publisher ready to be subscribed */ public static <T> Flux<T> yield(Consumer<? super ReactiveSession<T>> sessionConsumer) { return new FluxYieldingSession<>(sessionConsumer); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <O> The produced output after transformation by {@param combinator} * * @return a zipped {@link Flux} */ public static <T1, T2, O> Flux<O> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, final BiFunction<? super T1, ? super T2, ? extends O> combinator) { return new FluxZip<>(new Publisher[]{source1, source2}, new Function<Tuple2<T1, T2>, O>() { @Override public O apply(Tuple2<T1, T2> tuple) { return combinator.apply(tuple.getT1(), tuple.getT2()); } }, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static <T1, T2> Flux<Tuple2<T1, T2>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2) { return new FluxZip<>(new Publisher[]{source1, source2}, (Function<Tuple2<T1, T2>, Tuple2<T1, T2>>) IDENTITY_FUNCTION, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <V> The produced output after transformation by {@param combinator} * * @return a zipped {@link Flux} */ public static <T1, T2, T3, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Function<Tuple3<T1, T2, T3>, ? extends V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3}, combinator, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 */ @SuppressWarnings("unchecked") public static <T1, T2, T3> Flux<Tuple3<T1, T2, T3>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3) { return zip(IDENTITY_FUNCTION, source1, source2, source3); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <V> The produced output after transformation by {@param combinator} * * @return a {@link Flux} based on the produced value */ public static <T1, T2, T3, T4, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Function<Tuple4<T1, T2, T3, T4>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4}, combinator, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4> Flux<Tuple4<T1, T2, T3, T4>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4) { return zip(IDENTITY_FUNCTION, source1, source2, source3, source4); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <V> The produced output after transformation by {@param combinator} * * @return a {@link Flux} based on the produced value */ public static <T1, T2, T3, T4, T5, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Function<Tuple5<T1, T2, T3, T4, T5>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5> Flux<Tuple5<T1, T2, T3, T4, T5>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5) { return zip(IDENTITY_FUNCTION, source1, source2, source3, source4, source5); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * @param <V> The produced output after transformation by {@param combinator} * * @return a {@link Flux} based on the produced value */ public static <T1, T2, T3, T4, T5, T6, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6, Function<Tuple6<T1, T2, T3, T4, T5, T6>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5, source6}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6> Flux<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6) { return zip(IDENTITY_FUNCTION, source1, source2, source3, source4, source5, source6); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param source7 The seventh upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * @param <T7> type of the value from source7 * @param <V> combined type */ public static <T1, T2, T3, T4, T5, T6, T7, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6, Publisher<? extends T7> source7, Function<Tuple7<T1, T2, T3, T4, T5, T6, T7>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5, source6, source7}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param source7 The seventh upstream {@link Publisher} to subscribe to. * @param source8 The eigth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * @param <T7> type of the value from source7 * @param <T8> type of the value from source8 * @param <V> combined type */ public static <T1, T2, T3, T4, T5, T6, T7, T8, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6, Publisher<? extends T7> source7, Publisher<? extends T8> source8, Function<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5, source6, source7, source8}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * of the most recent items emitted by each source until any of them completes. Errors will immediately be * forwarded. * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static Flux<Tuple> zip(Iterable<? extends Publisher<?>> sources) { return zip(sources, IDENTITY_FUNCTION); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <O> the combined produced type * * @return a zipped {@link Flux} */ public static <O> Flux<O> zip(Iterable<? extends Publisher<?>> sources, final Function<Tuple, ? extends O> combinator) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param sources the {@link Publisher[]} to iterate on {@link Publisher#subscribe(Subscriber)} * @param <O> the combined produced type * * @return a zipped {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <I, O> Flux<O> zip( final Function<? super Tuple, ? extends O> combinator, Publisher<? extends I>... sources) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, PlatformDependent.XS_BUFFER_SIZE); } // ============================================================================================================== // Instance Operators // ============================================================================================================== protected Flux() { } /** * Immediately apply the given transformation to this Flux in order to generate a target {@link Publisher} type. * * {@code flux.as(Mono::from).subscribe(Subscribers.unbounded()) } * * @param transformer the {@link Function} to immediately map this {@link Flux} into a target {@link Publisher} * instance. * @param <P> the returned {@link Publisher} sequence type * * @return a new {@link Flux} */ public final <V, P extends Publisher<V>> P as(Function<? super Flux<T>, P> transformer) { return transformer.apply(this); } /** * Return a {@code Mono<Void>} that completes when this {@link Flux} completes. * This will actively ignore the sequence and only replay completion or error signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/after.png" alt=""> * * @return a new {@link Mono} */ @SuppressWarnings("unchecked") public final Mono<Void> after() { return (Mono<Void>)new MonoIgnoreElements<>(this); } /** * Emit from the fastest first sequence between this publisher and the given publisher * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * * @param other the {@link Publisher} to race with * * @return the fastest sequence */ public final Flux<T> ambWith(Publisher<? extends T> other) { return amb(this, other); } /** * Hint {@link Subscriber} to this {@link Flux} a preferred available capacity should be used. * {@link #toIterable()} can for instance use introspect this value to supply an appropriate queueing strategy. * * @param capacity the maximum capacity (in flight onNext) the return {@link Publisher} should expose * * @return a bounded {@link Flux} */ public final Flux<T> capacity(long capacity) { return new FluxBounded<>(this, capacity); } /** * Like {@link #flatMap(Function)}, but concatenate emissions instead of merging (no interleave). * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatmap.png" alt=""> * * @param mapper the function to transform this sequence of T into concated sequences of R * @param <R> the produced concated type * * @return a new {@link Flux} */ public final <R> Flux<R> concatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>(this, mapper, 1, PlatformDependent.XS_BUFFER_SIZE); } /** * Concatenate emissions of this {@link Flux} with the provided {@link Publisher} (no interleave). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * * @param other the {@link Publisher} sequence to concat after this {@link Flux} * * @return a new {@link Flux} */ public final Flux<T> concatWith(Publisher<? extends T> other) { return concat(this, other); } /** * Introspect this Flux graph * * @return {@link ReactiveStateUtils.Graph} representation of a publisher graph */ public final ReactiveStateUtils.Graph debug() { return ReactiveStateUtils.scan(this); } /** * Provide a default unique value if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/defaultifempty.png" alt=""> * * @param defaultV the alternate value if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> defaultIfEmpty(T defaultV) { return new FluxSwitchIfEmpty<>(this, just(defaultV)); } /** * Run onSubscribe, request, cancel, onNext, onComplete and onError on a supplied * {@link ProcessorGroup#dispatchOn} reference {@link org.reactivestreams.Processor}. * * <p> * Typically used for fast publisher, slow consumer(s) scenarios. * It naturally combines with {@link Processors#singleGroup} and {@link Processors#asyncGroup} which implement * fast async event loops. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dispatchon.png" alt=""> * * {@code flux.dispatchOn(Processors.queue()).subscribe(Subscribers.unbounded()) } * * @param group a {@link ProcessorGroup} pool * * @return a {@link Flux} consuming asynchronously */ @SuppressWarnings("unchecked") public final Flux<T> dispatchOn(ProcessorGroup group) { return new FluxProcessorGroup<>(this, false, ((ProcessorGroup<T>) group)); } /** * Triggered after the {@link Flux} terminates, either by completing downstream successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doafterterminate.png" alt=""> * * @param afterTerminate * * @return a new {@link Flux} */ public final Flux<T> doAfterTerminate(Runnable afterTerminate) { return new FluxPeek<>(this, null, null, null, afterTerminate, null, null, null); } /** * Triggered when the {@link Flux} is cancelled. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncancel.png" alt=""> * * @param onCancel * * @return a new {@link Flux} */ public final Flux<T> doOnCancel(Runnable onCancel) { return new FluxPeek<>(this, null, null, null, null, null, null, onCancel); } /** * Triggered when the {@link Flux} completes successfully. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncomplete.png" alt=""> * * @param onComplete * * @return a new {@link Flux} */ public final Flux<T> doOnComplete(Runnable onComplete) { return new FluxPeek<>(this, null, null, null, onComplete, null, null, null); } /** * Triggered when the {@link Flux} completes with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonerror.png" alt=""> * * @param onError * * @return */ public final Flux<T> doOnError(Consumer<? super Throwable> onError) { return new FluxPeek<>(this, null, null, onError, null, null, null, null); } /** * Triggered when the {@link Flux} emits an item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonnext.png" alt=""> * * @param onNext * * @return a new {@link Flux} */ public final Flux<T> doOnNext(Consumer<? super T> onNext) { return new FluxPeek<>(this, null, onNext, null, null, null, null, null); } /** * Triggered when the {@link Flux} is subscribed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonsubscribe.png" alt=""> * * @param onSubscribe * * @return a new {@link Flux} */ public final Flux<T> doOnSubscribe(Consumer<? super Subscription> onSubscribe) { return new FluxPeek<>(this, onSubscribe, null, null, null, null, null, null); } /** * Triggered when the {@link Flux} terminates, either by completing successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonterminate.png" alt=""> * * @param onTerminate * * @return a new {@link Flux} */ public final Flux<T> doOnTerminate(Runnable onTerminate) { return new FluxPeek<>(this, null, null, null, null, onTerminate, null, null); } /** * Transform the items emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * * @param mapper * @param <R> * * @return a new {@link Flux} */ public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>(this, mapper, PlatformDependent.SMALL_BUFFER_SIZE, 32); } /** * Transform the signals emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * OnError will be transformed into completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmaps.png" alt=""> * * @param mapperOnNext the {@link Function} to call on next data and returning a sequence to merge * @param mapperOnError the {@link Function} to call on error signal and returning a sequence to merge * @param mapperOnComplete the {@link Function} to call on complete signal and returning a sequence to merge * @param <R> the output {@link Publisher} type target * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapperOnNext, Function<Throwable, ? extends Publisher<? extends R>> mapperOnError, Supplier<? extends Publisher<? extends R>> mapperOnComplete) { return new FluxFlatMap<>( new FluxMapSignal<>(this, mapperOnNext, mapperOnError, mapperOnComplete), IDENTITY_FUNCTION, PlatformDependent.SMALL_BUFFER_SIZE, 32); } @Override public String getName() { return getClass().getName() .replace(Flux.class.getSimpleName(), ""); } @Override public int getMode() { return FACTORY; } /** * Create a {@link Flux} intercepting all source signals with the returned Subscriber that might choose to pass them * alone to the provided Subscriber (given to the returned {@code subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/lift.png" alt=""> * * @param operator * @param <R> * * @return a new {@link Flux} */ public final <R> Flux<R> lift(Function<Subscriber<? super R>, Subscriber<? super T>> operator) { return new FluxLift<>(this, operator); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * The default log category will be "reactor.core.publisher.FluxLog". * * @return a new {@link Flux} */ public final Flux<T> log() { return log(null, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * * @return a new {@link Flux} */ public final Flux<T> log(String category) { return log(category, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * * @return a new {@link Flux} */ public final Flux<T> log(String category, Level level) { return log(category, level, Logger.ALL); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @return a new {@link Flux} */ public final Flux<T> log(String category, Level level, int options) { return new FluxLog<>(this, category, level, options); } /** * Transform the items emitted by this {@link Flux} by applying a function to each item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/map.png" alt=""> * * @param mapper * @param <R> * * @return a new {@link Flux} */ public final <R> Flux<R> map(Function<? super T, ? extends R> mapper) { return new FluxMap<>(this, mapper); } /** * Merge emissions of this {@link Flux} with the provided {@link Publisher}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * * @param other the {@link Publisher} to merge with * * @return a new {@link Flux} */ public final Flux<T> mergeWith(Publisher<? extends T> other) { return merge(just(this, other)); } /** * Emit only the first item emitted by this {@link Flux}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/next.png" alt=""> * * If the sequence emits more than 1 data, emit {@link ArrayIndexOutOfBoundsException}. * * @return a new {@link Mono} */ public final Mono<T> next() { return new MonoNext<>(this); } /** * Subscribe to a returned fallback publisher when any error occurs. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * * @param fallback the {@link Function} mapping the error to a new {@link Publisher} sequence * * @return a new {@link Flux} */ public final Flux<T> onErrorResumeWith(Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(this, fallback); } /** * Fallback to the given value if an error is observed on this {@link Flux} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorreturn.png" alt=""> * * @param fallbackValue alternate value on fallback * * @return a new {@link Flux} */ public final Flux<T> onErrorReturn(final T fallbackValue) { return switchOnError(just(fallbackValue)); } /** * * A chaining {@link Publisher#subscribe(Subscriber)} alternative to inline composition type conversion to a hot * emitter (e.g. reactor FluxProcessor Broadcaster and Promise or rxjava Subject). * * {@code flux.subscribeWith(Processors.queue()).subscribe(Subscribers.unbounded()) } * * @param subscriber the {@link Subscriber} to subscribe and return * @param <E> the reified type from the input/output subscriber * * @return the passed {@link Subscriber} */ public final <E extends Subscriber<? super T>> E subscribeWith(E subscriber) { subscribe(subscriber); return subscriber; } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable() { return toIterable(this instanceof Backpressurable ? ((Backpressurable) this).getCapacity() : Long.MAX_VALUE ); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(long batchSize) { return toIterable(batchSize, null); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(final long batchSize, Supplier<Queue<T>> queueProvider) { final Supplier<Queue<T>> provider; if(queueProvider == null){ provider = QueueSupplier.get(batchSize); } else{ provider = queueProvider; } return new BlockingIterable<>(this, batchSize, provider); } /** * Run subscribe, onSubscribe and request on a supplied * {@link ProcessorGroup#publishOn} reference {@link org.reactivestreams.Processor}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/publishon.png" alt=""> * * <p> * Typically used for slow publisher e.g., blocking IO, fast consumer(s) scenarios. * It naturally combines with {@link Processors#ioGroup} which implements work-queue thread dispatching. * * <p> * {@code flux.publishOn(Processors.queue()).subscribe(Subscribers.unbounded()) } * * @param group a {@link ProcessorGroup} pool * * @return a {@link Flux} publishing asynchronously */ @SuppressWarnings("unchecked") public final Flux<T> publishOn(ProcessorGroup group) { return new FluxProcessorGroup<>(this, true, ((ProcessorGroup<T>) group)); } /** * Subscribe to the given fallback {@link Publisher} if an error is observed on this {@link Flux} * * @param fallback the alternate {@link Publisher} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchonerror.png" alt=""> * * @return a new {@link Flux} */ public final Flux<T> switchOnError(final Publisher<? extends T> fallback) { return onErrorResumeWith(FluxResume.create(fallback)); } /** * Provide an alternative if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchifempty.png" alt=""> * * @param alternate the alternate publisher if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> switchIfEmpty(Publisher<? extends T> alternate) { return new FluxSwitchIfEmpty<>(this, alternate); } /** * Start the chain and request unbounded demand. */ public final void subscribe() { subscribe(Subscribers.unbounded()); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <R> type of the value from source2 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public final <R> Flux<Tuple2<T, R>> zipWith(Publisher<? extends R> source2) { return new FluxZip<>(new Publisher[]{this, source2}, IDENTITY_FUNCTION, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator from the most recent items emitted by each source until any of them * completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <R> type of the value from source2 * @param <V> The produced output after transformation by {@param combinator} * * @return a zipped {@link Flux} */ public final <R, V> Flux<V> zipWith(Publisher<? extends R> source2, final BiFunction<? super T, ? super R, ? extends V> combinator) { return new FluxZip<>(new Publisher[]{this, source2}, new Function<Tuple2<T, R>, V>() { @Override public V apply(Tuple2<T, R> tuple) { return combinator.apply(tuple.getT1(), tuple.getT2()); } }, PlatformDependent.XS_BUFFER_SIZE); } // ============================================================================================================== // Containers // ============================================================================================================== /** * A marker interface for components responsible for augmenting subscribers with features like {@link #lift} * * @param <I> Upstream type * @param <O> Downstream type */ public interface Operator<I, O> extends Function<Subscriber<? super O>, Subscriber<? super I>> { } /** * A connecting Flux Publisher (right-to-left from a composition chain perspective) * * @param <I> Upstream type * @param <O> Downstream type */ public static class FluxBarrier<I, O> extends Flux<O> implements Backpressurable, Publishable { protected final Publisher<? extends I> source; public FluxBarrier(Publisher<? extends I> source) { this.source = source; } @Override public long getCapacity() { return Backpressurable.class.isAssignableFrom(source.getClass()) ? ((Backpressurable) source).getCapacity() : Long.MAX_VALUE; } @Override public long getPending() { return -1L; } /** * Default is delegating and decorating with Flux API */ @Override @SuppressWarnings("unchecked") public void subscribe(Subscriber<? super O> s) { source.subscribe((Subscriber<? super I>) s); } @Override public String toString() { return "{" + " operator : \"" + getName() + "\" " + '}'; } @Override public final Publisher<? extends I> upstream() { return source; } } /** * Decorate a Flux with a capacity for downstream accessors * * @param <I> */ final static class FluxBounded<I> extends FluxBarrier<I, I> { final private long capacity; public FluxBounded(Publisher<I> source, long capacity) { super(source); this.capacity = capacity; } @Override public long getCapacity() { return capacity; } @Override public String getName() { return "Bounded"; } @Override public void subscribe(Subscriber<? super I> s) { source.subscribe(s); } } static final class FluxProcessorGroup<I> extends FluxBarrier<I, I> implements Connectable { private final ProcessorGroup<I> processor; private final boolean publishOn; public FluxProcessorGroup(Publisher<? extends I> source, boolean publishOn, ProcessorGroup<I> processor) { super(source); this.processor = processor; this.publishOn = publishOn; } @Override public void subscribe(Subscriber<? super I> s) { if(publishOn) { processor.publishOn(source) .subscribe(s); } else{ processor.dispatchOn(source) .subscribe(s); } } @Override public Object connectedInput() { return processor; } @Override public Object connectedOutput() { return processor; } } /** * i -> i */ static final class IdentityFunction implements Function { @Override public Object apply(Object o) { return o; } } }
src/main/java/reactor/core/publisher/Flux.java
/* * Copyright (c) 2011-2016 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.Iterator; import java.util.Queue; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import reactor.core.queue.QueueSupplier; import reactor.core.subscriber.BlockingIterable; import reactor.core.subscriber.ReactiveSession; import reactor.core.subscriber.SubscriberWithContext; import reactor.core.subscriber.Subscribers; import reactor.core.timer.Timer; import reactor.core.timer.Timers; import reactor.core.trait.Backpressurable; import reactor.core.trait.Connectable; import reactor.core.trait.Introspectable; import reactor.core.trait.Publishable; import reactor.core.util.Assert; import reactor.core.util.Logger; import reactor.core.util.PlatformDependent; import reactor.core.util.ReactiveStateUtils; import reactor.fn.BiConsumer; import reactor.fn.BiFunction; import reactor.fn.Consumer; import reactor.fn.Function; import reactor.fn.Supplier; import reactor.fn.tuple.Tuple; import reactor.fn.tuple.Tuple2; import reactor.fn.tuple.Tuple3; import reactor.fn.tuple.Tuple4; import reactor.fn.tuple.Tuple5; import reactor.fn.tuple.Tuple6; import reactor.fn.tuple.Tuple7; import reactor.fn.tuple.Tuple8; /** * A Reactive Streams {@link Publisher} with basic rx operators that emits 0 to N elements, and then completes * (successfully or with an error). * * <p> * <img width="640" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flux.png" alt=""> * <p> * * <p>It is intended to be used in implementations and return types. Input parameters should keep using raw * {@link Publisher} as much as possible. * * <p>If it is known that the underlying {@link Publisher} will emit 0 or 1 element, {@link Mono} should be used * instead. * * @author Sebastien Deleuze * @author Stephane Maldini * @see Mono * @since 2.5 */ public abstract class Flux<T> implements Publisher<T>, Introspectable { // ============================================================================================================== // Static Generators // ============================================================================================================== static final IdentityFunction IDENTITY_FUNCTION = new IdentityFunction(); static final Flux<?> EMPTY = Mono.empty() .flux(); /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new Flux eventually subscribed to one of the sources or empty */ @SuppressWarnings({"unchecked", "varargs"}) @SafeVarargs public static <I> Flux<I> amb(Publisher<? extends I>... sources) { return new FluxAmb<>(sources); } /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new Flux eventually subscribed to one of the sources or empty */ @SuppressWarnings("unchecked") public static <I> Flux<I> amb(Iterable<? extends Publisher<? extends I>> sources) { if (sources == null) { return empty(); } return new FluxAmb<>(sources); } /** * Concat all sources emitted as an onNext signal from a parent {@link Publisher}. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned {@link Publisher} which will stop listening if the main sequence has also completed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatinner.png" alt=""> * * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new Flux concatenating all inner sources sequences until complete or error */ public static <I> Flux<I> concat(Publisher<? extends Publisher<? extends I>> sources) { return new FluxFlatMap<>(sources, 1, 32); } /** * Concat all sources pulled from the supplied * {@link Iterator} on {@link Publisher#subscribe} from the passed {@link Iterable} until {@link Iterator#hasNext} * returns false. A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new Flux concatenating all source sequences */ public static <I> Flux<I> concat(Iterable<? extends Publisher<? extends I>> sources) { return concat(fromIterable(sources)); } /** * Concat all sources pulled from the given {@link Publisher[]}. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new Flux concatenating all source sequences */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> concat(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return concat(fromArray(sources)); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param <T> The type of the data sequence * * @return a new {@link Flux} */ public static <T> Flux<T> create(Consumer<SubscriberWithContext<T, Void>> requestConsumer) { return create(requestConsumer, null, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called for every new subscriber returning an immutable context (IO * connection...) * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory) { return create(requestConsumer, contextFactory, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. The argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, * onComplete, onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(final Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { Assert.notNull(requestConsumer, "A data producer must be provided"); return new FluxGenerate.FluxForEach<>(requestConsumer, contextFactory, shutdownConsumer); } /** * Create a {@link Flux} that completes without emitting any item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/empty.png" alt=""> * * @param <T> the reified type of the target {@link Subscriber} * * @return an empty {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> empty() { return (Flux<T>) EMPTY; } /** * Create a {@link Flux} that completes with the specified error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/error.png" alt=""> * * @param error the error to signal to each {@link Subscriber} * @param <T> the reified type of the target {@link Subscriber} * * @return a new failed {@link Flux} */ public static <T> Flux<T> error(Throwable error) { return Mono.<T>error(error).flux(); } /** * Consume the passed * {@link Publisher} source and transform its sequence of T into a N sequences of V via the given {@link Function}. * The produced sequences {@link Publisher} will be merged back in the returned {@link Flux}. * The backpressure will apply using the provided bufferSize which will actively consume each sequence (and the * main one) and replenish its request cycle on a threshold free capacity. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * * @param source the source to flatten * @param mapper the function to transform the upstream sequence into N sub-sequences * @param concurrency the maximum alive transformations at a given time * @param bufferSize the bounded capacity for each individual merged sequence * @param <T> the source type * @param <V> the produced merged type * * @return a new merged {@link Flux} */ public static <T, V> Flux<V> flatMap(Publisher<? extends T> source, Function<? super T, ? extends Publisher<? extends V>> mapper, int concurrency, int bufferSize) { return new FluxFlatMap<>(source, mapper, concurrency, bufferSize); } /** * Expose the specified {@link Publisher} with the {@link Flux} API. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/from.png" alt=""> * * @param source the source to decorate * @param <T> the source sequence type * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> from(Publisher<? extends T> source) { if (source instanceof Flux) { return (Flux<T>) source; } if (source instanceof Supplier) { T t = ((Supplier<T>) source).get(); if (t != null) { return just(t); } } return new FluxBarrier<>(source); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromarray.png" alt=""> * * @param array the {@link T[]} array to read data from * @param <T> the {@link Publisher} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromArray(T[] array) { if (array == null || array.length == 0) { return empty(); } if (array.length == 1) { return just(array[0]); } return new FluxArray<>(array); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * A new iterator will be created for each subscriber. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromiterable.png" alt=""> * * @param it the {@link Iterable} to read data from * @param <T> the {@link Iterable} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromIterable(Iterable<? extends T> it) { FluxGenerate.IterableSequencer<T> iterablePublisher = new FluxGenerate.IterableSequencer<>(it); return create(iterablePublisher, iterablePublisher); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Tuple}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromtuple.png" alt=""> * <p> * * @param tuple the {@link Tuple} to read data from * * @return a new {@link Flux} */ public static Flux<Object> fromTuple(Tuple tuple) { return fromArray(tuple.toArray()); } /** * Create a {@link Publisher} reacting on requests with the passed {@link BiConsumer}. The argument {@code * contextFactory} is executed once by new subscriber to generate a context shared by every request calls. The * argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, onComplete, * onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generate.png" alt=""> * * @param requestConsumer A {@link BiConsumer} with left argument request and right argument target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a fresh Reactive Flux publisher ready to be subscribed */ public static <T, C> Flux<T> generate(BiConsumer<Long, SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { return new FluxGenerate<>(new FluxGenerate.RecursiveConsumer<>(requestConsumer), contextFactory, shutdownConsumer); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N seconds on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * * @param seconds The number of seconds to wait before the next increment * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long seconds) { return interval(seconds, TimeUnit.SECONDS); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the global timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit) { return interval(period, unit, Timers.global()); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * @param timer a {@link Timer} instance * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit, Timer timer) { long timespan = TimeUnit.MILLISECONDS.convert(period, unit); Assert.isTrue(timespan >= timer.period(), "The delay " + period + "ms cannot be less than the timer resolution" + "" + timer.period() + "ms"); return new FluxInterval(timer, period, unit, period); } /** * Create a new {@link Flux} that emits the specified items and then complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/justn.png" alt=""> * * @param data the consecutive data objects to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <T> Flux<T> just(T... data) { return fromArray(data); } /** * Create a new {@link Flux} that will only emit the passed data then onComplete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/just.png" alt=""> * * @param data the unique data to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ public static <T> Flux<T> just(T data) { return new FluxJust<>(data); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param source the source {@link Publisher} to log * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @param <T> the {@link Subscriber} type target * * @return a logged {@link Flux} */ public static <T> Flux<T> log(Publisher<T> source, String category, Level level, int options) { return new FluxLog<>(source, category, level, options); } /** * Create a {@link Flux} that will transform all signals into a target type. OnError will be transformed into * completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mapsignal.png" alt=""> * * @param source the source {@link Publisher} to map * @param mapperOnNext the {@link Function} to call on next data and returning the target transformed data * @param mapperOnError the {@link Function} to call on error signal and returning the target transformed data * @param mapperOnComplete the {@link Function} to call on complete signal and returning the target transformed data * @param <T> the input publisher type * @param <V> the output {@link Publisher} type target * * @return a new {@link Flux} */ public static <T, V> Flux<V> mapSignal(Publisher<T> source, Function<? super T, ? extends V> mapperOnNext, Function<Throwable, ? extends V> mapperOnError, Supplier<? extends V> mapperOnComplete) { return new FluxMapSignal<>(source, mapperOnNext, mapperOnError, mapperOnComplete); } /** * Merge emitted {@link Publisher} sequences by the passed {@link Publisher} into an interleaved merged sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mergeinner.png" alt=""> * * @param source a {@link Publisher} of {@link Publisher} sequence to merge * @param <T> the merged type * * @return a merged {@link Flux} */ public static <T> Flux<T> merge(Publisher<? extends Publisher<? extends T>> source) { return new FluxFlatMap<>(source, PlatformDependent.SMALL_BUFFER_SIZE, 32); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Iterable} into an interleaved merged sequence. * {@link Iterable#iterator()} will be called for each {@link Publisher#subscribe}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * * @param sources the {@link Iterable} to lazily iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive Flux publisher ready to be subscribed */ public static <I> Flux<I> merge(Iterable<? extends Publisher<? extends I>> sources) { return merge(fromIterable(sources)); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Publisher[]} into an interleaved merged * sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * * @param sources the {@link Publisher[]} to iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive Flux publisher ready to be subscribed */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> merge(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return merge(fromArray(sources)); } /** * Create a {@link Flux} that will never signal any data, error or completion signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/never.png" alt=""> * * @param <T> the {@link Subscriber} type target * * @return a never completing {@link Flux} */ public static <T> Flux<T> never() { return FluxNever.instance(); } /** * Create a {@link Flux} that will fallback to the produced {@link Publisher} given an onError signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * * @param <T> the {@link Subscriber} type target * * @return a resilient {@link Flux} */ public static <T> Flux<T> onErrorResumeWith( Publisher<? extends T> source, Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(source, fallback); } /** * Create a {@link Flux} reacting on subscribe with the passed {@link Consumer}. The argument {@code * sessionConsumer} is executed once by new subscriber to generate a {@link ReactiveSession} context ready to accept * signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/yield.png" alt=""> * * @param sessionConsumer A {@link Consumer} called once everytime a subscriber subscribes * @param <T> The type of the data sequence * * @return a fresh Reactive Flux publisher ready to be subscribed */ public static <T> Flux<T> yield(Consumer<? super ReactiveSession<T>> sessionConsumer) { return new FluxYieldingSession<>(sessionConsumer); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <O> The produced output after transformation by {@param combinator} * * @return a zipped {@link Flux} */ public static <T1, T2, O> Flux<O> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, final BiFunction<? super T1, ? super T2, ? extends O> combinator) { return new FluxZip<>(new Publisher[]{source1, source2}, new Function<Tuple2<T1, T2>, O>() { @Override public O apply(Tuple2<T1, T2> tuple) { return combinator.apply(tuple.getT1(), tuple.getT2()); } }, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static <T1, T2> Flux<Tuple2<T1, T2>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2) { return new FluxZip<>(new Publisher[]{source1, source2}, (Function<Tuple2<T1, T2>, Tuple2<T1, T2>>) IDENTITY_FUNCTION, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <V> The produced output after transformation by {@param combinator} * * @return a zipped {@link Flux} */ public static <T1, T2, T3, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Function<Tuple3<T1, T2, T3>, ? extends V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3}, combinator, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 */ @SuppressWarnings("unchecked") public static <T1, T2, T3> Flux<Tuple3<T1, T2, T3>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3) { return zip(IDENTITY_FUNCTION, source1, source2, source3); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <V> The produced output after transformation by {@param combinator} * * @return a {@link Flux} based on the produced value */ public static <T1, T2, T3, T4, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Function<Tuple4<T1, T2, T3, T4>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4}, combinator, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4> Flux<Tuple4<T1, T2, T3, T4>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4) { return zip(IDENTITY_FUNCTION, source1, source2, source3, source4); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <V> The produced output after transformation by {@param combinator} * * @return a {@link Flux} based on the produced value */ public static <T1, T2, T3, T4, T5, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Function<Tuple5<T1, T2, T3, T4, T5>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5> Flux<Tuple5<T1, T2, T3, T4, T5>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5) { return zip(IDENTITY_FUNCTION, source1, source2, source3, source4, source5); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * @param <V> The produced output after transformation by {@param combinator} * * @return a {@link Flux} based on the produced value */ public static <T1, T2, T3, T4, T5, T6, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6, Function<Tuple6<T1, T2, T3, T4, T5, T6>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5, source6}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6> Flux<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6) { return zip(IDENTITY_FUNCTION, source1, source2, source3, source4, source5, source6); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param source7 The seventh upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * @param <T7> type of the value from source7 * @param <V> combined type */ public static <T1, T2, T3, T4, T5, T6, T7, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6, Publisher<? extends T7> source7, Function<Tuple7<T1, T2, T3, T4, T5, T6, T7>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5, source6, source7}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param source7 The seventh upstream {@link Publisher} to subscribe to. * @param source8 The eigth upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * @param <T7> type of the value from source7 * @param <T8> type of the value from source8 * @param <V> combined type */ public static <T1, T2, T3, T4, T5, T6, T7, T8, V> Flux<V> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6, Publisher<? extends T7> source7, Publisher<? extends T8> source8, Function<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>, V> combinator) { return new FluxZip<>(new Publisher[]{source1, source2, source3, source4, source5, source6, source7, source8}, combinator, PlatformDependent .XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * of the most recent items emitted by each source until any of them completes. Errors will immediately be * forwarded. * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static Flux<Tuple> zip(Iterable<? extends Publisher<?>> sources) { return zip(sources, IDENTITY_FUNCTION); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <O> the combined produced type * * @return a zipped {@link Flux} */ public static <O> Flux<O> zip(Iterable<? extends Publisher<?>> sources, final Function<Tuple, ? extends O> combinator) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, PlatformDependent.XS_BUFFER_SIZE); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param sources the {@link Publisher[]} to iterate on {@link Publisher#subscribe(Subscriber)} * @param <O> the combined produced type * * @return a zipped {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <I, O> Flux<O> zip( final Function<? super Tuple, ? extends O> combinator, Publisher<? extends I>... sources) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, PlatformDependent.XS_BUFFER_SIZE); } // ============================================================================================================== // Instance Operators // ============================================================================================================== protected Flux() { } /** * Immediately apply the given transformation to this Flux in order to generate a target {@link Publisher} type. * * {@code flux.as(Mono::from).subscribe(Subscribers.unbounded()) } * * @param transformer the {@link Function} to immediately map this {@link Flux} into a target {@link Publisher} * instance. * @param <P> the returned {@link Publisher} sequence type * * @return a new {@link Flux} */ public final <V, P extends Publisher<V>> P as(Function<? super Flux<T>, P> transformer) { return transformer.apply(this); } /** * Return a {@code Mono<Void>} that completes when this {@link Flux} completes. * This will actively ignore the sequence and only replay completion or error signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/after.png" alt=""> * * @return a new {@link Mono} */ @SuppressWarnings("unchecked") public final Mono<Void> after() { return (Mono<Void>)new MonoIgnoreElements<>(this); } /** * Emit from the fastest first sequence between this publisher and the given publisher * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * * @param other the {@link Publisher} to race with * * @return the fastest sequence */ public final Flux<T> ambWith(Publisher<? extends T> other) { return amb(this, other); } /** * Hint {@link Subscriber} to this {@link Flux} a preferred available capacity should be used. * {@link #toIterable()} can for instance use introspect this value to supply an appropriate queueing strategy. * * @param capacity the maximum capacity (in flight onNext) the return {@link Publisher} should expose * * @return a bounded {@link Flux} */ public final Flux<T> capacity(long capacity) { return new FluxBounded<>(this, capacity); } /** * Like {@link #flatMap(Function)}, but concatenate emissions instead of merging (no interleave). * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatmap.png" alt=""> * * @param mapper the function to transform this sequence of T into concated sequences of R * @param <R> the produced concated type * * @return a new {@link Flux} */ public final <R> Flux<R> concatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>(this, mapper, 1, PlatformDependent.XS_BUFFER_SIZE); } /** * Concatenate emissions of this {@link Flux} with the provided {@link Publisher} (no interleave). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * * @param other the {@link Publisher} sequence to concat after this {@link Flux} * * @return a new {@link Flux} */ public final Flux<T> concatWith(Publisher<? extends T> other) { return concat(this, other); } /** * Introspect this Flux graph * * @return {@link ReactiveStateUtils.Graph} representation of a publisher graph */ public final ReactiveStateUtils.Graph debug() { return ReactiveStateUtils.scan(this); } /** * Provide a default unique value if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/defaultifempty.png" alt=""> * * @param defaultV the alternate value if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> defaultIfEmpty(T defaultV) { return new FluxSwitchIfEmpty<>(this, just(defaultV)); } /** * Run onSubscribe, request, cancel, onNext, onComplete and onError on a supplied * {@link ProcessorGroup#dispatchOn} reference {@link org.reactivestreams.Processor}. * * <p> * Typically used for fast publisher, slow consumer(s) scenarios. * It naturally combines with {@link Processors#singleGroup} and {@link Processors#asyncGroup} which implement * fast async event loops. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dispatchon.png" alt=""> * * {@code flux.dispatchOn(Processors.queue()).subscribe(Subscribers.unbounded()) } * * @param group a {@link ProcessorGroup} pool * * @return a {@link Flux} consuming asynchronously */ @SuppressWarnings("unchecked") public final Flux<T> dispatchOn(ProcessorGroup group) { return new FluxProcessorGroup<>(this, false, ((ProcessorGroup<T>) group)); } /** * Triggered after the {@link Flux} terminates, either by completing downstream successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doafterterminate.png" alt=""> * * @param afterTerminate * * @return a new {@link Flux} */ public final Flux<T> doAfterTerminate(Runnable afterTerminate) { return new FluxPeek<>(this, null, null, null, afterTerminate, null, null, null); } /** * Triggered when the {@link Flux} is cancelled. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncancel.png" alt=""> * * @param onCancel * * @return a new {@link Flux} */ public final Flux<T> doOnCancel(Runnable onCancel) { return new FluxPeek<>(this, null, null, null, null, null, null, onCancel); } /** * Triggered when the {@link Flux} completes successfully. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncomplete.png" alt=""> * * @param onComplete * * @return a new {@link Flux} */ public final Flux<T> doOnComplete(Runnable onComplete) { return new FluxPeek<>(this, null, null, null, onComplete, null, null, null); } /** * Triggered when the {@link Flux} completes with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonerror.png" alt=""> * * @param onError * * @return */ public final Flux<T> doOnError(Consumer<? super Throwable> onError) { return new FluxPeek<>(this, null, null, onError, null, null, null, null); } /** * Triggered when the {@link Flux} emits an item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonnext.png" alt=""> * * @param onNext * * @return a new {@link Flux} */ public final Flux<T> doOnNext(Consumer<? super T> onNext) { return new FluxPeek<>(this, null, onNext, null, null, null, null, null); } /** * Triggered when the {@link Flux} is subscribed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonsubscribe.png" alt=""> * * @param onSubscribe * * @return a new {@link Flux} */ public final Flux<T> doOnSubscribe(Consumer<? super Subscription> onSubscribe) { return new FluxPeek<>(this, onSubscribe, null, null, null, null, null, null); } /** * Triggered when the {@link Flux} terminates, either by completing successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonterminate.png" alt=""> * * @param onTerminate * * @return a new {@link Flux} */ public final Flux<T> doOnTerminate(Runnable onTerminate) { return new FluxPeek<>(this, null, null, null, null, onTerminate, null, null); } /** * Transform the items emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * * @param mapper * @param <R> * * @return a new {@link Flux} */ public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>(this, mapper, PlatformDependent.SMALL_BUFFER_SIZE, 32); } /** * Transform the signals emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * OnError will be transformed into completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmaps.png" alt=""> * * @param mapperOnNext the {@link Function} to call on next data and returning a sequence to merge * @param mapperOnError the {@link Function} to call on error signal and returning a sequence to merge * @param mapperOnComplete the {@link Function} to call on complete signal and returning a sequence to merge * @param <R> the output {@link Publisher} type target * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapperOnNext, Function<Throwable, ? extends Publisher<? extends R>> mapperOnError, Supplier<? extends Publisher<? extends R>> mapperOnComplete) { return new FluxFlatMap<>( new FluxMapSignal<>(this, mapperOnNext, mapperOnError, mapperOnComplete), IDENTITY_FUNCTION, PlatformDependent.SMALL_BUFFER_SIZE, 32); } @Override public String getName() { return getClass().getName() .replace(Flux.class.getSimpleName(), ""); } @Override public int getMode() { return FACTORY; } /** * Create a {@link Flux} intercepting all source signals with the returned Subscriber that might choose to pass them * alone to the provided Subscriber (given to the returned {@code subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/lift.png" alt=""> * * @param operator * @param <R> * * @return a new {@link Flux} */ public final <R> Flux<R> lift(Function<Subscriber<? super R>, Subscriber<? super T>> operator) { return new FluxLift<>(this, operator); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * The default log category will be "reactor.core.publisher.FluxLog". * * @return a new {@link Flux} */ public final Flux<T> log() { return log(null, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * * @return a new {@link Flux} */ public final Flux<T> log(String category) { return log(category, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * * @return a new {@link Flux} */ public final Flux<T> log(String category, Level level) { return log(category, level, Logger.ALL); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @return a new {@link Flux} */ public final Flux<T> log(String category, Level level, int options) { return new FluxLog<>(this, category, level, options); } /** * Transform the items emitted by this {@link Flux} by applying a function to each item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/map.png" alt=""> * * @param mapper * @param <R> * * @return a new {@link Flux} */ public final <R> Flux<R> map(Function<? super T, ? extends R> mapper) { return new FluxMap<>(this, mapper); } /** * Merge emissions of this {@link Flux} with the provided {@link Publisher}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * * @param source * * @return a new {@link Flux} */ public final Flux<T> mergeWith(Publisher<? extends T> source) { return merge(just(this, source)); } /** * Emit only the first item emitted by this {@link Flux}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/next.png" alt=""> * * If the sequence emits more than 1 data, emit {@link ArrayIndexOutOfBoundsException}. * * @return a new {@link Mono} */ public final Mono<T> next() { return new MonoNext<>(this); } /** * Subscribe to a returned fallback publisher when any error occurs. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * * @param fallback * * @return a new {@link Flux} */ public final Flux<T> onErrorResumeWith(Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(this, fallback); } /** * Fallback to the given value if an error is observed on this {@link Flux} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorreturn.png" alt=""> * * @param fallbackValue alternate value on fallback * * @return a new {@link Flux} */ public final Flux<T> onErrorReturn(final T fallbackValue) { return switchOnError(just(fallbackValue)); } /** * * A chaining {@link Publisher#subscribe(Subscriber)} alternative to inline composition type conversion to a hot * emitter (e.g. reactor FluxProcessor Broadcaster and Promise or rxjava Subject). * * {@code flux.subscribeWith(Processors.queue()).subscribe(Subscribers.unbounded()) } * * @param subscriber * @param <E> * * @return the passed {@link Subscriber} */ public final <E extends Subscriber<? super T>> E subscribeWith(E subscriber) { subscribe(subscriber); return subscriber; } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable() { return toIterable(this instanceof Backpressurable ? ((Backpressurable) this).getCapacity() : Long.MAX_VALUE ); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(long batchSize) { return toIterable(batchSize, null); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(final long batchSize, Supplier<Queue<T>> queueProvider) { final Supplier<Queue<T>> provider; if(queueProvider == null){ provider = QueueSupplier.get(batchSize); } else{ provider = queueProvider; } return new BlockingIterable<>(this, batchSize, provider); } /** * Run subscribe, onSubscribe and request on a supplied * {@link ProcessorGroup#publishOn} reference {@link org.reactivestreams.Processor}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/publishon.png" alt=""> * * <p> * Typically used for slow publisher e.g., blocking IO, fast consumer(s) scenarios. * It naturally combines with {@link Processors#ioGroup} which implements work-queue thread dispatching. * * <p> * {@code flux.publishOn(Processors.queue()).subscribe(Subscribers.unbounded()) } * * @param group a {@link ProcessorGroup} pool * * @return a {@link Flux} publishing asynchronously */ @SuppressWarnings("unchecked") public final Flux<T> publishOn(ProcessorGroup group) { return new FluxProcessorGroup<>(this, true, ((ProcessorGroup<T>) group)); } /** * Subscribe to the given fallback {@link Publisher} if an error is observed on this {@link Flux} * * @param fallback the alternate {@link Publisher} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchonerror.png" alt=""> * * @return a new {@link Flux} */ public final Flux<T> switchOnError(final Publisher<? extends T> fallback) { return onErrorResumeWith(FluxResume.create(fallback)); } /** * Provide an alternative if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchifempty.png" alt=""> * * @param alternate the alternate publisher if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> switchIfEmpty(Publisher<? extends T> alternate) { return new FluxSwitchIfEmpty<>(this, alternate); } /** * Start the chain and request unbounded demand. */ public final void subscribe() { subscribe(Subscribers.unbounded()); } /** * Combine the emissions of multiple Publishers together and emit single {@link Tuple2} for each * combination. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source2 * @param <R> * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public final <R> Flux<Tuple2<T, R>> zipWith(Publisher<? extends R> source2) { return new FluxZip<>(new Publisher[]{this, source2}, IDENTITY_FUNCTION, PlatformDependent.XS_BUFFER_SIZE); } /** * Combine the emissions of multiple Publishers together via a specified function and emit single items for each * combination based on the results of this function. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param source2 * @param zipper * @param <R> * @param <V> * * @return a new {@link Flux} */ public final <R, V> Flux<V> zipWith(Publisher<? extends R> source2, final BiFunction<? super T, ? super R, ? extends V> zipper) { return new FluxZip<>(new Publisher[]{this, source2}, new Function<Tuple2<T, R>, V>() { @Override public V apply(Tuple2<T, R> tuple) { return zipper.apply(tuple.getT1(), tuple.getT2()); } }, PlatformDependent.XS_BUFFER_SIZE); } // ============================================================================================================== // Containers // ============================================================================================================== /** * A marker interface for components responsible for augmenting subscribers with features like {@link #lift} * * @param <I> * @param <O> */ public interface Operator<I, O> extends Function<Subscriber<? super O>, Subscriber<? super I>> { } /** * A connecting Flux Publisher (right-to-left from a composition chain perspective) * * @param <I> * @param <O> */ public static class FluxBarrier<I, O> extends Flux<O> implements Backpressurable, Publishable { protected final Publisher<? extends I> source; public FluxBarrier(Publisher<? extends I> source) { this.source = source; } @Override public long getCapacity() { return Backpressurable.class.isAssignableFrom(source.getClass()) ? ((Backpressurable) source).getCapacity() : Long.MAX_VALUE; } @Override public long getPending() { return -1L; } /** * Default is delegating and decorating with Flux API * * @param s */ @Override @SuppressWarnings("unchecked") public void subscribe(Subscriber<? super O> s) { source.subscribe((Subscriber<? super I>) s); } @Override public String toString() { return "{" + " operator : \"" + getName() + "\" " + '}'; } @Override public final Publisher<? extends I> upstream() { return source; } } /** * Decorate a Flux with a capacity for downstream accessors * * @param <I> */ final static class FluxBounded<I> extends FluxBarrier<I, I> { final private long capacity; public FluxBounded(Publisher<I> source, long capacity) { super(source); this.capacity = capacity; } @Override public long getCapacity() { return capacity; } @Override public String getName() { return "Bounded"; } @Override public void subscribe(Subscriber<? super I> s) { source.subscribe(s); } } static final class FluxProcessorGroup<I> extends FluxBarrier<I, I> implements Connectable { private final ProcessorGroup<I> processor; private final boolean publishOn; public FluxProcessorGroup(Publisher<? extends I> source, boolean publishOn, ProcessorGroup<I> processor) { super(source); this.processor = processor; this.publishOn = publishOn; } @Override public void subscribe(Subscriber<? super I> s) { if(publishOn) { processor.publishOn(source) .subscribe(s); } else{ processor.dispatchOn(source) .subscribe(s); } } @Override public Object connectedInput() { return processor; } @Override public Object connectedOutput() { return processor; } } /** * i -> i */ static final class IdentityFunction implements Function { @Override public Object apply(Object o) { return o; } } }
more marble
src/main/java/reactor/core/publisher/Flux.java
more marble
Java
apache-2.0
de02593e1e945bda4a955dffca7c0bbea4c34cfd
0
KFCBETA/chikan,KFCBETA/chikan
package com.KFCBETA.hjeaimreus.chikan; import android.util.Log; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONException; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; /** * To parse the side menu items from mySQL database, * return a array list in order to input into constructor * of navigation drawer. * Created by hrw on 14/8/20. */ public class ParseCategories { private final static String TAG = "ParseCategories"; private final String link = "http://ea2ac45.ngrok.com/navigationitem"; private HttpClient httpClient = new DefaultHttpClient(); private HttpGet request; private HttpResponse httpResponse; private BufferedReader bufferedReader; private ArrayList<String> titles; private ArrayList<Integer> article_count; private ArrayList navigationItem; ParseCategories() { try { request = new HttpGet(new URI(link)); httpResponse = httpClient.execute(request); } catch (IOException e) { onError(e); } catch (URISyntaxException e) { onError(e); } } public ArrayList getNavigationDrawerList () { try { bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent())); String tempStr = bufferedReader.readLine(); JSONArray jsonArray = new JSONArray(tempStr); for (int i = 0; i < jsonArray.length(); i++) { titles.add(jsonArray.getJSONObject(i).getString("titles")); article_count.add(jsonArray.getJSONObject(i).getInt("article_count")); } navigationItem.add(titles); navigationItem.add(article_count); }catch (IOException e){ onError(e); } catch (JSONException e) { onError(e); } return navigationItem; } private void onError(Exception e){ Log.w(TAG,e.toString()); } }
app/src/main/java/com/KFCBETA/hjeaimreus/chikan/ParseCategories.java
package com.KFCBETA.hjeaimreus.chikan; import android.util.Log; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.json.JSONArray; import org.json.JSONException; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; /** * To parse the side menu items from mySQL database, * return a array list in order to input into constructor * of navigation drawer. * Created by hrw on 14/8/20. */ public class ParseCategories { private final static String TAG = "ParseCategories"; private final String link = "http://ea2ac45.ngrok.com/navigationitem"; private HttpClient httpClient = new DefaultHttpClient(); private HttpGet request; private HttpResponse httpResponse; private BufferedReader bufferedReader; private ArrayList<String> titles; private ArrayList<Integer> article_count; private ArrayList navigationItem; ParseCategories() { try { request = new HttpGet(new URI(link)); httpResponse = httpClient.execute(request); } catch (IOException e) { onError(e); } catch (URISyntaxException e) { onError(e); } } public ArrayList getNavigationDrawerList () { try { bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent())); String tempStr = bufferedReader.readLine(); JSONArray jsonArray = new JSONArray(tempStr); for (int i = 0; i < jsonArray.length(); i++) { titles.add(jsonArray.getJSONObject(i).getString("titles")); article_count.add(jsonArray.getJSONObject(i).getInt("article_count")); } navigationItem.add(titles); navigationItem.add(article_count); }catch (IOException e){ onError(e); } catch (JSONException e) { onError(e); } return navigationItem; } private void onError(Exception e){ Log.w(TAG,e.toString()); } }
Organized import
app/src/main/java/com/KFCBETA/hjeaimreus/chikan/ParseCategories.java
Organized import
Java
apache-2.0
b4367af27ee67d63c67c34dadbdbb6bc94f1906f
0
ibissource/iaf,ibissource/iaf,ibissource/iaf,ibissource/iaf,ibissource/iaf
/* * $Log: JmsSender.java,v $ * Revision 1.12 2004-10-12 15:12:34 L190409 * reworked handling of ParameterValueList * * Revision 1.11 2004/10/05 10:43:58 Gerrit van Brakel <[email protected]> * made into parameterized sender * * Revision 1.10 2004/09/01 07:30:00 Gerrit van Brakel <[email protected]> * correction in documentation * * Revision 1.9 2004/08/16 11:27:56 Gerrit van Brakel <[email protected]> * changed timeToLive back to messageTimeToLive * * Revision 1.8 2004/08/16 09:26:01 Gerrit van Brakel <[email protected]> * corrected JavaDoc * * Revision 1.7 2004/05/21 07:59:30 unknown <[email protected]> * Add (modifications) due to the postbox sender implementation * * Revision 1.6 2004/03/31 12:04:19 Gerrit van Brakel <[email protected]> * fixed javadoc * * Revision 1.5 2004/03/26 10:42:55 Johan Verrips <[email protected]> * added @version tag in javadoc * * Revision 1.4 2004/03/26 09:50:51 Johan Verrips <[email protected]> * Updated javadoc * * Revision 1.3 2004/03/23 18:22:39 Gerrit van Brakel <[email protected]> * enabled Transaction control * */ package nl.nn.adapterframework.jms; import nl.nn.adapterframework.configuration.ConfigurationException; import nl.nn.adapterframework.core.IPostboxSender; import nl.nn.adapterframework.core.ISender; import nl.nn.adapterframework.core.SenderException; import nl.nn.adapterframework.parameters.ParameterList; import nl.nn.adapterframework.parameters.ParameterValue; import nl.nn.adapterframework.parameters.ParameterValueList; import org.apache.commons.lang.builder.ToStringBuilder; import javax.jms.JMSException; import javax.jms.Session; import javax.jms.MessageProducer; import javax.jms.Message; /** * This class sends messages with JMS. * * <p><b>Configuration:</b> * <table border="1"> * <tr><th>attributes</th><th>description</th><th>default</th></tr> * <tr><td>classname</td><td>nl.nn.adapterframework.jms.JmsSender</td><td>&nbsp;</td></tr> * <tr><td>{@link #setName(String) name}</td><td>name of the sender</td><td>&nbsp;</td></tr> * <tr><td>{@link #setDestinationName(String) destinationName}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setDestinationType(String) destinationType}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setMessageTimeToLive(long) messageTimeToLive}</td><td>&nbsp;</td><td>0</td></tr> * <tr><td>{@link #setPersistent(boolean) persistent}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setAcknowledgeMode(String) acknowledgeMode}</td><td>&nbsp;</td><td>AUTO_ACKNOWLEDGE</td></tr> * <tr><td>{@link #setTransacted(boolean) transacted}</td><td>&nbsp;</td><td>false</td></tr> * <tr><td>{@link #setReplyToName(String) ReplyToName}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setJmsRealm(String) jmsRealm}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * </table> * </p> * @version Id * * @author Gerrit van Brakel */ public class JmsSender extends JMSFacade implements ISender, IPostboxSender { public static final String version = "$Id: JmsSender.java,v 1.12 2004-10-12 15:12:34 L190409 Exp $"; private String replyToName = null; public JmsSender() { super(); } /** * Starts the sender */ public void open() throws SenderException { try { super.open(); } catch (Exception e) { throw new SenderException(e); } } /** * Stops the sender */ public void close() throws SenderException { try { super.close(); } catch (Throwable e) { throw new SenderException("JmsMessageSender [" + getName() + "] " + "got error occured stopping sender", e); } } /** * Configures the sender */ public void configure() throws ConfigurationException { } /** * Configures the sender */ public void configure(ParameterList parameters) throws ConfigurationException { parameters.configure(); configure(); } public boolean isSynchronous() { return false; } /** * @see nl.nn.adapterframework.core.ISender#sendMessage(java.lang.String, java.lang.String) */ public String sendMessage(String correlationID, String message) throws SenderException { return sendMessage(correlationID, message, null); } /** * @see nl.nn.adapterframework.core.IPostboxSender#sendMessage(java.lang.String, java.lang.String, java.util.ArrayList) */ public String sendMessage(String correlationID, String message, ParameterValueList msgProperties) throws SenderException { Session s = null; MessageProducer mp = null; try { s = createSession(); mp = getMessageProducer(s, getDestination()); // create message Message msg = createTextMessage(s, correlationID, message); // set properties if (null != msgProperties) setProperties(msg, msgProperties); if (null != replyToName) { msg.setJMSReplyTo(getDestination(replyToName)); log.debug("replyTo set to [" + msg.getJMSReplyTo().toString() + "]"); } // send message send(mp, msg); if (log.isInfoEnabled()) { log.info( "[" + getName() + "] " + "sent Message: [" + message + "] " + "to [" + getDestinationName() + "] " + "msgID [" + msg.getJMSMessageID() + "] " + "correlationID [" + msg.getJMSCorrelationID() + "] " + "using " + (getPersistent() ? "persistent" : "non-persistent") + " mode " + ((replyToName != null) ? "replyTo:" + replyToName : "")); } return msg.getJMSMessageID(); } catch (Throwable e) { log.error("JmsSender [" + getName() + "] got exception: " + ToStringBuilder.reflectionToString(e), e); throw new SenderException(e); } finally { if (mp != null) try { mp.close(); } catch (JMSException e) { } if (s != null) try { s.close(); } catch (JMSException e) { } } } /** * sets the JMS message properties as descriped in the msgProperties arraylist * @param msg * @param msgProperties * @throws JMSException */ private void setProperties(final Message msg, ParameterValueList msgProperties) throws JMSException { for (int i=0; i<msgProperties.size(); i++) { ParameterValue property = msgProperties.getParameterValue(i); String type = property.getDefinition().getType(); String name = property.getDefinition().getName(); if ("boolean".equalsIgnoreCase(type)) msg.setBooleanProperty(name, property.asBooleanValue(false)); else if ("byte".equalsIgnoreCase(type)) msg.setByteProperty(name, property.asByteValue((byte) 0)); else if ("double".equalsIgnoreCase(type)) msg.setDoubleProperty(name, property.asDoubleValue(0)); else if ("float".equalsIgnoreCase(type)) msg.setFloatProperty(name, property.asFloatValue(0)); else if ("int".equalsIgnoreCase(type)) msg.setIntProperty(name, property.asIntegerValue(0)); else if ("long".equalsIgnoreCase(type)) msg.setLongProperty(name, property.asLongValue(0L)); else if ("short".equalsIgnoreCase(type)) msg.setShortProperty(name, property.asShortValue((short) 0)); else if ("string".equalsIgnoreCase(type)) msg.setStringProperty(name, property.asStringValue("")); else // if ("object".equalsIgnoreCase(type)) msg.setObjectProperty(name, property.getValue()); } } public String getReplyTo() { return replyToName; } public void setReplyToName(String replyTo) { this.replyToName = replyTo; } public String toString() { String result = super.toString(); ToStringBuilder ts = new ToStringBuilder(this); ts.append("name", getName()); ts.append("version", version); ts.append("replyToName", replyToName); result += ts.toString(); return result; } }
JavaSource/nl/nn/adapterframework/jms/JmsSender.java
/* * $Log: JmsSender.java,v $ * Revision 1.11 2004-10-05 10:43:58 L190409 * made into parameterized sender * * Revision 1.10 2004/09/01 07:30:00 Gerrit van Brakel <[email protected]> * correction in documentation * * Revision 1.9 2004/08/16 11:27:56 Gerrit van Brakel <[email protected]> * changed timeToLive back to messageTimeToLive * * Revision 1.8 2004/08/16 09:26:01 Gerrit van Brakel <[email protected]> * corrected JavaDoc * * Revision 1.7 2004/05/21 07:59:30 unknown <[email protected]> * Add (modifications) due to the postbox sender implementation * * Revision 1.6 2004/03/31 12:04:19 Gerrit van Brakel <[email protected]> * fixed javadoc * * Revision 1.5 2004/03/26 10:42:55 Johan Verrips <[email protected]> * added @version tag in javadoc * * Revision 1.4 2004/03/26 09:50:51 Johan Verrips <[email protected]> * Updated javadoc * * Revision 1.3 2004/03/23 18:22:39 Gerrit van Brakel <[email protected]> * enabled Transaction control * */ package nl.nn.adapterframework.jms; import java.util.ArrayList; import java.util.Iterator; import nl.nn.adapterframework.configuration.ConfigurationException; import nl.nn.adapterframework.core.IPostboxSender; import nl.nn.adapterframework.core.ISender; import nl.nn.adapterframework.core.SenderException; import nl.nn.adapterframework.parameters.ParameterList; import nl.nn.adapterframework.parameters.ParameterValue; import nl.nn.adapterframework.parameters.ParameterValueList; import org.apache.commons.lang.builder.ToStringBuilder; import javax.jms.JMSException; import javax.jms.Session; import javax.jms.MessageProducer; import javax.jms.Message; /** * This class sends messages with JMS. * * <p><b>Configuration:</b> * <table border="1"> * <tr><th>attributes</th><th>description</th><th>default</th></tr> * <tr><td>classname</td><td>nl.nn.adapterframework.jms.JmsSender</td><td>&nbsp;</td></tr> * <tr><td>{@link #setName(String) name}</td><td>name of the sender</td><td>&nbsp;</td></tr> * <tr><td>{@link #setDestinationName(String) destinationName}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setDestinationType(String) destinationType}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setMessageTimeToLive(long) messageTimeToLive}</td><td>&nbsp;</td><td>0</td></tr> * <tr><td>{@link #setPersistent(boolean) persistent}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setAcknowledgeMode(String) acknowledgeMode}</td><td>&nbsp;</td><td>AUTO_ACKNOWLEDGE</td></tr> * <tr><td>{@link #setTransacted(boolean) transacted}</td><td>&nbsp;</td><td>false</td></tr> * <tr><td>{@link #setReplyToName(String) ReplyToName}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * <tr><td>{@link #setJmsRealm(String) jmsRealm}</td><td>&nbsp;</td><td>&nbsp;</td></tr> * </table> * </p> * @version Id * * @author Gerrit van Brakel */ public class JmsSender extends JMSFacade implements ISender, IPostboxSender { public static final String version = "$Id: JmsSender.java,v 1.11 2004-10-05 10:43:58 L190409 Exp $"; private String replyToName = null; public JmsSender() { super(); } /** * Starts the sender */ public void open() throws SenderException { try { super.open(); } catch (Exception e) { throw new SenderException(e); } } /** * Stops the sender */ public void close() throws SenderException { try { super.close(); } catch (Throwable e) { throw new SenderException("JmsMessageSender [" + getName() + "] " + "got error occured stopping sender", e); } } /** * Configures the sender */ public void configure() throws ConfigurationException { } /** * Configures the sender */ public void configure(ParameterList parameters) throws ConfigurationException { configure(); } public boolean isSynchronous() { return false; } /** * @see nl.nn.adapterframework.core.ISender#sendMessage(java.lang.String, java.lang.String) */ public String sendMessage(String correlationID, String message) throws SenderException { return sendMessage(correlationID, message, null); } /** * @see nl.nn.adapterframework.core.IPostboxSender#sendMessage(java.lang.String, java.lang.String, java.util.ArrayList) */ public String sendMessage(String correlationID, String message, ParameterValueList msgProperties) throws SenderException { Session s = null; MessageProducer mp = null; try { s = createSession(); mp = getMessageProducer(s, getDestination()); // create message Message msg = createTextMessage(s, correlationID, message); // set properties if (null != msgProperties) setProperties(msg, msgProperties); if (null != replyToName) { msg.setJMSReplyTo(getDestination(replyToName)); log.debug("replyTo set to [" + msg.getJMSReplyTo().toString() + "]"); } // send message send(mp, msg); if (log.isInfoEnabled()) { log.info( "[" + getName() + "] " + "sent Message: [" + message + "] " + "to [" + getDestinationName() + "] " + "msgID [" + msg.getJMSMessageID() + "] " + "correlationID [" + msg.getJMSCorrelationID() + "] " + "using " + (getPersistent() ? "persistent" : "non-persistent") + " mode " + ((replyToName != null) ? "replyTo:" + replyToName : "")); } return msg.getJMSMessageID(); } catch (Throwable e) { log.error("JmsSender [" + getName() + "] got exception: " + ToStringBuilder.reflectionToString(e), e); throw new SenderException(e); } finally { if (mp != null) try { mp.close(); } catch (JMSException e) { } if (s != null) try { s.close(); } catch (JMSException e) { } } } /** * sets the JMS message properties as descriped in the msgProperties arraylist * @param msg * @param msgProperties * @throws JMSException */ private void setProperties(final Message msg, ArrayList msgProperties) throws JMSException { for (Iterator it = msgProperties.iterator(); it.hasNext();) { ParameterValue property = (ParameterValue) it.next(); String type = property.getDefinition().getType(); String name = property.getDefinition().getName(); if ("boolean".equalsIgnoreCase(type)) msg.setBooleanProperty(name, property.asBooleanValue(false)); else if ("byte".equalsIgnoreCase(type)) msg.setByteProperty(name, property.asByteValue((byte) 0)); else if ("double".equalsIgnoreCase(type)) msg.setDoubleProperty(name, property.asDoubleValue(0)); else if ("float".equalsIgnoreCase(type)) msg.setFloatProperty(name, property.asFloatValue(0)); else if ("int".equalsIgnoreCase(type)) msg.setIntProperty(name, property.asIntegerValue(0)); else if ("long".equalsIgnoreCase(type)) msg.setLongProperty(name, property.asLongValue(0L)); else if ("short".equalsIgnoreCase(type)) msg.setShortProperty(name, property.asShortValue((short) 0)); else if ("string".equalsIgnoreCase(type)) msg.setStringProperty(name, property.asStringValue("")); else // if ("object".equalsIgnoreCase(type)) msg.setObjectProperty(name, property.getValue()); } } public String getReplyTo() { return replyToName; } public void setReplyToName(String replyTo) { this.replyToName = replyTo; } public String toString() { String result = super.toString(); ToStringBuilder ts = new ToStringBuilder(this); ts.append("name", getName()); ts.append("version", version); ts.append("replyToName", replyToName); result += ts.toString(); return result; } }
reworked handling of ParameterValueList
JavaSource/nl/nn/adapterframework/jms/JmsSender.java
reworked handling of ParameterValueList
Java
apache-2.0
352cb58214f97b4afee1b572eceb7865618af215
0
elandau/ribbon,roykachouh/ribbon,drmaas/ribbon,bondj/ribbon,drtechniko/ribbon,elandau/ribbon,spencergibb/ribbon,brajput24/ribbon,robertroeser/ribbon,enriclluelles/ribbon,Netflix/ribbon
/* * * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.loadbalancer; import java.util.Date; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import com.google.common.annotations.VisibleForTesting; import com.netflix.config.DynamicIntProperty; import com.netflix.config.DynamicPropertyFactory; import com.netflix.servo.annotations.DataSourceType; import com.netflix.servo.annotations.Monitor; import com.netflix.stats.distribution.DataDistribution; import com.netflix.stats.distribution.DataPublisher; import com.netflix.stats.distribution.Distribution; import com.netflix.util.MeasuredRate; /** * Capture various stats per Server(node) in the LoadBalancer * @author stonse * */ public class ServerStats { private static final int DEFAULT_PUBLISH_INTERVAL = 60 * 1000; // = 1 minute private static final int DEFAULT_BUFFER_SIZE = 60 * 1000; // = 1000 requests/sec for 1 minute private final DynamicIntProperty connectionFailureThreshold; private final DynamicIntProperty circuitTrippedTimeoutFactor; private final DynamicIntProperty maxCircuitTrippedTimeout; private static final DynamicIntProperty activeRequestsCountTimeout = DynamicPropertyFactory.getInstance().getIntProperty("niws.loadbalancer.serverStats.activeRequestsCount.effectiveWindowSeconds", 60 * 10); private static final double[] PERCENTS = makePercentValues(); private DataDistribution dataDist = new DataDistribution(1, PERCENTS); // in case private DataPublisher publisher = null; private final Distribution responseTimeDist = new Distribution(); int bufferSize = DEFAULT_BUFFER_SIZE; int publishInterval = DEFAULT_PUBLISH_INTERVAL; long failureCountSlidingWindowInterval = 1000; private MeasuredRate serverFailureCounts = new MeasuredRate(failureCountSlidingWindowInterval); private MeasuredRate requestCountInWindow = new MeasuredRate(300000L); Server server; AtomicLong totalRequests = new AtomicLong(); @VisibleForTesting AtomicInteger successiveConnectionFailureCount = new AtomicInteger(0); @VisibleForTesting AtomicInteger activeRequestsCount = new AtomicInteger(0); private volatile long lastConnectionFailedTimestamp; private volatile long lastActiveRequestsCountChangeTimestamp; private AtomicLong totalCircuitBreakerBlackOutPeriod = new AtomicLong(0); private volatile long lastAccessedTimestamp; private volatile long firstConnectionTimestamp = 0; public ServerStats() { connectionFailureThreshold = DynamicPropertyFactory.getInstance().getIntProperty( "niws.loadbalancer.default.connectionFailureCountThreshold", 3); circuitTrippedTimeoutFactor = DynamicPropertyFactory.getInstance().getIntProperty( "niws.loadbalancer.default.circuitTripTimeoutFactorSeconds", 10); maxCircuitTrippedTimeout = DynamicPropertyFactory.getInstance().getIntProperty( "niws.loadbalancer.default.circuitTripMaxTimeoutSeconds", 30); } public ServerStats(LoadBalancerStats lbStats) { this.maxCircuitTrippedTimeout = lbStats.getCircuitTripMaxTimeoutSeconds(); this.circuitTrippedTimeoutFactor = lbStats.getCircuitTrippedTimeoutFactor(); this.connectionFailureThreshold = lbStats.getConnectionFailureCountThreshold(); } /** * Initializes the object, starting data collection and reporting. */ public void initialize(Server server) { serverFailureCounts = new MeasuredRate(failureCountSlidingWindowInterval); requestCountInWindow = new MeasuredRate(300000L); if (publisher == null) { dataDist = new DataDistribution(getBufferSize(), PERCENTS); publisher = new DataPublisher(dataDist, getPublishIntervalMillis()); publisher.start(); } this.server = server; } private int getBufferSize() { return bufferSize; } private long getPublishIntervalMillis() { return publishInterval; } public void setBufferSize(int bufferSize) { this.bufferSize = bufferSize; } public void setPublishInterval(int publishInterval) { this.publishInterval = publishInterval; } /** * The supported percentile values. * These correspond to the various Monitor methods defined below. * No, this is not pretty, but that's the way it is. */ private static enum Percent { TEN(10), TWENTY_FIVE(25), FIFTY(50), SEVENTY_FIVE(75), NINETY(90), NINETY_FIVE(95), NINETY_EIGHT(98), NINETY_NINE(99), NINETY_NINE_POINT_FIVE(99.5); private double val; Percent(double val) { this.val = val; } public double getValue() { return val; } } private static double[] makePercentValues() { Percent[] percents = Percent.values(); double[] p = new double[percents.length]; for (int i = 0; i < percents.length; i++) { p[i] = percents[i].getValue(); } return p; } public long getFailureCountSlidingWindowInterval() { return failureCountSlidingWindowInterval; } public void setFailureCountSlidingWindowInterval( long failureCountSlidingWindowInterval) { this.failureCountSlidingWindowInterval = failureCountSlidingWindowInterval; } // run time methods /** * Increment the count of failures for this Server * */ public void addToFailureCount(){ serverFailureCounts.increment(); } /** * Returns the count of failures in the current window * */ public long getFailureCount(){ return serverFailureCounts.getCurrentCount(); } /** * Call this method to note the response time after every request * @param msecs */ public void noteResponseTime(double msecs){ dataDist.noteValue(msecs); responseTimeDist.noteValue(msecs); } public void incrementNumRequests(){ totalRequests.incrementAndGet(); } public void incrementActiveRequestsCount() { activeRequestsCount.incrementAndGet(); requestCountInWindow.increment(); long currentTime = System.currentTimeMillis(); lastActiveRequestsCountChangeTimestamp = currentTime; lastAccessedTimestamp = currentTime; if (firstConnectionTimestamp == 0) { firstConnectionTimestamp = currentTime; } } public void decrementActiveRequestsCount() { if (activeRequestsCount.decrementAndGet() < 0) { activeRequestsCount.set(0); } lastActiveRequestsCountChangeTimestamp = System.currentTimeMillis(); } public int getActiveRequestsCount() { return getActiveRequestsCount(System.currentTimeMillis()); } public int getActiveRequestsCount(long currentTime) { int count = activeRequestsCount.get(); if (count == 0) { return 0; } else if (currentTime - lastActiveRequestsCountChangeTimestamp > activeRequestsCountTimeout.get() * 1000 || count < 0) { activeRequestsCount.set(0); return 0; } else { return count; } } public long getMeasuredRequestsCount() { return requestCountInWindow.getCount(); } @Monitor(name="ActiveRequestsCount", type = DataSourceType.GAUGE) public int getMonitoredActiveRequestsCount() { return activeRequestsCount.get(); } @Monitor(name="CircuitBreakerTripped", type = DataSourceType.GAUGE) public boolean isCircuitBreakerTripped() { return isCircuitBreakerTripped(System.currentTimeMillis()); } public boolean isCircuitBreakerTripped(long currentTime) { long circuitBreakerTimeout = getCircuitBreakerTimeout(); if (circuitBreakerTimeout <= 0) { return false; } return circuitBreakerTimeout > currentTime; } private long getCircuitBreakerTimeout() { long blackOutPeriod = getCircuitBreakerBlackoutPeriod(); if (blackOutPeriod <= 0) { return 0; } return lastConnectionFailedTimestamp + blackOutPeriod; } private long getCircuitBreakerBlackoutPeriod() { int failureCount = successiveConnectionFailureCount.get(); int threshold = connectionFailureThreshold.get(); if (failureCount < threshold) { return 0; } int diff = (failureCount - threshold) > 16 ? 16 : (failureCount - threshold); int blackOutSeconds = (1 << diff) * circuitTrippedTimeoutFactor.get(); if (blackOutSeconds > maxCircuitTrippedTimeout.get()) { blackOutSeconds = maxCircuitTrippedTimeout.get(); } return blackOutSeconds * 1000L; } public void incrementSuccessiveConnectionFailureCount() { lastConnectionFailedTimestamp = System.currentTimeMillis(); successiveConnectionFailureCount.incrementAndGet(); totalCircuitBreakerBlackOutPeriod.addAndGet(getCircuitBreakerBlackoutPeriod()); } public void clearSuccessiveConnectionFailureCount() { successiveConnectionFailureCount.set(0); } @Monitor(name="SuccessiveConnectionFailureCount", type = DataSourceType.GAUGE) public int getSuccessiveConnectionFailureCount() { return successiveConnectionFailureCount.get(); } /* * Response total times */ /** * Gets the average total amount of time to handle a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisAvg", type = DataSourceType.INFORMATIONAL, description = "Average total time for a request, in milliseconds") public double getResponseTimeAvg() { return responseTimeDist.getMean(); } /** * Gets the maximum amount of time spent handling a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisMax", type = DataSourceType.INFORMATIONAL, description = "Max total time for a request, in milliseconds") public double getResponseTimeMax() { return responseTimeDist.getMaximum(); } /** * Gets the minimum amount of time spent handling a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisMin", type = DataSourceType.INFORMATIONAL, description = "Min total time for a request, in milliseconds") public double getResponseTimeMin() { return responseTimeDist.getMinimum(); } /** * Gets the standard deviation in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisStdDev", type = DataSourceType.INFORMATIONAL, description = "Standard Deviation in total time to handle a request, in milliseconds") public double getResponseTimeStdDev() { return responseTimeDist.getStdDev(); } /* * QOS percentile performance data for most recent period */ /** * Gets the number of samples used to compute the various response-time percentiles. */ @Monitor(name = "ResponseTimePercentileNumValues", type = DataSourceType.GAUGE, description = "The number of data points used to compute the currently reported percentile values") public int getResponseTimePercentileNumValues() { return dataDist.getSampleSize(); } /** * Gets the time when the varios percentile data was last updated. */ @Monitor(name = "ResponseTimePercentileWhen", type = DataSourceType.INFORMATIONAL, description = "The time the percentile values were computed") public String getResponseTimePercentileTime() { return dataDist.getTimestamp(); } /** * Gets the time when the varios percentile data was last updated, * in milliseconds since the epoch. */ @Monitor(name = "ResponseTimePercentileWhenMillis", type = DataSourceType.COUNTER, description = "The time the percentile values were computed in milliseconds since the epoch") public long getResponseTimePercentileTimeMillis() { return dataDist.getTimestampMillis(); } /** * Gets the average total amount of time to handle a request * in the recent time-slice, in milliseconds. */ @Monitor(name = "ResponseTimeMillisAvg", type = DataSourceType.GAUGE, description = "Average total time for a request in the recent time slice, in milliseconds") public double getResponseTimeAvgRecent() { return dataDist.getMean(); } /** * Gets the 10-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis10Percentile", type = DataSourceType.INFORMATIONAL, description = "10th percentile in total time to handle a request, in milliseconds") public double getResponseTime10thPercentile() { return getResponseTimePercentile(Percent.TEN); } /** * Gets the 25-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis25Percentile", type = DataSourceType.INFORMATIONAL, description = "25th percentile in total time to handle a request, in milliseconds") public double getResponseTime25thPercentile() { return getResponseTimePercentile(Percent.TWENTY_FIVE); } /** * Gets the 50-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis50Percentile", type = DataSourceType.INFORMATIONAL, description = "50th percentile in total time to handle a request, in milliseconds") public double getResponseTime50thPercentile() { return getResponseTimePercentile(Percent.FIFTY); } /** * Gets the 75-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis75Percentile", type = DataSourceType.INFORMATIONAL, description = "75th percentile in total time to handle a request, in milliseconds") public double getResponseTime75thPercentile() { return getResponseTimePercentile(Percent.SEVENTY_FIVE); } /** * Gets the 90-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis90Percentile", type = DataSourceType.INFORMATIONAL, description = "90th percentile in total time to handle a request, in milliseconds") public double getResponseTime90thPercentile() { return getResponseTimePercentile(Percent.NINETY); } /** * Gets the 95-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis95Percentile", type = DataSourceType.GAUGE, description = "95th percentile in total time to handle a request, in milliseconds") public double getResponseTime95thPercentile() { return getResponseTimePercentile(Percent.NINETY_FIVE); } /** * Gets the 98-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis98Percentile", type = DataSourceType.INFORMATIONAL, description = "98th percentile in total time to handle a request, in milliseconds") public double getResponseTime98thPercentile() { return getResponseTimePercentile(Percent.NINETY_EIGHT); } /** * Gets the 99-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis99Percentile", type = DataSourceType.GAUGE, description = "99th percentile in total time to handle a request, in milliseconds") public double getResponseTime99thPercentile() { return getResponseTimePercentile(Percent.NINETY_NINE); } /** * Gets the 99.5-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis99_5Percentile", type = DataSourceType.GAUGE, description = "99.5th percentile in total time to handle a request, in milliseconds") public double getResponseTime99point5thPercentile() { return getResponseTimePercentile(Percent.NINETY_NINE_POINT_FIVE); } public long getTotalRequestsCount() { return totalRequests.get(); } private double getResponseTimePercentile(Percent p) { return dataDist.getPercentiles()[p.ordinal()]; } public String toString(){ StringBuilder sb = new StringBuilder(); sb.append("[Server:" + server + ";"); sb.append("\tZone:" + server.getZone() + ";"); sb.append("\tTotal Requests:" + totalRequests + ";"); sb.append("\tSuccessive connection failure:" + getSuccessiveConnectionFailureCount() + ";"); if (isCircuitBreakerTripped()) { sb.append("\tBlackout until: " + new Date(getCircuitBreakerTimeout()) + ";"); } sb.append("\tTotal blackout seconds:" + totalCircuitBreakerBlackOutPeriod.get() / 1000 + ";"); sb.append("\tLast connection made:" + new Date(lastAccessedTimestamp) + ";"); if (lastConnectionFailedTimestamp > 0) { sb.append("\tLast connection failure: " + new Date(lastConnectionFailedTimestamp) + ";"); } sb.append("\tFirst connection made: " + new Date(firstConnectionTimestamp) + ";"); sb.append("\tActive Connections:" + getMonitoredActiveRequestsCount() + ";"); sb.append("\ttotal failure count in last (" + failureCountSlidingWindowInterval + ") msecs:" + getFailureCount() + ";"); sb.append("\taverage resp time:" + getResponseTimeAvg() + ";"); sb.append("\t90 percentile resp time:" + getResponseTime90thPercentile() + ";"); sb.append("\t95 percentile resp time:" + getResponseTime95thPercentile() + ";"); sb.append("\tmin resp time:" + getResponseTimeMin() + ";"); sb.append("\tmax resp time:" + getResponseTimeMax() + ";"); sb.append("\tstddev resp time:" + getResponseTimeStdDev()); sb.append("]\n"); return sb.toString(); } public static void main(String[] args){ ServerStats ss = new ServerStats(); ss.setBufferSize(1000); ss.setPublishInterval(1000); ss.initialize(new Server("stonse", 80)); Random r = new Random(1459834); for (int i=0; i < 99; i++){ double rl = r.nextDouble() * 25.2; ss.noteResponseTime(rl); ss.incrementNumRequests(); try { Thread.sleep(100); System.out.println("ServerStats:avg:" + ss.getResponseTimeAvg()); System.out.println("ServerStats:90 percentile:" + ss.getResponseTime90thPercentile()); System.out.println("ServerStats:90 percentile:" + ss.getResponseTimePercentileNumValues()); } catch (InterruptedException e) { } } System.out.println("done ---"); ss.publisher.stop(); System.out.println("ServerStats:" + ss); } }
ribbon-loadbalancer/src/main/java/com/netflix/loadbalancer/ServerStats.java
/* * * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.loadbalancer; import java.util.Date; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import com.google.common.annotations.VisibleForTesting; import com.netflix.config.DynamicIntProperty; import com.netflix.config.DynamicPropertyFactory; import com.netflix.servo.annotations.DataSourceType; import com.netflix.servo.annotations.Monitor; import com.netflix.stats.distribution.DataDistribution; import com.netflix.stats.distribution.DataPublisher; import com.netflix.stats.distribution.Distribution; import com.netflix.util.MeasuredRate; /** * Capture various stats per Server(node) in the LoadBalancer * @author stonse * */ public class ServerStats { private static final int DEFAULT_PUBLISH_INTERVAL = 60 * 1000; // = 1 minute private static final int DEFAULT_BUFFER_SIZE = 60 * 1000; // = 1000 requests/sec for 1 minute private final DynamicIntProperty connectionFailureThreshold; private final DynamicIntProperty circuitTrippedTimeoutFactor; private final DynamicIntProperty maxCircuitTrippedTimeout; private static final DynamicIntProperty activeRequestsCountTimeout = DynamicPropertyFactory.getInstance().getIntProperty("niws.loadbalancer.serverStats.activeRequestsCount.effectiveWindowSeconds", 60 * 10); private static final double[] PERCENTS = makePercentValues(); private DataDistribution dataDist = new DataDistribution(1, PERCENTS); // in case private DataPublisher publisher = null; private final Distribution responseTimeDist = new Distribution(); int bufferSize = DEFAULT_BUFFER_SIZE; int publishInterval = DEFAULT_PUBLISH_INTERVAL; long failureCountSlidingWindowInterval = 1000; private MeasuredRate serverFailureCounts = new MeasuredRate(failureCountSlidingWindowInterval); private MeasuredRate requestCountInWindow = new MeasuredRate(300000L); Server server; AtomicLong totalRequests = new AtomicLong(); @VisibleForTesting AtomicInteger successiveConnectionFailureCount = new AtomicInteger(0); @VisibleForTesting AtomicInteger activeRequestsCount = new AtomicInteger(0); private volatile long lastConnectionFailedTimestamp; private volatile long lastActiveRequestsCountChangeTimestamp; private AtomicLong totalCircuitBreakerBlackOutPeriod = new AtomicLong(0); private volatile long lastAccessedTimestamp; private volatile long firstConnectionTimestamp = 0; public ServerStats() { connectionFailureThreshold = DynamicPropertyFactory.getInstance().getIntProperty( "niws.loadbalancer.default.connectionFailureCountThreshold", 3); circuitTrippedTimeoutFactor = DynamicPropertyFactory.getInstance().getIntProperty( "niws.loadbalancer.default.circuitTripTimeoutFactorSeconds", 10); maxCircuitTrippedTimeout = DynamicPropertyFactory.getInstance().getIntProperty( "niws.loadbalancer.default.circuitTripMaxTimeoutSeconds", 30); } public ServerStats(LoadBalancerStats lbStats) { this.maxCircuitTrippedTimeout = lbStats.getCircuitTripMaxTimeoutSeconds(); this.circuitTrippedTimeoutFactor = lbStats.getCircuitTrippedTimeoutFactor(); this.connectionFailureThreshold = lbStats.getConnectionFailureCountThreshold(); } /** * Initializes the object, starting data collection and reporting. */ public void initialize(Server server) { serverFailureCounts = new MeasuredRate(failureCountSlidingWindowInterval); requestCountInWindow = new MeasuredRate(300000L); if (publisher == null) { dataDist = new DataDistribution(getBufferSize(), PERCENTS); publisher = new DataPublisher(dataDist, getPublishIntervalMillis()); publisher.start(); } this.server = server; } private int getBufferSize() { return bufferSize; } private long getPublishIntervalMillis() { return publishInterval; } public void setBufferSize(int bufferSize) { this.bufferSize = bufferSize; } public void setPublishInterval(int publishInterval) { this.publishInterval = publishInterval; } /** * The supported percentile values. * These correspond to the various Monitor methods defined below. * No, this is not pretty, but that's the way it is. */ private static enum Percent { TEN(10), TWENTY_FIVE(25), FIFTY(50), SEVENTY_FIVE(75), NINETY(90), NINETY_FIVE(95), NINETY_EIGHT(98), NINETY_NINE(99), NINETY_NINE_POINT_FIVE(99.5); private double val; Percent(double val) { this.val = val; } public double getValue() { return val; } } private static double[] makePercentValues() { Percent[] percents = Percent.values(); double[] p = new double[percents.length]; for (int i = 0; i < percents.length; i++) { p[i] = percents[i].getValue(); } return p; } public long getFailureCountSlidingWindowInterval() { return failureCountSlidingWindowInterval; } public void setFailureCountSlidingWindowInterval( long failureCountSlidingWindowInterval) { this.failureCountSlidingWindowInterval = failureCountSlidingWindowInterval; } // run time methods /** * Increment the count of failures for this Server * */ public void addToFailureCount(){ serverFailureCounts.increment(); } /** * Returns the count of failures in the current window * */ public long getFailureCount(){ long count = 0; count = serverFailureCounts.getCurrentCount(); return count; } /** * Call this method to note the response time after every request * @param msecs */ public void noteResponseTime(double msecs){ dataDist.noteValue(msecs); responseTimeDist.noteValue(msecs); } public void incrementNumRequests(){ totalRequests.incrementAndGet(); } public void incrementActiveRequestsCount() { activeRequestsCount.incrementAndGet(); requestCountInWindow.increment(); long currentTime = System.currentTimeMillis(); lastActiveRequestsCountChangeTimestamp = currentTime; lastAccessedTimestamp = currentTime; if (firstConnectionTimestamp == 0) { firstConnectionTimestamp = currentTime; } } public void decrementActiveRequestsCount() { if (activeRequestsCount.decrementAndGet() < 0) { activeRequestsCount.set(0); } lastActiveRequestsCountChangeTimestamp = System.currentTimeMillis(); } public int getActiveRequestsCount() { return getActiveRequestsCount(System.currentTimeMillis()); } public int getActiveRequestsCount(long currentTime) { int count = activeRequestsCount.get(); if (count == 0) { return 0; } else if (currentTime - lastActiveRequestsCountChangeTimestamp > activeRequestsCountTimeout.get() * 1000 || count < 0) { activeRequestsCount.set(0); return 0; } else { return count; } } public long getMeasuredRequestsCount() { return requestCountInWindow.getCount(); } @Monitor(name="ActiveRequestsCount", type = DataSourceType.GAUGE) public int getMonitoredActiveRequestsCount() { return activeRequestsCount.get(); } @Monitor(name="CircuitBreakerTripped", type = DataSourceType.GAUGE) public boolean isCircuitBreakerTripped() { return isCircuitBreakerTripped(System.currentTimeMillis()); } public boolean isCircuitBreakerTripped(long currentTime) { long circuitBreakerTimeout = getCircuitBreakerTimeout(); if (circuitBreakerTimeout <= 0) { return false; } return circuitBreakerTimeout > currentTime; } private long getCircuitBreakerTimeout() { long blackOutPeriod = getCircuitBreakerBlackoutPeriod(); if (blackOutPeriod <= 0) { return 0; } return lastConnectionFailedTimestamp + blackOutPeriod; } private long getCircuitBreakerBlackoutPeriod() { int failureCount = successiveConnectionFailureCount.get(); int threshold = connectionFailureThreshold.get(); if (failureCount < threshold) { return 0; } int diff = (failureCount - threshold) > 16 ? 16 : (failureCount - threshold); int blackOutSeconds = (1 << diff) * circuitTrippedTimeoutFactor.get(); if (blackOutSeconds > maxCircuitTrippedTimeout.get()) { blackOutSeconds = maxCircuitTrippedTimeout.get(); } return blackOutSeconds * 1000L; } public void incrementSuccessiveConnectionFailureCount() { lastConnectionFailedTimestamp = System.currentTimeMillis(); successiveConnectionFailureCount.incrementAndGet(); totalCircuitBreakerBlackOutPeriod.addAndGet(getCircuitBreakerBlackoutPeriod()); } public void clearSuccessiveConnectionFailureCount() { successiveConnectionFailureCount.set(0); } @Monitor(name="SuccessiveConnectionFailureCount", type = DataSourceType.GAUGE) public int getSuccessiveConnectionFailureCount() { return successiveConnectionFailureCount.get(); } /* * Response total times */ /** * Gets the average total amount of time to handle a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisAvg", type = DataSourceType.INFORMATIONAL, description = "Average total time for a request, in milliseconds") public double getResponseTimeAvg() { return responseTimeDist.getMean(); } /** * Gets the maximum amount of time spent handling a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisMax", type = DataSourceType.INFORMATIONAL, description = "Max total time for a request, in milliseconds") public double getResponseTimeMax() { return responseTimeDist.getMaximum(); } /** * Gets the minimum amount of time spent handling a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisMin", type = DataSourceType.INFORMATIONAL, description = "Min total time for a request, in milliseconds") public double getResponseTimeMin() { return responseTimeDist.getMinimum(); } /** * Gets the standard deviation in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "OverallResponseTimeMillisStdDev", type = DataSourceType.INFORMATIONAL, description = "Standard Deviation in total time to handle a request, in milliseconds") public double getResponseTimeStdDev() { return responseTimeDist.getStdDev(); } /* * QOS percentile performance data for most recent period */ /** * Gets the number of samples used to compute the various response-time percentiles. */ @Monitor(name = "ResponseTimePercentileNumValues", type = DataSourceType.GAUGE, description = "The number of data points used to compute the currently reported percentile values") public int getResponseTimePercentileNumValues() { return dataDist.getSampleSize(); } /** * Gets the time when the varios percentile data was last updated. */ @Monitor(name = "ResponseTimePercentileWhen", type = DataSourceType.INFORMATIONAL, description = "The time the percentile values were computed") public String getResponseTimePercentileTime() { return dataDist.getTimestamp(); } /** * Gets the time when the varios percentile data was last updated, * in milliseconds since the epoch. */ @Monitor(name = "ResponseTimePercentileWhenMillis", type = DataSourceType.COUNTER, description = "The time the percentile values were computed in milliseconds since the epoch") public long getResponseTimePercentileTimeMillis() { return dataDist.getTimestampMillis(); } /** * Gets the average total amount of time to handle a request * in the recent time-slice, in milliseconds. */ @Monitor(name = "ResponseTimeMillisAvg", type = DataSourceType.GAUGE, description = "Average total time for a request in the recent time slice, in milliseconds") public double getResponseTimeAvgRecent() { return dataDist.getMean(); } /** * Gets the 10-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis10Percentile", type = DataSourceType.INFORMATIONAL, description = "10th percentile in total time to handle a request, in milliseconds") public double getResponseTime10thPercentile() { return getResponseTimePercentile(Percent.TEN); } /** * Gets the 25-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis25Percentile", type = DataSourceType.INFORMATIONAL, description = "25th percentile in total time to handle a request, in milliseconds") public double getResponseTime25thPercentile() { return getResponseTimePercentile(Percent.TWENTY_FIVE); } /** * Gets the 50-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis50Percentile", type = DataSourceType.INFORMATIONAL, description = "50th percentile in total time to handle a request, in milliseconds") public double getResponseTime50thPercentile() { return getResponseTimePercentile(Percent.FIFTY); } /** * Gets the 75-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis75Percentile", type = DataSourceType.INFORMATIONAL, description = "75th percentile in total time to handle a request, in milliseconds") public double getResponseTime75thPercentile() { return getResponseTimePercentile(Percent.SEVENTY_FIVE); } /** * Gets the 90-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis90Percentile", type = DataSourceType.INFORMATIONAL, description = "90th percentile in total time to handle a request, in milliseconds") public double getResponseTime90thPercentile() { return getResponseTimePercentile(Percent.NINETY); } /** * Gets the 95-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis95Percentile", type = DataSourceType.GAUGE, description = "95th percentile in total time to handle a request, in milliseconds") public double getResponseTime95thPercentile() { return getResponseTimePercentile(Percent.NINETY_FIVE); } /** * Gets the 98-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis98Percentile", type = DataSourceType.INFORMATIONAL, description = "98th percentile in total time to handle a request, in milliseconds") public double getResponseTime98thPercentile() { return getResponseTimePercentile(Percent.NINETY_EIGHT); } /** * Gets the 99-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis99Percentile", type = DataSourceType.GAUGE, description = "99th percentile in total time to handle a request, in milliseconds") public double getResponseTime99thPercentile() { return getResponseTimePercentile(Percent.NINETY_NINE); } /** * Gets the 99.5-th percentile in the total amount of time spent handling a request, in milliseconds. */ @Monitor(name = "ResponseTimeMillis99_5Percentile", type = DataSourceType.GAUGE, description = "99.5th percentile in total time to handle a request, in milliseconds") public double getResponseTime99point5thPercentile() { return getResponseTimePercentile(Percent.NINETY_NINE_POINT_FIVE); } public long getTotalRequestsCount() { return totalRequests.get(); } private double getResponseTimePercentile(Percent p) { return dataDist.getPercentiles()[p.ordinal()]; } public String toString(){ StringBuilder sb = new StringBuilder(); sb.append("[Server:" + server + ";"); sb.append("\tZone:" + server.getZone() + ";"); sb.append("\tTotal Requests:" + totalRequests + ";"); sb.append("\tSuccessive connection failure:" + getSuccessiveConnectionFailureCount() + ";"); if (isCircuitBreakerTripped()) { sb.append("\tBlackout until: " + new Date(getCircuitBreakerTimeout()) + ";"); } sb.append("\tTotal blackout seconds:" + totalCircuitBreakerBlackOutPeriod.get() / 1000 + ";"); sb.append("\tLast connection made:" + new Date(lastAccessedTimestamp) + ";"); if (lastConnectionFailedTimestamp > 0) { sb.append("\tLast connection failure: " + new Date(lastConnectionFailedTimestamp) + ";"); } sb.append("\tFirst connection made: " + new Date(firstConnectionTimestamp) + ";"); sb.append("\tActive Connections:" + getMonitoredActiveRequestsCount() + ";"); sb.append("\ttotal failure count in last (" + failureCountSlidingWindowInterval + ") msecs:" + getFailureCount() + ";"); sb.append("\taverage resp time:" + getResponseTimeAvg() + ";"); sb.append("\t90 percentile resp time:" + getResponseTime90thPercentile() + ";"); sb.append("\t95 percentile resp time:" + getResponseTime95thPercentile() + ";"); sb.append("\tmin resp time:" + getResponseTimeMin() + ";"); sb.append("\tmax resp time:" + getResponseTimeMax() + ";"); sb.append("\tstddev resp time:" + getResponseTimeStdDev()); sb.append("]\n"); return sb.toString(); } public static void main(String[] args){ ServerStats ss = new ServerStats(); ss.setBufferSize(1000); ss.setPublishInterval(1000); ss.initialize(new Server("stonse", 80)); Random r = new Random(1459834); for (int i=0; i < 99; i++){ double rl = r.nextDouble() * 25.2; ss.noteResponseTime(rl); ss.incrementNumRequests(); try { Thread.sleep(100); System.out.println("ServerStats:avg:" + ss.getResponseTimeAvg()); System.out.println("ServerStats:90 percentile:" + ss.getResponseTime90thPercentile()); System.out.println("ServerStats:90 percentile:" + ss.getResponseTimePercentileNumValues()); } catch (InterruptedException e) { } } System.out.println("done ---"); ss.publisher.stop(); System.out.println("ServerStats:" + ss); } }
Minor cleanup
ribbon-loadbalancer/src/main/java/com/netflix/loadbalancer/ServerStats.java
Minor cleanup
Java
apache-2.0
983f92c0dfa170cd50c5d8241d6f4790ae72537c
0
axxter99/profileWOW,axxter99/profileWOW,axxter99/profileWOW
package org.sakaiproject.profilewow.tool.producers; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.api.common.edu.person.SakaiPerson; import org.sakaiproject.api.common.edu.person.SakaiPersonManager; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentCollectionEdit; import org.sakaiproject.content.api.ContentHostingService; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.exception.TypeException; import org.sakaiproject.profilewow.tool.params.ImageViewParamaters; import org.sakaiproject.profilewow.tool.producers.templates.ProfilePicRenderer; import org.sakaiproject.profilewow.tool.util.ResourceUtil; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.user.api.UserDirectoryService; import uk.org.ponder.messageutil.TargettedMessageList; import uk.org.ponder.rsf.components.UIBranchContainer; import uk.org.ponder.rsf.components.UICommand; import uk.org.ponder.rsf.components.UIContainer; import uk.org.ponder.rsf.components.UIForm; import uk.org.ponder.rsf.components.UIInternalLink; import uk.org.ponder.rsf.components.UILink; import uk.org.ponder.rsf.components.UIMessage; import uk.org.ponder.rsf.components.UIOutput; import uk.org.ponder.rsf.components.UISelect; import uk.org.ponder.rsf.components.UISelectChoice; import uk.org.ponder.rsf.components.UIVerbatim; import uk.org.ponder.rsf.flow.ARIResult; import uk.org.ponder.rsf.flow.ActionResultInterceptor; import uk.org.ponder.rsf.flow.jsfnav.NavigationCase; import uk.org.ponder.rsf.flow.jsfnav.NavigationCaseReporter; import uk.org.ponder.rsf.view.ComponentChecker; import uk.org.ponder.rsf.view.ViewComponentProducer; import uk.org.ponder.rsf.viewstate.SimpleViewParameters; import uk.org.ponder.rsf.viewstate.ViewParameters; import uk.org.ponder.stringutil.StringList; public class ChangePicture implements ViewComponentProducer, ActionResultInterceptor { private static Log log = LogFactory.getLog(ChangePicture.class); private static final String NO_PIC_URL = ProfilePicRenderer.NO_PIC_URL; public static final String VIEW_ID="changepic"; public String getViewID() { // TODO Auto-generated method stub return VIEW_ID; } private SakaiPersonManager spm; public void setSakaiPersonManager(SakaiPersonManager in) { spm = in; } private ResourceUtil resourceUtil; public void setResourceUtil(ResourceUtil ru) { resourceUtil = ru; } private TargettedMessageList tml; public void setTargettedMessageList(TargettedMessageList tml) { this.tml = tml; } public void fillComponents(UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) { // TODO Auto-generated method stub ContentCollection pCollection = resourceUtil.getUserCollection(); log.debug("got a collection with " + pCollection.getMemberCount() + " objects"); SakaiPerson sPerson = spm.getSakaiPerson(spm.getUserMutableType()); //picture stuff String picUrl = sPerson.getPictureUrl(); if (picUrl == null || picUrl.trim().length() == 0) picUrl = NO_PIC_URL; else picUrl = sPerson.getPictureUrl(); if (sPerson.isSystemPicturePreferred() != null && sPerson.isSystemPicturePreferred().booleanValue()) { //System pic present and set to active //UIOutput.make(tofill, "remove-image-link"); UIBranchContainer uib = UIBranchContainer.make(tofill, "selected-image:"); UIInternalLink.make(uib, "selected-image", new ImageViewParamaters("imageServlet", sPerson.getAgentUuid())); UIBranchContainer uib2 = UIBranchContainer.make(tofill, "no-image:"); UIInternalLink.make(uib2, "no-image", NO_PIC_URL); //UIMessage.make(tofill, "current-pic-title", "current.picture.title.official"); } else if (sPerson.isSystemPicturePreferred() == null || !sPerson.isSystemPicturePreferred().booleanValue() ) { UIBranchContainer uib = UIBranchContainer.make(tofill, "selected-image:"); UILink.make(uib, "selected-image", picUrl); if (!picUrl.equals(NO_PIC_URL)) { //UIOutput.make(tofill, "remove-image-link"); //UIMessage.make(tofill, "current-pic-title", "current.picture.title"); UIBranchContainer uib2 = UIBranchContainer.make(tofill, "no-image:"); UIInternalLink.make(uib2, "no-image", NO_PIC_URL); }else{ //no profile image at all //UIMessage.make(tofill, "warning-no-image", "warning.picture.set"); //UIMessage.make(tofill, "current-pic-title", "current.picture.title.noimage"); //UIBranchContainer uib2 = UIBranchContainer.make(tofill, "no-image:"); //UIInternalLink.make(uib2, "no-image", NO_PIC_URL); } //should only display if there is an official pic if (hasProfilePic()) { UIBranchContainer op = UIBranchContainer.make(tofill, "official-pic:"); UIMessage.make(op, "official-pic-title", "official.picture.title"); UIInternalLink.make(op, "official-pic-image", new ImageViewParamaters("imageServlet", sPerson.getUuid() )); UIBranchContainer op2 = UIBranchContainer.make(tofill, "official-pic-form:"); UIForm form = UIForm.make(op2, "official-pic-form"); UICommand.make(form, "official-pic-field", UIMessage.make("useOfficialSub"),"uploadBean.useOfficial"); } } UIForm formUpload = UIForm.make(tofill, "upload-pic-form"); //UIInput.make(form,"file-upload", "uploadBean") UICommand.make(formUpload,"submit","uploadBean.processUpload"); UIForm form = UIForm.make(tofill,"form"); List<ContentResource> resources = pCollection.getMemberResources(); UISelect selectPic = UISelect.makeMultiple(form, "select-pic", null, "uploadBean.picUrl", new String[] {}); StringList selections = new StringList(); for (int i = 0; i < resources.size(); i++) { //UIBranchContainer row = UIBranchContainer.make(form, "pic-row:"); //for (int q =0; q < 5 && i< resources.size(); q++) { ContentResource resource = (ContentResource)resources.get(i); String rUrl = resource.getUrl(); if(!rUrl.equals(picUrl)){ UIBranchContainer cell = UIBranchContainer.make(tofill, "pic-cell:"); selections.add(rUrl); UISelectChoice choice = UISelectChoice.make(cell, "select", selectPic.getFullID(), (selections.size() -1 )); UILink.make(cell, "pic", rUrl); //i++; } //} } selectPic.optionlist.setValue(selections.toStringArray()); UICommand.make(form, "submit","Change picture","uploadBean.changePicture"); } private boolean hasProfilePic() { SakaiPerson sp = spm.getSakaiPerson(spm.getSystemMutableType()); if (sp == null) return false; else if (sp.getJpegPhoto() != null) return true; return false; } public void interceptActionResult(ARIResult result, ViewParameters incoming, Object actionReturn) { // TODO Auto-generated method stub result.resultingView = new SimpleViewParameters(MainProducer.VIEW_ID); log.warn("intecept object."); } }
tool/src/java/org/sakaiproject/profilewow/tool/producers/ChangePicture.java
package org.sakaiproject.profilewow.tool.producers; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.api.common.edu.person.SakaiPerson; import org.sakaiproject.api.common.edu.person.SakaiPersonManager; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentCollectionEdit; import org.sakaiproject.content.api.ContentHostingService; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.exception.TypeException; import org.sakaiproject.profilewow.tool.params.ImageViewParamaters; import org.sakaiproject.profilewow.tool.producers.templates.ProfilePicRenderer; import org.sakaiproject.profilewow.tool.util.ResourceUtil; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.user.api.UserDirectoryService; import uk.org.ponder.messageutil.TargettedMessageList; import uk.org.ponder.rsf.components.UIBranchContainer; import uk.org.ponder.rsf.components.UICommand; import uk.org.ponder.rsf.components.UIContainer; import uk.org.ponder.rsf.components.UIForm; import uk.org.ponder.rsf.components.UIInternalLink; import uk.org.ponder.rsf.components.UILink; import uk.org.ponder.rsf.components.UIMessage; import uk.org.ponder.rsf.components.UIOutput; import uk.org.ponder.rsf.components.UISelect; import uk.org.ponder.rsf.components.UISelectChoice; import uk.org.ponder.rsf.components.UIVerbatim; import uk.org.ponder.rsf.flow.ARIResult; import uk.org.ponder.rsf.flow.ActionResultInterceptor; import uk.org.ponder.rsf.flow.jsfnav.NavigationCase; import uk.org.ponder.rsf.flow.jsfnav.NavigationCaseReporter; import uk.org.ponder.rsf.view.ComponentChecker; import uk.org.ponder.rsf.view.ViewComponentProducer; import uk.org.ponder.rsf.viewstate.SimpleViewParameters; import uk.org.ponder.rsf.viewstate.ViewParameters; import uk.org.ponder.stringutil.StringList; public class ChangePicture implements ViewComponentProducer, ActionResultInterceptor { private static Log log = LogFactory.getLog(ChangePicture.class); private static final String NO_PIC_URL = ProfilePicRenderer.NO_PIC_URL; public static final String VIEW_ID="changepic"; public String getViewID() { // TODO Auto-generated method stub return VIEW_ID; } private SakaiPersonManager spm; public void setSakaiPersonManager(SakaiPersonManager in) { spm = in; } private ResourceUtil resourceUtil; public void setResourceUtil(ResourceUtil ru) { resourceUtil = ru; } private TargettedMessageList tml; public void setTargettedMessageList(TargettedMessageList tml) { this.tml = tml; } public void fillComponents(UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) { // TODO Auto-generated method stub ContentCollection pCollection = resourceUtil.getUserCollection(); log.debug("got a collection with " + pCollection.getMemberCount() + " objects"); SakaiPerson sPerson = spm.getSakaiPerson(spm.getUserMutableType()); //picture stuff String picUrl = sPerson.getPictureUrl(); if (picUrl == null || picUrl.trim().length() == 0) picUrl = NO_PIC_URL; else picUrl = sPerson.getPictureUrl(); if (sPerson.isSystemPicturePreferred() != null && sPerson.isSystemPicturePreferred().booleanValue()) { //System pic present and set to active //UIOutput.make(tofill, "remove-image-link"); UIBranchContainer uib = UIBranchContainer.make(tofill, "selected-image:"); UIInternalLink.make(uib, "selected-image", new ImageViewParamaters("imageServlet", sPerson.getUuid())); UIBranchContainer uib2 = UIBranchContainer.make(tofill, "no-image:"); UIInternalLink.make(uib2, "no-image", NO_PIC_URL); //UIMessage.make(tofill, "current-pic-title", "current.picture.title.official"); } else if (sPerson.isSystemPicturePreferred() == null || !sPerson.isSystemPicturePreferred().booleanValue() ) { UIBranchContainer uib = UIBranchContainer.make(tofill, "selected-image:"); UILink.make(uib, "selected-image", picUrl); if (!picUrl.equals(NO_PIC_URL)) { //UIOutput.make(tofill, "remove-image-link"); //UIMessage.make(tofill, "current-pic-title", "current.picture.title"); UIBranchContainer uib2 = UIBranchContainer.make(tofill, "no-image:"); UIInternalLink.make(uib2, "no-image", NO_PIC_URL); }else{ //no profile image at all //UIMessage.make(tofill, "warning-no-image", "warning.picture.set"); //UIMessage.make(tofill, "current-pic-title", "current.picture.title.noimage"); //UIBranchContainer uib2 = UIBranchContainer.make(tofill, "no-image:"); //UIInternalLink.make(uib2, "no-image", NO_PIC_URL); } //should only display if there is an official pic if (hasProfilePic()) { UIBranchContainer op = UIBranchContainer.make(tofill, "official-pic:"); UIMessage.make(op, "official-pic-title", "official.picture.title"); UIInternalLink.make(op, "official-pic-image", new ImageViewParamaters("imageServlet", sPerson.getUuid() )); UIBranchContainer op2 = UIBranchContainer.make(tofill, "official-pic-form:"); UIForm form = UIForm.make(op2, "official-pic-form"); UICommand.make(form, "official-pic-field", UIMessage.make("useOfficialSub"),"uploadBean.useOfficial"); } } UIForm formUpload = UIForm.make(tofill, "upload-pic-form"); //UIInput.make(form,"file-upload", "uploadBean") UICommand.make(formUpload,"submit","uploadBean.processUpload"); UIForm form = UIForm.make(tofill,"form"); List<ContentResource> resources = pCollection.getMemberResources(); UISelect selectPic = UISelect.makeMultiple(form, "select-pic", null, "uploadBean.picUrl", new String[] {}); StringList selections = new StringList(); for (int i = 0; i < resources.size(); i++) { //UIBranchContainer row = UIBranchContainer.make(form, "pic-row:"); //for (int q =0; q < 5 && i< resources.size(); q++) { ContentResource resource = (ContentResource)resources.get(i); String rUrl = resource.getUrl(); if(!rUrl.equals(picUrl)){ UIBranchContainer cell = UIBranchContainer.make(tofill, "pic-cell:"); selections.add(rUrl); UISelectChoice choice = UISelectChoice.make(cell, "select", selectPic.getFullID(), (selections.size() -1 )); UILink.make(cell, "pic", rUrl); //i++; } //} } selectPic.optionlist.setValue(selections.toStringArray()); UICommand.make(form, "submit","Change picture","uploadBean.changePicture"); } private boolean hasProfilePic() { SakaiPerson sp = spm.getSakaiPerson(spm.getSystemMutableType()); if (sp == null) return false; else if (sp.getJpegPhoto() != null) return true; return false; } public void interceptActionResult(ARIResult result, ViewParameters incoming, Object actionReturn) { // TODO Auto-generated method stub result.resultingView = new SimpleViewParameters(MainProducer.VIEW_ID); log.warn("intecept object."); } }
VULA-196 use right paramater
tool/src/java/org/sakaiproject/profilewow/tool/producers/ChangePicture.java
VULA-196 use right paramater
Java
apache-2.0
3bf9d2af0bbca3f5daa241ac5ac535cc06853e57
0
itachizhu/cms,itachizhu/cms,itachizhu/cms,itachizhu/cms
package org.itachi.cms.controller; import org.itachi.cms.service.RoleService; import org.itachi.cms.service.UserGroupService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; /** * Created by liaoyongchao on 2017/5/3. */ @Controller @RequestMapping("/admusergroup") public class AdmUserGroupController { // @Autowired //private UserGroupService userGroupService; @Autowired private RoleService roleService; @RequestMapping(value = "/list", method = RequestMethod.GET) public String list(Model model) throws Exception { model.addAttribute("name", "liaoyongchao"); return "userGroups/userGroupList"; } @RequestMapping(value = "/addUserGroup", method = RequestMethod.GET) public String addUserGroup(Model model) throws Exception { model.addAttribute("name", "liaoyongchao"); return "userGroups/addUserGroup.html"; } @RequestMapping(value = "/modifyUserGroup", method = RequestMethod.GET) public String modifyUserGroup(Model model) throws Exception { model.addAttribute("name", "liaoyongchao"); return "userGroups/modifyUserGroup.html"; } }
cms-boot/src/main/java/org/itachi/cms/controller/AdmUserGroupController.java
package org.itachi.cms.controller; import org.itachi.cms.service.RoleService; import org.itachi.cms.service.UserGroupService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; /** * Created by liaoyongchao on 2017/5/3. */ @Controller @RequestMapping("/group") public class AdmUserGroupController { @Autowired private UserGroupService userGroupService; @Autowired private RoleService roleService; @RequestMapping(value = "/userGroupList", method = RequestMethod.GET) public String userGroupList(Model model) throws Exception { model.addAttribute("name", "liaoyongchao"); return "userGroups/userGroupList"; } @RequestMapping(value = "/addUserGroup", method = RequestMethod.GET) public String addUserGroup(Model model) throws Exception { model.addAttribute("name", "liaoyongchao"); return "userGroups/addUserGroup.html"; } @RequestMapping(value = "/modifyUserGroup", method = RequestMethod.GET) public String modifyUserGroup(Model model) throws Exception { model.addAttribute("name", "liaoyongchao"); return "userGroups/modifyUserGroup.html"; } }
用户群组管理界面
cms-boot/src/main/java/org/itachi/cms/controller/AdmUserGroupController.java
用户群组管理界面
Java
apache-2.0
0b30f1318c8be9c3d096899331979788805f101b
0
OpenHFT/Chronicle-Queue,OpenHFT/Chronicle-Queue,fengshao0907/Chronicle-Queue
package demo; import javax.imageio.ImageIO; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** * Demo application for Chronicle */ public class ChronicleDashboard2 implements ChronicleUpdatable{ private AtomicLong runningTime = new AtomicLong(0); private AtomicLong messagesRead = new AtomicLong(0); private AtomicLong tcpMessagesProduced = new AtomicLong(0); private AtomicLong messagesProduced1 = new AtomicLong(0); private AtomicLong messagesProduced2 = new AtomicLong(0); private JTextField tfTotalWrites = new JTextField(); private JTextField tfWriteRate = new JTextField(); private JTextField tfTotalReads = new JTextField(); private JTextField tfReadRate = new JTextField(); private JTextField tfTotalTCP = new JTextField(); private JTextField tfRunningTime = new JTextField(); private JTextField tfTCPRate = new JTextField(); private JTextPane tpFiles = new JTextPane(); private JTextField tfDiskSpace = new JTextField(); private File demo_path = new File((System.getProperty("java.io.tmpdir") + "/demo").replaceAll("//", "/")); private ChronicleController controller; public static void main(String... args) throws IOException{ new ChronicleDashboard2(); } public ChronicleDashboard2() throws IOException{ Image image = ImageIO.read(ChronicleDashboard2.class.getResourceAsStream("/diagram.jpg")); final GUIUpdaterThread updater = new GUIUpdaterThread(); updater.setLoopTime(100); updater.start(); controller = new ChronicleController(this, demo_path); JFrame frame = new JFrame("ChronicleDashboard"); BackgroundPanel bg = new BackgroundPanel(image); JTextArea info = new JTextArea(); info.setText("This demonstrates a typical system topology for ChronicleQueue.\n" + "Messages (prices consisting of 1 String, " + "4 Ints, 1 Bool) are created by two producer threads " + "and read by local and remote (TCP loopback) consumers. All messages are persisted to disk and available for replay.\n" + "Even an average laptop should be able to " + "process 1,500,000 messages/second."); final JButton startButton = new JButton("Start Demo"); JLabel lblRate = new JLabel("Select event rate/s:"); final JComboBox<String> cbRate = new JComboBox<>(); cbRate.addItem(" 10,000"); cbRate.addItem(" 30,000"); cbRate.addItem(" 100,000"); cbRate.addItem(" 300,000"); cbRate.addItem("1,000,000"); cbRate.addItem("3,000,000"); cbRate.addItem("MAX"); cbRate.setSelectedItem("1,000,000"); final JProgressBar pBar = new JProgressBar(); JLabel lblEventsWritten = new JLabel("Events written"); JLabel lblRateWritten = new JLabel("Write rate(events/s)"); JLabel lblEventsRead = new JLabel("Events read"); JLabel lblRateRead = new JLabel("Read rate(events/s)"); JLabel lblEventsTCP = new JLabel("Events read"); JLabel lblRateTCP = new JLabel("Read rate(events/s)"); JLabel lblRunningTime = new JLabel("Demo running time(s)"); JLabel lblFilesCreated = new JLabel("Files written to disk"); JLabel lblDiskSpace = new JLabel("Disk space remaining"); JScrollPane scrollPane = new JScrollPane(tpFiles); scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); bg.add(tfTotalWrites); bg.add(tfWriteRate); bg.add(startButton); bg.add(lblRate); bg.add(cbRate); bg.add(pBar); bg.add(lblEventsWritten); bg.add(lblRateWritten); bg.add(tfReadRate); bg.add(tfTotalReads); bg.add(lblEventsRead); bg.add(lblRateRead); bg.add(tfTCPRate); bg.add(tfTotalTCP); bg.add(lblEventsTCP); bg.add(lblRateTCP); bg.add(tfRunningTime); bg.add(lblRunningTime); bg.add(scrollPane); bg.add(lblFilesCreated); bg.add(lblDiskSpace); bg.add(tfDiskSpace); bg.add(info) ; startButton.setLocation(5, 5); startButton.setSize(100, 50); lblRate.setLocation(120, 5); lblRate.setSize(140, 18); cbRate.setLocation(120, 25); cbRate.setSize(140, 25); lblRateWritten.setLocation(70, 180); lblRateWritten.setSize(140, 18); tfWriteRate.setLocation(200, 180); tfWriteRate.setSize(80, 18); tfWriteRate.setEditable(false); lblEventsWritten.setLocation(70, 200); lblEventsWritten.setSize(120, 18); tfTotalWrites.setLocation(200, 200); tfTotalWrites.setSize(80, 18); tfTotalWrites.setEditable(false); lblRateRead.setLocation(140, 310); lblRateRead.setSize(140, 18); tfReadRate.setLocation(270, 310); tfReadRate.setSize(80, 18); tfReadRate.setEditable(false); lblEventsRead.setLocation(140, 330); lblEventsRead.setSize(100, 18); tfTotalReads.setLocation(270, 330); tfTotalReads.setSize(80, 18); tfTotalReads.setEditable(false); lblRateTCP.setLocation(610, 100); lblRateTCP.setSize(140, 18); tfTCPRate.setLocation(740, 100); tfTCPRate.setSize(80, 18); tfTCPRate.setEditable(false); lblEventsTCP.setLocation(610, 120); lblEventsTCP.setSize(100, 18); tfTotalTCP.setLocation(740, 120); tfTotalTCP.setSize(80, 18); tfTotalTCP.setEditable(false); lblRunningTime.setLocation(5, 75); lblRunningTime.setSize(150, 18); tfRunningTime.setLocation(160, 75); tfRunningTime.setSize(100, 18); tfRunningTime.setEditable(false); int dx = 510; int dy = 350; lblFilesCreated.setLocation(dx, dy); lblFilesCreated.setSize(200, 18); scrollPane.setLocation(dx, dy+20); scrollPane.setSize(320, 150); scrollPane.setEnabled(false); lblDiskSpace.setLocation(dx, dy+175); lblDiskSpace.setSize(140, 18); tfDiskSpace.setLocation(dx+140, dy+175); tfDiskSpace.setSize(100, 18); tfDiskSpace.setEditable(false); info.setLocation(45, 370); info.setSize(215, 190); info.setEditable(false); info.setOpaque(false); info.setFont(info.getFont().deriveFont(12.0f)); info.setLineWrap(true); info.setWrapStyleWord(true); pBar.setLocation(5, 60); pBar.setSize(250, 10); pBar.setIndeterminate(false); startButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if(startButton.getText().equals("Start Demo")){ startButton.setText("Stop Demo"); pBar.setIndeterminate(true); cbRate.setEnabled(false); updater.go(); try { controller.start((String) cbRate.getSelectedItem()); } catch (IOException e1) { e1.printStackTrace(); } }else if(startButton.getText().equals("Stop Demo")){ startButton.setText("Reset Demo"); pBar.setIndeterminate(false); updater.pause(); controller.stop(); }else if(startButton.getText().equals("Reset Demo")){ startButton.setText("Start Demo"); cbRate.setEnabled(true); messagesProduced1.set(0); messagesProduced2.set(0); tcpMessagesProduced.set(0); messagesRead.set(0); runningTime.set(0); tfTotalWrites.setText("0"); tfRunningTime.setText("0"); tfWriteRate.setText("0"); tfReadRate.setText("0"); tfTotalTCP.setText("0"); tfTotalReads.setText("0"); tfTCPRate.setText("0"); tfDiskSpace.setText(getBytesAsGB(demo_path.getUsableSpace())); tpFiles.setText(""); } } }); frame.setContentPane(bg); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().repaint(); frame.pack(); frame.setResizable(false); frame.setVisible(true); } private String getBytesAsGB(long bytes) { double step = Math.pow(1000, 3); if (bytes > step) return String.format("%3.1f %s", bytes / step, "GB"); return Long.toString(bytes); } @Override public void setFileNames(List<String> files) { String fileNames = ""; for (int i = 0; i < files.size(); i++) { if (i != 0) fileNames += "\n"; fileNames += files.get(i); } tpFiles.setText(fileNames); } @Override public void addTimeMillis(long l) { runningTime.addAndGet(l); } @Override public void incrMessageRead() { messagesRead.incrementAndGet(); } @Override public void incrTcpMessageRead() { tcpMessagesProduced.incrementAndGet(); } @Override public AtomicLong tcpMessageRead() { return tcpMessagesProduced; } @Override public AtomicLong count1() { return messagesProduced1; } @Override public AtomicLong count2() { return messagesProduced2; } private static class BackgroundPanel extends JPanel { private Image image; public BackgroundPanel(Image image) { try { this.image = image; Dimension size = new Dimension(image.getWidth(null), image.getHeight(null)); setPreferredSize(size); setMinimumSize(size); setMaximumSize(size); setSize(size); setLayout(null); } catch (Exception e) { e.printStackTrace();/*handled in paintComponent()*/ } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); if (image != null) g.drawImage(image, 0,0,this.getWidth(),this.getHeight(),this); } } private class GUIUpdaterThread extends ControlledThread { private long count = 0; @Override public void loop() { tfTotalTCP.setText(String.format("%,d K", tcpMessagesProduced.get() / 1000)); tfTotalReads.setText(String.format("%,d K", messagesRead.get() / 1000)); long totalMessage = messagesProduced1.get() + messagesProduced2.get(); tfTotalWrites.setText(String.format("%,d K", totalMessage / 1000)); long runningTime = ChronicleDashboard2.this.runningTime.get(); tfRunningTime.setText(String.format("%.3f", runningTime / 1000.0)); if (runningTime != 0) { tfReadRate.setText(String.format("%,d K", messagesRead.get() / runningTime)); tfTCPRate.setText(String.format("%,d K", tcpMessagesProduced.get() / runningTime)); tfWriteRate.setText(String.format("%,d K", totalMessage / runningTime)); } if (count % 5 == 0) { //Once a second read file space tfDiskSpace.setText(getBytesAsGB(demo_path.getUsableSpace())); } count++; } @Override public void cleanup() { } } }
chronicle-demo/src/main/java/demo/ChronicleDashboard2.java
package demo; import javax.imageio.ImageIO; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** * Demo application for Chronicle */ public class ChronicleDashboard2 implements ChronicleUpdatable{ private AtomicLong runningTime = new AtomicLong(0); private AtomicLong messagesRead = new AtomicLong(0); private AtomicLong tcpMessagesProduced = new AtomicLong(0); private AtomicLong messagesProduced1 = new AtomicLong(0); private AtomicLong messagesProduced2 = new AtomicLong(0); private JTextField tfTotalWrites = new JTextField(); private JTextField tfWriteRate = new JTextField(); private JTextField tfTotalReads = new JTextField(); private JTextField tfReadRate = new JTextField(); private JTextField tfTotalTCP = new JTextField(); private JTextField tfRunningTime = new JTextField(); private JTextField tfTCPRate = new JTextField(); private JTextPane tpFiles = new JTextPane(); private JTextField tfDiskSpace = new JTextField(); private File demo_path = new File((System.getProperty("java.io.tmpdir") + "/demo").replaceAll("//", "/")); private ChronicleController controller; public static void main(String... args) throws IOException{ new ChronicleDashboard2(); } public ChronicleDashboard2() throws IOException{ Image image = ImageIO.read(ChronicleDashboard2.class.getResourceAsStream("/diagram.jpg")); final GUIUpdaterThread updater = new GUIUpdaterThread(); updater.setLoopTime(100); updater.start(); controller = new ChronicleController(this, demo_path); JFrame frame = new JFrame("ChronicleDashboard"); BackgroundPanel bg = new BackgroundPanel(image); JTextArea info = new JTextArea(); info.setText("This demonstrates a typical system topology for ChronicleQueue.\n" + "Messages (prices consisting of 1 String, " + "4 Ints, 1 Bool) are created by two producer threads " + "and read by local and remote (TCP loopback) consumers. All messages are persisted to disk and available for replay.\n" + "Even an average laptop should be able to " + "process 1,500,000 messages/second."); final JButton startButton = new JButton("Start Demo"); JLabel lblRate = new JLabel("Select event rate/s:"); final JComboBox<String> cbRate = new JComboBox<>(); cbRate.addItem(" 10,000"); cbRate.addItem(" 30,000"); cbRate.addItem(" 100,000"); cbRate.addItem(" 300,000"); cbRate.addItem("1,000,000"); cbRate.addItem("3,000,000"); cbRate.addItem("MAX"); cbRate.setSelectedItem("1,000,000"); final JProgressBar pBar = new JProgressBar(); JLabel lblEventsWritten = new JLabel("Events written"); JLabel lblRateWritten = new JLabel("Write rate(p/s)"); JLabel lblEventsRead = new JLabel("Events read"); JLabel lblRateRead = new JLabel("Read rate(p/s)"); JLabel lblEventsTCP = new JLabel("Events read"); JLabel lblRateTCP = new JLabel("Read rate(p/s)"); JLabel lblRunningTime = new JLabel("Demo running time(s)"); JLabel lblFilesCreated = new JLabel("Files written to disk"); JLabel lblDiskSpace = new JLabel("Disk space remaining"); JScrollPane scrollPane = new JScrollPane(tpFiles); scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); bg.add(tfTotalWrites); bg.add(tfWriteRate); bg.add(startButton); bg.add(lblRate); bg.add(cbRate); bg.add(pBar); bg.add(lblEventsWritten); bg.add(lblRateWritten); bg.add(tfReadRate); bg.add(tfTotalReads); bg.add(lblEventsRead); bg.add(lblRateRead); bg.add(tfTCPRate); bg.add(tfTotalTCP); bg.add(lblEventsTCP); bg.add(lblRateTCP); bg.add(tfRunningTime); bg.add(lblRunningTime); bg.add(scrollPane); bg.add(lblFilesCreated); bg.add(lblDiskSpace); bg.add(tfDiskSpace); bg.add(info) ; startButton.setLocation(5, 5); startButton.setSize(100, 50); lblRate.setLocation(120, 5); lblRate.setSize(140, 18); cbRate.setLocation(120, 25); cbRate.setSize(140, 25); lblRateWritten.setLocation(90, 180); lblRateWritten.setSize(100, 18); tfWriteRate.setLocation(200, 180); tfWriteRate.setSize(80, 18); tfWriteRate.setEditable(false); lblEventsWritten.setLocation(90, 200); lblEventsWritten.setSize(100, 18); tfTotalWrites.setLocation(200, 200); tfTotalWrites.setSize(80, 18); tfTotalWrites.setEditable(false); lblRateRead.setLocation(160, 310); lblRateRead.setSize(100, 18); tfReadRate.setLocation(270, 310); tfReadRate.setSize(80, 18); tfReadRate.setEditable(false); lblEventsRead.setLocation(160, 330); lblEventsRead.setSize(100, 18); tfTotalReads.setLocation(270, 330); tfTotalReads.setSize(80, 18); tfTotalReads.setEditable(false); lblRateTCP.setLocation(610, 100); lblRateTCP.setSize(100, 18); tfTCPRate.setLocation(720, 100); tfTCPRate.setSize(80, 18); tfTCPRate.setEditable(false); lblEventsTCP.setLocation(610, 120); lblEventsTCP.setSize(100, 18); tfTotalTCP.setLocation(720, 120); tfTotalTCP.setSize(80, 18); tfTotalTCP.setEditable(false); lblRunningTime.setLocation(5, 75); lblRunningTime.setSize(150, 18); tfRunningTime.setLocation(160, 75); tfRunningTime.setSize(100, 18); tfRunningTime.setEditable(false); int dx = 510; int dy = 350; lblFilesCreated.setLocation(dx, dy); lblFilesCreated.setSize(200, 18); scrollPane.setLocation(dx, dy+20); scrollPane.setSize(320, 150); scrollPane.setEnabled(false); lblDiskSpace.setLocation(dx, dy+175); lblDiskSpace.setSize(140, 18); tfDiskSpace.setLocation(dx+140, dy+175); tfDiskSpace.setSize(100, 18); tfDiskSpace.setEditable(false); info.setLocation(45, 370); info.setSize(215, 190); info.setEditable(false); info.setOpaque(false); info.setFont(info.getFont().deriveFont(12.0f)); info.setLineWrap(true); info.setWrapStyleWord(true); pBar.setLocation(5, 60); pBar.setSize(250, 10); pBar.setIndeterminate(false); startButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if(startButton.getText().equals("Start Demo")){ startButton.setText("Stop Demo"); pBar.setIndeterminate(true); cbRate.setEnabled(false); updater.go(); try { controller.start((String) cbRate.getSelectedItem()); } catch (IOException e1) { e1.printStackTrace(); } }else if(startButton.getText().equals("Stop Demo")){ startButton.setText("Reset Demo"); pBar.setIndeterminate(false); updater.pause(); controller.stop(); }else if(startButton.getText().equals("Reset Demo")){ startButton.setText("Start Demo"); cbRate.setEnabled(true); messagesProduced1.set(0); messagesProduced2.set(0); tcpMessagesProduced.set(0); messagesRead.set(0); runningTime.set(0); tfTotalWrites.setText("0"); tfRunningTime.setText("0"); tfWriteRate.setText("0"); tfReadRate.setText("0"); tfTotalTCP.setText("0"); tfTotalReads.setText("0"); tfTCPRate.setText("0"); tfDiskSpace.setText(getBytesAsGB(demo_path.getUsableSpace())); tpFiles.setText(""); } } }); frame.setContentPane(bg); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().repaint(); frame.pack(); frame.setResizable(false); frame.setVisible(true); } private String getBytesAsGB(long bytes) { double step = Math.pow(1000, 3); if (bytes > step) return String.format("%3.1f %s", bytes / step, "GB"); return Long.toString(bytes); } @Override public void setFileNames(List<String> files) { String fileNames = ""; for (int i = 0; i < files.size(); i++) { if (i != 0) fileNames += "\n"; fileNames += files.get(i); } tpFiles.setText(fileNames); } @Override public void addTimeMillis(long l) { runningTime.addAndGet(l); } @Override public void incrMessageRead() { messagesRead.incrementAndGet(); } @Override public void incrTcpMessageRead() { tcpMessagesProduced.incrementAndGet(); } @Override public AtomicLong tcpMessageRead() { return tcpMessagesProduced; } @Override public AtomicLong count1() { return messagesProduced1; } @Override public AtomicLong count2() { return messagesProduced2; } private static class BackgroundPanel extends JPanel { private Image image; public BackgroundPanel(Image image) { try { this.image = image; Dimension size = new Dimension(image.getWidth(null), image.getHeight(null)); setPreferredSize(size); setMinimumSize(size); setMaximumSize(size); setSize(size); setLayout(null); } catch (Exception e) { e.printStackTrace();/*handled in paintComponent()*/ } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); if (image != null) g.drawImage(image, 0,0,this.getWidth(),this.getHeight(),this); } } private class GUIUpdaterThread extends ControlledThread { private long count = 0; @Override public void loop() { tfTotalTCP.setText(String.format("%,d K", tcpMessagesProduced.get() / 1000)); tfTotalReads.setText(String.format("%,d K", messagesRead.get() / 1000)); long totalMessage = messagesProduced1.get() + messagesProduced2.get(); tfTotalWrites.setText(String.format("%,d K", totalMessage / 1000)); long runningTime = ChronicleDashboard2.this.runningTime.get(); tfRunningTime.setText(String.format("%.3f", runningTime / 1000.0)); if (runningTime != 0) { tfReadRate.setText(String.format("%,d K", messagesRead.get() / runningTime)); tfTCPRate.setText(String.format("%,d K", tcpMessagesProduced.get() / runningTime)); tfWriteRate.setText(String.format("%,d K", totalMessage / runningTime)); } if (count % 5 == 0) { //Once a second read file space tfDiskSpace.setText(getBytesAsGB(demo_path.getUsableSpace())); } count++; } @Override public void cleanup() { } } }
Events.
chronicle-demo/src/main/java/demo/ChronicleDashboard2.java
Events.
Java
apache-2.0
6ffe908e9627752a1b5edb4fd1427d9f7c154cac
0
pipoop/hobby-android-app,irkit/android-sdk
package com.getirkit.irkit; import android.os.AsyncTask; import android.os.Parcel; import android.os.Parcelable; import android.util.Log; import com.getirkit.irkit.net.IRDeviceAPIService; import com.getirkit.irkit.net.IRHTTPClient; import com.getirkit.irkit.net.IRInternetAPIService; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.io.Serializable; import java.net.InetAddress; import java.util.Date; import java.util.HashMap; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Header; import retrofit.client.Response; /** * IRKitデバイスを表します。 * An IRKit device. */ public class IRPeripheral implements Serializable, Parcelable { // Never change this or you'll get InvalidClassException! private static final long serialVersionUID = 1L; public transient static final String TAG = "IRPeripheral"; /** * IRKitデバイスに固有のホスト名。ホスト名はIRKitをリセットしても変わりません。 * Hostname which uniquely identifies an IRKit device. Hostname will remain unchanged over time. */ private String hostname; /** * ユーザが設定したニックネーム。 * User-provided nickname. */ private String customizedName; /** * このIRKitを初めてローカルネットワーク内に発見した日時。 * First found date on a local network. */ private Date foundDate; /** * IRKitサーバから割り当てられたdeviceid。 * A deviceid which is assigned by IRKit Server. */ private String deviceId; /** * IRKitのモデル名。Device HTTP APIのServerヘッダから取得されます。 * IRKit model name provided by Server header (e.g. "IRKit"). */ private String modelName; /** * ファームウェアバージョン。Device HTTP APIのServerヘッダから取得されます。 * IRKit firmware version provided by Server header (e.g. "2.0.2.0.g838e0ea"). */ private String firmwareVersion; // transient == prevent the field from serializing private transient InetAddress host; private transient int port; private transient boolean isFetchingDeviceId = false; @Override public String toString() { return "IRPeripheral[hostname=" + hostname + ";deviceId=" + deviceId + ";customizedName=" + customizedName + ";modelName=" + modelName + ";firmwareVersion=" + firmwareVersion + ";host=" + host + ";port=" + port + "]"; } public interface IRPeripheralListener { public void onErrorFetchingDeviceId(String message); public void onDeviceIdStatusChange(); public void onFetchDeviceIdSuccess(); public void onFetchModelInfoSuccess(); public void onErrorFetchingModelInfo(String message); } // listener won't be packed in a Parcelable since it's transient private transient IRPeripheralListener listener; public IRPeripheralListener getListener() { return listener; } public void setListener(IRPeripheralListener listener) { this.listener = listener; } public IRPeripheral() { this.foundDate = new Date(); } public String getHostname() { return hostname; } public void setHostname(String hostname) { this.hostname = hostname; } public String getCustomizedName() { return customizedName; } public void setCustomizedName(String customizedName) { this.customizedName = customizedName; } public Date getFoundDate() { return foundDate; } public void setFoundDate(Date foundDate) { this.foundDate = foundDate; } public boolean hasDeviceId() { return deviceId != null; } public String getDeviceId() { return deviceId; } public void setDeviceId(String deviceId) { this.deviceId = deviceId; } public boolean hasModelInfo() { return modelName != null; } public String getModelName() { return modelName; } public void setModelName(String modelName) { this.modelName = modelName; } public String getFirmwareVersion() { return firmwareVersion; } public boolean isFetchingDeviceId() { return isFetchingDeviceId; } public void setFirmwareVersion(String firmwareVersion) { this.firmwareVersion = firmwareVersion; } public InetAddress getHost() { return host; } public void setHost(InetAddress host) { this.host = host; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } /** * Device HTTP APIのレスポンスに含まれるServerヘッダの値を解釈します。 * modelNameとfirmwareVersionの値が変化している場合はフィールドに保存します。 * Parse the value of Server header in Device HTTP API response. * modelName and firmwareVersion may be updated. * * @param server Server header value * @return modelNameまたはfirmwareVersionの値が更新された場合はtrue。 * True if modelName or firmwareVersion has modified. */ public boolean parseServerValue(String server) { String[] params = server.split("/", 2); boolean isModified = false; if (params.length >= 2) { if (modelName == null || !modelName.equals(params[0])) { modelName = params[0]; isModified = true; } if (firmwareVersion == null || !firmwareVersion.equals(params[1])) { firmwareVersion = params[1]; isModified = true; } } return isModified; } /** * Device HTTP APIのレスポンスヘッダを解釈してフィールドを必要に応じて更新します。 * Parse headers in Device HTTP API response and store in fields if updated. * * @param response Response object * @return フィールドが更新された場合はtrue。 True if a field is modified. */ public boolean storeResponseHeaders(Response response) { for (Header header : response.getHeaders()) { String name = header.getName(); if (name != null && name.toLowerCase().equals("server")) { String value = header.getValue(); if (value != null) { return parseServerValue(value); } } } return false; } /** * modelNameとfirmwareVersionを取得します。 * Fetch modelName and firmwareVersion. */ public void fetchModelInfo() { fetchModelInfo(0); } /** * modelNameとfirmwareVersionを取得します。 * Fetch modelName and firmwareVersion. * * @param retryCount 最大リトライ数。 Max retry count. */ public void fetchModelInfo(final int retryCount) { if (!this.isLocalAddressResolved()) { Log.e(TAG, "fetchModelInfo: local address isn't resolved"); if (listener != null) { listener.onErrorFetchingModelInfo("network error"); } return; } if (retryCount > 5) { Log.e(TAG, "fetchModelInfo: exceeded max retry count"); if (listener != null) { listener.onErrorFetchingModelInfo("error"); } return; } IRHTTPClient httpClient = IRKit.sharedInstance().getHTTPClient(); httpClient.setDeviceAPIEndpoint("http://" + this.host.getHostAddress() + ":" + this.port); IRDeviceAPIService deviceAPIService = httpClient.getDeviceAPIService(); deviceAPIService.getMessages(new Callback<IRDeviceAPIService.GetMessagesResponse>() { @Override public void success(IRDeviceAPIService.GetMessagesResponse getMessagesResponse, Response response) { // fetchModelInfo success if (storeResponseHeaders(response)) { IRKit.sharedInstance().peripherals.save(); } if (listener != null) { listener.onFetchModelInfoSuccess(); } } @Override public void failure(RetrofitError error) { Log.e(TAG, "device getMessages failure: " + error.getMessage()); fetchModelInfo(retryCount + 1); } }); } /** * POST /keys を呼んでdeviceidを取得します。 * Fetch deviceid by calling POST /keys. * * @see <a href="http://getirkit.com/#toc_3">POST /keys</a> */ public void fetchDeviceId() { fetchDeviceId(0); } /** * POST /keys を呼んでdeviceidを取得します。 * Fetch deviceid by calling POST /keys. * * @param retryCount 現在のリトライ数。 Current retry count. * @see <a href="http://getirkit.com/#toc_3">POST /keys</a> */ private void fetchDeviceId(final int retryCount) { if (!isLocalAddressResolved()) { Log.e(TAG, "fetchDeviceId: local address isn't resolved"); if (listener != null) { isFetchingDeviceId = false; listener.onErrorFetchingDeviceId("network error"); } return; } if (retryCount > 5) { Log.e(TAG, "fetchDeviceId exceeded max retry count"); if (listener != null) { isFetchingDeviceId = false; listener.onErrorFetchingDeviceId("network error"); } return; } if (isFetchingDeviceId) { // already fetching device id return; } isFetchingDeviceId = true; if (listener != null) { listener.onDeviceIdStatusChange(); } IRHTTPClient.sharedInstance().setDeviceAPIEndpoint("http://" + this.host.getHostAddress() + ":" + this.port); IRDeviceAPIService deviceAPIService = IRHTTPClient.sharedInstance().getDeviceAPIService(); deviceAPIService.postKeys(new Callback<IRDeviceAPIService.PostKeysResponse>() { @Override public void success(IRDeviceAPIService.PostKeysResponse postKeysResponse, Response response) { if (storeResponseHeaders(response)) { IRKit.sharedInstance().peripherals.save(); } HashMap<String, String> params = new HashMap<>(); params.put("clienttoken", postKeysResponse.clienttoken); params.put("clientkey", IRHTTPClient.sharedInstance().getClientKey()); IRInternetAPIService internetAPIService = IRHTTPClient.sharedInstance().getInternetAPIService(); internetAPIService.postKeys(params, new Callback<IRInternetAPIService.PostKeysResponse>() { @Override public void success(IRInternetAPIService.PostKeysResponse postKeysResponse, Response response) { // Assigned a device id IRPeripheral.this.setDeviceId(postKeysResponse.deviceid); IRKit.sharedInstance().peripherals.save(); if (listener != null) { listener.onFetchDeviceIdSuccess(); } isFetchingDeviceId = false; } @Override public void failure(RetrofitError error) { Log.e(TAG, "internet postKeys failure: " + error.getMessage()); isFetchingDeviceId = false; } }); } @Override public void failure(RetrofitError error) { // Retry Log.w(TAG, "local postkeys failure: message=" + error.getMessage() + " kind=" + error.getKind() + "; retrying"); isFetchingDeviceId = false; fetchDeviceId(retryCount + 1); } }); } /** * このIRKitデバイスにローカルネットワーク内で接続するためのエンドポイントを返します。 * Return a local network endpoint for this IRKit device. * * @return "http://host:port" のような文字列。またはIRKitがローカルネットワーク内に見つからない場合はnull。 * A string like "http://host:port", or null if this peripheral is not found on local network. */ public String getDeviceAPIEndpoint() { if ( isLocalAddressResolved() ) { return "http://" + host.getHostAddress() + ":" + port; } else { return null; } } /** * IRKitがローカルネットワーク内に見つかっているかどうかを返します。 * Return whether this IRKit is found on local network. * * @return IRKitがローカルネットワーク内に検出済みの場合はtrue。 True if IRKit is found on local network. */ public boolean isLocalAddressResolved() { return host != null; } /** * IRKitへローカルネットワーク内で到達可能かどうかをテストします。最大で100ミリ秒ブロックします。 * Test whether this IRKit is reachable on local network. Blocks up to 100 milliseconds. * * @return IRKitに到達可能な場合はtrue。 True if IRKit is reachable. */ public boolean isReachable() { if ( isLocalAddressResolved() ) { try { return host.isReachable(100); } catch (IOException e) { e.printStackTrace(); return false; } } else { return false; } } /** * testReachability()で使用するコールバック用インタフェースです。 * Callback interface for testReachability(). * * @see IRPeripheral#testReachability(ReachabilityResult) */ public interface ReachabilityResult { /** * IRKitがローカルネットワーク内で到達可能な時に呼ばれます。この場合はDevice HTTP APIを利用できます。 * Called when IRKit is reachable on local network. Device HTTP API is available. */ public void reachable(); /** * IRKitがローカルネットワーク内で到達できない時に呼ばれます。この場合はDevice HTTP APIを利用できません。 * Called when IRKit is not reachable on local network. Device HTTP API is unavailable. */ public void notReachable(); } /** * IRKitにローカルネットワーク内で到達可能かどうかを非同期にテストします。 * Test asynchronously whether this IRKit is reachable on local network. * * @param result ReachabilityResult object */ public void testReachability(final ReachabilityResult result) { if ( !isLocalAddressResolved() ) { result.notReachable(); return; } new AsyncTask<Void, Void, Void>() { @Override protected Void doInBackground(Void... voids) { try { if (host.isReachable(100)) { result.reachable(); return null; } } catch (IOException e) { e.printStackTrace(); } result.notReachable(); return null; } }.execute(); } /** * IRKitにローカルネットワーク内で到達できなくなった際にこのメソッドを呼びます。 * Call this method when IRKit is no longer reachable on local network. */ public void lostLocalAddress() { this.host = null; this.port = 0; } public JSONObject toJSONObject() { JSONObject obj = new JSONObject(); try { obj.put("deviceid", deviceId); obj.put("customizedName", customizedName); obj.put("hostname", hostname); obj.put("foundDate", foundDate.getTime() / 1000); obj.put("version", firmwareVersion); obj.put("modelName", modelName); obj.put("regdomain", IRKit.getRegDomainForDefaultLocale()); } catch (JSONException e) { e.printStackTrace(); } return obj; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel out, int flags) { out.writeString(hostname); out.writeString(customizedName); out.writeSerializable(foundDate); out.writeString(deviceId); out.writeString(modelName); out.writeString(firmwareVersion); out.writeSerializable(host); out.writeInt(port); out.writeByte((byte) (isFetchingDeviceId ? 1 : 0)); } public static final Creator<IRPeripheral> CREATOR = new Creator<IRPeripheral>() { @Override public IRPeripheral createFromParcel(Parcel in) { return new IRPeripheral(in); } @Override public IRPeripheral[] newArray(int size) { return new IRPeripheral[size]; } }; private IRPeripheral(Parcel in) { hostname = in.readString(); customizedName = in.readString(); foundDate = (Date) in.readSerializable(); deviceId = in.readString(); modelName = in.readString(); firmwareVersion = in.readString(); host = (InetAddress) in.readSerializable(); port = in.readInt(); isFetchingDeviceId = in.readByte() != 0; } }
irkit-android-sdk/src/main/java/com/getirkit/irkit/IRPeripheral.java
package com.getirkit.irkit; import android.os.AsyncTask; import android.os.Parcel; import android.os.Parcelable; import android.util.Log; import com.getirkit.irkit.net.IRDeviceAPIService; import com.getirkit.irkit.net.IRHTTPClient; import com.getirkit.irkit.net.IRInternetAPIService; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.io.Serializable; import java.net.InetAddress; import java.util.Date; import java.util.HashMap; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Header; import retrofit.client.Response; /** * An IRKit device. * IRKitデバイスを表します。 */ public class IRPeripheral implements Serializable, Parcelable { // Never change this or you'll get InvalidClassException! private static final long serialVersionUID = 1L; public transient static final String TAG = "IRPeripheral"; /** * Hostname which uniquely identifies an IRKit device. Hostname will remain unchanged over time. * IRKitデバイスに固有のホスト名。ホスト名はIRKitをリセットしても変わりません。 */ private String hostname; /** * User-provided nickname. * ユーザが設定したニックネーム。 */ private String customizedName; /** * First found date on a local network. * このIRKitを初めてローカルネットワーク内に発見した日時。 */ private Date foundDate; /** * A deviceid which is assigned by IRKit Server. * IRKitサーバから割り当てられたdeviceid。 */ private String deviceId; /** * IRKit model name provided by Server header (e.g. "IRKit"). * IRKitのモデル名。Device HTTP APIのServerヘッダから取得されます。 */ private String modelName; /** * IRKit firmware version provided by Server header (e.g. "2.0.2.0.g838e0ea"). * ファームウェアバージョン。Device HTTP APIのServerヘッダから取得されます。 */ private String firmwareVersion; // transient == prevent the field from serializing private transient InetAddress host; private transient int port; private transient boolean isFetchingDeviceId = false; @Override public String toString() { return "IRPeripheral[hostname=" + hostname + ";deviceId=" + deviceId + ";customizedName=" + customizedName + ";modelName=" + modelName + ";firmwareVersion=" + firmwareVersion + ";host=" + host + ";port=" + port + "]"; } public interface IRPeripheralListener { public void onErrorFetchingDeviceId(String message); public void onDeviceIdStatusChange(); public void onFetchDeviceIdSuccess(); public void onFetchModelInfoSuccess(); public void onErrorFetchingModelInfo(String message); } // listener won't be packed in a Parcelable since it's transient private transient IRPeripheralListener listener; public IRPeripheralListener getListener() { return listener; } public void setListener(IRPeripheralListener listener) { this.listener = listener; } public IRPeripheral() { this.foundDate = new Date(); } public String getHostname() { return hostname; } public void setHostname(String hostname) { this.hostname = hostname; } public String getCustomizedName() { return customizedName; } public void setCustomizedName(String customizedName) { this.customizedName = customizedName; } public Date getFoundDate() { return foundDate; } public void setFoundDate(Date foundDate) { this.foundDate = foundDate; } public boolean hasDeviceId() { return deviceId != null; } public String getDeviceId() { return deviceId; } public void setDeviceId(String deviceId) { this.deviceId = deviceId; } public boolean hasModelInfo() { return modelName != null; } public String getModelName() { return modelName; } public void setModelName(String modelName) { this.modelName = modelName; } public String getFirmwareVersion() { return firmwareVersion; } public boolean isFetchingDeviceId() { return isFetchingDeviceId; } public void setFirmwareVersion(String firmwareVersion) { this.firmwareVersion = firmwareVersion; } public InetAddress getHost() { return host; } public void setHost(InetAddress host) { this.host = host; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } /** * Parse the value of Server header in Device HTTP API response. * modelName and firmwareVersion may be updated. * Device HTTP APIのレスポンスに含まれるServerヘッダの値を解釈します。 * modelNameとfirmwareVersionの値が変化している場合はフィールドに保存します。 * * @param server Server header value * @return True if modelName or firmwareVersion has modified. * modelNameまたはfirmwareVersionの値が更新された場合はtrue。 */ public boolean parseServerValue(String server) { String[] params = server.split("/", 2); boolean isModified = false; if (params.length >= 2) { if (modelName == null || !modelName.equals(params[0])) { modelName = params[0]; isModified = true; } if (firmwareVersion == null || !firmwareVersion.equals(params[1])) { firmwareVersion = params[1]; isModified = true; } } return isModified; } /** * Parse headers in Device HTTP API response and store in fields if updated. * Device HTTP APIのレスポンスヘッダを解釈してフィールドを必要に応じて更新します。 * * @param response Response object * @return True if a field is modified. フィールドが更新された場合はtrue。 */ public boolean storeResponseHeaders(Response response) { for (Header header : response.getHeaders()) { String name = header.getName(); if (name != null && name.toLowerCase().equals("server")) { String value = header.getValue(); if (value != null) { return parseServerValue(value); } } } return false; } /** * Fetch modelName and firmwareVersion. * modelNameとfirmwareVersionを取得します。 */ public void fetchModelInfo() { fetchModelInfo(0); } /** * Fetch modelName and firmwareVersion. * modelNameとfirmwareVersionを取得します。 * * @param retryCount Max retry count. 最大リトライ数。 */ public void fetchModelInfo(final int retryCount) { if (!this.isLocalAddressResolved()) { Log.e(TAG, "fetchModelInfo: local address isn't resolved"); if (listener != null) { listener.onErrorFetchingModelInfo("network error"); } return; } if (retryCount > 5) { Log.e(TAG, "fetchModelInfo: exceeded max retry count"); if (listener != null) { listener.onErrorFetchingModelInfo("error"); } return; } IRHTTPClient httpClient = IRKit.sharedInstance().getHTTPClient(); httpClient.setDeviceAPIEndpoint("http://" + this.host.getHostAddress() + ":" + this.port); IRDeviceAPIService deviceAPIService = httpClient.getDeviceAPIService(); deviceAPIService.getMessages(new Callback<IRDeviceAPIService.GetMessagesResponse>() { @Override public void success(IRDeviceAPIService.GetMessagesResponse getMessagesResponse, Response response) { // fetchModelInfo success if (storeResponseHeaders(response)) { IRKit.sharedInstance().peripherals.save(); } if (listener != null) { listener.onFetchModelInfoSuccess(); } } @Override public void failure(RetrofitError error) { Log.e(TAG, "device getMessages failure: " + error.getMessage()); fetchModelInfo(retryCount + 1); } }); } /** * Fetch deviceid by calling POST /keys. * POST /keys を呼んでdeviceidを取得します。 * * @see <a href="http://getirkit.com/#toc_3">POST /keys</a> */ public void fetchDeviceId() { fetchDeviceId(0); } /** * Fetch deviceid by calling POST /keys. * POST /keys を呼んでdeviceidを取得します。 * * @param retryCount Current retry count. 現在のリトライ数。 * @see <a href="http://getirkit.com/#toc_3">POST /keys</a> */ private void fetchDeviceId(final int retryCount) { if (!isLocalAddressResolved()) { Log.e(TAG, "fetchDeviceId: local address isn't resolved"); if (listener != null) { isFetchingDeviceId = false; listener.onErrorFetchingDeviceId("network error"); } return; } if (retryCount > 5) { Log.e(TAG, "fetchDeviceId exceeded max retry count"); if (listener != null) { isFetchingDeviceId = false; listener.onErrorFetchingDeviceId("network error"); } return; } if (isFetchingDeviceId) { // already fetching device id return; } isFetchingDeviceId = true; if (listener != null) { listener.onDeviceIdStatusChange(); } IRHTTPClient.sharedInstance().setDeviceAPIEndpoint("http://" + this.host.getHostAddress() + ":" + this.port); IRDeviceAPIService deviceAPIService = IRHTTPClient.sharedInstance().getDeviceAPIService(); deviceAPIService.postKeys(new Callback<IRDeviceAPIService.PostKeysResponse>() { @Override public void success(IRDeviceAPIService.PostKeysResponse postKeysResponse, Response response) { if (storeResponseHeaders(response)) { IRKit.sharedInstance().peripherals.save(); } HashMap<String, String> params = new HashMap<>(); params.put("clienttoken", postKeysResponse.clienttoken); params.put("clientkey", IRHTTPClient.sharedInstance().getClientKey()); IRInternetAPIService internetAPIService = IRHTTPClient.sharedInstance().getInternetAPIService(); internetAPIService.postKeys(params, new Callback<IRInternetAPIService.PostKeysResponse>() { @Override public void success(IRInternetAPIService.PostKeysResponse postKeysResponse, Response response) { // Assigned a device id IRPeripheral.this.setDeviceId(postKeysResponse.deviceid); IRKit.sharedInstance().peripherals.save(); if (listener != null) { listener.onFetchDeviceIdSuccess(); } isFetchingDeviceId = false; } @Override public void failure(RetrofitError error) { Log.e(TAG, "internet postKeys failure: " + error.getMessage()); isFetchingDeviceId = false; } }); } @Override public void failure(RetrofitError error) { // Retry Log.w(TAG, "local postkeys failure: message=" + error.getMessage() + " kind=" + error.getKind() + "; retrying"); isFetchingDeviceId = false; fetchDeviceId(retryCount + 1); } }); } /** * Return a local network endpoint for this IRKit device. * このIRKitデバイスにローカルネットワーク内で接続するためのエンドポイントを返します。 * * @return A string like "http://host:port", or null if this peripheral is not found on local network. * "http://host:port" のような文字列。またはIRKitがローカルネットワーク内に見つからない場合はnull。 */ public String getDeviceAPIEndpoint() { if ( isLocalAddressResolved() ) { return "http://" + host.getHostAddress() + ":" + port; } else { return null; } } /** * Return whether this IRKit is found on local network. * IRKitがローカルネットワーク内に見つかっているかどうかを返します。 * * @return True if IRKit is found on local network. IRKitがローカルネットワーク内に検出済みの場合はtrue。 */ public boolean isLocalAddressResolved() { return host != null; } /** * Test whether this IRKit is reachable on local network. Blocks up to 100 milliseconds. * IRKitへローカルネットワーク内で到達可能かどうかをテストします。最大で100ミリ秒ブロックします。 * * @return True if IRKit is reachable. IRKitに到達可能な場合はtrue。 */ public boolean isReachable() { if ( isLocalAddressResolved() ) { try { return host.isReachable(100); } catch (IOException e) { e.printStackTrace(); return false; } } else { return false; } } /** * Callback interface for testReachability(). * testReachability()で使用するコールバック用インタフェースです。 * * @see IRPeripheral#testReachability(ReachabilityResult) */ public interface ReachabilityResult { /** * Called when IRKit is reachable on local network. Device HTTP API is available. * IRKitがローカルネットワーク内で到達可能な時に呼ばれます。この場合はDevice HTTP APIを利用できます。 */ public void reachable(); /** * Called when IRKit is not reachable on local network. Device HTTP API is unavailable. * IRKitがローカルネットワーク内で到達できない時に呼ばれます。この場合はDevice HTTP APIを利用できません。 */ public void notReachable(); } /** * Test asynchronously whether this IRKit is reachable on local network. * IRKitにローカルネットワーク内で到達可能かどうかを非同期にテストします。 * * @param result ReachabilityResult object */ public void testReachability(final ReachabilityResult result) { if ( !isLocalAddressResolved() ) { result.notReachable(); return; } new AsyncTask<Void, Void, Void>() { @Override protected Void doInBackground(Void... voids) { try { if (host.isReachable(100)) { result.reachable(); return null; } } catch (IOException e) { e.printStackTrace(); } result.notReachable(); return null; } }.execute(); } /** * Call this method when IRKit is no longer reachable on local network. * IRKitにローカルネットワーク内で到達できなくなった際にこのメソッドを呼びます。 */ public void lostLocalAddress() { this.host = null; this.port = 0; } public JSONObject toJSONObject() { JSONObject obj = new JSONObject(); try { obj.put("deviceid", deviceId); obj.put("customizedName", customizedName); obj.put("hostname", hostname); obj.put("foundDate", foundDate.getTime() / 1000); obj.put("version", firmwareVersion); obj.put("modelName", modelName); obj.put("regdomain", IRKit.getRegDomainForDefaultLocale()); } catch (JSONException e) { e.printStackTrace(); } return obj; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel out, int flags) { out.writeString(hostname); out.writeString(customizedName); out.writeSerializable(foundDate); out.writeString(deviceId); out.writeString(modelName); out.writeString(firmwareVersion); out.writeSerializable(host); out.writeInt(port); out.writeByte((byte) (isFetchingDeviceId ? 1 : 0)); } public static final Creator<IRPeripheral> CREATOR = new Creator<IRPeripheral>() { @Override public IRPeripheral createFromParcel(Parcel in) { return new IRPeripheral(in); } @Override public IRPeripheral[] newArray(int size) { return new IRPeripheral[size]; } }; private IRPeripheral(Parcel in) { hostname = in.readString(); customizedName = in.readString(); foundDate = (Date) in.readSerializable(); deviceId = in.readString(); modelName = in.readString(); firmwareVersion = in.readString(); host = (InetAddress) in.readSerializable(); port = in.readInt(); isFetchingDeviceId = in.readByte() != 0; } }
IRPeripheral: Put Japanese doc first
irkit-android-sdk/src/main/java/com/getirkit/irkit/IRPeripheral.java
IRPeripheral: Put Japanese doc first
Java
apache-2.0
bc40c9050019561d5227fb7dafa60760febde6d0
0
astarlabs/bitcoin-java-api-client,astarlabs/bitcoin-java-api-client
package br.com.astarlabs.util; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.List; import br.com.astarlabs.client.ApiException; import br.com.astarlabs.client.api.SearchApi; import br.com.astarlabs.client.api.SendApi; import br.com.astarlabs.client.model.BlockchainNetwork; import br.com.astarlabs.client.model.SingleResult; import br.com.astarlabs.client.model.Transaction; public class Blockchain { private String token; private Integer account; private String user; private String pass; /** * Registro um hash (bytearray) a partir de um arquivo * @param file Arquivo que sera registrado na BLockchain * @param blockchainNetwork qual a rede blockchain o registro deverá ser feito * @param testMode indica se o registro deve ser feito na rede de teste ou na rede principal * @return Texto de numero inteiro contentdo o ID da transação validada * @throws IOException * @throws ApiException */ public String registrarDocumento(File file, final BlockchainNetwork blockchainNetwork, final Integer testMode) throws IOException, ApiException { return registrarDocumento(Files.readAllBytes(file.toPath()), blockchainNetwork, testMode); } public String registrarDocumento(String fileString, final BlockchainNetwork blockchainNetwork, final Integer testMode) throws ApiException { return registrarDocumento(fileString.getBytes(), blockchainNetwork, testMode); } public String registrarDocumento(byte[] bytesFile, final BlockchainNetwork blockchainNetwork, final Integer testMode) throws ApiException { String hash = DoubleSha256.hashFile(bytesFile); token = getMyToken(); SendApi api = new SendApi(); SingleResult singleResult = api.sendHash(token, account, user, pass, hash, blockchainNetwork, testMode); if (singleResult.getResult() != null && singleResult.getStatus()) { return singleResult.getResult(); } return null; } /** * * @param bytesFile * @return * @throws ApiException */ public Transaction consultarDocumentoPorConteudo(byte[] bytesFile) throws ApiException { token = getMyToken(); SearchApi api = new SearchApi(); List<Transaction> transactions = api.searchByContent(token, account, user, pass, new String(bytesFile)); if (transactions != null && transactions.size() > 0) { return transactions.get(0); } return null; } public Transaction validarRegistroDocumento(Integer id) throws ApiException { token = getMyToken(); Transaction transaction = new Transaction(); SearchApi sa = new SearchApi(); transaction = sa.searchByAPIID(token, account, user, pass, id); return transaction; } private String getMyToken() { try { this.token = Token.sign(token); } catch (Exception e) { e.printStackTrace(); } return token; } public Blockchain(String token, Integer account, String user, String pass) { super(); this.token = token; this.account = account; this.user = user; this.pass = pass; } }
src/main/java/br/com/astarlabs/util/Blockchain.java
package br.com.astarlabs.util; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.List; import br.com.astarlabs.client.ApiException; import br.com.astarlabs.client.api.SearchApi; import br.com.astarlabs.client.api.SendApi; import br.com.astarlabs.client.model.BlockchainNetwork; import br.com.astarlabs.client.model.SingleResult; import br.com.astarlabs.client.model.Transaction; public class Blockchain { private String token; private Integer account; private String user; private String pass; /** * * @param Arquivo que sera registrado na BLockchain * @return Inteiro contentdo o ID da transação validada * @throws IOException * @throws ApiException */ public String registrarDocumento(File file, final BlockchainNetwork blockchainNetwork, final Integer testMode) throws IOException, ApiException { return registrarDocumento(Files.readAllBytes(file.toPath()), blockchainNetwork, testMode); } public String registrarDocumento(String fileString, final BlockchainNetwork blockchainNetwork, final Integer testMode) throws ApiException { return registrarDocumento(fileString.getBytes(), blockchainNetwork, testMode); } public String registrarDocumento(byte[] bytesFile, final BlockchainNetwork blockchainNetwork, final Integer testMode) throws ApiException { String hash = DoubleSha256.hashFile(bytesFile); token = getMyToken(); SendApi api = new SendApi(); SingleResult singleResult = api.sendHash(token, account, user, pass, hash, blockchainNetwork, testMode); if (singleResult.getResult() != null && singleResult.getStatus()) { return singleResult.getResult(); } return null; } /** * * @param bytesFile * @return * @throws ApiException */ public Transaction consultarDocumentoPorConteudo(byte[] bytesFile) throws ApiException { token = getMyToken(); SearchApi api = new SearchApi(); List<Transaction> transactions = api.searchByContent(token, account, user, pass, new String(bytesFile)); if (transactions != null && transactions.size() > 0) { return transactions.get(0); } return null; } public Transaction validarRegistroDocumento(Integer id) throws ApiException { token = getMyToken(); Transaction transaction = new Transaction(); SearchApi sa = new SearchApi(); transaction = sa.searchByAPIID(token, account, user, pass, id); return transaction; } private String getMyToken() { try { this.token = Token.sign(token); } catch (Exception e) { e.printStackTrace(); } return token; } public Blockchain(String token, Integer account, String user, String pass) { super(); this.token = token; this.account = account; this.user = user; this.pass = pass; } }
Melhoria da documentação do metodo registrarDocumento
src/main/java/br/com/astarlabs/util/Blockchain.java
Melhoria da documentação do metodo registrarDocumento
Java
bsd-2-clause
8a5ed58a14c4b29269604d15f4f2f257492007dd
0
KorAP/Koral
package de.ids_mannheim.korap.query.serialize; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.BailErrorStrategy; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; import de.ids_mannheim.korap.query.PoliqarpPlusLexer; import de.ids_mannheim.korap.query.PoliqarpPlusParser; import de.ids_mannheim.korap.query.serialize.AbstractSyntaxTree; import de.ids_mannheim.korap.util.QueryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Map representation of Poliqarp syntax tree as returned by ANTLR * @author joachim * */ public class PoliqarpPlusTree extends AbstractSyntaxTree { Logger log = LoggerFactory.getLogger(PoliqarpPlusTree.class); /** * Top-level map representing the whole request. */ LinkedHashMap<String,Object> requestMap = new LinkedHashMap<String,Object>(); /** * Keeps track of open node categories */ LinkedList<String> openNodeCats = new LinkedList<String>(); /** * Flag that indicates whether token fields or meta fields are currently being processed */ boolean inMeta = false; /** * Flag that indicates whether a cq_segment is to be ignored (e.g. when it is empty, is followed directly by only a spanclass and has no other children etc...). */ boolean ignoreCq_segment = false; /** * Flag that indicates whether a cq_segments element is quantified by an occ element. */ boolean cqHasOccSibling = false; /** * Flag that indicates whether a cq_segments' children are quantified by an occ element. */ boolean cqHasOccChild = false; /** * Flag for negation of complete field */ boolean negField = false; /** * Flag that indicates whether subsequent element is to be aligned. */ boolean alignNext = false; /** * Flag that indicates whether current element has been aligned. */ boolean isAligned = false; /** * Indicates a sequence which has an align operator as its child. Needed for deciding * when to close the align group object. */ // ParseTree alignedSequence = null; /** * Parser object deriving the ANTLR parse tree. */ static Parser poliqarpParser; /** * Keeps track of all visited nodes in a tree */ List<ParseTree> visited = new ArrayList<ParseTree>(); /** * Keeps track of active fields (like 'base=foo'). */ LinkedList<ArrayList<Object>> fieldStack = new LinkedList<ArrayList<Object>>(); /** * Keeps track of active tokens. */ LinkedList<LinkedHashMap<String,Object>> tokenStack = new LinkedList<LinkedHashMap<String,Object>>(); /** * Marks the currently active token in order to know where to add flags (might already have been taken away from token stack). */ LinkedHashMap<String,Object> curToken = new LinkedHashMap<String,Object>(); /** * Keeps track of active object. */ LinkedList<LinkedHashMap<String,Object>> objectStack = new LinkedList<LinkedHashMap<String,Object>>(); /** * Marks the object to which following occurrence information is to be added. */ LinkedHashMap<String,Object> curOccGroup = new LinkedHashMap<String,Object>(); /** * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)} */ LinkedList<Integer> objectsToPop = new LinkedList<Integer>(); /** * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)} */ LinkedList<Integer> tokensToPop = new LinkedList<Integer>(); /** * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)} */ LinkedList<Integer> fieldsToPop = new LinkedList<Integer>(); /** * If true, print debug statements */ public static boolean debug = false; /** * Index of the current child of its parent (needed for relating occ elements to their operands). */ int curChildIndex = 0; /** * */ Integer stackedObjects = 0; Integer stackedTokens= 0; Integer stackedFields = 0; /** * Most centrally, this class maintains a set of nested maps and lists which represent the JSON tree, which is built by the JSON serialiser * on basis of the {@link #requestMap} at the root of the tree. * <br/> * The class further maintains a set of stacks which effectively keep track of which objects to embed in which containing objects. * * @param query The syntax tree as returned by ANTLR * @throws QueryException */ public PoliqarpPlusTree(String query) throws QueryException { try { process(query); } catch (NullPointerException e) { if (query.contains(" ")) { System.err.println("Warning: It seems like your query contains illegal whitespace characters. Trying again with whitespaces removed..."); query = query.replaceAll(" ", ""); process(query); } else { try { throw new QueryException("Error handling query."); } catch (QueryException e1) { e1.printStackTrace(); System.exit(1); } } } System.out.println(">>> "+requestMap.get("query")+" <<<"); log.info(">>> " + requestMap.get("query") + " <<<"); } @Override public Map<String, Object> getRequestMap() { return requestMap; } @Override public void process(String query) throws QueryException { ParseTree tree = null; try { tree = parsePoliqarpQuery(query); } catch (QueryException e) { try { tree = parsePoliqarpQuery(query.replaceAll(" ", "")); } catch (QueryException e1) { System.exit(1); } } System.out.println("Processing PoliqarpPlus"); requestMap.put("context", "http://ids-mannheim.de/ns/KorAP/json-ld/v0.1/context.jsonld"); // QueryUtils.prepareContext(requestMap); processNode(tree); } /** * Recursively calls itself with the children of the currently active node, traversing the tree nodes in a top-down, depth-first fashion. * A list is maintained that contains all visited nodes * in case they have been directly addressed by its (grand-/grand-grand-/...) parent node, such that some processing time is saved, as these node will * not be processed. This method is effectively a list of if-statements that are responsible for treating the different node types correctly and filling the * respective maps/lists. * * @param node The currently processed node. The process(String query) method calls this method with the root. * @throws QueryException */ @SuppressWarnings("unchecked") private void processNode(ParseTree node) throws QueryException { // Top-down processing if (visited.contains(node)) return; else visited.add(node); if (alignNext) { alignNext=false; isAligned=true; } String nodeCat = QueryUtils.getNodeCat(node); openNodeCats.push(nodeCat); stackedObjects = 0; stackedTokens= 0; stackedFields = 0; if (debug) { System.err.println(" "+objectStack); System.err.println(" "+tokenStack); System.out.println(openNodeCats); } /* **************************************************************** **************************************************************** * Processing individual node categories * **************************************************************** **************************************************************** */ // cq_segments/sq_segments: token group if (nodeCat.equals("cq_segments") || nodeCat.equals("sq_segments")) { cqHasOccSibling = false; cqHasOccChild = false; // disregard empty segments in simple queries (parsed by ANTLR as empty cq_segments) ignoreCq_segment = (node.getChildCount() == 1 && (node.getChild(0).toStringTree(poliqarpParser).equals(" ") || QueryUtils.getNodeCat(node.getChild(0)).equals("spanclass") || QueryUtils.getNodeCat(node.getChild(0)).equals("position"))); // ignore this node if it only serves as an aligned sequence container if (node.getChildCount()>1) { if (QueryUtils.getNodeCat(node.getChild(1)).equals("cq_segments") && QueryUtils.hasChild(node.getChild(1), "alignment")) { // if (QueryUtils.getNodeCat(node.getChild(0)).equals("align")) { ignoreCq_segment = true; } } if (!ignoreCq_segment) { LinkedHashMap<String,Object> sequence = new LinkedHashMap<String,Object>(); // Step 0: cq_segments has 'occ' child -> introduce group as super group to the sequence/token/group // this requires creating a group and inserting it at a suitable place if (node.getParent().getChildCount()>curChildIndex+2 && QueryUtils.getNodeCat(node.getParent().getChild(curChildIndex+2)).equals("occ")) { cqHasOccSibling = true; createOccGroup(node); } if (QueryUtils.getNodeCat(node.getChild(node.getChildCount()-1)).equals("occ")) { cqHasOccChild = true; } // Step I: decide type of element (one or more elements? -> token or sequence) // take into account a possible 'occ' child with accompanying parantheses, therefore 3 extra children int occExtraChildren = cqHasOccChild ? 3:0; if (node.getChildCount()>1 + occExtraChildren) { ParseTree emptySegments = QueryUtils.getFirstChildWithCat(node, "empty_segments"); if (emptySegments != null && emptySegments != node.getChild(0)) { String[] minmax = parseEmptySegments(emptySegments); Integer min = Integer.parseInt(minmax[0]); Integer max = Integer.parseInt(minmax[1]); sequence.put("@type", "korap:group"); sequence.put("operation", "operation:"+"sequence"); // sequence.put("operation", "operation:"+"distance"); sequence.put("inOrder", true); ArrayList<Object> constraint = new ArrayList<Object>(); sequence.put("distances", constraint); ArrayList<Object> sequenceOperands = new ArrayList<Object>(); sequence.put("operands", sequenceOperands); objectStack.push(sequence); stackedObjects++; LinkedHashMap<String, Object> distMap = new LinkedHashMap<String, Object>(); constraint.add(distMap); distMap.put("@type", "korap:distance"); distMap.put("key", "w"); distMap.put("min", min); distMap.put("max", max); } else { sequence.put("@type", "korap:group"); sequence.put("operation", "operation:"+"sequence"); ArrayList<Object> sequenceOperands = new ArrayList<Object>(); if (emptySegments != null) { String[] minmax = parseEmptySegments(emptySegments); Integer min = Integer.parseInt(minmax[0]); Integer max = Integer.parseInt(minmax[1]); sequence.put("offset-min", min); sequence.put("offset-max", max); } sequence.put("operands", sequenceOperands); objectStack.push(sequence); stackedObjects++; } } else { // if only child, make the sequence a mere token... // ... but only if it has a real token/element beneath it if (QueryUtils.getNodeCat(node.getChild(0)).equals("cq_segment") || QueryUtils.getNodeCat(node.getChild(0)).equals("sq_segment") || QueryUtils.getNodeCat(node.getChild(0)).equals("element") ) { sequence.put("@type", "korap:token"); tokenStack.push(sequence); stackedTokens++; objectStack.push(sequence); stackedObjects++; // else, it's a group (with shrink()/spanclass/align... as child) } else { sequence.put("@type", "korap:group"); } } // Step II: decide where to put this element // check if this is an argument for a containing occurrence group (see step 0) if (cqHasOccSibling) { ArrayList<Object> topGroupOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topGroupOperands.add(sequence); // ...if not modified by occurrence, put into suitable super structure } else { if (openNodeCats.get(1).equals("query")) { // cq_segment is top query node if (node.getParent().getChildCount()==1) { // only child requestMap.put("query", sequence); } else { // not an only child, need to create containing sequence if (node.getParent().getChild(0).equals(node)) { // if first child, create containing sequence and embed there LinkedHashMap<String,Object> superSequence = new LinkedHashMap<String,Object>(); superSequence.put("@type", "korap:group"); superSequence.put("operation", "operation:"+"sequence"); ArrayList<Object> operands = new ArrayList<Object>(); superSequence.put("operands", operands); operands.add(sequence); requestMap.put("query", superSequence); objectStack.push(superSequence); // add at 2nd position to keep current cq_segment accessible stackedObjects++; } else { // if not first child, add to previously created parent sequence ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(sequence); } } } else if (!objectStack.isEmpty()){ // embed in super sequence ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(sequence); } } } } // cq_segment if (nodeCat.equals("cq_segment")) { int onTopOfObjectStack = 0; // Step I: determine whether to create new token or get token from the stack (if added by cq_segments) LinkedHashMap<String, Object> token; if (tokenStack.isEmpty()) { token = new LinkedHashMap<String, Object>(); tokenStack.push(token); stackedTokens++; // do this only if token is newly created, otherwise it'll be in objectStack twice objectStack.push(token); onTopOfObjectStack = 1; stackedObjects++; } else { // in case cq_segments has already added the token token = tokenStack.getFirst(); } curToken = token; // Step II: start filling object and add to containing sequence token.put("@type", "korap:token"); // add token to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the values from "field") // take into account a possible 'occ' child if (node.getParent().getChildCount()>1) { if (node.getText().equals("[]")) { // LinkedHashMap<String, Object> sequence = objectStack.get(onTopOfObjectStack); // String offsetStr = (String) sequence.get("offset"); // if (offsetStr == null) { // sequence.put("offset", "1"); // } else { // Integer offset = Integer.parseInt(offsetStr); // sequence.put("offset", offset+1); // } // } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(onTopOfObjectStack).get("operands"); topSequenceOperands.add(token); } } } // cq_segment modified by occurrence if (nodeCat.equals("cq_seg_occ")) { LinkedHashMap<String,Object> group = new LinkedHashMap<String,Object>(); curOccGroup = group; group.put("@type", "korap:group"); group.put("operands", new ArrayList<Object>()); objectStack.push(group); stackedObjects++; // add group to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the values from "field") // take into account a possible 'occ' child if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(group); } else { requestMap.put("query", group); } } // disjoint cq_segments, like ([base=foo][base=bar])|[base=foobar] if (nodeCat.equals("cq_disj_segments")) { LinkedHashMap<String,Object> disjunction = new LinkedHashMap<String,Object>(); objectStack.push(disjunction); stackedObjects++; ArrayList<Object> disjOperands = new ArrayList<Object>(); disjunction.put("@type", "korap:group"); disjunction.put("operation", "operation:"+"or"); disjunction.put("operands", disjOperands); // decide where to put the disjunction if (openNodeCats.get(1).equals("query")) { requestMap.put("query", disjunction); } else if (openNodeCats.get(1).equals("cq_segments")) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(disjunction); } } // field element (outside meta) if (nodeCat.equals("field")) { LinkedHashMap<String,Object> fieldMap = new LinkedHashMap<String,Object>(); // Step I: extract info String layer = ""; String foundry = null; String value = null; ParseTree fieldNameNode = node.getChild(0); if (fieldNameNode.getChildCount() == 1) { layer = fieldNameNode.getChild(0).toStringTree(poliqarpParser); //e.g. (field_name base) (field_op !=) (re_query "bar*") } else if (fieldNameNode.getChildCount() == 3) { // layer is indicated, merge layer and field name (0th and 2nd children, 1st is "/") foundry = fieldNameNode.getChild(0).toStringTree(poliqarpParser); layer = fieldNameNode.getChild(2).toStringTree(poliqarpParser); } else if (fieldNameNode.getChildCount() == 5) { // layer and value are indicated foundry = fieldNameNode.getChild(0).toStringTree(poliqarpParser); layer = fieldNameNode.getChild(2).toStringTree(poliqarpParser); value = fieldNameNode.getChild(4).toStringTree(poliqarpParser); } String relation = node.getChild(1).getChild(0).toStringTree(poliqarpParser); if (negField) { if (relation.startsWith("!")) { relation = relation.substring(1); } else { relation = "!"+relation; } } if (relation.equals("=")) { relation="eq"; } else if (relation.equals("!=")) { relation="ne"; } String key = ""; ParseTree valNode = node.getChild(2); String valType = QueryUtils.getNodeCat(valNode); fieldMap.put("@type", "korap:term"); if (valType.equals("simple_query")) { key = valNode.getChild(0).getChild(0).toStringTree(poliqarpParser); //e.g. (simple_query (sq_segment foo)) } else if (valType.equals("re_query")) { key = valNode.getChild(0).toStringTree(poliqarpParser); //e.g. (re_query "bar*") fieldMap.put("type", "type:regex"); key = key.substring(1,key.length()-1); //remove trailing quotes } fieldMap.put("key", key); fieldMap.put("layer", layer); if (foundry != null) fieldMap.put("foundry", foundry); if (value != null) fieldMap.put("value", value); fieldMap.put("match", "match:"+relation); // Step II: decide where to put the field map (as the only value of a token or the meta filter or as a part of a group in case of coordinated fields) if (fieldStack.isEmpty()) { if (!inMeta) { tokenStack.getFirst().put("wrap", fieldMap); } else { ((HashMap<String, Object>) requestMap.get("meta")).put("key", fieldMap); } } else { fieldStack.getFirst().add(fieldMap); } visited.add(node.getChild(0)); visited.add(node.getChild(1)); visited.add(node.getChild(2)); } if (nodeCat.equals("neg_field") || nodeCat.equals("neg_field_group")) { negField=!negField; } // conj_field serves for both conjunctions and disjunctions if (nodeCat.equals("conj_field")) { LinkedHashMap<String,Object> group = new LinkedHashMap<String,Object>(); group.put("@type", "korap:termGroup"); // Step I: get operator (& or |) ParseTree operatorNode = node.getChild(1).getChild(0); String operator = QueryUtils.getNodeCat(operatorNode); String relation = operator.equals("&") ? "and" : "or"; if (negField) { relation = relation.equals("or") ? "and": "or"; } group.put("relation", relation); ArrayList<Object> groupOperands = new ArrayList<Object>(); group.put("operands", groupOperands); fieldStack.push(groupOperands); stackedFields++; // Step II: decide where to put the group (directly under token or in top meta filter section or embed in super group) if (openNodeCats.get(1).equals("cq_segment")) { tokenStack.getFirst().put("wrap", group); } else if (openNodeCats.get(1).equals("meta_field_group")) { ((HashMap<String, Object>) requestMap.get("meta")).put("key", group); } else if (openNodeCats.get(2).equals("conj_field")) { fieldStack.get(1).add(group); } else { tokenStack.getFirst().put("wrap", group); } // skip the operator visited.add(node.getChild(1)); } if (nodeCat.equals("sq_segment")) { // Step I: determine whether to create new token or get token from the stack (if added by cq_segments) LinkedHashMap<String, Object> token; if (tokenStack.isEmpty()) { token = new LinkedHashMap<String, Object>(); tokenStack.push(token); stackedTokens++; } else { // in case sq_segments has already added the token token = tokenStack.getFirst(); } curToken = token; objectStack.push(token); stackedObjects++; // Step II: fill object (token values) and put into containing sequence if (node.getText().equals("[]")) { } else { token.put("@type", "korap:token"); String word = node.getChild(0).toStringTree(poliqarpParser); LinkedHashMap<String,Object> tokenValues = new LinkedHashMap<String,Object>(); token.put("wrap", tokenValues); tokenValues.put("@type", "korap:term"); tokenValues.put("key", word); tokenValues.put("layer", "orth"); tokenValues.put("match", "match:"+"eq"); // add token to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the values from "field") if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(token); } } visited.add(node.getChild(0)); } if (nodeCat.equals("re_query")) { LinkedHashMap<String,Object> reQuery = new LinkedHashMap<String,Object>(); reQuery.put("type", "type:regex"); String regex = node.getChild(0).toStringTree(poliqarpParser); reQuery.put("key", regex); reQuery.put("match", "match:"+"eq"); // if in field, regex was already added there if (!openNodeCats.get(1).equals("field")) { LinkedHashMap<String,Object> token = new LinkedHashMap<String,Object>(); token.put("@type", "korap:token"); token.put("wrap", reQuery); reQuery.put("@type", "korap:term"); if (openNodeCats.get(1).equals("query")) { requestMap.put("query", token); } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(token); } } } if (nodeCat.equals("alignment")) { alignNext = true; LinkedHashMap<String,Object> alignGroup = new LinkedHashMap<String,Object>(); // push but don't increase the stackedObjects counter in order to keep this // group open until the mother cq_segments node will be closed, since the // operands are siblings of this align node rather than children, i.e. the group // would be removed from the stack before seeing its operands. objectStack.push(alignGroup); stackedObjects++; // Step I: get info // fill group alignGroup.put("@type", "korap:group"); alignGroup.put("alignment", "left"); alignGroup.put("operands", new ArrayList<Object>()); // Step II: decide where to put the group // add group to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the relevant info) if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(alignGroup); } else if (openNodeCats.get(2).equals("query")) { requestMap.put("query", alignGroup); } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(alignGroup); } visited.add(node.getChild(0)); } if (nodeCat.equals("element")) { // Step I: determine whether to create new token or get token from the stack (if added by cq_segments) LinkedHashMap<String, Object> elem; if (tokenStack.isEmpty()) { elem = new LinkedHashMap<String, Object>(); } else { // in case sq_segments has already added the token elem = tokenStack.getFirst(); } curToken = elem; objectStack.push(elem); stackedObjects++; // Step II: fill object (token values) and put into containing sequence elem.put("@type", "korap:span"); String value = node.getChild(1).toStringTree(poliqarpParser); elem.put("key", value); // add token to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the values from "field") if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(elem); } visited.add(node.getChild(0)); visited.add(node.getChild(1)); visited.add(node.getChild(2)); } if (nodeCat.equals("spanclass")) { LinkedHashMap<String,Object> span = new LinkedHashMap<String,Object>(); span.put("@type", "korap:group"); span.put("operation", "operation:"+"class"); objectStack.push(span); stackedObjects++; ArrayList<Object> spanOperands = new ArrayList<Object>(); // Step I: get info int classId = 0; if (QueryUtils.getNodeCat(node.getChild(1)).equals("spanclass_id")) { String ref = node.getChild(1).getChild(0).toStringTree(poliqarpParser); try { classId = Integer.parseInt(ref); } catch (NumberFormatException e) { throw new QueryException("The specified class reference in the shrink/split-Operator is not a number: "+ref); } // only allow class id up to 255 if (classId>255) { classId = 0; } } span.put("class", classId); span.put("operands", spanOperands); // Step II: decide where to put the span // add span to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the relevant info) if (openNodeCats.get(2).equals("query") && node.getParent().getChildCount() == 1) { requestMap.put("query", span); } else if (objectStack.size()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(span); } // ignore leading and trailing braces visited.add(node.getChild(0)); visited.add(node.getChild(node.getChildCount()-1)); if (QueryUtils.getNodeCat(node.getChild(1)).equals("spanclass_id")) { visited.add(node.getChild(1)); } } if (nodeCat.equals("position")) { LinkedHashMap<String,Object> positionGroup = new LinkedHashMap<String,Object>(); objectStack.push(positionGroup); stackedObjects++; ArrayList<Object> posOperands = new ArrayList<Object>(); // Step I: get info String relation = QueryUtils.getNodeCat(node.getChild(0)); positionGroup.put("@type", "korap:group"); positionGroup.put("operation", "operation:"+"position"); positionGroup.put("frame", "frame:"+relation.toLowerCase()); // positionGroup.put("@subtype", "incl"); positionGroup.put("operands", posOperands); // Step II: decide where to put the group // add group to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the relevant info) if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(positionGroup); } else if (openNodeCats.get(2).equals("query")) { requestMap.put("query", positionGroup); } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(positionGroup); } } if (nodeCat.equals("shrink")) { LinkedHashMap<String,Object> shrinkGroup = new LinkedHashMap<String,Object>(); objectStack.push(shrinkGroup); stackedObjects++; ArrayList<Object> shrinkOperands = new ArrayList<Object>(); // Step I: get info ArrayList<Integer> classRefs = new ArrayList<Integer>(); String classRefOp = null; if (QueryUtils.getNodeCat(node.getChild(2)).equals("spanclass_id")) { ParseTree spanNode = node.getChild(2); for (int i=0; i<spanNode.getChildCount()-1; i++) { String ref = spanNode.getChild(i).getText(); System.err.println(" "+ref); if (ref.equals("|") || ref.equals("&")) { classRefOp = ref.equals("|") ? "intersection" : "union"; } else { try { int classRef = Integer.parseInt(ref); // only allow class id up to 255 if (classRef>255) { classRef = 0; } classRefs.add(classRef); } catch (NumberFormatException e) { throw new QueryException("The specified class reference in the shrink/split-Operator is not a number."); } } } } else { classRefs.add(0); } shrinkGroup.put("@type", "korap:group"); String type = node.getChild(0).toStringTree(poliqarpParser); String operation = type.equals("shrink") ? "submatch" : "split"; shrinkGroup.put("operation", "operation:"+operation); shrinkGroup.put("classRef", classRefs); if (classRefOp != null) { shrinkGroup.put("classRefOp", "classRefOp:"+classRefOp); } shrinkGroup.put("operands", shrinkOperands); int i=1; // Step II: decide where to put the group // add group to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the relevant info) if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(i).get("operands"); // this shrinkGroup is on top topSequenceOperands.add(shrinkGroup); } else if (openNodeCats.get(2).equals("query")) { requestMap.put("query", shrinkGroup); } else if (objectStack.size()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(i).get("operands"); topSequenceOperands.add(shrinkGroup); } visited.add(node.getChild(0)); } // repetition of token group if (nodeCat.equals("occ")) { ParseTree occChild = node.getChild(0); String repetition = occChild.toStringTree(poliqarpParser); int[] minmax = parseRepetition(repetition); curOccGroup.put("operation", "operation:"+"repetition"); curOccGroup.put("min", minmax[0]); curOccGroup.put("max", minmax[1]); visited.add(occChild); } // flags for case sensitivity and whole-word-matching if (nodeCat.equals("flag")) { String flag = QueryUtils.getNodeCat(node.getChild(0)).substring(1); //substring removes leading slash '/' // add to current token's value if (flag.contains("i")) ((HashMap<String, Object>) curToken.get("wrap")).put("caseInsensitive", true); else if (flag.contains("I")) ((HashMap<String, Object>) curToken.get("wrap")).put("caseInsensitive", false); else ((HashMap<String, Object>) curToken.get("wrap")).put("flag", flag); } if (nodeCat.equals("meta")) { inMeta=true; LinkedHashMap<String,Object> metaFilter = new LinkedHashMap<String,Object>(); requestMap.put("meta", metaFilter); metaFilter.put("@type", "korap:meta"); } if (nodeCat.equals("within") && !QueryUtils.getNodeCat(node.getParent()).equals("position")) { ParseTree domainNode = node.getChild(2); String domain = QueryUtils.getNodeCat(domainNode); LinkedHashMap<String,Object> curObject = (LinkedHashMap<String, Object>) objectStack.getFirst(); curObject.put("within", domain); visited.add(node.getChild(0)); visited.add(node.getChild(1)); visited.add(domainNode); } objectsToPop.push(stackedObjects); tokensToPop.push(stackedTokens); fieldsToPop.push(stackedFields); /* **************************************************************** **************************************************************** * recursion until 'request' node (root of tree) is processed * **************************************************************** **************************************************************** */ for (int i=0; i<node.getChildCount(); i++) { ParseTree child = node.getChild(i); curChildIndex = i; processNode(child); } // set negField back if (nodeCat.equals("neg_field") || nodeCat.equals("neg_field_group")) { negField = !negField; } // pop the align group that was introduced by previous 'align' but never closed // if (isAligned) { // isAligned=false; // objectStack.pop(); // } // Stuff that happens when leaving a node (taking items off the stacks) for (int i=0; i<objectsToPop.get(0); i++) { objectStack.pop(); } objectsToPop.pop(); for (int i=0; i<tokensToPop.get(0); i++) { tokenStack.pop(); } tokensToPop.pop(); for (int i=0; i<fieldsToPop.get(0); i++) { fieldStack.pop(); } fieldsToPop.pop(); openNodeCats.pop(); } private int[] parseRepetition(String repetition) { if (repetition.equals("*")) { return new int[] {0, 100}; } else if (repetition.equals("+")) { return new int[] {1, 100}; } else if (repetition.equals("?")) { return new int[] {0, 1}; } else { repetition = repetition.substring(1, repetition.length()-1); // remove braces String[] splitted = repetition.split(","); return new int[] {Integer.parseInt(splitted[0]), Integer.parseInt(splitted[1])}; } } private String[] parseEmptySegments(ParseTree emptySegments) { String[] minmax = new String[2]; Integer min = 0; Integer max = 0; ParseTree child; for (int i=0; i<emptySegments.getChildCount()-1; i++) { child = emptySegments.getChild(i); ParseTree nextSibling = emptySegments.getChild(i+1); String nextSiblingString = nextSibling.toStringTree(); if (child.toStringTree().equals("[]")) { if (nextSiblingString.equals("?")) { max++; } else if (nextSiblingString.startsWith("{")) { String occ = nextSiblingString.substring(1,nextSiblingString.length()-1); System.out.println(occ); if (occ.contains(",")) { String[] minmaxOcc = occ.split(","); min += Integer.parseInt(minmaxOcc[0]); max += Integer.parseInt(minmaxOcc[1]); } else { min += Integer.parseInt(occ); max += Integer.parseInt(occ); } } else { min++; max++; } } } child = emptySegments.getChild(emptySegments.getChildCount()-1); if (child.toStringTree().equals("[]")) { min++; max++; } minmax[0] = min.toString(); minmax[1] = max.toString(); return minmax; } @SuppressWarnings("unchecked") private void createOccGroup(ParseTree node) { LinkedHashMap<String,Object> occGroup = new LinkedHashMap<String,Object>(); occGroup.put("@type", "korap:group"); ArrayList<Object> groupOperands = new ArrayList<Object>(); occGroup.put("operands", groupOperands); curOccGroup = occGroup; objectStack.push(occGroup); stackedObjects++; // if only this group is on the object stack, add as top query element if (objectStack.size()==1) { requestMap.put("query", occGroup); // embed in super sequence } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(occGroup); } } private static ParserRuleContext parsePoliqarpQuery (String p) throws QueryException { QueryUtils.checkUnbalancedPars(p); Lexer poliqarpLexer = new PoliqarpPlusLexer((CharStream)null); ParserRuleContext tree = null; // Like p. 111 try { // Tokenize input data ANTLRInputStream input = new ANTLRInputStream(p); poliqarpLexer.setInputStream(input); CommonTokenStream tokens = new CommonTokenStream(poliqarpLexer); poliqarpParser = new PoliqarpPlusParser(tokens); // Don't throw out erroneous stuff poliqarpParser.setErrorHandler(new BailErrorStrategy()); poliqarpParser.removeErrorListeners(); // Get starting rule from parser Method startRule = PoliqarpPlusParser.class.getMethod("request"); tree = (ParserRuleContext) startRule.invoke(poliqarpParser, (Object[])null); } // Some things went wrong ... catch (Exception e) { System.err.println( e.getMessage() ); } if (tree==null) throw new QueryException( "The query you specified could not be processed. Please make sure it is well-formed."); // Return the generated tree return tree; } public static void main(String[] args) { /* * For testing */ String[] queries = new String[] { "shrink(1|2:{1:[base=der]}{2:[base=Mann]})", // "[base=foo] meta (author=name&year=2000)", // "[base=foo] meta year=2000", "{[base=Mann]}", "shrink(1:[orth=Der]{1:[orth=Mann][orth=geht]})", "[base=Mann/i]", "[cnx/base/pos=n]" }; PoliqarpPlusTree.debug=true; for (String q : queries) { try { System.out.println(q); System.out.println(PoliqarpPlusTree.parsePoliqarpQuery(q).toStringTree(PoliqarpPlusTree.poliqarpParser)); @SuppressWarnings("unused") PoliqarpPlusTree pt = new PoliqarpPlusTree(q); System.out.println(q); System.out.println(); } catch (Exception npe) { npe.printStackTrace(); System.out.println("null\n"); } } } }
src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusTree.java
package de.ids_mannheim.korap.query.serialize; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.BailErrorStrategy; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; import de.ids_mannheim.korap.query.PoliqarpPlusLexer; import de.ids_mannheim.korap.query.PoliqarpPlusParser; import de.ids_mannheim.korap.query.serialize.AbstractSyntaxTree; import de.ids_mannheim.korap.util.QueryException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Map representation of Poliqarp syntax tree as returned by ANTLR * @author joachim * */ public class PoliqarpPlusTree extends AbstractSyntaxTree { Logger log = LoggerFactory.getLogger(PoliqarpPlusTree.class); /** * Top-level map representing the whole request. */ LinkedHashMap<String,Object> requestMap = new LinkedHashMap<String,Object>(); /** * Keeps track of open node categories */ LinkedList<String> openNodeCats = new LinkedList<String>(); /** * Flag that indicates whether token fields or meta fields are currently being processed */ boolean inMeta = false; /** * Flag that indicates whether a cq_segment is to be ignored (e.g. when it is empty, is followed directly by only a spanclass and has no other children etc...). */ boolean ignoreCq_segment = false; /** * Flag that indicates whether a cq_segments element is quantified by an occ element. */ boolean cqHasOccSibling = false; /** * Flag that indicates whether a cq_segments' children are quantified by an occ element. */ boolean cqHasOccChild = false; /** * Flag for negation of complete field */ boolean negField = false; /** * Flag that indicates whether subsequent element is to be aligned. */ boolean alignNext = false; /** * Flag that indicates whether current element has been aligned. */ boolean isAligned = false; /** * Indicates a sequence which has an align operator as its child. Needed for deciding * when to close the align group object. */ // ParseTree alignedSequence = null; /** * Parser object deriving the ANTLR parse tree. */ static Parser poliqarpParser; /** * Keeps track of all visited nodes in a tree */ List<ParseTree> visited = new ArrayList<ParseTree>(); /** * Keeps track of active fields (like 'base=foo'). */ LinkedList<ArrayList<Object>> fieldStack = new LinkedList<ArrayList<Object>>(); /** * Keeps track of active tokens. */ LinkedList<LinkedHashMap<String,Object>> tokenStack = new LinkedList<LinkedHashMap<String,Object>>(); /** * Marks the currently active token in order to know where to add flags (might already have been taken away from token stack). */ LinkedHashMap<String,Object> curToken = new LinkedHashMap<String,Object>(); /** * Keeps track of active object. */ LinkedList<LinkedHashMap<String,Object>> objectStack = new LinkedList<LinkedHashMap<String,Object>>(); /** * Marks the object to which following occurrence information is to be added. */ LinkedHashMap<String,Object> curOccGroup = new LinkedHashMap<String,Object>(); /** * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)} */ LinkedList<Integer> objectsToPop = new LinkedList<Integer>(); /** * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)} */ LinkedList<Integer> tokensToPop = new LinkedList<Integer>(); /** * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)} */ LinkedList<Integer> fieldsToPop = new LinkedList<Integer>(); /** * If true, print debug statements */ public static boolean debug = false; /** * Index of the current child of its parent (needed for relating occ elements to their operands). */ int curChildIndex = 0; /** * */ Integer stackedObjects = 0; Integer stackedTokens= 0; Integer stackedFields = 0; /** * Most centrally, this class maintains a set of nested maps and lists which represent the JSON tree, which is built by the JSON serialiser * on basis of the {@link #requestMap} at the root of the tree. * <br/> * The class further maintains a set of stacks which effectively keep track of which objects to embed in which containing objects. * * @param query The syntax tree as returned by ANTLR * @throws QueryException */ public PoliqarpPlusTree(String query) throws QueryException { try { process(query); } catch (NullPointerException e) { if (query.contains(" ")) { System.err.println("Warning: It seems like your query contains illegal whitespace characters. Trying again with whitespaces removed..."); query = query.replaceAll(" ", ""); process(query); } else { try { throw new QueryException("Error handling query."); } catch (QueryException e1) { e1.printStackTrace(); System.exit(1); } } } System.out.println(">>> "+requestMap.get("query")+" <<<"); log.info(">>> " + requestMap.get("query") + " <<<"); } @Override public Map<String, Object> getRequestMap() { return requestMap; } @Override public void process(String query) throws QueryException { ParseTree tree = null; try { tree = parsePoliqarpQuery(query); } catch (QueryException e) { try { tree = parsePoliqarpQuery(query.replaceAll(" ", "")); } catch (QueryException e1) { System.exit(1); } } System.out.println("Processing PoliqarpPlus"); requestMap.put("context", "http://ids-mannheim.de/ns/KorAP/json-ld/v0.1/context.jsonld"); // QueryUtils.prepareContext(requestMap); processNode(tree); } /** * Recursively calls itself with the children of the currently active node, traversing the tree nodes in a top-down, depth-first fashion. * A list is maintained that contains all visited nodes * in case they have been directly addressed by its (grand-/grand-grand-/...) parent node, such that some processing time is saved, as these node will * not be processed. This method is effectively a list of if-statements that are responsible for treating the different node types correctly and filling the * respective maps/lists. * * @param node The currently processed node. The process(String query) method calls this method with the root. * @throws QueryException */ @SuppressWarnings("unchecked") private void processNode(ParseTree node) throws QueryException { // Top-down processing if (visited.contains(node)) return; else visited.add(node); if (alignNext) { alignNext=false; isAligned=true; } String nodeCat = QueryUtils.getNodeCat(node); openNodeCats.push(nodeCat); stackedObjects = 0; stackedTokens= 0; stackedFields = 0; if (debug) { System.err.println(" "+objectStack); System.err.println(" "+tokenStack); System.out.println(openNodeCats); } /* **************************************************************** **************************************************************** * Processing individual node categories * **************************************************************** **************************************************************** */ // cq_segments/sq_segments: token group if (nodeCat.equals("cq_segments") || nodeCat.equals("sq_segments")) { cqHasOccSibling = false; cqHasOccChild = false; // disregard empty segments in simple queries (parsed by ANTLR as empty cq_segments) ignoreCq_segment = (node.getChildCount() == 1 && (node.getChild(0).toStringTree(poliqarpParser).equals(" ") || QueryUtils.getNodeCat(node.getChild(0)).equals("spanclass") || QueryUtils.getNodeCat(node.getChild(0)).equals("position"))); // ignore this node if it only serves as an aligned sequence container if (node.getChildCount()>1) { if (QueryUtils.getNodeCat(node.getChild(1)).equals("cq_segments") && QueryUtils.hasChild(node.getChild(1), "alignment")) { // if (QueryUtils.getNodeCat(node.getChild(0)).equals("align")) { ignoreCq_segment = true; } } if (!ignoreCq_segment) { LinkedHashMap<String,Object> sequence = new LinkedHashMap<String,Object>(); // Step 0: cq_segments has 'occ' child -> introduce group as super group to the sequence/token/group // this requires creating a group and inserting it at a suitable place if (node.getParent().getChildCount()>curChildIndex+2 && QueryUtils.getNodeCat(node.getParent().getChild(curChildIndex+2)).equals("occ")) { cqHasOccSibling = true; createOccGroup(node); } if (QueryUtils.getNodeCat(node.getChild(node.getChildCount()-1)).equals("occ")) { cqHasOccChild = true; } // Step I: decide type of element (one or more elements? -> token or sequence) // take into account a possible 'occ' child with accompanying parantheses, therefore 3 extra children int occExtraChildren = cqHasOccChild ? 3:0; if (node.getChildCount()>1 + occExtraChildren) { ParseTree emptySegments = QueryUtils.getFirstChildWithCat(node, "empty_segments"); if (emptySegments != null && emptySegments != node.getChild(0)) { String[] minmax = parseEmptySegments(emptySegments); Integer min = Integer.parseInt(minmax[0]); Integer max = Integer.parseInt(minmax[1]); sequence.put("@type", "korap:group"); sequence.put("operation", "operation:"+"sequence"); // sequence.put("operation", "operation:"+"distance"); sequence.put("inOrder", true); ArrayList<Object> constraint = new ArrayList<Object>(); sequence.put("distances", constraint); ArrayList<Object> sequenceOperands = new ArrayList<Object>(); sequence.put("operands", sequenceOperands); objectStack.push(sequence); stackedObjects++; LinkedHashMap<String, Object> distMap = new LinkedHashMap<String, Object>(); constraint.add(distMap); distMap.put("@type", "korap:distance"); distMap.put("key", "w"); distMap.put("min", min); distMap.put("max", max); } else { sequence.put("@type", "korap:group"); sequence.put("operation", "operation:"+"sequence"); ArrayList<Object> sequenceOperands = new ArrayList<Object>(); if (emptySegments != null) { String[] minmax = parseEmptySegments(emptySegments); Integer min = Integer.parseInt(minmax[0]); Integer max = Integer.parseInt(minmax[1]); sequence.put("offset-min", min); sequence.put("offset-max", max); } sequence.put("operands", sequenceOperands); objectStack.push(sequence); stackedObjects++; } } else { // if only child, make the sequence a mere token... // ... but only if it has a real token/element beneath it if (QueryUtils.getNodeCat(node.getChild(0)).equals("cq_segment") || QueryUtils.getNodeCat(node.getChild(0)).equals("sq_segment") || QueryUtils.getNodeCat(node.getChild(0)).equals("element") ) { sequence.put("@type", "korap:token"); tokenStack.push(sequence); stackedTokens++; objectStack.push(sequence); stackedObjects++; // else, it's a group (with shrink()/spanclass/align... as child) } else { sequence.put("@type", "korap:group"); } } // Step II: decide where to put this element // check if this is an argument for a containing occurrence group (see step 0) if (cqHasOccSibling) { ArrayList<Object> topGroupOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topGroupOperands.add(sequence); // ...if not modified by occurrence, put into suitable super structure } else { if (openNodeCats.get(1).equals("query")) { // cq_segment is top query node if (node.getParent().getChildCount()==1) { // only child requestMap.put("query", sequence); } else { // not an only child, need to create containing sequence if (node.getParent().getChild(0).equals(node)) { // if first child, create containing sequence and embed there LinkedHashMap<String,Object> superSequence = new LinkedHashMap<String,Object>(); superSequence.put("@type", "korap:group"); superSequence.put("operation", "operation:"+"sequence"); ArrayList<Object> operands = new ArrayList<Object>(); superSequence.put("operands", operands); operands.add(sequence); requestMap.put("query", superSequence); objectStack.push(superSequence); // add at 2nd position to keep current cq_segment accessible stackedObjects++; } else { // if not first child, add to previously created parent sequence ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(sequence); } } } else if (!objectStack.isEmpty()){ // embed in super sequence ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(sequence); } } } } // cq_segment if (nodeCat.equals("cq_segment")) { int onTopOfObjectStack = 0; // Step I: determine whether to create new token or get token from the stack (if added by cq_segments) LinkedHashMap<String, Object> token; if (tokenStack.isEmpty()) { token = new LinkedHashMap<String, Object>(); tokenStack.push(token); stackedTokens++; // do this only if token is newly created, otherwise it'll be in objectStack twice objectStack.push(token); onTopOfObjectStack = 1; stackedObjects++; } else { // in case cq_segments has already added the token token = tokenStack.getFirst(); } curToken = token; // Step II: start filling object and add to containing sequence token.put("@type", "korap:token"); // add token to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the values from "field") // take into account a possible 'occ' child if (node.getParent().getChildCount()>1) { if (node.getText().equals("[]")) { // LinkedHashMap<String, Object> sequence = objectStack.get(onTopOfObjectStack); // String offsetStr = (String) sequence.get("offset"); // if (offsetStr == null) { // sequence.put("offset", "1"); // } else { // Integer offset = Integer.parseInt(offsetStr); // sequence.put("offset", offset+1); // } // } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(onTopOfObjectStack).get("operands"); topSequenceOperands.add(token); } } } // cq_segment modified by occurrence if (nodeCat.equals("cq_seg_occ")) { LinkedHashMap<String,Object> group = new LinkedHashMap<String,Object>(); curOccGroup = group; group.put("@type", "korap:group"); group.put("operands", new ArrayList<Object>()); objectStack.push(group); stackedObjects++; // add group to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the values from "field") // take into account a possible 'occ' child if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(group); } else { requestMap.put("query", group); } } // disjoint cq_segments, like ([base=foo][base=bar])|[base=foobar] if (nodeCat.equals("cq_disj_segments")) { LinkedHashMap<String,Object> disjunction = new LinkedHashMap<String,Object>(); objectStack.push(disjunction); stackedObjects++; ArrayList<Object> disjOperands = new ArrayList<Object>(); disjunction.put("@type", "korap:group"); disjunction.put("operation", "operation:"+"or"); disjunction.put("operands", disjOperands); // decide where to put the disjunction if (openNodeCats.get(1).equals("query")) { requestMap.put("query", disjunction); } else if (openNodeCats.get(1).equals("cq_segments")) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(disjunction); } } // field element (outside meta) if (nodeCat.equals("field")) { LinkedHashMap<String,Object> fieldMap = new LinkedHashMap<String,Object>(); // Step I: extract info String fieldName = ""; ParseTree fieldNameNode = node.getChild(0); if (fieldNameNode.getChildCount() == 1) { fieldName = fieldNameNode.getChild(0).toStringTree(poliqarpParser); //e.g. (field_name base) (field_op !=) (re_query "bar*") } else if (fieldNameNode.getChildCount() == 3) { // layer is indicated, merge layer and field name (0th and 2nd children, 1st is "/") String layer = fieldNameNode.getChild(0).toStringTree(poliqarpParser); if (layer.equals("base")) layer="lemma"; String layeredFieldName = fieldNameNode.getChild(2).toStringTree(poliqarpParser); fieldName = layer+"/"+layeredFieldName; } String relation = node.getChild(1).getChild(0).toStringTree(poliqarpParser); if (negField) { if (relation.startsWith("!")) { relation = relation.substring(1); } else { relation = "!"+relation; } } if (relation.equals("=")) { relation="eq"; } else if (relation.equals("!=")) { relation="ne"; } String value = ""; ParseTree valNode = node.getChild(2); String valType = QueryUtils.getNodeCat(valNode); fieldMap.put("@type", "korap:term"); if (valType.equals("simple_query")) { value = valNode.getChild(0).getChild(0).toStringTree(poliqarpParser); //e.g. (simple_query (sq_segment foo)) } else if (valType.equals("re_query")) { value = valNode.getChild(0).toStringTree(poliqarpParser); //e.g. (re_query "bar*") fieldMap.put("type", "type:regex"); value = value.substring(1,value.length()-1); //remove trailing quotes } fieldMap.put("key", value); if (fieldName.contains("/")) { String[] splitted = fieldName.split("/"); fieldMap.put("layer", splitted[1]); fieldMap.put("foundry", splitted[0]); } else { if (fieldName.equals("base")) fieldName = "lemma"; fieldMap.put("layer", fieldName); } fieldMap.put("match", "match:"+relation); // Step II: decide where to put the field map (as the only value of a token or the meta filter or as a part of a group in case of coordinated fields) if (fieldStack.isEmpty()) { if (!inMeta) { tokenStack.getFirst().put("wrap", fieldMap); } else { ((HashMap<String, Object>) requestMap.get("meta")).put("key", fieldMap); } } else { fieldStack.getFirst().add(fieldMap); } visited.add(node.getChild(0)); visited.add(node.getChild(1)); visited.add(node.getChild(2)); } if (nodeCat.equals("neg_field") || nodeCat.equals("neg_field_group")) { negField=!negField; } // conj_field serves for both conjunctions and disjunctions if (nodeCat.equals("conj_field")) { LinkedHashMap<String,Object> group = new LinkedHashMap<String,Object>(); group.put("@type", "korap:termGroup"); // Step I: get operator (& or |) ParseTree operatorNode = node.getChild(1).getChild(0); String operator = QueryUtils.getNodeCat(operatorNode); String relation = operator.equals("&") ? "and" : "or"; if (negField) { relation = relation.equals("or") ? "and": "or"; } group.put("relation", relation); ArrayList<Object> groupOperands = new ArrayList<Object>(); group.put("operands", groupOperands); fieldStack.push(groupOperands); stackedFields++; // Step II: decide where to put the group (directly under token or in top meta filter section or embed in super group) if (openNodeCats.get(1).equals("cq_segment")) { tokenStack.getFirst().put("wrap", group); } else if (openNodeCats.get(1).equals("meta_field_group")) { ((HashMap<String, Object>) requestMap.get("meta")).put("key", group); } else if (openNodeCats.get(2).equals("conj_field")) { fieldStack.get(1).add(group); } else { tokenStack.getFirst().put("wrap", group); } // skip the operator visited.add(node.getChild(1)); } if (nodeCat.equals("sq_segment")) { // Step I: determine whether to create new token or get token from the stack (if added by cq_segments) LinkedHashMap<String, Object> token; if (tokenStack.isEmpty()) { token = new LinkedHashMap<String, Object>(); tokenStack.push(token); stackedTokens++; } else { // in case sq_segments has already added the token token = tokenStack.getFirst(); } curToken = token; objectStack.push(token); stackedObjects++; // Step II: fill object (token values) and put into containing sequence if (node.getText().equals("[]")) { } else { token.put("@type", "korap:token"); String word = node.getChild(0).toStringTree(poliqarpParser); LinkedHashMap<String,Object> tokenValues = new LinkedHashMap<String,Object>(); token.put("wrap", tokenValues); tokenValues.put("@type", "korap:term"); tokenValues.put("key", word); tokenValues.put("layer", "orth"); tokenValues.put("match", "match:"+"eq"); // add token to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the values from "field") if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(token); } } visited.add(node.getChild(0)); } if (nodeCat.equals("re_query")) { LinkedHashMap<String,Object> reQuery = new LinkedHashMap<String,Object>(); reQuery.put("type", "type:regex"); String regex = node.getChild(0).toStringTree(poliqarpParser); reQuery.put("key", regex); reQuery.put("match", "match:"+"eq"); // if in field, regex was already added there if (!openNodeCats.get(1).equals("field")) { LinkedHashMap<String,Object> token = new LinkedHashMap<String,Object>(); token.put("@type", "korap:token"); token.put("wrap", reQuery); reQuery.put("@type", "korap:term"); if (openNodeCats.get(1).equals("query")) { requestMap.put("query", token); } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(token); } } } if (nodeCat.equals("alignment")) { alignNext = true; LinkedHashMap<String,Object> alignGroup = new LinkedHashMap<String,Object>(); // push but don't increase the stackedObjects counter in order to keep this // group open until the mother cq_segments node will be closed, since the // operands are siblings of this align node rather than children, i.e. the group // would be removed from the stack before seeing its operands. objectStack.push(alignGroup); stackedObjects++; // Step I: get info // fill group alignGroup.put("@type", "korap:group"); alignGroup.put("alignment", "left"); alignGroup.put("operands", new ArrayList<Object>()); // Step II: decide where to put the group // add group to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the relevant info) if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(alignGroup); } else if (openNodeCats.get(2).equals("query")) { requestMap.put("query", alignGroup); } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(alignGroup); } visited.add(node.getChild(0)); } if (nodeCat.equals("element")) { // Step I: determine whether to create new token or get token from the stack (if added by cq_segments) LinkedHashMap<String, Object> elem; if (tokenStack.isEmpty()) { elem = new LinkedHashMap<String, Object>(); } else { // in case sq_segments has already added the token elem = tokenStack.getFirst(); } curToken = elem; objectStack.push(elem); stackedObjects++; // Step II: fill object (token values) and put into containing sequence elem.put("@type", "korap:span"); String value = node.getChild(1).toStringTree(poliqarpParser); elem.put("key", value); // add token to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the values from "field") if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(elem); } visited.add(node.getChild(0)); visited.add(node.getChild(1)); visited.add(node.getChild(2)); } if (nodeCat.equals("spanclass")) { LinkedHashMap<String,Object> span = new LinkedHashMap<String,Object>(); span.put("@type", "korap:group"); span.put("operation", "operation:"+"class"); objectStack.push(span); stackedObjects++; ArrayList<Object> spanOperands = new ArrayList<Object>(); // Step I: get info int classId = 0; if (QueryUtils.getNodeCat(node.getChild(1)).equals("spanclass_id")) { String ref = node.getChild(1).getChild(0).toStringTree(poliqarpParser); try { classId = Integer.parseInt(ref); } catch (NumberFormatException e) { throw new QueryException("The specified class reference in the shrink/split-Operator is not a number: "+ref); } // only allow class id up to 255 if (classId>255) { classId = 0; } } span.put("class", classId); span.put("operands", spanOperands); // Step II: decide where to put the span // add span to sequence only if it is not an only child (in that case, cq_segments has already added the info and is just waiting for the relevant info) if (openNodeCats.get(2).equals("query") && node.getParent().getChildCount() == 1) { requestMap.put("query", span); } else if (objectStack.size()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(span); } // ignore leading and trailing braces visited.add(node.getChild(0)); visited.add(node.getChild(node.getChildCount()-1)); if (QueryUtils.getNodeCat(node.getChild(1)).equals("spanclass_id")) { visited.add(node.getChild(1)); } } if (nodeCat.equals("position")) { LinkedHashMap<String,Object> positionGroup = new LinkedHashMap<String,Object>(); objectStack.push(positionGroup); stackedObjects++; ArrayList<Object> posOperands = new ArrayList<Object>(); // Step I: get info String relation = QueryUtils.getNodeCat(node.getChild(0)); positionGroup.put("@type", "korap:group"); positionGroup.put("operation", "operation:"+"position"); positionGroup.put("frame", "frame:"+relation.toLowerCase()); // positionGroup.put("@subtype", "incl"); positionGroup.put("operands", posOperands); // Step II: decide where to put the group // add group to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the relevant info) if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(positionGroup); } else if (openNodeCats.get(2).equals("query")) { requestMap.put("query", positionGroup); } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(positionGroup); } } if (nodeCat.equals("shrink")) { LinkedHashMap<String,Object> shrinkGroup = new LinkedHashMap<String,Object>(); objectStack.push(shrinkGroup); stackedObjects++; ArrayList<Object> shrinkOperands = new ArrayList<Object>(); // Step I: get info ArrayList<Integer> classRefs = new ArrayList<Integer>(); String classRefOp = null; if (QueryUtils.getNodeCat(node.getChild(2)).equals("spanclass_id")) { ParseTree spanNode = node.getChild(2); for (int i=0; i<spanNode.getChildCount()-1; i++) { String ref = spanNode.getChild(i).getText(); System.err.println(" "+ref); if (ref.equals("|") || ref.equals("&")) { classRefOp = ref.equals("|") ? "intersection" : "union"; } else { try { int classRef = Integer.parseInt(ref); // only allow class id up to 255 if (classRef>255) { classRef = 0; } classRefs.add(classRef); } catch (NumberFormatException e) { throw new QueryException("The specified class reference in the shrink/split-Operator is not a number."); } } } } else { classRefs.add(0); } shrinkGroup.put("@type", "korap:group"); String type = node.getChild(0).toStringTree(poliqarpParser); String operation = type.equals("shrink") ? "submatch" : "split"; shrinkGroup.put("operation", "operation:"+operation); shrinkGroup.put("classRef", classRefs); if (classRefOp != null) { shrinkGroup.put("classRefOp", "classRefOp:"+classRefOp); } shrinkGroup.put("operands", shrinkOperands); int i=1; // Step II: decide where to put the group // add group to sequence only if it is not an only child (in that case, sq_segments has already added the info and is just waiting for the relevant info) if (node.getParent().getChildCount()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(i).get("operands"); // this shrinkGroup is on top topSequenceOperands.add(shrinkGroup); } else if (openNodeCats.get(2).equals("query")) { requestMap.put("query", shrinkGroup); } else if (objectStack.size()>1) { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(i).get("operands"); topSequenceOperands.add(shrinkGroup); } visited.add(node.getChild(0)); } // repetition of token group if (nodeCat.equals("occ")) { ParseTree occChild = node.getChild(0); String repetition = occChild.toStringTree(poliqarpParser); int[] minmax = parseRepetition(repetition); curOccGroup.put("operation", "operation:"+"repetition"); curOccGroup.put("min", minmax[0]); curOccGroup.put("max", minmax[1]); visited.add(occChild); } // flags for case sensitivity and whole-word-matching if (nodeCat.equals("flag")) { String flag = QueryUtils.getNodeCat(node.getChild(0)).substring(1); //substring removes leading slash '/' // add to current token's value if (flag.contains("i")) ((HashMap<String, Object>) curToken.get("wrap")).put("caseInsensitive", true); else if (flag.contains("I")) ((HashMap<String, Object>) curToken.get("wrap")).put("caseInsensitive", false); else ((HashMap<String, Object>) curToken.get("wrap")).put("flag", flag); } if (nodeCat.equals("meta")) { inMeta=true; LinkedHashMap<String,Object> metaFilter = new LinkedHashMap<String,Object>(); requestMap.put("meta", metaFilter); metaFilter.put("@type", "korap:meta"); } if (nodeCat.equals("within") && !QueryUtils.getNodeCat(node.getParent()).equals("position")) { ParseTree domainNode = node.getChild(2); String domain = QueryUtils.getNodeCat(domainNode); LinkedHashMap<String,Object> curObject = (LinkedHashMap<String, Object>) objectStack.getFirst(); curObject.put("within", domain); visited.add(node.getChild(0)); visited.add(node.getChild(1)); visited.add(domainNode); } objectsToPop.push(stackedObjects); tokensToPop.push(stackedTokens); fieldsToPop.push(stackedFields); /* **************************************************************** **************************************************************** * recursion until 'request' node (root of tree) is processed * **************************************************************** **************************************************************** */ for (int i=0; i<node.getChildCount(); i++) { ParseTree child = node.getChild(i); curChildIndex = i; processNode(child); } // set negField back if (nodeCat.equals("neg_field") || nodeCat.equals("neg_field_group")) { negField = !negField; } // pop the align group that was introduced by previous 'align' but never closed // if (isAligned) { // isAligned=false; // objectStack.pop(); // } // Stuff that happens when leaving a node (taking items off the stacks) for (int i=0; i<objectsToPop.get(0); i++) { objectStack.pop(); } objectsToPop.pop(); for (int i=0; i<tokensToPop.get(0); i++) { tokenStack.pop(); } tokensToPop.pop(); for (int i=0; i<fieldsToPop.get(0); i++) { fieldStack.pop(); } fieldsToPop.pop(); openNodeCats.pop(); } private int[] parseRepetition(String repetition) { if (repetition.equals("*")) { return new int[] {0, 100}; } else if (repetition.equals("+")) { return new int[] {1, 100}; } else if (repetition.equals("?")) { return new int[] {0, 1}; } else { repetition = repetition.substring(1, repetition.length()-1); // remove braces String[] splitted = repetition.split(","); return new int[] {Integer.parseInt(splitted[0]), Integer.parseInt(splitted[1])}; } } private String[] parseEmptySegments(ParseTree emptySegments) { String[] minmax = new String[2]; Integer min = 0; Integer max = 0; ParseTree child; for (int i=0; i<emptySegments.getChildCount()-1; i++) { child = emptySegments.getChild(i); ParseTree nextSibling = emptySegments.getChild(i+1); String nextSiblingString = nextSibling.toStringTree(); if (child.toStringTree().equals("[]")) { if (nextSiblingString.equals("?")) { max++; } else if (nextSiblingString.startsWith("{")) { String occ = nextSiblingString.substring(1,nextSiblingString.length()-1); System.out.println(occ); if (occ.contains(",")) { String[] minmaxOcc = occ.split(","); min += Integer.parseInt(minmaxOcc[0]); max += Integer.parseInt(minmaxOcc[1]); } else { min += Integer.parseInt(occ); max += Integer.parseInt(occ); } } else { min++; max++; } } } child = emptySegments.getChild(emptySegments.getChildCount()-1); if (child.toStringTree().equals("[]")) { min++; max++; } minmax[0] = min.toString(); minmax[1] = max.toString(); return minmax; } @SuppressWarnings("unchecked") private void createOccGroup(ParseTree node) { LinkedHashMap<String,Object> occGroup = new LinkedHashMap<String,Object>(); occGroup.put("@type", "korap:group"); ArrayList<Object> groupOperands = new ArrayList<Object>(); occGroup.put("operands", groupOperands); curOccGroup = occGroup; objectStack.push(occGroup); stackedObjects++; // if only this group is on the object stack, add as top query element if (objectStack.size()==1) { requestMap.put("query", occGroup); // embed in super sequence } else { ArrayList<Object> topSequenceOperands = (ArrayList<Object>) objectStack.get(1).get("operands"); topSequenceOperands.add(occGroup); } } private static ParserRuleContext parsePoliqarpQuery (String p) throws QueryException { QueryUtils.checkUnbalancedPars(p); Lexer poliqarpLexer = new PoliqarpPlusLexer((CharStream)null); ParserRuleContext tree = null; // Like p. 111 try { // Tokenize input data ANTLRInputStream input = new ANTLRInputStream(p); poliqarpLexer.setInputStream(input); CommonTokenStream tokens = new CommonTokenStream(poliqarpLexer); poliqarpParser = new PoliqarpPlusParser(tokens); // Don't throw out erroneous stuff poliqarpParser.setErrorHandler(new BailErrorStrategy()); poliqarpParser.removeErrorListeners(); // Get starting rule from parser Method startRule = PoliqarpPlusParser.class.getMethod("request"); tree = (ParserRuleContext) startRule.invoke(poliqarpParser, (Object[])null); } // Some things went wrong ... catch (Exception e) { System.err.println( e.getMessage() ); } if (tree==null) throw new QueryException( "The query you specified could not be processed. Please make sure it is well-formed."); // Return the generated tree return tree; } public static void main(String[] args) { /* * For testing */ String[] queries = new String[] { "shrink(1|2:{1:[base=der]}{2:[base=Mann]})", // "[base=foo] meta (author=name&year=2000)", // "[base=foo] meta year=2000", "{[base=Mann]}", "shrink(1:[orth=Der]{1:[orth=Mann][orth=geht]})", "[base=Mann/i]" }; PoliqarpPlusTree.debug=true; for (String q : queries) { try { System.out.println(q); System.out.println(PoliqarpPlusTree.parsePoliqarpQuery(q).toStringTree(PoliqarpPlusTree.poliqarpParser)); @SuppressWarnings("unused") PoliqarpPlusTree pt = new PoliqarpPlusTree(q); System.out.println(q); System.out.println(); } catch (Exception npe) { npe.printStackTrace(); System.out.println("null\n"); } } } }
PQ values
src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusTree.java
PQ values
Java
bsd-2-clause
07d952aab9e888a19d306a44b77a600604a63b7a
0
scifio/scifio
// // MinimalTiffReader.java // /* OME Bio-Formats package for reading and converting biological file formats. Copyright (C) 2005-@year@ UW-Madison LOCI and Glencoe Software, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package loci.formats.in; import java.io.IOException; import loci.common.DataTools; import loci.common.RandomAccessInputStream; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.tiff.IFD; import loci.formats.tiff.IFDList; import loci.formats.tiff.PhotoInterp; import loci.formats.tiff.TiffParser; /** * MinimalTiffReader is the superclass for file format readers compatible with * or derived from the TIFF 6.0 file format. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/bio-formats/src/loci/formats/in/MinimalTiffReader.java">Trac</a>, * <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/bio-formats/src/loci/formats/in/MinimalTiffReader.java;hb=HEAD">Gitweb</a></dd></dl> * * @author Melissa Linkert melissa at glencoesoftware.com */ public class MinimalTiffReader extends FormatReader { // -- Fields -- /** List of IFDs for the current TIFF. */ protected IFDList ifds; /** List of thumbnail IFDs for the current TIFF. */ protected IFDList thumbnailIFDs; protected TiffParser tiffParser; private int lastPlane; // -- Constructors -- /** Constructs a new MinimalTiffReader. */ public MinimalTiffReader() { this("Minimal TIFF", new String[] {"tif", "tiff"}); } /** Constructs a new MinimalTiffReader. */ public MinimalTiffReader(String name, String suffix) { this(name, new String[] {suffix}); } /** Constructs a new MinimalTiffReader. */ public MinimalTiffReader(String name, String[] suffixes) { super(name, suffixes); domains = new String[] {FormatTools.GRAPHICS_DOMAIN}; suffixNecessary = false; } // -- MinimalTiffReader API methods -- /** Gets the list of IFDs associated with the current TIFF's image planes. */ public IFDList getIFDs() { return ifds; } /** Gets the list of IFDs associated with the current TIFF's thumbnails. */ public IFDList getThumbnailIFDs() { return thumbnailIFDs; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { return new TiffParser(stream).isValidHeader(); } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (ifds == null || lastPlane < 0 || lastPlane > ifds.size()) return null; IFD lastIFD = ifds.get(lastPlane); int[] bits = lastIFD.getBitsPerSample(); if (bits[0] <= 8) { int[] colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null) { // it's possible that the LUT is only present in the first IFD if (lastPlane != 0) { lastIFD = ifds.get(0); colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null) return null; } else return null; } byte[][] table = new byte[3][colorMap.length / 3]; int next = 0; for (int j=0; j<table.length; j++) { for (int i=0; i<table[0].length; i++) { table[j][i] = (byte) ((colorMap[next++] >> 8) & 0xff); } } return table; } return null; } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (ifds == null || lastPlane < 0 || lastPlane > ifds.size()) return null; IFD lastIFD = ifds.get(lastPlane); int[] bits = lastIFD.getBitsPerSample(); if (bits[0] <= 16 && bits[0] > 8) { int[] colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null || colorMap.length < 65536 * 3) { // it's possible that the LUT is only present in the first IFD if (lastPlane != 0) { lastIFD = ifds.get(0); colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null || colorMap.length < 65536 * 3) return null; } else return null; } short[][] table = new short[3][colorMap.length / 3]; int next = 0; for (int i=0; i<table.length; i++) { for (int j=0; j<table[0].length; j++) { table[i][j] = (short) (colorMap[next++] & 0xffff); } } return table; } return null; } /* @see loci.formats.FormatReader#getThumbSizeX() */ public int getThumbSizeX() { if (thumbnailIFDs != null && thumbnailIFDs.size() > 0) { try { return (int) thumbnailIFDs.get(0).getImageWidth(); } catch (FormatException e) { LOGGER.debug("Could not retrieve thumbnail width", e); } } return super.getThumbSizeX(); } /* @see loci.formats.FormatReader#getThumbSizeY() */ public int getThumbSizeY() { if (thumbnailIFDs != null && thumbnailIFDs.size() > 0) { try { return (int) thumbnailIFDs.get(0).getImageLength(); } catch (FormatException e) { LOGGER.debug("Could not retrieve thumbnail height", e); } } return super.getThumbSizeY(); } /* @see loci.formats.FormatReader#openThumbBytes(int) */ public byte[] openThumbBytes(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (thumbnailIFDs == null || thumbnailIFDs.size() <= no) { return super.openThumbBytes(no); } int[] bps = null; try { bps = thumbnailIFDs.get(no).getBitsPerSample(); } catch (FormatException e) { } if (bps == null) { return super.openThumbBytes(no); } int b = bps[0]; while ((b % 8) != 0) b++; b /= 8; if (b != FormatTools.getBytesPerPixel(getPixelType()) || bps.length != getRGBChannelCount()) { return super.openThumbBytes(no); } byte[] buf = new byte[getThumbSizeX() * getThumbSizeY() * getRGBChannelCount() * FormatTools.getBytesPerPixel(getPixelType())]; return tiffParser.getSamples(thumbnailIFDs.get(no), buf); } /** * @see loci.formats.FormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); lastPlane = no; tiffParser.getSamples(ifds.get(no), buf, x, y, w, h); boolean float16 = getPixelType() == FormatTools.FLOAT && ifds.get(0).getBitsPerSample()[0] == 16; boolean float24 = getPixelType() == FormatTools.FLOAT && ifds.get(0).getBitsPerSample()[0] == 24; if (float16 || float24) { int nPixels = w * h * getRGBChannelCount(); int nBytes = float16 ? 2 : 3; int mantissaBits = float16 ? 10 : 16; int exponentBits = float16 ? 5 : 7; int maxExponent = (int) Math.pow(2, exponentBits) - 1; int bits = (nBytes * 8) - 1; byte[] newBuf = new byte[buf.length]; for (int i=0; i<nPixels; i++) { int v = DataTools.bytesToInt(buf, i * nBytes, nBytes, isLittleEndian()); int sign = v >> bits; int exponent = (v >> mantissaBits) & (int) (Math.pow(2, exponentBits) - 1); int mantissa = v & (int) (Math.pow(2, mantissaBits) - 1); if (exponent == 0) { if (mantissa != 0) { while ((mantissa & (int) Math.pow(2, mantissaBits)) == 0) { mantissa <<= 1; exponent--; } exponent++; mantissa &= (int) (Math.pow(2, mantissaBits) - 1); exponent += 127 - (Math.pow(2, exponentBits - 1) - 1); } } else if (exponent == maxExponent) { exponent = 255; } else { exponent += 127 - (Math.pow(2, exponentBits - 1) - 1); } mantissa <<= (23 - mantissaBits); int value = (sign << 31) | (exponent << 23) | mantissa; DataTools.unpackBytes(value, newBuf, i * 4, 4, isLittleEndian()); } System.arraycopy(newBuf, 0, buf, 0, newBuf.length); } return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { ifds = null; thumbnailIFDs = null; lastPlane = 0; tiffParser = null; } } /* @see loci.formats.IFormatReader#getOptimalTileWidth() */ public int getOptimalTileWidth() { FormatTools.assertId(currentId, true, 1); try { return (int) ifds.get(0).getTileWidth(); } catch (FormatException e) { LOGGER.debug("Could not retrieve tile width", e); } return super.getOptimalTileWidth(); } /* @see loci.formats.IFormatReader#getOptimalTileHeight() */ public int getOptimalTileHeight() { FormatTools.assertId(currentId, true, 1); try { return (int) ifds.get(0).getTileLength(); } catch (FormatException e) { LOGGER.debug("Could not retrieve tile height", e); } return super.getOptimalTileHeight(); } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); in = new RandomAccessInputStream(id); tiffParser = new TiffParser(in); tiffParser.setDoCaching(false); Boolean littleEndian = tiffParser.checkHeader(); if (littleEndian == null) { throw new FormatException("Invalid TIFF file"); } boolean little = littleEndian.booleanValue(); in.order(little); LOGGER.info("Reading IFDs"); ifds = tiffParser.getNonThumbnailIFDs(); if (ifds == null || ifds.size() == 0) { throw new FormatException("No IFDs found"); } thumbnailIFDs = tiffParser.getThumbnailIFDs(); LOGGER.info("Populating metadata"); core[0].imageCount = ifds.size(); for (IFD ifd : ifds) { tiffParser.fillInIFD(ifd); } IFD firstIFD = ifds.get(0); PhotoInterp photo = firstIFD.getPhotometricInterpretation(); int samples = firstIFD.getSamplesPerPixel(); core[0].rgb = samples > 1 || photo == PhotoInterp.RGB; core[0].interleaved = false; core[0].littleEndian = firstIFD.isLittleEndian(); core[0].sizeX = (int) firstIFD.getImageWidth(); core[0].sizeY = (int) firstIFD.getImageLength(); core[0].sizeZ = 1; core[0].sizeC = isRGB() ? samples : 1; core[0].sizeT = ifds.size(); core[0].pixelType = firstIFD.getPixelType(); core[0].metadataComplete = true; core[0].indexed = photo == PhotoInterp.RGB_PALETTE && (get8BitLookupTable() != null || get16BitLookupTable() != null); if (isIndexed()) { core[0].sizeC = 1; core[0].rgb = false; for (IFD ifd : ifds) { ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, PhotoInterp.RGB_PALETTE); } } if (getSizeC() == 1 && !isIndexed()) core[0].rgb = false; core[0].falseColor = false; core[0].dimensionOrder = "XYCZT"; core[0].bitsPerPixel = firstIFD.getBitsPerSample()[0]; } }
components/bio-formats/src/loci/formats/in/MinimalTiffReader.java
// // MinimalTiffReader.java // /* OME Bio-Formats package for reading and converting biological file formats. Copyright (C) 2005-@year@ UW-Madison LOCI and Glencoe Software, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package loci.formats.in; import java.io.IOException; import loci.common.DataTools; import loci.common.RandomAccessInputStream; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.tiff.IFD; import loci.formats.tiff.IFDList; import loci.formats.tiff.PhotoInterp; import loci.formats.tiff.TiffParser; /** * MinimalTiffReader is the superclass for file format readers compatible with * or derived from the TIFF 6.0 file format. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/bio-formats/src/loci/formats/in/MinimalTiffReader.java">Trac</a>, * <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/bio-formats/src/loci/formats/in/MinimalTiffReader.java;hb=HEAD">Gitweb</a></dd></dl> * * @author Melissa Linkert melissa at glencoesoftware.com */ public class MinimalTiffReader extends FormatReader { // -- Fields -- /** List of IFDs for the current TIFF. */ protected IFDList ifds; /** List of thumbnail IFDs for the current TIFF. */ protected IFDList thumbnailIFDs; protected TiffParser tiffParser; private int lastPlane; // -- Constructors -- /** Constructs a new MinimalTiffReader. */ public MinimalTiffReader() { this("Minimal TIFF", new String[] {"tif", "tiff"}); } /** Constructs a new MinimalTiffReader. */ public MinimalTiffReader(String name, String suffix) { this(name, new String[] {suffix}); } /** Constructs a new MinimalTiffReader. */ public MinimalTiffReader(String name, String[] suffixes) { super(name, suffixes); domains = new String[] {FormatTools.GRAPHICS_DOMAIN}; suffixNecessary = false; } // -- MinimalTiffReader API methods -- /** Gets the list of IFDs associated with the current TIFF's image planes. */ public IFDList getIFDs() { return ifds; } /** Gets the list of IFDs associated with the current TIFF's thumbnails. */ public IFDList getThumbnailIFDs() { return thumbnailIFDs; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { return new TiffParser(stream).isValidHeader(); } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (ifds == null || lastPlane < 0 || lastPlane > ifds.size()) return null; IFD lastIFD = ifds.get(lastPlane); int[] bits = lastIFD.getBitsPerSample(); if (bits[0] <= 8) { int[] colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null) { // it's possible that the LUT is only present in the first IFD if (lastPlane != 0) { lastIFD = ifds.get(0); colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null) return null; } else return null; } byte[][] table = new byte[3][colorMap.length / 3]; int next = 0; for (int j=0; j<table.length; j++) { for (int i=0; i<table[0].length; i++) { table[j][i] = (byte) ((colorMap[next++] >> 8) & 0xff); } } return table; } return null; } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (ifds == null || lastPlane < 0 || lastPlane > ifds.size()) return null; IFD lastIFD = ifds.get(lastPlane); int[] bits = lastIFD.getBitsPerSample(); if (bits[0] <= 16 && bits[0] > 8) { int[] colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null || colorMap.length < 65536 * 3) { // it's possible that the LUT is only present in the first IFD if (lastPlane != 0) { lastIFD = ifds.get(0); colorMap = lastIFD.getIFDIntArray(IFD.COLOR_MAP); if (colorMap == null || colorMap.length < 65536 * 3) return null; } else return null; } short[][] table = new short[3][colorMap.length / 3]; int next = 0; for (int i=0; i<table.length; i++) { for (int j=0; j<table[0].length; j++) { table[i][j] = (short) (colorMap[next++] & 0xffff); } } return table; } return null; } /* @see loci.formats.FormatReader#getThumbSizeX() */ public int getThumbSizeX() { if (thumbnailIFDs != null && thumbnailIFDs.size() > 0) { try { return (int) thumbnailIFDs.get(0).getImageWidth(); } catch (FormatException e) { LOGGER.debug("Could not retrieve thumbnail width", e); } } return super.getThumbSizeX(); } /* @see loci.formats.FormatReader#getThumbSizeY() */ public int getThumbSizeY() { if (thumbnailIFDs != null && thumbnailIFDs.size() > 0) { try { return (int) thumbnailIFDs.get(0).getImageLength(); } catch (FormatException e) { LOGGER.debug("Could not retrieve thumbnail height", e); } } return super.getThumbSizeY(); } /* @see loci.formats.FormatReader#openThumbBytes(int) */ public byte[] openThumbBytes(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (thumbnailIFDs == null || thumbnailIFDs.size() <= no) { return super.openThumbBytes(no); } int[] bps = thumbnailIFDs.get(no).getBitsPerSample(); int b = bps[0]; while ((b % 8) != 0) b++; b /= 8; if (b != FormatTools.getBytesPerPixel(getPixelType()) || bps.length != getRGBChannelCount()) { return super.openThumbBytes(no); } byte[] buf = new byte[getThumbSizeX() * getThumbSizeY() * getRGBChannelCount() * FormatTools.getBytesPerPixel(getPixelType())]; return tiffParser.getSamples(thumbnailIFDs.get(no), buf); } /** * @see loci.formats.FormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); lastPlane = no; tiffParser.getSamples(ifds.get(no), buf, x, y, w, h); boolean float16 = getPixelType() == FormatTools.FLOAT && ifds.get(0).getBitsPerSample()[0] == 16; boolean float24 = getPixelType() == FormatTools.FLOAT && ifds.get(0).getBitsPerSample()[0] == 24; if (float16 || float24) { int nPixels = w * h * getRGBChannelCount(); int nBytes = float16 ? 2 : 3; int mantissaBits = float16 ? 10 : 16; int exponentBits = float16 ? 5 : 7; int maxExponent = (int) Math.pow(2, exponentBits) - 1; int bits = (nBytes * 8) - 1; byte[] newBuf = new byte[buf.length]; for (int i=0; i<nPixels; i++) { int v = DataTools.bytesToInt(buf, i * nBytes, nBytes, isLittleEndian()); int sign = v >> bits; int exponent = (v >> mantissaBits) & (int) (Math.pow(2, exponentBits) - 1); int mantissa = v & (int) (Math.pow(2, mantissaBits) - 1); if (exponent == 0) { if (mantissa != 0) { while ((mantissa & (int) Math.pow(2, mantissaBits)) == 0) { mantissa <<= 1; exponent--; } exponent++; mantissa &= (int) (Math.pow(2, mantissaBits) - 1); exponent += 127 - (Math.pow(2, exponentBits - 1) - 1); } } else if (exponent == maxExponent) { exponent = 255; } else { exponent += 127 - (Math.pow(2, exponentBits - 1) - 1); } mantissa <<= (23 - mantissaBits); int value = (sign << 31) | (exponent << 23) | mantissa; DataTools.unpackBytes(value, newBuf, i * 4, 4, isLittleEndian()); } System.arraycopy(newBuf, 0, buf, 0, newBuf.length); } return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { ifds = null; thumbnailIFDs = null; lastPlane = 0; tiffParser = null; } } /* @see loci.formats.IFormatReader#getOptimalTileWidth() */ public int getOptimalTileWidth() { FormatTools.assertId(currentId, true, 1); try { return (int) ifds.get(0).getTileWidth(); } catch (FormatException e) { LOGGER.debug("Could not retrieve tile width", e); } return super.getOptimalTileWidth(); } /* @see loci.formats.IFormatReader#getOptimalTileHeight() */ public int getOptimalTileHeight() { FormatTools.assertId(currentId, true, 1); try { return (int) ifds.get(0).getTileLength(); } catch (FormatException e) { LOGGER.debug("Could not retrieve tile height", e); } return super.getOptimalTileHeight(); } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); in = new RandomAccessInputStream(id); tiffParser = new TiffParser(in); tiffParser.setDoCaching(false); Boolean littleEndian = tiffParser.checkHeader(); if (littleEndian == null) { throw new FormatException("Invalid TIFF file"); } boolean little = littleEndian.booleanValue(); in.order(little); LOGGER.info("Reading IFDs"); ifds = tiffParser.getNonThumbnailIFDs(); if (ifds == null || ifds.size() == 0) { throw new FormatException("No IFDs found"); } thumbnailIFDs = tiffParser.getThumbnailIFDs(); LOGGER.info("Populating metadata"); core[0].imageCount = ifds.size(); for (IFD ifd : ifds) { tiffParser.fillInIFD(ifd); } IFD firstIFD = ifds.get(0); PhotoInterp photo = firstIFD.getPhotometricInterpretation(); int samples = firstIFD.getSamplesPerPixel(); core[0].rgb = samples > 1 || photo == PhotoInterp.RGB; core[0].interleaved = false; core[0].littleEndian = firstIFD.isLittleEndian(); core[0].sizeX = (int) firstIFD.getImageWidth(); core[0].sizeY = (int) firstIFD.getImageLength(); core[0].sizeZ = 1; core[0].sizeC = isRGB() ? samples : 1; core[0].sizeT = ifds.size(); core[0].pixelType = firstIFD.getPixelType(); core[0].metadataComplete = true; core[0].indexed = photo == PhotoInterp.RGB_PALETTE && (get8BitLookupTable() != null || get16BitLookupTable() != null); if (isIndexed()) { core[0].sizeC = 1; core[0].rgb = false; for (IFD ifd : ifds) { ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, PhotoInterp.RGB_PALETTE); } } if (getSizeC() == 1 && !isIndexed()) core[0].rgb = false; core[0].falseColor = false; core[0].dimensionOrder = "XYCZT"; core[0].bitsPerPixel = firstIFD.getBitsPerSample()[0]; } }
Fall back to default openThumbBytes(...) implementation when necessary.
components/bio-formats/src/loci/formats/in/MinimalTiffReader.java
Fall back to default openThumbBytes(...) implementation when necessary.
Java
bsd-3-clause
7041a4d2132a79a82d94e648bb34679ce22470fc
0
eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j
/******************************************************************************* * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.console; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Path; import java.nio.file.Paths; import org.eclipse.rdf4j.rio.RDFParseException; import org.jline.reader.EndOfFileException; import org.jline.reader.History; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.impl.history.DefaultHistory; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; /** * @author Dale Visser */ public class ConsoleIO { private static final String PLEASE_OPEN_FIRST = "please open a repository first"; private final Terminal terminal; private final LineReader input; private final ConsoleState appInfo; private boolean echo = false; private boolean quiet = false; private boolean force = false; private boolean cautious = false; private boolean errorWritten; /** * Constructor * * @param input * @param out * @param info * @throws IOException */ public ConsoleIO(InputStream input, OutputStream out, ConsoleState info) throws IOException { this.terminal = TerminalBuilder.builder().system(false).streams(input, out).build(); this.appInfo = info; this.input = buildLineReader(); } /** * Constructor * * @param info * @throws IOException */ public ConsoleIO(ConsoleState info) throws IOException { this.terminal = TerminalBuilder.terminal(); this.appInfo = info; this.input = buildLineReader(); } /** * Build JLine line reader with default history * * @return line reader */ private LineReader buildLineReader() { History history = new DefaultHistory(); LineReader reader = LineReaderBuilder.builder().terminal(this.terminal).history(history).build(); Path file = Paths.get(appInfo.getDataDirectory().toString(), "history.txt"); reader.setVariable(LineReader.HISTORY_FILE, file); return reader; } /** * Get the JLine line reader * * @return line reader */ public LineReader getLineReader() { return this.input; } /** * Get JLine terminal output stream * * @return output stream */ public OutputStream getOutputStream() { return terminal.output(); } /** * Read a command from input * * @return one line of input, or null on error * @throws IOException */ protected String readCommand() throws IOException { try { String line = input.readLine(getPrompt()); if (line == null) { return null; } line = line.trim(); if (line.endsWith(".")) { line = line.substring(0, line.length() - 1); } return line; } catch (EndOfFileException e) { return null; } } /** * Get command prompt. * * Contains the name of the current repository when connected. * * @return command prompt string */ private String getPrompt() { String repositoryID = appInfo.getRepositoryID(); if (quiet) { return ""; } else if (repositoryID != null) { return repositoryID + "> "; } else { return "> "; } } /** * Reads multiple lines from the input until a line that with a '.' on its own is read. * * @return * @throws IOException */ public String readMultiLineInput() throws IOException { return readMultiLineInput("> "); } /** * Reads multiple lines from the input until a line that with a '.' on its own is read. * * @param prompt * @return * @throws IOException */ public String readMultiLineInput(String prompt) throws IOException { String line = input.readLine(prompt); String result = null; if (line != null) { final StringBuilder buf = new StringBuilder(256); buf.append(line); while (line != null && !(line.length() == 1 && line.endsWith("."))) { line = input.readLine("> "); buf.append('\n'); buf.append(line); } // Remove closing dot buf.setLength(buf.length() - 1); result = buf.toString().trim(); } if (echo) { writeln(result); } return result; } /** * Read message from input * * @param message one or multiple messages * @return * @throws IOException */ public String readln(String... message) throws IOException { String prompt = !quiet && message.length > 0 && message[0] != null ? message[0] : ""; String result = input.readLine(prompt); if (echo) { writeln(result); } return result; } /** * Read password from input * * @param prompt prompt to display * @return password string * @throws IOException */ public String readPassword(final String prompt) throws IOException { String result = input.readLine(prompt, '*'); if (echo && !result.isEmpty()) { writeln("************"); } return result; } /** * Write a string * * @param string string to write */ public void write(final String string) { terminal.writer().print(string); } /** * Write a newline */ public void writeln() { terminal.writer().println(); } /** * Write a string, followed by a newline * * @param string string to write */ public void writeln(final String string) { terminal.writer().println(string); } /** * Write an error message * * @param errMsg error message */ public void writeError(final String errMsg) { terminal.writer().println(errMsg); errorWritten = true; } /** * Write a "please open first" error message */ public void writeUnopenedError() { writeError(PLEASE_OPEN_FIRST); } /** * Write parser error * * @param prefix * @param lineNo line number * @param colNo column number * @param msg message to write */ public void writeParseError(String prefix, long lineNo, long colNo, String msg) { String locationString = RDFParseException.getLocationString(lineNo, colNo); int locSize = locationString.length(); StringBuilder builder = new StringBuilder(locSize + prefix.length() + msg.length() + 3); builder.append(prefix).append(": ").append(msg); if (locSize > 0) { builder.append(" ").append(locationString); } writeError(builder.toString()); } /** * Ask if the user wants to continue * * @param msg confirmation question * @param defaultValue true when default is yes * @return true when continue * @throws IOException */ public boolean askProceed(String msg, boolean defaultValue) throws IOException { final String defaultString = defaultValue ? "yes" : "no"; boolean result = force ? true : (cautious ? false : defaultValue); if (!force && !cautious) { while (true) { writeln(msg); final String reply = readln("Proceed? (yes|no) [" + defaultString + "]: "); if ("no".equalsIgnoreCase(reply) || "no.".equalsIgnoreCase(reply)) { result = false; break; } else if ("yes".equalsIgnoreCase(reply) || "yes.".equalsIgnoreCase(reply)) { result = true; break; } else if (reply.trim().isEmpty()) { break; } } } return result; } /** * Whether to echo user input to output stream * * @param echo true to echo input */ protected void setEcho(boolean echo) { this.echo = echo; } /** * Whether to suppress printing of prompts to output * * @param quiet true to suppress printing */ public void setQuiet(boolean quiet) { this.quiet = quiet; } /** * Force commands to proceed */ public void setForce() { this.force = true; } /** * Be cautious when executing commands, opposite of force */ public void setCautious() { this.cautious = true; } /** * Check if an error was written to the console * * @return true when error was written */ public boolean wasErrorWritten() { return errorWritten; } }
tools/console/src/main/java/org/eclipse/rdf4j/console/ConsoleIO.java
/******************************************************************************* * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.console; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Path; import java.nio.file.Paths; import org.eclipse.rdf4j.rio.RDFParseException; import org.jline.reader.EndOfFileException; import org.jline.reader.History; import org.jline.reader.LineReader; import org.jline.reader.LineReaderBuilder; import org.jline.reader.impl.history.DefaultHistory; import org.jline.terminal.Terminal; import org.jline.terminal.TerminalBuilder; /** * @author Dale Visser */ public class ConsoleIO { private static final String PLEASE_OPEN_FIRST = "please open a repository first"; private final Terminal terminal; private final LineReader input; private final ConsoleState appInfo; private boolean echo = false; private boolean quiet = false; private boolean force = false; private boolean cautious = false; private boolean errorWritten; /** * Constructor * * @param input * @param out * @param info * @throws IOException */ public ConsoleIO(InputStream input, OutputStream out, ConsoleState info) throws IOException { this.terminal = TerminalBuilder.builder().system(false).streams(input, out).build(); this.appInfo = info; this.input = buildLineReader(); } /** * Constructor * * @param info * @throws IOException */ public ConsoleIO(ConsoleState info) throws IOException { this.terminal = TerminalBuilder.terminal(); this.appInfo = info; this.input = buildLineReader(); } /** * Build JLine line reader with default history * * @return line reader */ private LineReader buildLineReader() { History history = new DefaultHistory(); LineReader reader = LineReaderBuilder.builder().terminal(this.terminal).history(history).build(); Path file = Paths.get(appInfo.getDataDirectory().toString(), "history.txt"); reader.setVariable(LineReader.HISTORY_FILE, file); return reader; } /** * Get the JLine line reader * * @return line reader */ public LineReader getLineReader() { return this.input; } /** * Get JLine terminal output stream * * @return output stream */ public OutputStream getOutputStream() { return terminal.output(); } /** * Read a command from input * * @return one line of input, or null on error * @throws IOException */ protected String readCommand() throws IOException { try { String line = input.readLine(getPrompt()); if (line == null) { return null; } line = line.trim(); if (line.endsWith(".")) { line = line.substring(0, line.length() - 1); } return line; } catch (EndOfFileException e) { return null; } } /** * Get command prompt. * * Contains the name of the current repository when connected. * * @return command prompt string */ private String getPrompt() { String repositoryID = appInfo.getRepositoryID(); if (quiet) { return ""; } else if (repositoryID != null) { return repositoryID + "> "; } else { return "> "; } } /** * Reads multiple lines from the input until a line that with a '.' on its own is read. * * @return * @throws IOException */ public String readMultiLineInput() throws IOException { return readMultiLineInput("> "); } /** * Reads multiple lines from the input until a line that with a '.' on its own is read. * * @param prompt * @return * @throws IOException */ public String readMultiLineInput(String prompt) throws IOException { String line = input.readLine(prompt); String result = null; if (line != null) { final StringBuilder buf = new StringBuilder(256); buf.append(line); while (line != null && !(line.length() == 1 && line.endsWith("."))) { line = input.readLine("> "); buf.append('\n'); buf.append(line); } // Remove closing dot buf.setLength(buf.length() - 1); result = buf.toString().trim(); } if (echo) { writeln(result); } return result; } /** * Read message from input * * @param message one or multiple messages * @return * @throws IOException */ public String readln(String... message) throws IOException { String prompt = !quiet && message.length > 0 && message[0] != null ? message[0] : ""; String result = input.readLine(prompt); if (echo) { writeln(result); } return result; } /** * Read password from input * * @param prompt prompt to display * @return password string * @throws IOException */ public String readPassword(final String prompt) throws IOException { String result = input.readLine(prompt, '*'); if (echo && !result.isEmpty()) { writeln("************"); } return result; } /** * Write a string * * @param string string to write */ public void write(final String string) { terminal.writer().print(string); } /** * Write a newline */ public void writeln() { terminal.writer().println(); } /** * Write a string, followed by a newline * * @param string string to write */ public void writeln(final String string) { terminal.writer().println(string); } /** * Write an error message * * @param errMsg error message */ public void writeError(final String errMsg) { terminal.writer().println(errMsg); errorWritten = true; } /** * Write a "please open first" error message */ public void writeUnopenedError() { writeError(PLEASE_OPEN_FIRST); } /** * Write parser error * * @param prefix * @param lineNo line number * @param colNo column number * @param msg message to write */ public void writeParseError(String prefix, long lineNo, long colNo, String msg) { String locationString = RDFParseException.getLocationString(lineNo, colNo); int locSize = locationString.length(); StringBuilder builder = new StringBuilder(locSize + prefix.length() + msg.length() + 3); builder.append(prefix).append(": ").append(msg); if (locSize > 0) { builder.append(" ").append(locationString); } writeError(builder.toString()); } /** * Ask if the user wants to continue * * @param msg confirmation question * @param defaultValue true when default is yes * @return true when continue * @throws IOException */ public boolean askProceed(String msg, boolean defaultValue) throws IOException { final String defaultString = defaultValue ? "yes" : "no"; boolean result = force ? true : (cautious ? false : defaultValue); if (!force && !cautious) { while (true) { writeln(msg); final String reply = readln("Proceed? (yes|no) [" + defaultString + "]: "); if ("no".equalsIgnoreCase(reply) || "no.".equalsIgnoreCase(reply)) { result = false; break; } else if ("yes".equalsIgnoreCase(reply) || "yes.".equalsIgnoreCase(reply)) { result = true; break; } else if (reply.trim().isEmpty()) { break; } } } return result; } /** * Whether to echo user input to output stream * * @param echo true to echo input */ protected void setEcho(boolean echo) { this.echo = echo; } /** * Whether to suppress printing of prompts to output * * @param quiet true to suppress printing */ public void setQuiet(boolean quiet) { this.quiet = quiet; } /** * Force commands to proceed */ public void setForce() { this.force = true; } /** * Be cautious when executing commands, opposite of force */ public void setCautious() { this.cautious = true; } /** * Check if an error was written to the console * * @return true when error was written */ public boolean wasErrorWritten() { return errorWritten; } }
Resolve merge conflict
tools/console/src/main/java/org/eclipse/rdf4j/console/ConsoleIO.java
Resolve merge conflict
Java
mit
c29a68a4fd1edfe52b6c299f3a15897629667757
0
douggie/XChange
package org.knowm.xchange.huobi; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; import org.knowm.xchange.dto.Order.OrderStatus; import org.knowm.xchange.dto.Order.OrderType; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.MarketOrder; import org.knowm.xchange.dto.trade.OpenOrders; import org.knowm.xchange.huobi.dto.account.HuobiBalanceRecord; import org.knowm.xchange.huobi.dto.account.HuobiBalanceSum; import org.knowm.xchange.huobi.dto.marketdata.HuobiAsset; import org.knowm.xchange.huobi.dto.marketdata.HuobiAssetPair; import org.knowm.xchange.huobi.dto.marketdata.HuobiTicker; import org.knowm.xchange.huobi.dto.trade.HuobiOrder; public class HuobiAdapters { public static Ticker adaptTicker(HuobiTicker huobiTicker, CurrencyPair currencyPair) { Ticker.Builder builder = new Ticker.Builder(); builder.open(huobiTicker.getOpen()); builder.ask(huobiTicker.getAsk().getPrice()); builder.bid(huobiTicker.getBid().getPrice()); builder.last(huobiTicker.getClose()); builder.high(huobiTicker.getHigh()); builder.low(huobiTicker.getLow()); builder.volume(huobiTicker.getVol()); builder.timestamp(huobiTicker.getTs()); builder.currencyPair(currencyPair); return builder.build(); } static ExchangeMetaData adaptToExchangeMetaData( HuobiAssetPair[] assetPairs, HuobiAsset[] assets) { HuobiUtils.setHuobiAssets(assets); HuobiUtils.setHuobiAssetPairs(assetPairs); Map<CurrencyPair, CurrencyPairMetaData> pairs = new HashMap<>(); for (HuobiAssetPair pair : assetPairs) { pairs.put(adaptCurrencyPair(pair.getKey()), adaptPair(pair)); } Map<Currency, CurrencyMetaData> currencies = new HashMap<>(); for (HuobiAsset asset : assets) { Currency currency = adaptCurrency(asset.getAsset()); currencies.put(currency, new CurrencyMetaData(0, null)); } return new ExchangeMetaData(pairs, currencies, null, null, false); } private static CurrencyPair adaptCurrencyPair(String currencyPair) { return HuobiUtils.translateHuobiCurrencyPair(currencyPair); } private static CurrencyPairMetaData adaptPair(HuobiAssetPair pair) { return new CurrencyPairMetaData(null, null, null, new Integer(pair.getPricePrecision())); } private static Currency adaptCurrency(String currency) { return HuobiUtils.translateHuobiCurrencyCode(currency); } public static Wallet adaptWallet(Map<String, HuobiBalanceSum> huobiWallet) { List<Balance> balances = new ArrayList<>(huobiWallet.size()); for (Map.Entry<String, HuobiBalanceSum> record : huobiWallet.entrySet()) { Currency currency = adaptCurrency(record.getKey()); Balance balance = new Balance( currency, record.getValue().getTotal(), record.getValue().getAvailable(), record.getValue().getFrozen()); balances.add(balance); } return new Wallet(balances); } public static Map<String, HuobiBalanceSum> adaptBalance(HuobiBalanceRecord[] huobiBalance) { Map<String, HuobiBalanceSum> map = new HashMap<>(); for (HuobiBalanceRecord record : huobiBalance) { HuobiBalanceSum sum = map.get(record.getCurrency()); if (sum == null) { sum = new HuobiBalanceSum(); map.put(record.getCurrency(), sum); } if (record.getType().equals("trade")) { sum.setAvailable(record.getBalance()); } else if (record.getType().equals("frozen")) { sum.setFrozen(record.getBalance()); } } return map; } public static OpenOrders adaptOpenOrders(HuobiOrder[] openOrders) { List<LimitOrder> limitOrders = new ArrayList<>(); for (HuobiOrder openOrder : openOrders) { if (openOrder.isLimit()) { limitOrders.add((LimitOrder) adaptOrder(openOrder)); } } return new OpenOrders(limitOrders); } private static Order adaptOrder(HuobiOrder openOrder) { Order order = null; OrderType orderType = adaptOrderType(openOrder.getType()); CurrencyPair currencyPair = adaptCurrencyPair(openOrder.getSymbol()); if (openOrder.isMarket()) { order = new MarketOrder( orderType, openOrder.getAmount(), currencyPair, String.valueOf(openOrder.getId()), openOrder.getCreatedAt(), openOrder.getFieldCashAmount().divide(openOrder.getFieldAmount(), 8, BigDecimal.ROUND_DOWN), openOrder.getFieldAmount(), openOrder.getFieldFees(), null); } if (openOrder.isLimit()) { order = new LimitOrder( orderType, openOrder.getAmount(), openOrder.getFieldAmount(), currencyPair, String.valueOf(openOrder.getId()), openOrder.getCreatedAt(), openOrder.getPrice()); order.setAveragePrice(openOrder.getFieldCashAmount().divide(openOrder.getFieldAmount(), 8, BigDecimal.ROUND_DOWN)); } if (order != null) { order.setOrderStatus(adaptOrderStatus(openOrder.getState())); } return order; } private static OrderStatus adaptOrderStatus(String huobiStatus) { OrderStatus result = OrderStatus.UNKNOWN; switch (huobiStatus) { case "pre-submitted": result = OrderStatus.PENDING_NEW; break; case "submitting": result = OrderStatus.PENDING_NEW; break; case "submitted": result = OrderStatus.NEW; break; case "partial-filled": result = OrderStatus.PARTIALLY_FILLED; break; case "partial-canceled": result = OrderStatus.PARTIALLY_CANCELED; break; case "filled": result = OrderStatus.FILLED; break; case "canceled": result = OrderStatus.CANCELED; break; } return result; } private static OrderType adaptOrderType(String orderType) { if (orderType.startsWith("buy")) { return OrderType.BID; } if (orderType.startsWith("sell")) { return OrderType.ASK; } return null; } public static List<Order> adaptOrders(List<HuobiOrder> huobiOrders) { List<Order> orders = new ArrayList<>(); for (HuobiOrder order : huobiOrders) { orders.add(adaptOrder(order)); } return orders; } }
xchange-huobi/src/main/java/org/knowm/xchange/huobi/HuobiAdapters.java
package org.knowm.xchange.huobi; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; import org.knowm.xchange.dto.Order.OrderStatus; import org.knowm.xchange.dto.Order.OrderType; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.MarketOrder; import org.knowm.xchange.dto.trade.OpenOrders; import org.knowm.xchange.huobi.dto.account.HuobiBalanceRecord; import org.knowm.xchange.huobi.dto.account.HuobiBalanceSum; import org.knowm.xchange.huobi.dto.marketdata.HuobiAsset; import org.knowm.xchange.huobi.dto.marketdata.HuobiAssetPair; import org.knowm.xchange.huobi.dto.marketdata.HuobiTicker; import org.knowm.xchange.huobi.dto.trade.HuobiOrder; public class HuobiAdapters { public static Ticker adaptTicker(HuobiTicker huobiTicker, CurrencyPair currencyPair) { Ticker.Builder builder = new Ticker.Builder(); builder.open(huobiTicker.getOpen()); builder.ask(huobiTicker.getAsk().getPrice()); builder.bid(huobiTicker.getBid().getPrice()); builder.last(huobiTicker.getClose()); builder.high(huobiTicker.getHigh()); builder.low(huobiTicker.getLow()); builder.volume(huobiTicker.getVol()); builder.timestamp(huobiTicker.getTs()); builder.currencyPair(currencyPair); return builder.build(); } static ExchangeMetaData adaptToExchangeMetaData( HuobiAssetPair[] assetPairs, HuobiAsset[] assets) { HuobiUtils.setHuobiAssets(assets); HuobiUtils.setHuobiAssetPairs(assetPairs); Map<CurrencyPair, CurrencyPairMetaData> pairs = new HashMap<>(); for (HuobiAssetPair pair : assetPairs) { pairs.put(adaptCurrencyPair(pair.getKey()), adaptPair(pair)); } Map<Currency, CurrencyMetaData> currencies = new HashMap<>(); for (HuobiAsset asset : assets) { Currency currency = adaptCurrency(asset.getAsset()); currencies.put(currency, new CurrencyMetaData(0, null)); } return new ExchangeMetaData(pairs, currencies, null, null, false); } private static CurrencyPair adaptCurrencyPair(String currencyPair) { return HuobiUtils.translateHuobiCurrencyPair(currencyPair); } private static CurrencyPairMetaData adaptPair(HuobiAssetPair pair) { return new CurrencyPairMetaData(null, null, null, new Integer(pair.getPricePrecision())); } private static Currency adaptCurrency(String currency) { return HuobiUtils.translateHuobiCurrencyCode(currency); } public static Wallet adaptWallet(Map<String, HuobiBalanceSum> huobiWallet) { List<Balance> balances = new ArrayList<>(huobiWallet.size()); for (Map.Entry<String, HuobiBalanceSum> record : huobiWallet.entrySet()) { Currency currency = adaptCurrency(record.getKey()); Balance balance = new Balance( currency, record.getValue().getTotal(), record.getValue().getAvailable(), record.getValue().getFrozen()); balances.add(balance); } return new Wallet(balances); } public static Map<String, HuobiBalanceSum> adaptBalance(HuobiBalanceRecord[] huobiBalance) { Map<String, HuobiBalanceSum> map = new HashMap<>(); for (HuobiBalanceRecord record : huobiBalance) { HuobiBalanceSum sum = map.get(record.getCurrency()); if (sum == null) { sum = new HuobiBalanceSum(); map.put(record.getCurrency(), sum); } if (record.getType().equals("trade")) { sum.setAvailable(record.getBalance()); } else if (record.getType().equals("frozen")) { sum.setFrozen(record.getBalance()); } } return map; } public static OpenOrders adaptOpenOrders(HuobiOrder[] openOrders) { List<LimitOrder> limitOrders = new ArrayList<>(); for (HuobiOrder openOrder : openOrders) { if (openOrder.isLimit()) { limitOrders.add((LimitOrder) adaptOrder(openOrder)); } } return new OpenOrders(limitOrders); } private static Order adaptOrder(HuobiOrder openOrder) { Order order = null; OrderType orderType = adaptOrderType(openOrder.getType()); CurrencyPair currencyPair = adaptCurrencyPair(openOrder.getSymbol()); if (openOrder.isMarket()) { order = new MarketOrder( orderType, openOrder.getAmount(), currencyPair, String.valueOf(openOrder.getId()), openOrder.getCreatedAt(), null, openOrder.getFieldAmount(), openOrder.getFieldFees(), null); } if (openOrder.isLimit()) { order = new LimitOrder( orderType, openOrder.getAmount(), openOrder.getFieldAmount(), currencyPair, String.valueOf(openOrder.getId()), openOrder.getCreatedAt(), openOrder.getPrice()); } if (order != null) { order.setOrderStatus(adaptOrderStatus(openOrder.getState())); } return order; } private static OrderStatus adaptOrderStatus(String huobiStatus) { OrderStatus result = OrderStatus.UNKNOWN; switch (huobiStatus) { case "pre-submitted": result = OrderStatus.PENDING_NEW; break; case "submitting": result = OrderStatus.PENDING_NEW; break; case "submitted": result = OrderStatus.NEW; break; case "partial-filled": result = OrderStatus.PARTIALLY_FILLED; break; case "partial-canceled": result = OrderStatus.PARTIALLY_CANCELED; break; case "filled": result = OrderStatus.FILLED; break; case "canceled": result = OrderStatus.CANCELED; break; } return result; } private static OrderType adaptOrderType(String orderType) { if (orderType.startsWith("buy")) { return OrderType.BID; } if (orderType.startsWith("sell")) { return OrderType.ASK; } return null; } public static List<Order> adaptOrders(List<HuobiOrder> huobiOrders) { List<Order> orders = new ArrayList<>(); for (HuobiOrder order : huobiOrders) { orders.add(adaptOrder(order)); } return orders; } }
Provide an average price for the trading of huobi
xchange-huobi/src/main/java/org/knowm/xchange/huobi/HuobiAdapters.java
Provide an average price for the trading of huobi
Java
mit
3709ae717bd889f789afe5f41cfd1503a1ace1ff
0
RysingDragon/LandProtect
package com.initianovamc.rysingdragon.landprotect.listeners; import com.flowpowered.math.vector.Vector3i; import com.google.common.reflect.TypeToken; import com.initianovamc.rysingdragon.landprotect.config.GeneralConfig; import com.initianovamc.rysingdragon.landprotect.utils.Utils; import ninja.leaping.configurate.objectmapping.ObjectMappingException; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.block.InteractBlockEvent; import org.spongepowered.api.event.filter.cause.First; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.format.TextColors; import java.util.ArrayList; import java.util.List; import java.util.UUID; public class InteractBlockListener { @Listener public void onInteract(InteractBlockEvent.Secondary event, @First Player player) { if (event.getCause().containsType(Player.class)) { UUID worldUUID = player.getWorld().getUniqueId(); if (Utils.inAddInteractMode.contains(player.getUniqueId())) { Utils.inAddInteractMode.remove(player.getUniqueId()); event.setCancelled(true); try { List<String> interactables = GeneralConfig.getConfig().getConfigNode().getNode("Interactable").getList(TypeToken.of(String.class), new ArrayList<>()); if (interactables.contains(event.getTargetBlock().getState().getType().getId())) { player.sendMessage(Text.of(TextColors.RED, "This block is already added as interactable")); return; } interactables.add(event.getTargetBlock().getState().getType().getId()); GeneralConfig.getConfig().getConfigNode().getNode("Interactable").setValue(interactables); GeneralConfig.getConfig().save(); player.sendMessage(Text.of(TextColors.DARK_AQUA, "id ", TextColors.GOLD, event.getTargetBlock().getState().getType().getName(), TextColors.DARK_AQUA, " has been added to the list of interactable blocks")); } catch (ObjectMappingException e) { e.printStackTrace(); } } if (Utils.inRemoveInteractMode.contains(player.getUniqueId())) { Utils.inRemoveInteractMode.remove(player.getUniqueId()); event.setCancelled(true); try { List<String> interactables = GeneralConfig.getConfig().getConfigNode().getNode("Interactable").getList(TypeToken.of(String.class), new ArrayList<>()); if (!interactables.contains(event.getTargetBlock().getState().getType().getId())) { player.sendMessage(Text.of(TextColors.RED, "This block has not yet been added as interactable")); return; } interactables.remove(event.getTargetBlock().getState().getType().getId()); GeneralConfig.getConfig().getConfigNode().getNode("Interactable").setValue(interactables); GeneralConfig.getConfig().save(); player.sendMessage(Text.of(TextColors.DARK_AQUA, "id ", TextColors.GOLD, event.getTargetBlock().getState().getType().getName(), TextColors.DARK_AQUA, " has been removed from the list of interactable blocks")); } catch (ObjectMappingException e) { e.printStackTrace(); } } if (event.getTargetBlock().getLocation().isPresent()) { Vector3i chunk = event.getTargetBlock().getLocation().get().getChunkPosition(); if (Utils.isClaimed(chunk, worldUUID)) { if (Utils.isProtected(chunk, worldUUID)) { if (player.hasPermission("landprotect.protect.bypass")) { return; } try { List<String> interactables = GeneralConfig.getConfig().getConfigNode().getNode("Interactable").getList(TypeToken.of(String.class), new ArrayList<>()); if (!interactables.contains(event.getTargetBlock().getState().getType().getName())) { event.setCancelled(true); player.sendMessage(Text.of(TextColors.RED, "This land is claimed")); } else { return; } } catch (ObjectMappingException e) { e.printStackTrace(); } } if (Utils.isTrustedToClaim(chunk, player.getUniqueId(), worldUUID)) { return; } if (Utils.getClaimOwner(chunk, worldUUID).isPresent()) { UUID owner = Utils.getClaimOwner(chunk, worldUUID).get(); if (owner.equals(player.getUniqueId())) { return; } else if (Utils.isFriend(owner, player.getUniqueId())) { return; } } if (player.hasPermission("landprotect.claim.bypass")) { return; } event.setCancelled(true); player.sendMessage(Text.of(TextColors.RED, "This land is claimed")); } } } } }
src/main/java/com/initianovamc/rysingdragon/landprotect/listeners/InteractBlockListener.java
package com.initianovamc.rysingdragon.landprotect.listeners; import com.flowpowered.math.vector.Vector3i; import com.google.common.reflect.TypeToken; import com.initianovamc.rysingdragon.landprotect.config.GeneralConfig; import com.initianovamc.rysingdragon.landprotect.utils.Utils; import ninja.leaping.configurate.objectmapping.ObjectMappingException; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.block.InteractBlockEvent; import org.spongepowered.api.event.filter.cause.First; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.format.TextColors; import java.util.ArrayList; import java.util.List; import java.util.UUID; public class InteractBlockListener { @Listener public void onInteract(InteractBlockEvent.Secondary event, @First Player player) { if (event.getCause().containsType(Player.class)) { UUID worldUUID = player.getWorld().getUniqueId(); if (Utils.inAddInteractMode.contains(player.getUniqueId())) { Utils.inAddInteractMode.remove(player.getUniqueId()); event.setCancelled(true); try { List<String> interactables = GeneralConfig.getConfig().getConfigNode().getNode("Interactable").getList(TypeToken.of(String.class), new ArrayList<>()); if (interactables.contains(event.getTargetBlock().getState().getType().getId())) { player.sendMessage(Text.of(TextColors.RED, "This block is already added as interactable")); return; } interactables.add(event.getTargetBlock().getState().getType().getId()); GeneralConfig.getConfig().getConfigNode().getNode("Interactable").setValue(interactables); GeneralConfig.getConfig().save(); player.sendMessage(Text.of(TextColors.DARK_AQUA, "id ", TextColors.GOLD, event.getTargetBlock().getState().getType().getName(), TextColors.DARK_AQUA, " has been added to the list of interactable blocks")); } catch (ObjectMappingException e) { e.printStackTrace(); } } if (Utils.inRemoveInteractMode.contains(player.getUniqueId())) { Utils.inRemoveInteractMode.remove(player.getUniqueId()); event.setCancelled(true); try { List<String> interactables = GeneralConfig.getConfig().getConfigNode().getNode("Interactable").getList(TypeToken.of(String.class), new ArrayList<>()); if (!interactables.contains(event.getTargetBlock().getState().getType().getId())) { player.sendMessage(Text.of(TextColors.RED, "This block has not yet been added as interactable")); return; } interactables.remove(event.getTargetBlock().getState().getType().getId()); GeneralConfig.getConfig().getConfigNode().getNode("Interactable").setValue(interactables); GeneralConfig.getConfig().save(); player.sendMessage(Text.of(TextColors.DARK_AQUA, "id ", TextColors.GOLD, event.getTargetBlock().getState().getType().getName(), TextColors.DARK_AQUA, " has been removed from the list of interactable blocks")); } catch (ObjectMappingException e) { e.printStackTrace(); } } if (event.getTargetBlock().getLocation().isPresent()) { Vector3i chunk = event.getTargetBlock().getLocation().get().getChunkPosition(); if (Utils.isClaimed(chunk, worldUUID)) { if (Utils.isProtected(chunk, worldUUID)) { if (player.hasPermission("landprotect.protect.bypass")) { return; } try { List<String> interactables = GeneralConfig.getConfig().getConfigNode().getNode("Interactable").getList(TypeToken.of(String.class), new ArrayList<>()); if (!interactables.contains(event.getTargetBlock().getState().getType().getName())) { event.setCancelled(true); player.sendMessage(Text.of(TextColors.RED, "This land is claimed")); } } catch (ObjectMappingException e) { e.printStackTrace(); } } if (Utils.isTrustedToClaim(chunk, player.getUniqueId(), worldUUID)) { return; } if (Utils.getClaimOwner(chunk, worldUUID).isPresent()) { UUID owner = Utils.getClaimOwner(chunk, worldUUID).get(); if (owner.equals(player.getUniqueId())) { return; } else if (Utils.isFriend(owner, player.getUniqueId())) { return; } } if (player.hasPermission("landprotect.claim.bypass")) { return; } event.setCancelled(true); player.sendMessage(Text.of(TextColors.RED, "This land is claimed")); } } } } }
bug fix
src/main/java/com/initianovamc/rysingdragon/landprotect/listeners/InteractBlockListener.java
bug fix
Java
mit
95637438ff859b634f2322a454d6e96d5b4d5fe6
0
madumlao/oxCore,GluuFederation/oxCore
/* * oxCore is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */package org.xdi.config.oxtrust; import java.io.Serializable; import java.util.List; import org.codehaus.jackson.annotate.JsonIgnoreProperties; /** * oxTrust configuration * * @author Yuriy Movchan * @version 0.1, 05/15/2013 */ @JsonIgnoreProperties(ignoreUnknown = true) public final class ApplicationConfiguration implements Serializable { private static final long serialVersionUID = -8991383390239617013L; private String baseDN; private String orgInum; private String orgIname; private String orgSupportEmail; private String applianceInum; private String applianceUrl; private String baseEndpoint; private String[] personObjectClassTypes; private String personCustomObjectClass; private String[] personObjectClassDisplayNames; private String[] contactObjectClassTypes; private String[] contactObjectClassDisplayNames; private String photoRepositoryRootDir; private int photoRepositoryThumbWidth; private int photoRepositoryThumbHeight; private int photoRepositoryCountLeveles; private int photoRepositoryCountFoldersPerLevel; private String authMode; private String ldifStore; private boolean updateApplianceStatus; private String svnConfigurationStoreRoot; private String svnConfigurationStorePassword; private String keystorePath; private String keystorePassword; private boolean allowPersonModification; private String idpUrl; private String velocityLog; private String spMetadataPath; private String logoLocation; private String idpSecurityKey; private String idpSecurityKeyPassword; private String idpSecurityCert; private String[] gluuSpAttributes; private boolean configGeneration; private String idpLdapProtocol; private String idpLdapServer; private String idpBindDn; private String idpBindPassword; private String idpUserFields; private String gluuSpCert; private String shibboleth3FederationRootDir; private String caCertsLocation; private String caCertsPassphrase; private String tempCertDir; private String certDir; private String servicesRestartTrigger; private boolean persistSVN; private String oxAuthAuthorizeUrl; private String oxAuthRegisterUrl; private String oxAuthTokenUrl; private String oxAuthEndSessionUrl; private String oxAuthLogoutUrl; private String oxAuthTokenValidationUrl; private String oxAuthUserInfo; private String oxAuthSectorIdentifierUrl; private String oxAuthClientId; private String oxAuthClientPassword; private String oxAuthClientScope; private String loginRedirectUrl; private String logoutRedirectUrl; private String[] clusteredInums; private String clientAssociationAttribute; private String oxAuthIssuer; private boolean ignoreValidation; private String umaIssuer; private String umaClientId; private String umaClientKeyId; private String umaResourceId; private String umaScope; private String umaClientKeyStoreFile; private String umaClientKeyStorePassword; private String passportUmaClientId; private String passportUmaClientKeyId; private String passportUmaResourceId; private String passportUmaScope; private String passportUmaClientKeyStoreFile; private String passportUmaClientKeyStorePassword; private String cssLocation; private String jsLocation; private String recaptchaSiteKey; private String recaptchaSecretKey; private boolean scimTestMode; private String scimTestModeAccessToken; private boolean rptConnectionPoolUseConnectionPooling; private int rptConnectionPoolMaxTotal; private int rptConnectionPoolDefaultMaxPerRoute; private int rptConnectionPoolValidateAfterInactivity; // In seconds; will be converted to millis private int rptConnectionPoolCustomKeepAliveTimeout; // In seconds; will be converted to millis private boolean oxIncommonFlag; private List<String> clientWhiteList; private List<String> clientBlackList; public boolean isOxIncommonFlag() { return oxIncommonFlag; } public void setOxIncommonFlag(boolean oxIncommonFlag) { this.oxIncommonFlag = oxIncommonFlag; } private String shibbolethVersion; private String shibboleth3IdpRootDir; private String shibboleth3SpConfDir; private String organizationName; private String idp3SigningCert; private String idp3EncryptionCert; public String getBaseDN() { return baseDN; } public void setBaseDN(String baseDN) { this.baseDN = baseDN; } public String getOrgInum() { return orgInum; } public void setOrgInum(String orgInum) { this.orgInum = orgInum; } public String getOrgIname() { return orgIname; } public void setOrgIname(String orgIname) { this.orgIname = orgIname; } public String getOrgSupportEmail() { return orgSupportEmail; } public void setOrgSupportEmail(String orgSupportEmail) { this.orgSupportEmail = orgSupportEmail; } public String getApplianceInum() { return applianceInum; } public void setApplianceInum(String applianceInum) { this.applianceInum = applianceInum; } public String getApplianceUrl() { return applianceUrl; } public void setApplianceUrl(String applianceUrl) { this.applianceUrl = applianceUrl; } public String getBaseEndpoint() { return baseEndpoint; } public void setBaseEndpoint(String baseEndpoint) { this.baseEndpoint = baseEndpoint; } public String[] getPersonObjectClassTypes() { return personObjectClassTypes; } public void setPersonObjectClassTypes(String[] personObjectClassTypes) { this.personObjectClassTypes = personObjectClassTypes; } public String getPersonCustomObjectClass() { return personCustomObjectClass; } public void setPersonCustomObjectClass(String personCustomObjectClass) { this.personCustomObjectClass = personCustomObjectClass; } public String[] getPersonObjectClassDisplayNames() { return personObjectClassDisplayNames; } public void setPersonObjectClassDisplayNames( String[] personObjectClassDisplayNames) { this.personObjectClassDisplayNames = personObjectClassDisplayNames; } public String[] getContactObjectClassTypes() { return contactObjectClassTypes; } public void setContactObjectClassTypes(String[] contactObjectClassTypes) { this.contactObjectClassTypes = contactObjectClassTypes; } public String[] getContactObjectClassDisplayNames() { return contactObjectClassDisplayNames; } public void setContactObjectClassDisplayNames( String[] contactObjectClassDisplayNames) { this.contactObjectClassDisplayNames = contactObjectClassDisplayNames; } public String getPhotoRepositoryRootDir() { return photoRepositoryRootDir; } public void setPhotoRepositoryRootDir(String photoRepositoryRootDir) { this.photoRepositoryRootDir = photoRepositoryRootDir; } public int getPhotoRepositoryThumbWidth() { return photoRepositoryThumbWidth; } public void setPhotoRepositoryThumbWidth(int photoRepositoryThumbWidth) { this.photoRepositoryThumbWidth = photoRepositoryThumbWidth; } public int getPhotoRepositoryThumbHeight() { return photoRepositoryThumbHeight; } public void setPhotoRepositoryThumbHeight(int photoRepositoryThumbHeight) { this.photoRepositoryThumbHeight = photoRepositoryThumbHeight; } public int getPhotoRepositoryCountLeveles() { return photoRepositoryCountLeveles; } public void setPhotoRepositoryCountLeveles(int photoRepositoryCountLeveles) { this.photoRepositoryCountLeveles = photoRepositoryCountLeveles; } public int getPhotoRepositoryCountFoldersPerLevel() { return photoRepositoryCountFoldersPerLevel; } public void setPhotoRepositoryCountFoldersPerLevel( int photoRepositoryCountFoldersPerLevel) { this.photoRepositoryCountFoldersPerLevel = photoRepositoryCountFoldersPerLevel; } public String getAuthMode() { return authMode; } public void setAuthMode(String authMode) { this.authMode = authMode; } public String getLdifStore() { return ldifStore; } public void setLdifStore(String ldifStore) { this.ldifStore = ldifStore; } public boolean isUpdateApplianceStatus() { return updateApplianceStatus; } public void setUpdateApplianceStatus(boolean updateApplianceStatus) { this.updateApplianceStatus = updateApplianceStatus; } public String getSvnConfigurationStoreRoot() { return svnConfigurationStoreRoot; } public void setSvnConfigurationStoreRoot(String svnConfigurationStoreRoot) { this.svnConfigurationStoreRoot = svnConfigurationStoreRoot; } public String getSvnConfigurationStorePassword() { return svnConfigurationStorePassword; } public void setSvnConfigurationStorePassword( String svnConfigurationStorePassword) { this.svnConfigurationStorePassword = svnConfigurationStorePassword; } public String getKeystorePath() { return keystorePath; } public void setKeystorePath(String keystorePath) { this.keystorePath = keystorePath; } public String getKeystorePassword() { return keystorePassword; } public void setKeystorePassword(String keystorePassword) { this.keystorePassword = keystorePassword; } public boolean isAllowPersonModification() { return allowPersonModification; } public void setAllowPersonModification(boolean allowPersonModification) { this.allowPersonModification = allowPersonModification; } public String getIdpUrl() { return idpUrl; } public void setIdpUrl(String idpUrl) { this.idpUrl = idpUrl; } public String getVelocityLog() { return velocityLog; } public void setVelocityLog(String velocityLog) { this.velocityLog = velocityLog; } public String getSpMetadataPath() { return spMetadataPath; } public void setSpMetadataPath(String spMetadataPath) { this.spMetadataPath = spMetadataPath; } public String getLogoLocation() { return logoLocation; } public void setLogoLocation(String logoLocation) { this.logoLocation = logoLocation; } public String getIdpSecurityKey() { return idpSecurityKey; } public void setIdpSecurityKey(String idpSecurityKey) { this.idpSecurityKey = idpSecurityKey; } public String getIdpSecurityKeyPassword() { return idpSecurityKeyPassword; } public void setIdpSecurityKeyPassword(String idpSecurityKeyPassword) { this.idpSecurityKeyPassword = idpSecurityKeyPassword; } public String getIdpSecurityCert() { return idpSecurityCert; } public void setIdpSecurityCert(String idpSecurityCert) { this.idpSecurityCert = idpSecurityCert; } public String[] getGluuSpAttributes() { return gluuSpAttributes; } public void setGluuSpAttributes(String[] gluuSpAttributes) { this.gluuSpAttributes = gluuSpAttributes; } public boolean isConfigGeneration() { return configGeneration; } public void setConfigGeneration(boolean configGeneration) { this.configGeneration = configGeneration; } public String getIdpLdapProtocol() { return idpLdapProtocol; } public void setIdpLdapProtocol(String idpLdapProtocol) { this.idpLdapProtocol = idpLdapProtocol; } public String getIdpLdapServer() { return idpLdapServer; } public void setIdpLdapServer(String idpLdapServer) { this.idpLdapServer = idpLdapServer; } public String getIdpBindDn() { return idpBindDn; } public void setIdpBindDn(String idpBindDn) { this.idpBindDn = idpBindDn; } public String getIdpBindPassword() { return idpBindPassword; } public void setIdpBindPassword(String idpBindPassword) { this.idpBindPassword = idpBindPassword; } public String getIdpUserFields() { return idpUserFields; } public void setIdpUserFields(String idpUserFields) { this.idpUserFields = idpUserFields; } public String getGluuSpCert() { return gluuSpCert; } public void setGluuSpCert(String gluuSpCert) { this.gluuSpCert = gluuSpCert; } public String getShibboleth3FederationRootDir() { return shibboleth3FederationRootDir; } public void setShibboleth3FederationRootDir(String shibboleth3FederationRootDir) { this.shibboleth3FederationRootDir = shibboleth3FederationRootDir; } public String getCaCertsLocation() { return caCertsLocation; } public void setCaCertsLocation(String caCertsLocation) { this.caCertsLocation = caCertsLocation; } public String getCaCertsPassphrase() { return caCertsPassphrase; } public void setCaCertsPassphrase(String caCertsPassphrase) { this.caCertsPassphrase = caCertsPassphrase; } public String getTempCertDir() { return tempCertDir; } public void setTempCertDir(String tempCertDir) { this.tempCertDir = tempCertDir; } public String getCertDir() { return certDir; } public void setCertDir(String certDir) { this.certDir = certDir; } public String getServicesRestartTrigger() { return servicesRestartTrigger; } public void setServicesRestartTrigger(String servicesRestartTrigger) { this.servicesRestartTrigger = servicesRestartTrigger; } public boolean isPersistSVN() { return persistSVN; } public void setPersistSVN(boolean persistSVN) { this.persistSVN = persistSVN; } public String getOxAuthAuthorizeUrl() { return oxAuthAuthorizeUrl; } public void setOxAuthAuthorizeUrl(String oxAuthAuthorizeUrl) { this.oxAuthAuthorizeUrl = oxAuthAuthorizeUrl; } public String getOxAuthRegisterUrl() { return oxAuthRegisterUrl; } public void setOxAuthRegisterUrl(String oxAuthRegisterUrl) { this.oxAuthRegisterUrl = oxAuthRegisterUrl; } public String getOxAuthTokenUrl() { return oxAuthTokenUrl; } public void setOxAuthTokenUrl(String oxAuthTokenUrl) { this.oxAuthTokenUrl = oxAuthTokenUrl; } public String getOxAuthEndSessionUrl() { return oxAuthEndSessionUrl; } public void setOxAuthEndSessionUrl(String oxAuthEndSessionUrl) { this.oxAuthEndSessionUrl = oxAuthEndSessionUrl; } public String getOxAuthLogoutUrl() { return oxAuthLogoutUrl; } public void setOxAuthLogoutUrl(String oxAuthLogoutUrl) { this.oxAuthLogoutUrl = oxAuthLogoutUrl; } public String getOxAuthTokenValidationUrl() { return oxAuthTokenValidationUrl; } public void setOxAuthTokenValidationUrl(String oxAuthTokenValidationUrl) { this.oxAuthTokenValidationUrl = oxAuthTokenValidationUrl; } public String getOxAuthUserInfo() { return oxAuthUserInfo; } public void setOxAuthUserInfo(String oxAuthUserInfo) { this.oxAuthUserInfo = oxAuthUserInfo; } public String getOxAuthSectorIdentifierUrl() { return oxAuthSectorIdentifierUrl; } public void setOxAuthSectorIdentifierUrl(String oxAuthSectorIdentifierUrl) { this.oxAuthSectorIdentifierUrl = oxAuthSectorIdentifierUrl; } public String getOxAuthClientId() { return oxAuthClientId; } public void setOxAuthClientId(String oxAuthClientId) { this.oxAuthClientId = oxAuthClientId; } public String getOxAuthClientPassword() { return oxAuthClientPassword; } public void setOxAuthClientPassword(String oxAuthClientPassword) { this.oxAuthClientPassword = oxAuthClientPassword; } public String getOxAuthClientScope() { return oxAuthClientScope; } public void setOxAuthClientScope(String oxAuthClientScope) { this.oxAuthClientScope = oxAuthClientScope; } public String getLoginRedirectUrl() { return loginRedirectUrl; } public void setLoginRedirectUrl(String loginRedirectUrl) { this.loginRedirectUrl = loginRedirectUrl; } public String getLogoutRedirectUrl() { return logoutRedirectUrl; } public void setLogoutRedirectUrl(String logoutRedirectUrl) { this.logoutRedirectUrl = logoutRedirectUrl; } public String[] getClusteredInums() { return clusteredInums; } public void setClusteredInums(String[] clusteredInums) { this.clusteredInums = clusteredInums; } public String getClientAssociationAttribute() { return clientAssociationAttribute; } public void setClientAssociationAttribute(String clientAssociationAttribute) { this.clientAssociationAttribute = clientAssociationAttribute; } public String getOxAuthIssuer() { return oxAuthIssuer; } public void setOxAuthIssuer(String oxAuthIssuer) { this.oxAuthIssuer = oxAuthIssuer; } public boolean isIgnoreValidation() { return ignoreValidation; } public void setIgnoreValidation(boolean ignoreValidation) { this.ignoreValidation = ignoreValidation; } public String getUmaIssuer() { return umaIssuer; } public void setUmaIssuer(String umaIssuer) { this.umaIssuer = umaIssuer; } public String getUmaClientId() { return umaClientId; } public void setUmaClientId(String umaClientId) { this.umaClientId = umaClientId; } public String getUmaClientKeyId() { return umaClientKeyId; } public void setUmaClientKeyId(String umaClientKeyId) { this.umaClientKeyId = umaClientKeyId; } public String getUmaResourceId() { return umaResourceId; } public void setUmaResourceId(String umaResourceId) { this.umaResourceId = umaResourceId; } public String getUmaScope() { return umaScope; } public void setUmaScope(String umaScope) { this.umaScope = umaScope; } public String getUmaClientKeyStoreFile() { return umaClientKeyStoreFile; } public void setUmaClientKeyStoreFile(String umaClientKeyStoreFile) { this.umaClientKeyStoreFile = umaClientKeyStoreFile; } public String getUmaClientKeyStorePassword() { return umaClientKeyStorePassword; } public void setUmaClientKeyStorePassword(String umaClientKeyStorePassword) { this.umaClientKeyStorePassword = umaClientKeyStorePassword; } public String getPassportUmaClientId() { return passportUmaClientId; } public void setPassportUmaClientId(String passportUmaClientId) { this.passportUmaClientId = passportUmaClientId; } public String getPassportUmaClientKeyId() { return passportUmaClientKeyId; } public void setPassportUmaClientKeyId(String passportUmaClientKeyId) { this.passportUmaClientKeyId = passportUmaClientKeyId; } public String getPassportUmaResourceId() { return passportUmaResourceId; } public void setPassportUmaResourceId(String passportUmaResourceId) { this.passportUmaResourceId = passportUmaResourceId; } public String getPassportUmaScope() { return passportUmaScope; } public void setPassportUmaScope(String passportUmaScope) { this.passportUmaScope = passportUmaScope; } public String getPassportUmaClientKeyStoreFile() { return passportUmaClientKeyStoreFile; } public void setPassportUmaClientKeyStoreFile(String passportUmaClientKeyStoreFile) { this.passportUmaClientKeyStoreFile = passportUmaClientKeyStoreFile; } public String getPassportUmaClientKeyStorePassword() { return passportUmaClientKeyStorePassword; } public void setPassportUmaClientKeyStorePassword(String passportUmaClientKeyStorePassword) { this.passportUmaClientKeyStorePassword = passportUmaClientKeyStorePassword; } public String getCssLocation() { return cssLocation; } public void setCssLocation(String cssLocation) { this.cssLocation = cssLocation; } public String getJsLocation() { return jsLocation; } public void setJsLocation(String jsLocation) { this.jsLocation = jsLocation; } public String getRecaptchaSiteKey() { return recaptchaSiteKey; } public void setRecaptchaSiteKey(String recaptchaSiteKey) { this.recaptchaSiteKey = recaptchaSiteKey; } public String getRecaptchaSecretKey() { return recaptchaSecretKey; } public void setRecaptchaSecretKey(String recaptchaSecretKey) { this.recaptchaSecretKey = recaptchaSecretKey; } public boolean isScimTestMode() { return scimTestMode; } public void setScimTestMode(boolean scimTestMode) { this.scimTestMode = scimTestMode; } public String getScimTestModeAccessToken() { return scimTestModeAccessToken; } public void setScimTestModeAccessToken(String scimTestModeAccessToken) { this.scimTestModeAccessToken = scimTestModeAccessToken; } public boolean isRptConnectionPoolUseConnectionPooling() { return rptConnectionPoolUseConnectionPooling; } public void setRptConnectionPoolUseConnectionPooling(boolean rptConnectionPoolUseConnectionPooling) { this.rptConnectionPoolUseConnectionPooling = rptConnectionPoolUseConnectionPooling; } public int getRptConnectionPoolMaxTotal() { return rptConnectionPoolMaxTotal; } public void setRptConnectionPoolMaxTotal(int rptConnectionPoolMaxTotal) { this.rptConnectionPoolMaxTotal = rptConnectionPoolMaxTotal; } public int getRptConnectionPoolDefaultMaxPerRoute() { return rptConnectionPoolDefaultMaxPerRoute; } public void setRptConnectionPoolDefaultMaxPerRoute(int rptConnectionPoolDefaultMaxPerRoute) { this.rptConnectionPoolDefaultMaxPerRoute = rptConnectionPoolDefaultMaxPerRoute; } public int getRptConnectionPoolValidateAfterInactivity() { return rptConnectionPoolValidateAfterInactivity; } public void setRptConnectionPoolValidateAfterInactivity(int rptConnectionPoolValidateAfterInactivity) { this.rptConnectionPoolValidateAfterInactivity = rptConnectionPoolValidateAfterInactivity; } public int getRptConnectionPoolCustomKeepAliveTimeout() { return rptConnectionPoolCustomKeepAliveTimeout; } public void setRptConnectionPoolCustomKeepAliveTimeout(int rptConnectionPoolCustomKeepAliveTimeout) { this.rptConnectionPoolCustomKeepAliveTimeout = rptConnectionPoolCustomKeepAliveTimeout; } public String getShibbolethVersion() { return shibbolethVersion; } public void setShibbolethVersion(String shibbolethVersion) { this.shibbolethVersion = shibbolethVersion; } public String getShibboleth3IdpRootDir() { return shibboleth3IdpRootDir; } public void setShibboleth3IdpRootDir(String shibboleth3IdpRootDir) { this.shibboleth3IdpRootDir = shibboleth3IdpRootDir; } public String getShibboleth3SpConfDir() { return shibboleth3SpConfDir; } public void setShibboleth3SpConfDir(String shibboleth3SpConfDir) { this.shibboleth3SpConfDir = shibboleth3SpConfDir; } public String getOrganizationName() { return organizationName; } public void setOrganizationName(String organizationName) { this.organizationName = organizationName; } public String getIdp3SigningCert() { return idp3SigningCert; } public void setIdp3SigningCert(String idp3SigningCert) { this.idp3SigningCert = idp3SigningCert; } public String getIdp3EncryptionCert() { return idp3EncryptionCert; } public void setIdp3EncryptionCert(String idp3EncryptionCert) { this.idp3EncryptionCert = idp3EncryptionCert; } public List<String> getClientWhiteList() { return clientWhiteList; } public void setClientWhiteList(List<String> clientWhiteList) { this.clientWhiteList = clientWhiteList; } public List<String> getClientBlackList() { return clientBlackList; } public void setClientBlackList(List<String> clientBlackList) { this.clientBlackList = clientBlackList; } }
oxService/src/main/java/org/xdi/config/oxtrust/ApplicationConfiguration.java
/* * oxCore is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */package org.xdi.config.oxtrust; import java.io.Serializable; import java.util.List; import org.codehaus.jackson.annotate.JsonIgnoreProperties; /** * oxTrust configuration * * @author Yuriy Movchan * @version 0.1, 05/15/2013 */ @JsonIgnoreProperties(ignoreUnknown = true) public final class ApplicationConfiguration implements Serializable { private static final long serialVersionUID = -8991383390239617013L; private String baseDN; private String orgInum; private String orgIname; private String orgSupportEmail; private String applianceInum; private String applianceUrl; private String baseEndpoint; private String schemaAddObjectClassWithoutAttributeTypesDefinition; private String schemaAddObjectClassWithAttributeTypesDefinition; private String[] personObjectClassTypes; private String personCustomObjectClass; private String[] personObjectClassDisplayNames; private String schemaAddAttributeDefinition; private String[] contactObjectClassTypes; private String[] contactObjectClassDisplayNames; private String photoRepositoryRootDir; private int photoRepositoryThumbWidth; private int photoRepositoryThumbHeight; private int photoRepositoryCountLeveles; private int photoRepositoryCountFoldersPerLevel; private String authMode; private String ldifStore; private boolean updateApplianceStatus; private String svnConfigurationStoreRoot; private String svnConfigurationStorePassword; private String keystorePath; private String keystorePassword; private boolean allowPersonModification; private String idpUrl; private String velocityLog; private String spMetadataPath; private String logoLocation; private String idpSecurityKey; private String idpSecurityKeyPassword; private String idpSecurityCert; private String[] gluuSpAttributes; private boolean configGeneration; private String idpLdapProtocol; private String idpLdapServer; private String idpBindDn; private String idpBindPassword; private String idpUserFields; private String gluuSpCert; private String shibboleth3FederationRootDir; private String caCertsLocation; private String caCertsPassphrase; private String tempCertDir; private String certDir; private String servicesRestartTrigger; private boolean persistSVN; private String oxAuthAuthorizeUrl; private String oxAuthRegisterUrl; private String oxAuthTokenUrl; private String oxAuthEndSessionUrl; private String oxAuthLogoutUrl; private String oxAuthTokenValidationUrl; private String oxAuthUserInfo; private String oxAuthSectorIdentifierUrl; private String oxAuthClientId; private String oxAuthClientPassword; private String oxAuthClientScope; private String loginRedirectUrl; private String logoutRedirectUrl; private String[] clusteredInums; private String clientAssociationAttribute; private String oxAuthIssuer; private boolean ignoreValidation; private String umaIssuer; private String umaClientId; private String umaClientKeyId; private String umaResourceId; private String umaScope; private String umaClientKeyStoreFile; private String umaClientKeyStorePassword; private String passportUmaClientId; private String passportUmaClientKeyId; private String passportUmaResourceId; private String passportUmaScope; private String passportUmaClientKeyStoreFile; private String passportUmaClientKeyStorePassword; private String cssLocation; private String jsLocation; private String recaptchaSiteKey; private String recaptchaSecretKey; private boolean scimTestMode; private String scimTestModeAccessToken; private boolean rptConnectionPoolUseConnectionPooling; private int rptConnectionPoolMaxTotal; private int rptConnectionPoolDefaultMaxPerRoute; private int rptConnectionPoolValidateAfterInactivity; // In seconds; will be converted to millis private int rptConnectionPoolCustomKeepAliveTimeout; // In seconds; will be converted to millis private boolean oxIncommonFlag; private List<String> clientWhiteList; private List<String> clientBlackList; public boolean isOxIncommonFlag() { return oxIncommonFlag; } public void setOxIncommonFlag(boolean oxIncommonFlag) { this.oxIncommonFlag = oxIncommonFlag; } private String shibbolethVersion; private String shibboleth3IdpRootDir; private String shibboleth3SpConfDir; private String organizationName; private String idp3SigningCert; private String idp3EncryptionCert; public String getBaseDN() { return baseDN; } public void setBaseDN(String baseDN) { this.baseDN = baseDN; } public String getOrgInum() { return orgInum; } public void setOrgInum(String orgInum) { this.orgInum = orgInum; } public String getOrgIname() { return orgIname; } public void setOrgIname(String orgIname) { this.orgIname = orgIname; } public String getOrgSupportEmail() { return orgSupportEmail; } public void setOrgSupportEmail(String orgSupportEmail) { this.orgSupportEmail = orgSupportEmail; } public String getApplianceInum() { return applianceInum; } public void setApplianceInum(String applianceInum) { this.applianceInum = applianceInum; } public String getApplianceUrl() { return applianceUrl; } public void setApplianceUrl(String applianceUrl) { this.applianceUrl = applianceUrl; } public String getBaseEndpoint() { return baseEndpoint; } public void setBaseEndpoint(String baseEndpoint) { this.baseEndpoint = baseEndpoint; } public String getSchemaAddObjectClassWithoutAttributeTypesDefinition() { return schemaAddObjectClassWithoutAttributeTypesDefinition; } public void setSchemaAddObjectClassWithoutAttributeTypesDefinition( String schemaAddObjectClassWithoutAttributeTypesDefinition) { this.schemaAddObjectClassWithoutAttributeTypesDefinition = schemaAddObjectClassWithoutAttributeTypesDefinition; } public String getSchemaAddObjectClassWithAttributeTypesDefinition() { return schemaAddObjectClassWithAttributeTypesDefinition; } public void setSchemaAddObjectClassWithAttributeTypesDefinition( String schemaAddObjectClassWithAttributeTypesDefinition) { this.schemaAddObjectClassWithAttributeTypesDefinition = schemaAddObjectClassWithAttributeTypesDefinition; } public String[] getPersonObjectClassTypes() { return personObjectClassTypes; } public void setPersonObjectClassTypes(String[] personObjectClassTypes) { this.personObjectClassTypes = personObjectClassTypes; } public String getPersonCustomObjectClass() { return personCustomObjectClass; } public void setPersonCustomObjectClass(String personCustomObjectClass) { this.personCustomObjectClass = personCustomObjectClass; } public String[] getPersonObjectClassDisplayNames() { return personObjectClassDisplayNames; } public void setPersonObjectClassDisplayNames( String[] personObjectClassDisplayNames) { this.personObjectClassDisplayNames = personObjectClassDisplayNames; } public String getSchemaAddAttributeDefinition() { return schemaAddAttributeDefinition; } public void setSchemaAddAttributeDefinition( String schemaAddAttributeDefinition) { this.schemaAddAttributeDefinition = schemaAddAttributeDefinition; } public String[] getContactObjectClassTypes() { return contactObjectClassTypes; } public void setContactObjectClassTypes(String[] contactObjectClassTypes) { this.contactObjectClassTypes = contactObjectClassTypes; } public String[] getContactObjectClassDisplayNames() { return contactObjectClassDisplayNames; } public void setContactObjectClassDisplayNames( String[] contactObjectClassDisplayNames) { this.contactObjectClassDisplayNames = contactObjectClassDisplayNames; } public String getPhotoRepositoryRootDir() { return photoRepositoryRootDir; } public void setPhotoRepositoryRootDir(String photoRepositoryRootDir) { this.photoRepositoryRootDir = photoRepositoryRootDir; } public int getPhotoRepositoryThumbWidth() { return photoRepositoryThumbWidth; } public void setPhotoRepositoryThumbWidth(int photoRepositoryThumbWidth) { this.photoRepositoryThumbWidth = photoRepositoryThumbWidth; } public int getPhotoRepositoryThumbHeight() { return photoRepositoryThumbHeight; } public void setPhotoRepositoryThumbHeight(int photoRepositoryThumbHeight) { this.photoRepositoryThumbHeight = photoRepositoryThumbHeight; } public int getPhotoRepositoryCountLeveles() { return photoRepositoryCountLeveles; } public void setPhotoRepositoryCountLeveles(int photoRepositoryCountLeveles) { this.photoRepositoryCountLeveles = photoRepositoryCountLeveles; } public int getPhotoRepositoryCountFoldersPerLevel() { return photoRepositoryCountFoldersPerLevel; } public void setPhotoRepositoryCountFoldersPerLevel( int photoRepositoryCountFoldersPerLevel) { this.photoRepositoryCountFoldersPerLevel = photoRepositoryCountFoldersPerLevel; } public String getAuthMode() { return authMode; } public void setAuthMode(String authMode) { this.authMode = authMode; } public String getLdifStore() { return ldifStore; } public void setLdifStore(String ldifStore) { this.ldifStore = ldifStore; } public boolean isUpdateApplianceStatus() { return updateApplianceStatus; } public void setUpdateApplianceStatus(boolean updateApplianceStatus) { this.updateApplianceStatus = updateApplianceStatus; } public String getSvnConfigurationStoreRoot() { return svnConfigurationStoreRoot; } public void setSvnConfigurationStoreRoot(String svnConfigurationStoreRoot) { this.svnConfigurationStoreRoot = svnConfigurationStoreRoot; } public String getSvnConfigurationStorePassword() { return svnConfigurationStorePassword; } public void setSvnConfigurationStorePassword( String svnConfigurationStorePassword) { this.svnConfigurationStorePassword = svnConfigurationStorePassword; } public String getKeystorePath() { return keystorePath; } public void setKeystorePath(String keystorePath) { this.keystorePath = keystorePath; } public String getKeystorePassword() { return keystorePassword; } public void setKeystorePassword(String keystorePassword) { this.keystorePassword = keystorePassword; } public boolean isAllowPersonModification() { return allowPersonModification; } public void setAllowPersonModification(boolean allowPersonModification) { this.allowPersonModification = allowPersonModification; } public String getIdpUrl() { return idpUrl; } public void setIdpUrl(String idpUrl) { this.idpUrl = idpUrl; } public String getVelocityLog() { return velocityLog; } public void setVelocityLog(String velocityLog) { this.velocityLog = velocityLog; } public String getSpMetadataPath() { return spMetadataPath; } public void setSpMetadataPath(String spMetadataPath) { this.spMetadataPath = spMetadataPath; } public String getLogoLocation() { return logoLocation; } public void setLogoLocation(String logoLocation) { this.logoLocation = logoLocation; } public String getIdpSecurityKey() { return idpSecurityKey; } public void setIdpSecurityKey(String idpSecurityKey) { this.idpSecurityKey = idpSecurityKey; } public String getIdpSecurityKeyPassword() { return idpSecurityKeyPassword; } public void setIdpSecurityKeyPassword(String idpSecurityKeyPassword) { this.idpSecurityKeyPassword = idpSecurityKeyPassword; } public String getIdpSecurityCert() { return idpSecurityCert; } public void setIdpSecurityCert(String idpSecurityCert) { this.idpSecurityCert = idpSecurityCert; } public String[] getGluuSpAttributes() { return gluuSpAttributes; } public void setGluuSpAttributes(String[] gluuSpAttributes) { this.gluuSpAttributes = gluuSpAttributes; } public boolean isConfigGeneration() { return configGeneration; } public void setConfigGeneration(boolean configGeneration) { this.configGeneration = configGeneration; } public String getIdpLdapProtocol() { return idpLdapProtocol; } public void setIdpLdapProtocol(String idpLdapProtocol) { this.idpLdapProtocol = idpLdapProtocol; } public String getIdpLdapServer() { return idpLdapServer; } public void setIdpLdapServer(String idpLdapServer) { this.idpLdapServer = idpLdapServer; } public String getIdpBindDn() { return idpBindDn; } public void setIdpBindDn(String idpBindDn) { this.idpBindDn = idpBindDn; } public String getIdpBindPassword() { return idpBindPassword; } public void setIdpBindPassword(String idpBindPassword) { this.idpBindPassword = idpBindPassword; } public String getIdpUserFields() { return idpUserFields; } public void setIdpUserFields(String idpUserFields) { this.idpUserFields = idpUserFields; } public String getGluuSpCert() { return gluuSpCert; } public void setGluuSpCert(String gluuSpCert) { this.gluuSpCert = gluuSpCert; } public String getShibboleth3FederationRootDir() { return shibboleth3FederationRootDir; } public void setShibboleth3FederationRootDir(String shibboleth3FederationRootDir) { this.shibboleth3FederationRootDir = shibboleth3FederationRootDir; } public String getCaCertsLocation() { return caCertsLocation; } public void setCaCertsLocation(String caCertsLocation) { this.caCertsLocation = caCertsLocation; } public String getCaCertsPassphrase() { return caCertsPassphrase; } public void setCaCertsPassphrase(String caCertsPassphrase) { this.caCertsPassphrase = caCertsPassphrase; } public String getTempCertDir() { return tempCertDir; } public void setTempCertDir(String tempCertDir) { this.tempCertDir = tempCertDir; } public String getCertDir() { return certDir; } public void setCertDir(String certDir) { this.certDir = certDir; } public String getServicesRestartTrigger() { return servicesRestartTrigger; } public void setServicesRestartTrigger(String servicesRestartTrigger) { this.servicesRestartTrigger = servicesRestartTrigger; } public boolean isPersistSVN() { return persistSVN; } public void setPersistSVN(boolean persistSVN) { this.persistSVN = persistSVN; } public String getOxAuthAuthorizeUrl() { return oxAuthAuthorizeUrl; } public void setOxAuthAuthorizeUrl(String oxAuthAuthorizeUrl) { this.oxAuthAuthorizeUrl = oxAuthAuthorizeUrl; } public String getOxAuthRegisterUrl() { return oxAuthRegisterUrl; } public void setOxAuthRegisterUrl(String oxAuthRegisterUrl) { this.oxAuthRegisterUrl = oxAuthRegisterUrl; } public String getOxAuthTokenUrl() { return oxAuthTokenUrl; } public void setOxAuthTokenUrl(String oxAuthTokenUrl) { this.oxAuthTokenUrl = oxAuthTokenUrl; } public String getOxAuthEndSessionUrl() { return oxAuthEndSessionUrl; } public void setOxAuthEndSessionUrl(String oxAuthEndSessionUrl) { this.oxAuthEndSessionUrl = oxAuthEndSessionUrl; } public String getOxAuthLogoutUrl() { return oxAuthLogoutUrl; } public void setOxAuthLogoutUrl(String oxAuthLogoutUrl) { this.oxAuthLogoutUrl = oxAuthLogoutUrl; } public String getOxAuthTokenValidationUrl() { return oxAuthTokenValidationUrl; } public void setOxAuthTokenValidationUrl(String oxAuthTokenValidationUrl) { this.oxAuthTokenValidationUrl = oxAuthTokenValidationUrl; } public String getOxAuthUserInfo() { return oxAuthUserInfo; } public void setOxAuthUserInfo(String oxAuthUserInfo) { this.oxAuthUserInfo = oxAuthUserInfo; } public String getOxAuthSectorIdentifierUrl() { return oxAuthSectorIdentifierUrl; } public void setOxAuthSectorIdentifierUrl(String oxAuthSectorIdentifierUrl) { this.oxAuthSectorIdentifierUrl = oxAuthSectorIdentifierUrl; } public String getOxAuthClientId() { return oxAuthClientId; } public void setOxAuthClientId(String oxAuthClientId) { this.oxAuthClientId = oxAuthClientId; } public String getOxAuthClientPassword() { return oxAuthClientPassword; } public void setOxAuthClientPassword(String oxAuthClientPassword) { this.oxAuthClientPassword = oxAuthClientPassword; } public String getOxAuthClientScope() { return oxAuthClientScope; } public void setOxAuthClientScope(String oxAuthClientScope) { this.oxAuthClientScope = oxAuthClientScope; } public String getLoginRedirectUrl() { return loginRedirectUrl; } public void setLoginRedirectUrl(String loginRedirectUrl) { this.loginRedirectUrl = loginRedirectUrl; } public String getLogoutRedirectUrl() { return logoutRedirectUrl; } public void setLogoutRedirectUrl(String logoutRedirectUrl) { this.logoutRedirectUrl = logoutRedirectUrl; } public String[] getClusteredInums() { return clusteredInums; } public void setClusteredInums(String[] clusteredInums) { this.clusteredInums = clusteredInums; } public String getClientAssociationAttribute() { return clientAssociationAttribute; } public void setClientAssociationAttribute(String clientAssociationAttribute) { this.clientAssociationAttribute = clientAssociationAttribute; } public String getOxAuthIssuer() { return oxAuthIssuer; } public void setOxAuthIssuer(String oxAuthIssuer) { this.oxAuthIssuer = oxAuthIssuer; } public boolean isIgnoreValidation() { return ignoreValidation; } public void setIgnoreValidation(boolean ignoreValidation) { this.ignoreValidation = ignoreValidation; } public String getUmaIssuer() { return umaIssuer; } public void setUmaIssuer(String umaIssuer) { this.umaIssuer = umaIssuer; } public String getUmaClientId() { return umaClientId; } public void setUmaClientId(String umaClientId) { this.umaClientId = umaClientId; } public String getUmaClientKeyId() { return umaClientKeyId; } public void setUmaClientKeyId(String umaClientKeyId) { this.umaClientKeyId = umaClientKeyId; } public String getUmaResourceId() { return umaResourceId; } public void setUmaResourceId(String umaResourceId) { this.umaResourceId = umaResourceId; } public String getUmaScope() { return umaScope; } public void setUmaScope(String umaScope) { this.umaScope = umaScope; } public String getUmaClientKeyStoreFile() { return umaClientKeyStoreFile; } public void setUmaClientKeyStoreFile(String umaClientKeyStoreFile) { this.umaClientKeyStoreFile = umaClientKeyStoreFile; } public String getUmaClientKeyStorePassword() { return umaClientKeyStorePassword; } public void setUmaClientKeyStorePassword(String umaClientKeyStorePassword) { this.umaClientKeyStorePassword = umaClientKeyStorePassword; } public String getPassportUmaClientId() { return passportUmaClientId; } public void setPassportUmaClientId(String passportUmaClientId) { this.passportUmaClientId = passportUmaClientId; } public String getPassportUmaClientKeyId() { return passportUmaClientKeyId; } public void setPassportUmaClientKeyId(String passportUmaClientKeyId) { this.passportUmaClientKeyId = passportUmaClientKeyId; } public String getPassportUmaResourceId() { return passportUmaResourceId; } public void setPassportUmaResourceId(String passportUmaResourceId) { this.passportUmaResourceId = passportUmaResourceId; } public String getPassportUmaScope() { return passportUmaScope; } public void setPassportUmaScope(String passportUmaScope) { this.passportUmaScope = passportUmaScope; } public String getPassportUmaClientKeyStoreFile() { return passportUmaClientKeyStoreFile; } public void setPassportUmaClientKeyStoreFile(String passportUmaClientKeyStoreFile) { this.passportUmaClientKeyStoreFile = passportUmaClientKeyStoreFile; } public String getPassportUmaClientKeyStorePassword() { return passportUmaClientKeyStorePassword; } public void setPassportUmaClientKeyStorePassword(String passportUmaClientKeyStorePassword) { this.passportUmaClientKeyStorePassword = passportUmaClientKeyStorePassword; } public String getCssLocation() { return cssLocation; } public void setCssLocation(String cssLocation) { this.cssLocation = cssLocation; } public String getJsLocation() { return jsLocation; } public void setJsLocation(String jsLocation) { this.jsLocation = jsLocation; } public String getRecaptchaSiteKey() { return recaptchaSiteKey; } public void setRecaptchaSiteKey(String recaptchaSiteKey) { this.recaptchaSiteKey = recaptchaSiteKey; } public String getRecaptchaSecretKey() { return recaptchaSecretKey; } public void setRecaptchaSecretKey(String recaptchaSecretKey) { this.recaptchaSecretKey = recaptchaSecretKey; } public boolean isScimTestMode() { return scimTestMode; } public void setScimTestMode(boolean scimTestMode) { this.scimTestMode = scimTestMode; } public String getScimTestModeAccessToken() { return scimTestModeAccessToken; } public void setScimTestModeAccessToken(String scimTestModeAccessToken) { this.scimTestModeAccessToken = scimTestModeAccessToken; } public boolean isRptConnectionPoolUseConnectionPooling() { return rptConnectionPoolUseConnectionPooling; } public void setRptConnectionPoolUseConnectionPooling(boolean rptConnectionPoolUseConnectionPooling) { this.rptConnectionPoolUseConnectionPooling = rptConnectionPoolUseConnectionPooling; } public int getRptConnectionPoolMaxTotal() { return rptConnectionPoolMaxTotal; } public void setRptConnectionPoolMaxTotal(int rptConnectionPoolMaxTotal) { this.rptConnectionPoolMaxTotal = rptConnectionPoolMaxTotal; } public int getRptConnectionPoolDefaultMaxPerRoute() { return rptConnectionPoolDefaultMaxPerRoute; } public void setRptConnectionPoolDefaultMaxPerRoute(int rptConnectionPoolDefaultMaxPerRoute) { this.rptConnectionPoolDefaultMaxPerRoute = rptConnectionPoolDefaultMaxPerRoute; } public int getRptConnectionPoolValidateAfterInactivity() { return rptConnectionPoolValidateAfterInactivity; } public void setRptConnectionPoolValidateAfterInactivity(int rptConnectionPoolValidateAfterInactivity) { this.rptConnectionPoolValidateAfterInactivity = rptConnectionPoolValidateAfterInactivity; } public int getRptConnectionPoolCustomKeepAliveTimeout() { return rptConnectionPoolCustomKeepAliveTimeout; } public void setRptConnectionPoolCustomKeepAliveTimeout(int rptConnectionPoolCustomKeepAliveTimeout) { this.rptConnectionPoolCustomKeepAliveTimeout = rptConnectionPoolCustomKeepAliveTimeout; } public String getShibbolethVersion() { return shibbolethVersion; } public void setShibbolethVersion(String shibbolethVersion) { this.shibbolethVersion = shibbolethVersion; } public String getShibboleth3IdpRootDir() { return shibboleth3IdpRootDir; } public void setShibboleth3IdpRootDir(String shibboleth3IdpRootDir) { this.shibboleth3IdpRootDir = shibboleth3IdpRootDir; } public String getShibboleth3SpConfDir() { return shibboleth3SpConfDir; } public void setShibboleth3SpConfDir(String shibboleth3SpConfDir) { this.shibboleth3SpConfDir = shibboleth3SpConfDir; } public String getOrganizationName() { return organizationName; } public void setOrganizationName(String organizationName) { this.organizationName = organizationName; } public String getIdp3SigningCert() { return idp3SigningCert; } public void setIdp3SigningCert(String idp3SigningCert) { this.idp3SigningCert = idp3SigningCert; } public String getIdp3EncryptionCert() { return idp3EncryptionCert; } public void setIdp3EncryptionCert(String idp3EncryptionCert) { this.idp3EncryptionCert = idp3EncryptionCert; } public List<String> getClientWhiteList() { return clientWhiteList; } public void setClientWhiteList(List<String> clientWhiteList) { this.clientWhiteList = clientWhiteList; } public List<String> getClientBlackList() { return clientBlackList; } public void setClientBlackList(List<String> clientBlackList) { this.clientBlackList = clientBlackList; } }
Remove deprecated properties
oxService/src/main/java/org/xdi/config/oxtrust/ApplicationConfiguration.java
Remove deprecated properties
Java
mit
549eef78eef80137dfc127e7cede9ce93c1c9384
0
fluttercommunity/flutter_contacts,fluttercommunity/flutter_contacts,fluttercommunity/flutter_contacts,fluttercommunity/flutter_contacts
package flutter.plugins.contactsservice.contactsservice; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.MethodCallHandler; import io.flutter.plugin.common.MethodChannel.Result; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.PluginRegistry.Registrar; import android.annotation.TargetApi; import android.content.ContentProviderOperation; import android.content.ContentResolver; import android.content.ContentUris; import android.content.OperationApplicationException; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.RemoteException; import android.provider.ContactsContract; import android.text.TextUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import static android.provider.ContactsContract.CommonDataKinds; import static android.provider.ContactsContract.CommonDataKinds.Email; import static android.provider.ContactsContract.CommonDataKinds.Organization; import static android.provider.ContactsContract.CommonDataKinds.Phone; import static android.provider.ContactsContract.CommonDataKinds.StructuredName; import static android.provider.ContactsContract.CommonDataKinds.StructuredPostal; @TargetApi(Build.VERSION_CODES.ECLAIR) public class ContactsServicePlugin implements MethodCallHandler { ContactsServicePlugin(ContentResolver contentResolver){ this.contentResolver = contentResolver; } private final ContentResolver contentResolver; public static void registerWith(Registrar registrar) { final MethodChannel channel = new MethodChannel(registrar.messenger(), "github.com/clovisnicolas/flutter_contacts"); channel.setMethodCallHandler(new ContactsServicePlugin(registrar.context().getContentResolver())); } @Override public void onMethodCall(MethodCall call, Result result) { switch(call.method){ case "getContacts": this.getContacts((String)call.argument("query"), (boolean)call.argument("withThumbnails"), result); break; case "addContact": Contact c = Contact.fromMap((HashMap)call.arguments); if(this.addContact(c)) { result.success(null); } else{ result.error(null, "Failed to add the contact", null); } break; case "deleteContact": Contact ct = Contact.fromMap((HashMap)call.arguments); if(this.deleteContact(ct)){ result.success(null); } else{ result.error(null, "Failed to delete the contact, make sure it has a valid identifier", null); } break; case "updateContact": Contact ct1 = Contact.fromMap((HashMap)call.arguments); if(this.updateContact(ct1)) { result.success(null); } else { result.error(null, "Failed to update the contact, make sure it has a valid identifier", null); } break; default: result.notImplemented(); break; } } private static final String[] PROJECTION = { ContactsContract.Data.CONTACT_ID, ContactsContract.Profile.DISPLAY_NAME, ContactsContract.Contacts.Data.MIMETYPE, StructuredName.DISPLAY_NAME, StructuredName.GIVEN_NAME, StructuredName.MIDDLE_NAME, StructuredName.FAMILY_NAME, StructuredName.PREFIX, StructuredName.SUFFIX, Phone.NUMBER, Phone.TYPE, Phone.LABEL, Email.DATA, Email.ADDRESS, Email.TYPE, Email.LABEL, Organization.COMPANY, Organization.TITLE, StructuredPostal.FORMATTED_ADDRESS, StructuredPostal.TYPE, StructuredPostal.LABEL, StructuredPostal.STREET, StructuredPostal.POBOX, StructuredPostal.NEIGHBORHOOD, StructuredPostal.CITY, StructuredPostal.REGION, StructuredPostal.POSTCODE, StructuredPostal.COUNTRY, }; @TargetApi(Build.VERSION_CODES.ECLAIR) private void getContacts(String query, boolean withThumbnails, Result result) { new GetContactsTask(result, withThumbnails).execute(new String[] {query}); } @TargetApi(Build.VERSION_CODES.CUPCAKE) private class GetContactsTask extends AsyncTask<String, Void, ArrayList<HashMap>> { private Result getContactResult; private boolean withThumbnails; public GetContactsTask(Result result, boolean withThumbnails){ this.getContactResult = result; this.withThumbnails = withThumbnails; } @TargetApi(Build.VERSION_CODES.ECLAIR) protected ArrayList<HashMap> doInBackground(String... query) { ArrayList<Contact> contacts = getContactsFrom(getCursor(query[0])); if (withThumbnails) { for(Contact c : contacts){ setAvatarDataForContactIfAvailable(c); } } //Transform the list of contacts to a list of Map ArrayList<HashMap> contactMaps = new ArrayList<>(); for(Contact c : contacts){ contactMaps.add(c.toMap()); } return contactMaps; } protected void onPostExecute(ArrayList<HashMap> result) { getContactResult.success(result); } } private Cursor getCursor(String query){ String selection = ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=?"; String[] selectionArgs = new String[]{Email.CONTENT_ITEM_TYPE, Phone.CONTENT_ITEM_TYPE, StructuredName.CONTENT_ITEM_TYPE, Organization.CONTENT_ITEM_TYPE, StructuredPostal.CONTENT_ITEM_TYPE}; if(query != null){ selectionArgs = new String[]{"%" + query + "%"}; selection = ContactsContract.Contacts.DISPLAY_NAME_PRIMARY + " LIKE ?"; } return contentResolver.query(ContactsContract.Data.CONTENT_URI, PROJECTION, selection, selectionArgs, null); } /** * Builds the list of contacts from the cursor * @param cursor * @return the list of contacts */ private ArrayList<Contact> getContactsFrom(Cursor cursor) { HashMap<String, Contact> map = new LinkedHashMap<>(); while (cursor != null && cursor.moveToNext()) { int columnIndex = cursor.getColumnIndex(ContactsContract.Data.CONTACT_ID); String contactId = cursor.getString(columnIndex); if (!map.containsKey(contactId)) { map.put(contactId, new Contact(contactId)); } Contact contact = map.get(contactId); String mimeType = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.MIMETYPE)); contact.displayName = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); //NAMES if (mimeType.equals(StructuredName.CONTENT_ITEM_TYPE)) { contact.givenName = cursor.getString(cursor.getColumnIndex(StructuredName.GIVEN_NAME)); contact.middleName = cursor.getString(cursor.getColumnIndex(StructuredName.MIDDLE_NAME)); contact.familyName = cursor.getString(cursor.getColumnIndex(StructuredName.FAMILY_NAME)); contact.prefix = cursor.getString(cursor.getColumnIndex(StructuredName.PREFIX)); contact.suffix = cursor.getString(cursor.getColumnIndex(StructuredName.SUFFIX)); } //PHONES else if (mimeType.equals(Phone.CONTENT_ITEM_TYPE)){ String phoneNumber = cursor.getString(cursor.getColumnIndex(Phone.NUMBER)); int type = cursor.getInt(cursor.getColumnIndex(Phone.TYPE)); if (!TextUtils.isEmpty(phoneNumber)){ contact.phones.add(new Item(Item.getPhoneLabel(type),phoneNumber)); } } //MAILS else if (mimeType.equals(Email.CONTENT_ITEM_TYPE)) { String email = cursor.getString(cursor.getColumnIndex(Email.ADDRESS)); int type = cursor.getInt(cursor.getColumnIndex(Email.TYPE)); if (!TextUtils.isEmpty(email)) { contact.emails.add(new Item(Item.getEmailLabel(type, cursor),email)); } } //ORG else if (mimeType.equals(Organization.CONTENT_ITEM_TYPE)) { contact.company = cursor.getString(cursor.getColumnIndex(Organization.COMPANY)); contact.jobTitle = cursor.getString(cursor.getColumnIndex(Organization.TITLE)); } //ADDRESSES else if (mimeType.equals(StructuredPostal.CONTENT_ITEM_TYPE)) { contact.postalAddresses.add(new PostalAddress(cursor)); } } return new ArrayList<>(map.values()); } private void setAvatarDataForContactIfAvailable(Contact contact) { Uri contactUri = ContentUris.withAppendedId(ContactsContract.Contacts.CONTENT_URI, Integer.parseInt(contact.identifier)); Uri photoUri = Uri.withAppendedPath(contactUri, ContactsContract.Contacts.Photo.CONTENT_DIRECTORY); Cursor avatarCursor = contentResolver.query(photoUri, new String[] {ContactsContract.Contacts.Photo.PHOTO}, null, null, null); if (avatarCursor != null && avatarCursor.moveToFirst()) { byte[] avatar = avatarCursor.getBlob(0); contact.avatar = avatar; } if (avatarCursor != null) { avatarCursor.close(); } } private boolean addContact(Contact contact){ ArrayList<ContentProviderOperation> ops = new ArrayList<>(); ContentProviderOperation.Builder op = ContentProviderOperation.newInsert(ContactsContract.RawContacts.CONTENT_URI) .withValue(ContactsContract.RawContacts.ACCOUNT_TYPE, null) .withValue(ContactsContract.RawContacts.ACCOUNT_NAME, null); ops.add(op.build()); op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, StructuredName.CONTENT_ITEM_TYPE) .withValue(StructuredName.GIVEN_NAME, contact.givenName) .withValue(StructuredName.MIDDLE_NAME, contact.middleName) .withValue(StructuredName.FAMILY_NAME, contact.familyName) .withValue(StructuredName.PREFIX, contact.prefix) .withValue(StructuredName.SUFFIX, contact.suffix); ops.add(op.build()); op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, Organization.CONTENT_ITEM_TYPE) .withValue(Organization.COMPANY, contact.company) .withValue(Organization.TITLE, contact.jobTitle); ops.add(op.build()); op.withYieldAllowed(true); //Phones for(Item phone : contact.phones){ op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, CommonDataKinds.Phone.CONTENT_ITEM_TYPE) .withValue(ContactsContract.CommonDataKinds.Phone.NUMBER, phone.value) .withValue(CommonDataKinds.Phone.TYPE, Item.stringToPhoneType(phone.label)); ops.add(op.build()); } //Emails for (Item email : contact.emails) { op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, CommonDataKinds.Email.CONTENT_ITEM_TYPE) .withValue(CommonDataKinds.Email.ADDRESS, email.value) .withValue(CommonDataKinds.Email.TYPE, Item.stringToEmailType(email.label)); ops.add(op.build()); } //Postal addresses for (PostalAddress address : contact.postalAddresses) { op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, CommonDataKinds.StructuredPostal.CONTENT_ITEM_TYPE) .withValue(CommonDataKinds.StructuredPostal.TYPE, PostalAddress.stringToPostalAddressType(address.label)) .withValue(CommonDataKinds.StructuredPostal.LABEL, address.label) .withValue(CommonDataKinds.StructuredPostal.STREET, address.street) .withValue(CommonDataKinds.StructuredPostal.CITY, address.city) .withValue(CommonDataKinds.StructuredPostal.REGION, address.region) .withValue(CommonDataKinds.StructuredPostal.POSTCODE, address.postcode) .withValue(CommonDataKinds.StructuredPostal.COUNTRY, address.country); ops.add(op.build()); } try { contentResolver.applyBatch(ContactsContract.AUTHORITY, ops); return true; } catch (Exception e) { return false; } } private boolean deleteContact(Contact contact){ ArrayList<ContentProviderOperation> ops = new ArrayList<>(); ops.add(ContentProviderOperation.newDelete(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?", new String[]{String.valueOf(contact.identifier)}) .build()); try { contentResolver.applyBatch(ContactsContract.AUTHORITY, ops); return true; } catch (Exception e) { return false; } } private boolean updateContact(Contact contact) { ArrayList<ContentProviderOperation> ops = new ArrayList<>(); ContentProviderOperation.Builder operation = ContentProviderOperation.newUpdate(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?" + " AND " + ContactsContract.Data.MIMETYPE + "=?", new String[]{String.valueOf(contact.identifier), ContactsContract.CommonDataKinds.StructuredName.CONTENT_ITEM_TYPE}) .withValue(StructuredName.GIVEN_NAME, contact.givenName) .withValue(StructuredName.MIDDLE_NAME, contact.middleName) .withValue(StructuredName.FAMILY_NAME, contact.familyName) .withValue(StructuredName.PREFIX, contact.prefix) .withValue(StructuredName.SUFFIX, contact.suffix); ops.add(operation.build()); operation = ContentProviderOperation.newUpdate(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?" + " AND " + ContactsContract.Data.MIMETYPE + "=?", new String[]{String.valueOf(contact.identifier), ContactsContract.CommonDataKinds.Organization.CONTENT_ITEM_TYPE}) .withValue(Organization.COMPANY, contact.company) .withValue(Organization.TITLE, contact.jobTitle); ops.add(operation.build()); //Phones for(Item phone : contact.phones){ operation = ContentProviderOperation.newUpdate(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?" + " AND " + ContactsContract.Data.MIMETYPE + "=?" + " AND " + Phone.TYPE + "=?", new String[]{String.valueOf(contact.identifier), Phone.CONTENT_ITEM_TYPE, String.valueOf(Item.stringToPhoneType(phone.label))}) .withValue(ContactsContract.CommonDataKinds.Phone.NUMBER, phone.value); ops.add(operation.build()); } //Emails for (Item email : contact.emails) { operation = ContentProviderOperation.newUpdate(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?" + " AND " + ContactsContract.Data.MIMETYPE + "=?" + " AND " + Email.TYPE + "=?", new String[]{String.valueOf(contact.identifier), Email.CONTENT_ITEM_TYPE, String.valueOf(Item.stringToEmailType(email.label))}) .withValue(CommonDataKinds.Email.ADDRESS, email.value); ops.add(operation.build()); } //Postal addresses for (PostalAddress address : contact.postalAddresses) { operation = ContentProviderOperation.newUpdate(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?" + " AND " + ContactsContract.Data.MIMETYPE + "=?" + " AND " + StructuredPostal.TYPE + "=?", new String[]{String.valueOf(contact.identifier), StructuredPostal.CONTENT_ITEM_TYPE, String.valueOf(PostalAddress.stringToPostalAddressType(address.label))}) .withValue(StructuredPostal.LABEL, address.label) .withValue(StructuredPostal.STREET, address.street) .withValue(StructuredPostal.CITY, address.city) .withValue(StructuredPostal.REGION, address.region) .withValue(StructuredPostal.POSTCODE, address.postcode) .withValue(StructuredPostal.COUNTRY, address.country); ops.add(operation.build()); } try { contentResolver.applyBatch(ContactsContract.AUTHORITY, ops); return true; } catch (Exception e) { return false; } } }
android/src/main/java/flutter/plugins/contactsservice/contactsservice/ContactsServicePlugin.java
package flutter.plugins.contactsservice.contactsservice; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.MethodCallHandler; import io.flutter.plugin.common.MethodChannel.Result; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.PluginRegistry.Registrar; import android.annotation.TargetApi; import android.content.ContentProviderOperation; import android.content.ContentResolver; import android.content.ContentUris; import android.content.OperationApplicationException; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.RemoteException; import android.provider.ContactsContract; import android.text.TextUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import static android.provider.ContactsContract.CommonDataKinds; import static android.provider.ContactsContract.CommonDataKinds.Email; import static android.provider.ContactsContract.CommonDataKinds.Organization; import static android.provider.ContactsContract.CommonDataKinds.Phone; import static android.provider.ContactsContract.CommonDataKinds.StructuredName; import static android.provider.ContactsContract.CommonDataKinds.StructuredPostal; @TargetApi(Build.VERSION_CODES.ECLAIR) public class ContactsServicePlugin implements MethodCallHandler { ContactsServicePlugin(ContentResolver contentResolver){ this.contentResolver = contentResolver; } private final ContentResolver contentResolver; public static void registerWith(Registrar registrar) { final MethodChannel channel = new MethodChannel(registrar.messenger(), "github.com/clovisnicolas/flutter_contacts"); channel.setMethodCallHandler(new ContactsServicePlugin(registrar.context().getContentResolver())); } @Override public void onMethodCall(MethodCall call, Result result) { switch(call.method){ case "getContacts": this.getContacts((String)call.argument("query"), (boolean)call.argument("withThumbnails"), result); break; case "addContact": Contact c = Contact.fromMap((HashMap)call.arguments); if(this.addContact(c)) { result.success(null); } else{ result.error(null, "Failed to add the contact", null); } break; case "deleteContact": Contact ct = Contact.fromMap((HashMap)call.arguments); if(this.deleteContact(ct)){ result.success(null); } else{ result.error(null, "Failed to delete the contact, make sure it has a valid identifier", null); } break; default: result.notImplemented(); } } private static final String[] PROJECTION = { ContactsContract.Data.CONTACT_ID, ContactsContract.Profile.DISPLAY_NAME, ContactsContract.Contacts.Data.MIMETYPE, StructuredName.DISPLAY_NAME, StructuredName.GIVEN_NAME, StructuredName.MIDDLE_NAME, StructuredName.FAMILY_NAME, StructuredName.PREFIX, StructuredName.SUFFIX, Phone.NUMBER, Phone.TYPE, Phone.LABEL, Email.DATA, Email.ADDRESS, Email.TYPE, Email.LABEL, Organization.COMPANY, Organization.TITLE, StructuredPostal.FORMATTED_ADDRESS, StructuredPostal.TYPE, StructuredPostal.LABEL, StructuredPostal.STREET, StructuredPostal.POBOX, StructuredPostal.NEIGHBORHOOD, StructuredPostal.CITY, StructuredPostal.REGION, StructuredPostal.POSTCODE, StructuredPostal.COUNTRY, }; @TargetApi(Build.VERSION_CODES.ECLAIR) private void getContacts(String query, boolean withThumbnails, Result result) { new GetContactsTask(result, withThumbnails).execute(new String[] {query}); } @TargetApi(Build.VERSION_CODES.CUPCAKE) private class GetContactsTask extends AsyncTask<String, Void, ArrayList<HashMap>> { private Result getContactResult; private boolean withThumbnails; public GetContactsTask(Result result, boolean withThumbnails){ this.getContactResult = result; this.withThumbnails = withThumbnails; } @TargetApi(Build.VERSION_CODES.ECLAIR) protected ArrayList<HashMap> doInBackground(String... query) { ArrayList<Contact> contacts = getContactsFrom(getCursor(query[0])); if (withThumbnails) { for(Contact c : contacts){ setAvatarDataForContactIfAvailable(c); } } //Transform the list of contacts to a list of Map ArrayList<HashMap> contactMaps = new ArrayList<>(); for(Contact c : contacts){ contactMaps.add(c.toMap()); } return contactMaps; } protected void onPostExecute(ArrayList<HashMap> result) { getContactResult.success(result); } } private Cursor getCursor(String query){ String selection = ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=? OR " + ContactsContract.Data.MIMETYPE + "=?"; String[] selectionArgs = new String[]{Email.CONTENT_ITEM_TYPE, Phone.CONTENT_ITEM_TYPE, StructuredName.CONTENT_ITEM_TYPE, Organization.CONTENT_ITEM_TYPE, StructuredPostal.CONTENT_ITEM_TYPE}; if(query != null){ selectionArgs = new String[]{"%" + query + "%"}; selection = ContactsContract.Contacts.DISPLAY_NAME_PRIMARY + " LIKE ?"; } return contentResolver.query(ContactsContract.Data.CONTENT_URI, PROJECTION, selection, selectionArgs, null); } /** * Builds the list of contacts from the cursor * @param cursor * @return the list of contacts */ private ArrayList<Contact> getContactsFrom(Cursor cursor) { HashMap<String, Contact> map = new LinkedHashMap<>(); while (cursor != null && cursor.moveToNext()) { int columnIndex = cursor.getColumnIndex(ContactsContract.Data.CONTACT_ID); String contactId = cursor.getString(columnIndex); if (!map.containsKey(contactId)) { map.put(contactId, new Contact(contactId)); } Contact contact = map.get(contactId); String mimeType = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.MIMETYPE)); contact.displayName = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); //NAMES if (mimeType.equals(StructuredName.CONTENT_ITEM_TYPE)) { contact.givenName = cursor.getString(cursor.getColumnIndex(StructuredName.GIVEN_NAME)); contact.middleName = cursor.getString(cursor.getColumnIndex(StructuredName.MIDDLE_NAME)); contact.familyName = cursor.getString(cursor.getColumnIndex(StructuredName.FAMILY_NAME)); contact.prefix = cursor.getString(cursor.getColumnIndex(StructuredName.PREFIX)); contact.suffix = cursor.getString(cursor.getColumnIndex(StructuredName.SUFFIX)); } //PHONES else if (mimeType.equals(Phone.CONTENT_ITEM_TYPE)){ String phoneNumber = cursor.getString(cursor.getColumnIndex(Phone.NUMBER)); int type = cursor.getInt(cursor.getColumnIndex(Phone.TYPE)); if (!TextUtils.isEmpty(phoneNumber)){ contact.phones.add(new Item(Item.getPhoneLabel(type),phoneNumber)); } } //MAILS else if (mimeType.equals(Email.CONTENT_ITEM_TYPE)) { String email = cursor.getString(cursor.getColumnIndex(Email.ADDRESS)); int type = cursor.getInt(cursor.getColumnIndex(Email.TYPE)); if (!TextUtils.isEmpty(email)) { contact.emails.add(new Item(Item.getEmailLabel(type, cursor),email)); } } //ORG else if (mimeType.equals(Organization.CONTENT_ITEM_TYPE)) { contact.company = cursor.getString(cursor.getColumnIndex(Organization.COMPANY)); contact.jobTitle = cursor.getString(cursor.getColumnIndex(Organization.TITLE)); } //ADDRESSES else if (mimeType.equals(StructuredPostal.CONTENT_ITEM_TYPE)) { contact.postalAddresses.add(new PostalAddress(cursor)); } } return new ArrayList<>(map.values()); } private void setAvatarDataForContactIfAvailable(Contact contact) { Uri contactUri = ContentUris.withAppendedId(ContactsContract.Contacts.CONTENT_URI, Integer.parseInt(contact.identifier)); Uri photoUri = Uri.withAppendedPath(contactUri, ContactsContract.Contacts.Photo.CONTENT_DIRECTORY); Cursor avatarCursor = contentResolver.query(photoUri, new String[] {ContactsContract.Contacts.Photo.PHOTO}, null, null, null); if (avatarCursor != null && avatarCursor.moveToFirst()) { byte[] avatar = avatarCursor.getBlob(0); contact.avatar = avatar; } if (avatarCursor != null) { avatarCursor.close(); } } private boolean addContact(Contact contact){ ArrayList<ContentProviderOperation> ops = new ArrayList<>(); ContentProviderOperation.Builder op = ContentProviderOperation.newInsert(ContactsContract.RawContacts.CONTENT_URI) .withValue(ContactsContract.RawContacts.ACCOUNT_TYPE, null) .withValue(ContactsContract.RawContacts.ACCOUNT_NAME, null); ops.add(op.build()); op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, StructuredName.CONTENT_ITEM_TYPE) .withValue(StructuredName.GIVEN_NAME, contact.givenName) .withValue(StructuredName.MIDDLE_NAME, contact.middleName) .withValue(StructuredName.FAMILY_NAME, contact.familyName) .withValue(StructuredName.PREFIX, contact.prefix) .withValue(StructuredName.SUFFIX, contact.suffix); ops.add(op.build()); op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, Organization.CONTENT_ITEM_TYPE) .withValue(Organization.COMPANY, contact.company) .withValue(Organization.TITLE, contact.jobTitle); ops.add(op.build()); op.withYieldAllowed(true); //Phones for(Item phone : contact.phones){ op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, CommonDataKinds.Phone.CONTENT_ITEM_TYPE) .withValue(ContactsContract.CommonDataKinds.Phone.NUMBER, phone.value) .withValue(CommonDataKinds.Phone.TYPE, Item.stringToPhoneType(phone.label)); ops.add(op.build()); } //Emails for (Item email : contact.emails) { op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, CommonDataKinds.Email.CONTENT_ITEM_TYPE) .withValue(CommonDataKinds.Email.ADDRESS, email.value) .withValue(CommonDataKinds.Email.TYPE, Item.stringToEmailType(email.label)); ops.add(op.build()); } //Postal addresses for (PostalAddress address : contact.postalAddresses) { op = ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID, 0) .withValue(ContactsContract.Data.MIMETYPE, CommonDataKinds.StructuredPostal.CONTENT_ITEM_TYPE) .withValue(CommonDataKinds.StructuredPostal.TYPE, PostalAddress.stringToPostalAddressType(address.label)) .withValue(CommonDataKinds.StructuredPostal.STREET, address.street) .withValue(CommonDataKinds.StructuredPostal.CITY, address.city) .withValue(CommonDataKinds.StructuredPostal.REGION, address.region) .withValue(CommonDataKinds.StructuredPostal.POSTCODE, address.postcode) .withValue(CommonDataKinds.StructuredPostal.COUNTRY, address.country); ops.add(op.build()); } try { contentResolver.applyBatch(ContactsContract.AUTHORITY, ops); return true; } catch (Exception e) { return false; } } private boolean deleteContact(Contact contact){ ArrayList<ContentProviderOperation> ops = new ArrayList<>(); ops.add(ContentProviderOperation.newDelete(ContactsContract.Data.CONTENT_URI) .withSelection(ContactsContract.Data.CONTACT_ID + "=?", new String[]{String.valueOf(contact.identifier)}) .build()); try { contentResolver.applyBatch(ContactsContract.AUTHORITY, ops); return true; } catch (Exception e) { return false; } } }
Added updateContact method to ContactsServicePlugin.java fin
android/src/main/java/flutter/plugins/contactsservice/contactsservice/ContactsServicePlugin.java
Added updateContact method to ContactsServicePlugin.java
Java
epl-1.0
1972ebe8bb509d5af0939a06678caea4bfdaaaf7
0
rrimmana/birt-1,Charling-Huang/birt,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,Charling-Huang/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,sguan-actuate/birt
/******************************************************************************* * Copyright (c) 2004, 2007 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.engine.emitter.html; import java.util.Stack; import org.eclipse.birt.report.engine.content.ICellContent; import org.eclipse.birt.report.engine.content.IColumn; import org.eclipse.birt.report.engine.content.IContainerContent; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.IForeignContent; import org.eclipse.birt.report.engine.content.IImageContent; import org.eclipse.birt.report.engine.content.IPageContent; import org.eclipse.birt.report.engine.content.IRowContent; import org.eclipse.birt.report.engine.content.IStyle; import org.eclipse.birt.report.engine.content.ITableContent; import org.eclipse.birt.report.engine.content.ITextContent; import org.eclipse.birt.report.engine.emitter.HTMLTags; import org.eclipse.birt.report.engine.emitter.HTMLWriter; import org.eclipse.birt.report.engine.emitter.html.util.HTMLEmitterUtil; import org.eclipse.birt.report.engine.ir.DimensionType; import org.eclipse.birt.report.engine.ir.EngineIRConstants; import org.w3c.dom.css.CSSValue; /** * */ public abstract class HTMLEmitter { protected HTMLReportEmitter reportEmitter; protected HTMLWriter writer; protected String layoutPreference; /** * The <code>containerDisplayStack</code> that stores the display value of container. */ protected Stack containerDisplayStack = new Stack( ); public HTMLEmitter( HTMLReportEmitter reportEmitter, HTMLWriter writer, String layoutPreference ) { this.reportEmitter = reportEmitter; this.writer = writer; this.layoutPreference = layoutPreference; } // FIXME: code review: We shouldn��t pass the style directly. We should pass // the element and get the style form the element in the method. public abstract void buildDefaultStyle( StringBuffer styleBuffer, IStyle style ); public abstract void buildStyle( StringBuffer styleBuffer, IStyle style ); public abstract void buildPageBandStyle( StringBuffer styleBuffer, IStyle style ); public abstract void buildTableStyle( ITableContent table, StringBuffer styleBuffer ); public abstract void buildColumnStyle( IColumn column, StringBuffer styleBuffer ); public abstract void handleColumnAlign( IColumn column ); public abstract void buildRowStyle( IRowContent row, StringBuffer styleBuffer ); public abstract void handleRowAlign( IRowContent row ); public abstract void buildCellStyle( ICellContent cell, StringBuffer styleBuffer, boolean isHead ); public abstract void handleCellAlign( ICellContent cell ); public abstract void buildContainerStyle( IContainerContent container, StringBuffer styleBuffer ); public abstract void handleContainerAlign( IContainerContent container ); // FIXME: code review: Because the display has already been calculated in // the HTMLReportEmitter, so we can build the display there too. We needn't // pass the display here. public abstract void buildTextStyle( ITextContent text, StringBuffer styleBuffer, int display ); public abstract void buildForeignStyle( IForeignContent foreign, StringBuffer styleBuffer, int display ); public abstract void buildImageStyle( IImageContent image, StringBuffer styleBuffer, int display ); /** * Build the style of the page */ public void buildPageStyle( IPageContent page, StringBuffer styleBuffer, boolean needOutputBackgroundSize ) { // The method getStyle( ) will nevel return a null value; IStyle style = page.getStyle( ); if ( !needOutputBackgroundSize ) { AttributeBuilder .buildBackground( styleBuffer, style, reportEmitter ); } else AttributeBuilder.buildBackgroundColor( styleBuffer, style, reportEmitter ); AttributeBuilder.buildBorders( styleBuffer, style ); } /** * Build size style string say, "width: 10.0mm;". The min-height should be * implemented by sepcial way. * * @param content * The <code>StringBuffer</code> to which the result is output. * @param name * The property name * @param value * The values of the property */ public void buildSize( StringBuffer content, String name, DimensionType value ) { if ( value != null ) { if ( HTMLTags.ATTR_MIN_HEIGHT.equals( name ) ) { //To solve the problem that IE do not support min-height. //Use this way to make Firefox and IE both work well. content.append( " height: auto !important; height: " ); content.append( value.toString( ) ); content.append( "; min-height: " ); content.append( value.toString( ) ); content.append( ';' ); } else { content.append( ' ' ); content.append( name ); content.append( ": " ); content.append( value.toString( ) ); content.append( ';' ); } } } protected IStyle getElementStyle( IContent content ) { IStyle style = content.getInlineStyle( ); if ( style == null || style.isEmpty( ) ) { return null; } return style; } // FIXME: code review: We should remove all the codes about the x and y. // BIRT doesn't supoort the x and y now. /** * Checks whether the element is block, inline or inline-block level. In * BIRT, the absolute positioning model is used and a box is explicitly * offset with respect to its containing block. When an element's x or y is * set, it will be treated as a block level element regardless of the * 'Display' property set in style. When designating width or height value * to an inline element, it will be treated as inline-block. * * @param x * Specifies how far a box's left margin edge is offset to the * right of the left edge of the box's containing block. * @param y * Specifies how far an absolutely positioned box's top margin * edge is offset below the top edge of the box's containing * block. * @param width * The width of the element. * @param height * The height of the element. * @param style * The <code>IStyle</code> object. * @return The display type of the element. */ public CSSValue getElementDisplay( DimensionType x, DimensionType y, DimensionType width, DimensionType height, IStyle style ) { CSSValue display = null; if ( style != null ) { display = style.getProperty( IStyle.STYLE_DISPLAY ); } if ( IStyle.NONE_VALUE == display ) { return IStyle.NONE_VALUE; } if ( x != null || y != null ) { return IStyle.BLOCK_VALUE; } else if( IStyle.INLINE_VALUE == display ) { if ( width != null || height != null ) { return IStyle.INLINE_BLOCK_VALUE; } else { return IStyle.INLINE_VALUE; } } return IStyle.BLOCK_VALUE; } /** * Checks whether the element is block, inline or inline-block level. In * BIRT, the absolute positioning model is used and a box is explicitly * offset with respect to its containing block. When an element's x or y is * set, it will be treated as a block level element regardless of the * 'Display' property set in style. When designating width or height value * to an inline element, it will be treated as inline-block. * * @param x * Specifies how far a box's left margin edge is offset to the * right of the left edge of the box's containing block. * @param y * Specifies how far an absolutely positioned box's top margin * edge is offset below the top edge of the box's containing * block. * @param width * The width of the element. * @param height * The height of the element. * @param style * The <code>IStyle</code> object. * @return The display type of the element. */ public int getElementType( DimensionType x, DimensionType y, DimensionType width, DimensionType height, IStyle style ) { int type = 0; String display = null; if ( style != null ) { display = style.getDisplay( ); } if ( EngineIRConstants.DISPLAY_NONE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_NONE; } if ( x != null || y != null ) { return type | HTMLEmitterUtil.DISPLAY_BLOCK; } else if ( EngineIRConstants.DISPLAY_INLINE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_INLINE; if ( width != null || height != null ) { type |= HTMLEmitterUtil.DISPLAY_INLINE_BLOCK; } return type; } return type | HTMLEmitterUtil.DISPLAY_BLOCK; } /** * Checks whether the element is block, inline or inline-block level. In * BIRT, the absolute positioning model is used and a box is explicitly * offset with respect to its containing block. When an element's x or y is * set, it will be treated as a block level element regardless of the * 'Display' property set in style. When designating width or height value * to an inline element, it will be treated as inline-block. * * @param x * Specifies how far a box's left margin edge is offset to the * right of the left edge of the box's containing block. * @param y * Specifies how far an absolutely positioned box's top margin * edge is offset below the top edge of the box's containing * block. * @param width * The width of the element. * @param height * The height of the element. * @param style * The <code>IStyle</code> object. * @return The display type of the element. */ public int getTextElementType( DimensionType x, DimensionType y, DimensionType width, DimensionType height, IStyle style ) { int type = 0; String display = null; if ( style != null ) { display = style.getDisplay( ); } if ( EngineIRConstants.DISPLAY_NONE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_NONE; } if ( x != null || y != null ) { return type | HTMLEmitterUtil.DISPLAY_BLOCK; } else if ( EngineIRConstants.DISPLAY_INLINE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_INLINE; //Inline text doesn't support height. if ( width != null ) { type |= HTMLEmitterUtil.DISPLAY_INLINE_BLOCK; } return type; } return type | HTMLEmitterUtil.DISPLAY_BLOCK; } /** * adds the default table styles * * @param styleBuffer */ protected void addDefaultTableStyles( StringBuffer styleBuffer ) { styleBuffer.append( "border-collapse: collapse; empty-cells: show;" ); //$NON-NLS-1$ } /** * Checks the 'CanShrink' property and sets the width and height according * to the table below: * <p> * <table border=0 cellspacing=3 cellpadding=0 summary="Chart showing * symbol, location, localized, and meaning."> * <tr bgcolor="#ccccff"> * <th align=left>CanShrink</th> * <th align=left>Element Type</th> * <th align=left>Width</th> * <th align=left>Height</th> * </tr> * <tr valign=middle> * <td rowspan="2"><code>true(by default)</code></td> * <td>in-line</td> * <td>ignor</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>ignor</td> * </tr> * <tr valign=middle> * <td rowspan="2" bgcolor="#eeeeff"><code>false</code></td> * <td>in-line</td> * <td>replaced by 'min-width' property</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>replaced by 'min-height' property</td> * </tr> * </table> * * @param type * The display type of the element. * @param style * The style of an element. * @param height * The height property. * @param width * The width property. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. * @return A <code>boolean</code> value indicating 'Can-Shrink' property * is set to <code>true</code> or not. */ // protected boolean handleShrink( CSSValue display, IStyle style, // DimensionType height, DimensionType width, StringBuffer styleBuffer ) // { // boolean canShrink = style != null // && "true".equalsIgnoreCase( style.getCanShrink( ) ); //$NON-NLS-1$ // // if ( IStyle.BLOCK_VALUE == display ) // { // buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); // if ( !canShrink ) // { // buildSize( styleBuffer, HTMLTags.ATTR_MIN_HEIGHT, height ); // } // } // else if ( IStyle.INLINE_VALUE == display // || IStyle.INLINE_BLOCK_VALUE == display ) // { // buildSize( styleBuffer, HTMLTags.ATTR_HEIGHT, height ); // if ( !canShrink ) // { // buildSize( styleBuffer, HTMLTags.ATTR_MIN_WIDTH, width ); // } // } // // return canShrink; // } /** * Checks the 'CanShrink' property and sets the width and height according * to the table below: * <p> * <table border=0 cellspacing=3 cellpadding=0 summary="Chart showing * symbol, location, localized, and meaning."> * <tr bgcolor="#ccccff"> * <th align=left>CanShrink</th> * <th align=left>Element Type</th> * <th align=left>Width</th> * <th align=left>Height</th> * </tr> * <tr valign=middle> * <td rowspan="2"><code>true(by default)</code></td> * <td>in-line</td> * <td>ignor</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>ignor</td> * </tr> * <tr valign=middle> * <td rowspan="2" bgcolor="#eeeeff"><code>false</code></td> * <td>in-line</td> * <td>replaced by 'min-width' property</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>replaced by 'min-height' property</td> * </tr> * </table> * * @param type * The display type of the element. * @param style * The style of an element. * @param height * The height property. * @param width * The width property. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. * @return A <code>boolean</code> value indicating 'Can-Shrink' property * is set to <code>true</code> or not. */ protected boolean handleShrink( int type, IStyle style, DimensionType height, DimensionType width, StringBuffer styleBuffer ) { boolean canShrink = style != null && "true".equalsIgnoreCase( style.getCanShrink( ) ); //$NON-NLS-1$ if ( ( type & HTMLEmitterUtil.DISPLAY_BLOCK ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_MIN_HEIGHT, height ); } } else if ( ( type & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_HEIGHT, height ); if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_MIN_WIDTH, width ); } } else { assert false; } return canShrink; } /** * Checks the 'CanShrink' property and sets the width and height according * @param type * The display type of the element. * @param style * The style of an element. * @param height * The height property. * @param width * The width property. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. * @return A <code>boolean</code> value indicating 'Can-Shrink' property * is set to <code>true</code> or not. */ protected boolean handleTextShrink( int type, IStyle style, DimensionType height, DimensionType width, StringBuffer styleBuffer ) { boolean canShrink = style != null && "true".equalsIgnoreCase( style.getCanShrink( ) ); //$NON-NLS-1$ if ( ( type & HTMLEmitterUtil.DISPLAY_BLOCK ) > 0 ) { if ( width != null ) { buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); styleBuffer.append( " overflow: hidden;" ); } if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_MIN_HEIGHT, height ); } } else if ( ( type & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) { //Inline text doesn't support height. Inline-block text supports height. //The user can use line height to implement the height effect of inline text. if ( ( type & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_HEIGHT, height ); } if ( !canShrink ) { if ( width != null ) { buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); styleBuffer.append( " overflow: hidden;" ); } } } else { assert false; } return canShrink; } // FIXME: code review: implement the openContainerTag and closeContainerTag // in the HTMLReportEmitter directly. /** * Open the container tag. */ public void openContainerTag( IContainerContent container ) { DimensionType x = container.getX( ); DimensionType y = container.getY( ); DimensionType width = container.getWidth( ); DimensionType height = container.getHeight( ); int display = getElementType( x, y, width, height, container.getStyle( ) ); // The display value is pushed in Stack. It will be popped when close the container tag. containerDisplayStack.push( new Integer( display ) ); if ( ( ( display & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) || ( ( display & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) ) { // Open the inlineBox tag when implement the inline box. openInlineBoxTag( ); //FIXME: code review: We should implement the shrink here. } writer.openTag( HTMLTags.TAG_DIV ); } /** * Close the container tag. */ public void closeContainerTag( ) { writer.closeTag( HTMLTags.TAG_DIV ); int display = ( (Integer) containerDisplayStack.pop( ) ).intValue( ); if ( ( ( display & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) || ( ( display & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) ) { // Close the inlineBox tag when implement the inline box. closeInlineBoxTag( ); } } /** * Open the tag when implement the inline box. */ protected void openInlineBoxTag( ) { writer.openTag( HTMLTags.TAG_DIV ); // For the IE the display value will be "inline", because the IE can't // identify the "!important". For the Firefox 1.5 and 2 the display // value will be "-moz-inline-box", because only the Firefox 3 implement // the "inline-block". For the Firefox 3 the display value will be // "inline-block". writer.attribute( HTMLTags.ATTR_STYLE, " display:-moz-inline-box !important; display:inline-block !important; display:inline;" ); writer.openTag( HTMLTags.TAG_TABLE ); writer.openTag( HTMLTags.TAG_TR ); writer.openTag( HTMLTags.TAG_TD ); } /** * Close the tag when implement the inline box. */ protected void closeInlineBoxTag( ) { writer.closeTag( HTMLTags.TAG_TD ); writer.closeTag( HTMLTags.TAG_TR ); writer.closeTag( HTMLTags.TAG_TABLE ); writer.closeTag( HTMLTags.TAG_DIV ); } /** * Set the display property to style. * * @param display * The display type. * @param mask * The mask. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. */ protected void setDisplayProperty( int display, int mask, StringBuffer styleBuffer ) { int flag = display & mask; if ( ( display & HTMLEmitterUtil.DISPLAY_NONE ) > 0 ) { styleBuffer.append( "display: none;" ); //$NON-NLS-1$ } else if ( flag > 0 ) { if ( ( flag & HTMLEmitterUtil.DISPLAY_BLOCK ) > 0 ) { styleBuffer.append( "display: block;" ); //$NON-NLS-1$ } else if ( ( flag & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) { styleBuffer.append( "display: inline-block;" ); //$NON-NLS-1$ } else if ( ( flag & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) { styleBuffer.append( "display: inline;" ); //$NON-NLS-1$ } } } /** * Open the vertical-align box tag if the element needs implementing the * vertical-align. */ // FIXME: code review: Because only the text element and foreign element use // this method, so the method name should be changed to // handleTextVerticalAlignBegin // FIXME: code review of text: Inline text doesn't need outputting the // vertical-align. Block and inline-block texts need outputting the // vertical-align. public void handleVerticalAlignBegin( IContent element ) { IStyle style = element.getStyle( ); CSSValue vAlign = style.getProperty( IStyle.STYLE_VERTICAL_ALIGN ); CSSValue canShrink = style.getProperty( IStyle.STYLE_CAN_SHRINK ); DimensionType height = element.getHeight( ); // FIXME: code review: the top value of the vAlign shouldn't be outptted too. if ( vAlign != null && vAlign != IStyle.BASELINE_VALUE && height != null && canShrink != IStyle.TRUE_VALUE ) { // implement vertical align. writer.openTag( HTMLTags.TAG_TABLE ); StringBuffer nestingTableStyleBuffer = new StringBuffer( ); nestingTableStyleBuffer.append( " width:100%; height:" ); nestingTableStyleBuffer.append( height.toString( ) ); writer.attribute( HTMLTags.ATTR_STYLE, nestingTableStyleBuffer.toString( ) ); writer.openTag( HTMLTags.TAG_TR ); writer.openTag( HTMLTags.TAG_TD ); StringBuffer textStyleBuffer = new StringBuffer( ); textStyleBuffer.append( " vertical-align:" ); textStyleBuffer.append( vAlign.getCssText( ) ); textStyleBuffer.append( ";" ); writer.attribute( HTMLTags.ATTR_STYLE, textStyleBuffer.toString( ) ); } } /** * Close the vertical-align box tag if the element needs implementing the * vertical-align. */ public void handleVerticalAlignEnd( IContent element ) { IStyle style = element.getStyle( ); CSSValue vAlign = style.getProperty( IStyle.STYLE_VERTICAL_ALIGN ); CSSValue canShrink = style.getProperty( IStyle.STYLE_CAN_SHRINK ); DimensionType height = element.getHeight( ); if ( vAlign != null && vAlign != IStyle.BASELINE_VALUE && height != null && canShrink != IStyle.TRUE_VALUE ) { writer.closeTag( HTMLTags.TAG_TD ); writer.closeTag( HTMLTags.TAG_TR ); writer.closeTag( HTMLTags.TAG_TABLE ); } } }
engine/org.eclipse.birt.report.engine.emitter.html/src/org/eclipse/birt/report/engine/emitter/html/HTMLEmitter.java
/******************************************************************************* * Copyright (c) 2004, 2007 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.engine.emitter.html; import java.util.Stack; import org.eclipse.birt.report.engine.content.ICellContent; import org.eclipse.birt.report.engine.content.IColumn; import org.eclipse.birt.report.engine.content.IContainerContent; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.IForeignContent; import org.eclipse.birt.report.engine.content.IImageContent; import org.eclipse.birt.report.engine.content.IPageContent; import org.eclipse.birt.report.engine.content.IRowContent; import org.eclipse.birt.report.engine.content.IStyle; import org.eclipse.birt.report.engine.content.ITableContent; import org.eclipse.birt.report.engine.content.ITextContent; import org.eclipse.birt.report.engine.emitter.HTMLTags; import org.eclipse.birt.report.engine.emitter.HTMLWriter; import org.eclipse.birt.report.engine.emitter.html.util.HTMLEmitterUtil; import org.eclipse.birt.report.engine.ir.DimensionType; import org.eclipse.birt.report.engine.ir.EngineIRConstants; import org.w3c.dom.css.CSSValue; /** * */ public abstract class HTMLEmitter { protected HTMLReportEmitter reportEmitter; protected HTMLWriter writer; protected String layoutPreference; /** * The <code>containerDisplayStack</code> that stores the display value of container. */ protected Stack containerDisplayStack = new Stack( ); public HTMLEmitter( HTMLReportEmitter reportEmitter, HTMLWriter writer, String layoutPreference ) { this.reportEmitter = reportEmitter; this.writer = writer; this.layoutPreference = layoutPreference; } // FIXME: code review: We shouldn��t pass the style directly. We should pass // the element and get the style form the element in the method. public abstract void buildDefaultStyle( StringBuffer styleBuffer, IStyle style ); public abstract void buildStyle( StringBuffer styleBuffer, IStyle style ); public abstract void buildPageBandStyle( StringBuffer styleBuffer, IStyle style ); public abstract void buildTableStyle( ITableContent table, StringBuffer styleBuffer ); public abstract void buildColumnStyle( IColumn column, StringBuffer styleBuffer ); public abstract void handleColumnAlign( IColumn column ); public abstract void buildRowStyle( IRowContent row, StringBuffer styleBuffer ); public abstract void handleRowAlign( IRowContent row ); public abstract void buildCellStyle( ICellContent cell, StringBuffer styleBuffer, boolean isHead ); public abstract void handleCellAlign( ICellContent cell ); public abstract void buildContainerStyle( IContainerContent container, StringBuffer styleBuffer ); public abstract void handleContainerAlign( IContainerContent container ); // FIXME: code review: Because the display has already been calculated in // the HTMLReportEmitter, so we can build the display there too. We needn't // pass the display here. public abstract void buildTextStyle( ITextContent text, StringBuffer styleBuffer, int display ); public abstract void buildForeignStyle( IForeignContent foreign, StringBuffer styleBuffer, int display ); public abstract void buildImageStyle( IImageContent image, StringBuffer styleBuffer, int display ); /** * Build the style of the page */ public void buildPageStyle( IPageContent page, StringBuffer styleBuffer, boolean needOutputBackgroundSize ) { // The method getStyle( ) will nevel return a null value; IStyle style = page.getStyle( ); if ( !needOutputBackgroundSize ) { AttributeBuilder .buildBackground( styleBuffer, style, reportEmitter ); } else AttributeBuilder.buildBackgroundColor( styleBuffer, style, reportEmitter ); AttributeBuilder.buildBorders( styleBuffer, style ); } /** * Build size style string say, "width: 10.0mm;". The min-height should be * implemented by sepcial way. * * @param content * The <code>StringBuffer</code> to which the result is output. * @param name * The property name * @param value * The values of the property */ public void buildSize( StringBuffer content, String name, DimensionType value ) { if ( value != null ) { if ( HTMLTags.ATTR_MIN_HEIGHT.equals( name ) ) { //To solve the problem that IE do not support min-height. //Use this way to make Firefox and IE both work well. content.append( " height: auto !important; height: " ); content.append( value.toString( ) ); content.append( "; min-height: " ); content.append( value.toString( ) ); content.append( ';' ); } else { content.append( ' ' ); content.append( name ); content.append( ": " ); content.append( value.toString( ) ); content.append( ';' ); } } } protected IStyle getElementStyle( IContent content ) { IStyle style = content.getInlineStyle( ); if ( style == null || style.isEmpty( ) ) { return null; } return style; } // FIXME: code review: We should remove all the codes about the x and y. // BIRT doesn't supoort the x and y now. /** * Checks whether the element is block, inline or inline-block level. In * BIRT, the absolute positioning model is used and a box is explicitly * offset with respect to its containing block. When an element's x or y is * set, it will be treated as a block level element regardless of the * 'Display' property set in style. When designating width or height value * to an inline element, it will be treated as inline-block. * * @param x * Specifies how far a box's left margin edge is offset to the * right of the left edge of the box's containing block. * @param y * Specifies how far an absolutely positioned box's top margin * edge is offset below the top edge of the box's containing * block. * @param width * The width of the element. * @param height * The height of the element. * @param style * The <code>IStyle</code> object. * @return The display type of the element. */ public CSSValue getElementDisplay( DimensionType x, DimensionType y, DimensionType width, DimensionType height, IStyle style ) { CSSValue display = null; if ( style != null ) { display = style.getProperty( IStyle.STYLE_DISPLAY ); } if ( IStyle.NONE_VALUE == display ) { return IStyle.NONE_VALUE; } if ( x != null || y != null ) { return IStyle.BLOCK_VALUE; } else if( IStyle.INLINE_VALUE == display ) { if ( width != null || height != null ) { return IStyle.INLINE_BLOCK_VALUE; } else { return IStyle.INLINE_VALUE; } } return IStyle.BLOCK_VALUE; } /** * Checks whether the element is block, inline or inline-block level. In * BIRT, the absolute positioning model is used and a box is explicitly * offset with respect to its containing block. When an element's x or y is * set, it will be treated as a block level element regardless of the * 'Display' property set in style. When designating width or height value * to an inline element, it will be treated as inline-block. * * @param x * Specifies how far a box's left margin edge is offset to the * right of the left edge of the box's containing block. * @param y * Specifies how far an absolutely positioned box's top margin * edge is offset below the top edge of the box's containing * block. * @param width * The width of the element. * @param height * The height of the element. * @param style * The <code>IStyle</code> object. * @return The display type of the element. */ public int getElementType( DimensionType x, DimensionType y, DimensionType width, DimensionType height, IStyle style ) { int type = 0; String display = null; if ( style != null ) { display = style.getDisplay( ); } if ( EngineIRConstants.DISPLAY_NONE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_NONE; } if ( x != null || y != null ) { return type | HTMLEmitterUtil.DISPLAY_BLOCK; } else if ( EngineIRConstants.DISPLAY_INLINE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_INLINE; if ( width != null || height != null ) { type |= HTMLEmitterUtil.DISPLAY_INLINE_BLOCK; } return type; } return type | HTMLEmitterUtil.DISPLAY_BLOCK; } /** * Checks whether the element is block, inline or inline-block level. In * BIRT, the absolute positioning model is used and a box is explicitly * offset with respect to its containing block. When an element's x or y is * set, it will be treated as a block level element regardless of the * 'Display' property set in style. When designating width or height value * to an inline element, it will be treated as inline-block. * * @param x * Specifies how far a box's left margin edge is offset to the * right of the left edge of the box's containing block. * @param y * Specifies how far an absolutely positioned box's top margin * edge is offset below the top edge of the box's containing * block. * @param width * The width of the element. * @param height * The height of the element. * @param style * The <code>IStyle</code> object. * @return The display type of the element. */ public int getTextElementType( DimensionType x, DimensionType y, DimensionType width, DimensionType height, IStyle style ) { int type = 0; String display = null; if ( style != null ) { display = style.getDisplay( ); } if ( EngineIRConstants.DISPLAY_NONE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_NONE; } if ( x != null || y != null ) { return type | HTMLEmitterUtil.DISPLAY_BLOCK; } else if ( EngineIRConstants.DISPLAY_INLINE.equalsIgnoreCase( display ) ) { type |= HTMLEmitterUtil.DISPLAY_INLINE; //Inline text doesn't support height. if ( width != null ) { type |= HTMLEmitterUtil.DISPLAY_INLINE_BLOCK; } return type; } return type | HTMLEmitterUtil.DISPLAY_BLOCK; } /** * adds the default table styles * * @param styleBuffer */ protected void addDefaultTableStyles( StringBuffer styleBuffer ) { styleBuffer.append( "border-collapse: collapse; empty-cells: show;" ); //$NON-NLS-1$ } /** * Checks the 'CanShrink' property and sets the width and height according * to the table below: * <p> * <table border=0 cellspacing=3 cellpadding=0 summary="Chart showing * symbol, location, localized, and meaning."> * <tr bgcolor="#ccccff"> * <th align=left>CanShrink</th> * <th align=left>Element Type</th> * <th align=left>Width</th> * <th align=left>Height</th> * </tr> * <tr valign=middle> * <td rowspan="2"><code>true(by default)</code></td> * <td>in-line</td> * <td>ignor</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>ignor</td> * </tr> * <tr valign=middle> * <td rowspan="2" bgcolor="#eeeeff"><code>false</code></td> * <td>in-line</td> * <td>replaced by 'min-width' property</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>replaced by 'min-height' property</td> * </tr> * </table> * * @param type * The display type of the element. * @param style * The style of an element. * @param height * The height property. * @param width * The width property. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. * @return A <code>boolean</code> value indicating 'Can-Shrink' property * is set to <code>true</code> or not. */ // protected boolean handleShrink( CSSValue display, IStyle style, // DimensionType height, DimensionType width, StringBuffer styleBuffer ) // { // boolean canShrink = style != null // && "true".equalsIgnoreCase( style.getCanShrink( ) ); //$NON-NLS-1$ // // if ( IStyle.BLOCK_VALUE == display ) // { // buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); // if ( !canShrink ) // { // buildSize( styleBuffer, HTMLTags.ATTR_MIN_HEIGHT, height ); // } // } // else if ( IStyle.INLINE_VALUE == display // || IStyle.INLINE_BLOCK_VALUE == display ) // { // buildSize( styleBuffer, HTMLTags.ATTR_HEIGHT, height ); // if ( !canShrink ) // { // buildSize( styleBuffer, HTMLTags.ATTR_MIN_WIDTH, width ); // } // } // // return canShrink; // } /** * Checks the 'CanShrink' property and sets the width and height according * to the table below: * <p> * <table border=0 cellspacing=3 cellpadding=0 summary="Chart showing * symbol, location, localized, and meaning."> * <tr bgcolor="#ccccff"> * <th align=left>CanShrink</th> * <th align=left>Element Type</th> * <th align=left>Width</th> * <th align=left>Height</th> * </tr> * <tr valign=middle> * <td rowspan="2"><code>true(by default)</code></td> * <td>in-line</td> * <td>ignor</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>ignor</td> * </tr> * <tr valign=middle> * <td rowspan="2" bgcolor="#eeeeff"><code>false</code></td> * <td>in-line</td> * <td>replaced by 'min-width' property</td> * <td>set</td> * </tr> * <tr valign=top bgcolor="#eeeeff"> * <td><code>block</code></td> * <td>set</td> * <td>replaced by 'min-height' property</td> * </tr> * </table> * * @param type * The display type of the element. * @param style * The style of an element. * @param height * The height property. * @param width * The width property. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. * @return A <code>boolean</code> value indicating 'Can-Shrink' property * is set to <code>true</code> or not. */ protected boolean handleShrink( int type, IStyle style, DimensionType height, DimensionType width, StringBuffer styleBuffer ) { boolean canShrink = style != null && "true".equalsIgnoreCase( style.getCanShrink( ) ); //$NON-NLS-1$ if ( ( type & HTMLEmitterUtil.DISPLAY_BLOCK ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_MIN_HEIGHT, height ); } } else if ( ( type & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_HEIGHT, height ); if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_MIN_WIDTH, width ); } } else { assert false; } return canShrink; } /** * Checks the 'CanShrink' property and sets the width and height according * @param type * The display type of the element. * @param style * The style of an element. * @param height * The height property. * @param width * The width property. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. * @return A <code>boolean</code> value indicating 'Can-Shrink' property * is set to <code>true</code> or not. */ protected boolean handleTextShrink( int type, IStyle style, DimensionType height, DimensionType width, StringBuffer styleBuffer ) { boolean canShrink = style != null && "true".equalsIgnoreCase( style.getCanShrink( ) ); //$NON-NLS-1$ if ( ( type & HTMLEmitterUtil.DISPLAY_BLOCK ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_MIN_HEIGHT, height ); } } else if ( ( type & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) { //Inline text doesn't support height. Inline-block text supports height. //The user can use line height to implement the height effect of inline text. if ( ( type & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) { buildSize( styleBuffer, HTMLTags.ATTR_HEIGHT, height ); } if ( !canShrink ) { buildSize( styleBuffer, HTMLTags.ATTR_WIDTH, width ); } } else { assert false; } return canShrink; } // FIXME: code review: implement the openContainerTag and closeContainerTag // in the HTMLReportEmitter directly. /** * Open the container tag. */ public void openContainerTag( IContainerContent container ) { DimensionType x = container.getX( ); DimensionType y = container.getY( ); DimensionType width = container.getWidth( ); DimensionType height = container.getHeight( ); int display = getElementType( x, y, width, height, container.getStyle( ) ); // The display value is pushed in Stack. It will be popped when close the container tag. containerDisplayStack.push( new Integer( display ) ); if ( ( ( display & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) || ( ( display & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) ) { // Open the inlineBox tag when implement the inline box. openInlineBoxTag( ); //FIXME: code review: We should implement the shrink here. } writer.openTag( HTMLTags.TAG_DIV ); } /** * Close the container tag. */ public void closeContainerTag( ) { writer.closeTag( HTMLTags.TAG_DIV ); int display = ( (Integer) containerDisplayStack.pop( ) ).intValue( ); if ( ( ( display & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) || ( ( display & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) ) { // Close the inlineBox tag when implement the inline box. closeInlineBoxTag( ); } } /** * Open the tag when implement the inline box. */ protected void openInlineBoxTag( ) { writer.openTag( HTMLTags.TAG_DIV ); // For the IE the display value will be "inline", because the IE can't // identify the "!important". For the Firefox 1.5 and 2 the display // value will be "-moz-inline-box", because only the Firefox 3 implement // the "inline-block". For the Firefox 3 the display value will be // "inline-block". writer.attribute( HTMLTags.ATTR_STYLE, " display:-moz-inline-box !important; display:inline-block !important; display:inline;" ); writer.openTag( HTMLTags.TAG_TABLE ); writer.openTag( HTMLTags.TAG_TR ); writer.openTag( HTMLTags.TAG_TD ); } /** * Close the tag when implement the inline box. */ protected void closeInlineBoxTag( ) { writer.closeTag( HTMLTags.TAG_TD ); writer.closeTag( HTMLTags.TAG_TR ); writer.closeTag( HTMLTags.TAG_TABLE ); writer.closeTag( HTMLTags.TAG_DIV ); } /** * Set the display property to style. * * @param display * The display type. * @param mask * The mask. * @param styleBuffer * The <code>StringBuffer</code> object that returns 'style' * content. */ protected void setDisplayProperty( int display, int mask, StringBuffer styleBuffer ) { int flag = display & mask; if ( ( display & HTMLEmitterUtil.DISPLAY_NONE ) > 0 ) { styleBuffer.append( "display: none;" ); //$NON-NLS-1$ } else if ( flag > 0 ) { if ( ( flag & HTMLEmitterUtil.DISPLAY_BLOCK ) > 0 ) { styleBuffer.append( "display: block;" ); //$NON-NLS-1$ } else if ( ( flag & HTMLEmitterUtil.DISPLAY_INLINE_BLOCK ) > 0 ) { styleBuffer.append( "display: inline-block;" ); //$NON-NLS-1$ } else if ( ( flag & HTMLEmitterUtil.DISPLAY_INLINE ) > 0 ) { styleBuffer.append( "display: inline;" ); //$NON-NLS-1$ } } } /** * Open the vertical-align box tag if the element needs implementing the * vertical-align. */ // FIXME: code review: Because only the text element and foreign element use // this method, so the method name should be changed to // handleTextVerticalAlignBegin // FIXME: code review of text: Inline text doesn't need outputting the // vertical-align. Block and inline-block texts need outputting the // vertical-align. public void handleVerticalAlignBegin( IContent element ) { IStyle style = element.getStyle( ); CSSValue vAlign = style.getProperty( IStyle.STYLE_VERTICAL_ALIGN ); CSSValue canShrink = style.getProperty( IStyle.STYLE_CAN_SHRINK ); DimensionType height = element.getHeight( ); // FIXME: code review: the top value of the vAlign shouldn't be outptted too. if ( vAlign != null && vAlign != IStyle.BASELINE_VALUE && height != null && canShrink != IStyle.TRUE_VALUE ) { // implement vertical align. writer.openTag( HTMLTags.TAG_TABLE ); StringBuffer nestingTableStyleBuffer = new StringBuffer( ); nestingTableStyleBuffer.append( " width:100%; height:" ); nestingTableStyleBuffer.append( height.toString( ) ); writer.attribute( HTMLTags.ATTR_STYLE, nestingTableStyleBuffer.toString( ) ); writer.openTag( HTMLTags.TAG_TR ); writer.openTag( HTMLTags.TAG_TD ); StringBuffer textStyleBuffer = new StringBuffer( ); textStyleBuffer.append( " vertical-align:" ); textStyleBuffer.append( vAlign.getCssText( ) ); textStyleBuffer.append( ";" ); writer.attribute( HTMLTags.ATTR_STYLE, textStyleBuffer.toString( ) ); } } /** * Close the vertical-align box tag if the element needs implementing the * vertical-align. */ public void handleVerticalAlignEnd( IContent element ) { IStyle style = element.getStyle( ); CSSValue vAlign = style.getProperty( IStyle.STYLE_VERTICAL_ALIGN ); CSSValue canShrink = style.getProperty( IStyle.STYLE_CAN_SHRINK ); DimensionType height = element.getHeight( ); if ( vAlign != null && vAlign != IStyle.BASELINE_VALUE && height != null && canShrink != IStyle.TRUE_VALUE ) { writer.closeTag( HTMLTags.TAG_TD ); writer.closeTag( HTMLTags.TAG_TR ); writer.closeTag( HTMLTags.TAG_TABLE ); } } }
Fix the bugzilla bug 272280.
engine/org.eclipse.birt.report.engine.emitter.html/src/org/eclipse/birt/report/engine/emitter/html/HTMLEmitter.java
Fix the bugzilla bug 272280.
Java
epl-1.0
8183a531eaf2633120bc62c3e00315e3cb1cb665
0
codenvy/plugin-datasource,codenvy/plugin-datasource
/* * CODENVY CONFIDENTIAL * __________________ * * [2013] - [2014] Codenvy, S.A. * All Rights Reserved. * * NOTICE: All information contained herein is, and remains * the property of Codenvy S.A. and its suppliers, * if any. The intellectual and technical concepts contained * herein are proprietary to Codenvy S.A. * and its suppliers and may be covered by U.S. and Foreign Patents, * patents in process, and are protected by trade secret or copyright law. * Dissemination of this information or reproduction of this material * is strictly forbidden unless prior written permission is obtained * from Codenvy S.A.. */ package com.codenvy.ide.ext.datasource.client.sqllauncher; import com.codenvy.ide.api.editor.EditorPartPresenter; import com.codenvy.ide.api.preferences.PreferencesManager; import com.codenvy.ide.api.ui.workspace.AbstractPartPresenter; import com.codenvy.ide.ext.datasource.client.sqleditor.SqlEditorProvider; import com.codenvy.ide.util.loging.Log; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.ui.AcceptsOneWidget; import com.google.inject.Inject; public class SqlRequestLauncherPresenter extends AbstractPartPresenter implements SqlRequestLauncherView.ActionDelegate { /** Preference property name for default result limit. */ private static final String PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT = "SqlEditor_default_request_limit"; /** Default value for request limit (when no pref is set). */ private static final int DEFAULT_REQUEST_LIMIT = 20; /** The matching view. */ private final SqlRequestLauncherView view; /** The i18n-able constants. */ private final SqlRequestLauncherConstants constants; private String selectedDatasourceId = null; private int resultLimit = DEFAULT_REQUEST_LIMIT; private EditorPartPresenter editor; @Inject public SqlRequestLauncherPresenter(final SqlRequestLauncherView view, final SqlRequestLauncherConstants constants, final PreferencesManager preferencesManager, final SqlEditorProvider sqlEditorProvider) { this.view = view; this.view.setDelegate(this); this.constants = constants; this.editor = sqlEditorProvider.getEditor(); final String prefRequestLimit = preferencesManager.getValue(PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT); if (prefRequestLimit != null) { try { int prefValue = Integer.valueOf(prefRequestLimit); if (prefValue > 0) { this.resultLimit = prefValue; } else { Log.warn(SqlRequestLauncherPresenter.class, "negative value stored in preference " + PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT); } } catch (final NumberFormatException e) { StringBuilder sb = new StringBuilder("Preference stored in ") .append(PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT) .append(" is not an integer (") .append(resultLimit) .append(")."); Log.warn(SqlRequestLauncherPresenter.class, sb.toString()); } } // push the request limit value to the view this.view.setResultLimit(this.resultLimit); } @Override public String getTitle() { return this.constants.sqlEditorWindowTitle(); } @Override public ImageResource getTitleImage() { return null; } @Override public String getTitleToolTip() { return null; } @Override public void go(final AcceptsOneWidget container) { container.setWidget(view); editor.go(this.view.getEditorZone()); } @Override public boolean onClose() { return this.editor.onClose(); } @Override public void onOpen() { this.editor.onOpen(); } @Override public void datasourceChanged(final String newDataSourceId) { this.selectedDatasourceId = newDataSourceId; } @Override public void resultLimitChanged(final int newResultLimit) { if (newResultLimit > 0) { this.resultLimit = newResultLimit; } else { this.view.setResultLimit(this.resultLimit); } } @Override public void executeRequested(final String request) { } }
codenvy-ext-datasource-core/src/main/java/com/codenvy/ide/ext/datasource/client/sqllauncher/SqlRequestLauncherPresenter.java
/* * CODENVY CONFIDENTIAL * __________________ * * [2013] - [2014] Codenvy, S.A. * All Rights Reserved. * * NOTICE: All information contained herein is, and remains * the property of Codenvy S.A. and its suppliers, * if any. The intellectual and technical concepts contained * herein are proprietary to Codenvy S.A. * and its suppliers and may be covered by U.S. and Foreign Patents, * patents in process, and are protected by trade secret or copyright law. * Dissemination of this information or reproduction of this material * is strictly forbidden unless prior written permission is obtained * from Codenvy S.A.. */ package com.codenvy.ide.ext.datasource.client.sqllauncher; import com.codenvy.ide.api.editor.EditorPartPresenter; import com.codenvy.ide.api.preferences.PreferencesManager; import com.codenvy.ide.api.ui.workspace.AbstractPartPresenter; import com.codenvy.ide.ext.datasource.client.sqleditor.SqlEditorProvider; import com.codenvy.ide.util.loging.Log; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.ui.AcceptsOneWidget; import com.google.inject.Inject; public class SqlRequestLauncherPresenter extends AbstractPartPresenter implements SqlRequestLauncherView.ActionDelegate { /** Preference property name for default result limit. */ private static final String PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT = "SqlEditor_default_request_limit"; /** Default value for request limit (when no pref is set). */ private static final int DEFAULT_REQUEST_LIMIT = 20; /** The matching view. */ private final SqlRequestLauncherView view; /** The i18n-able constants. */ private final SqlRequestLauncherConstants constants; private String selectedDatasourceId = null; private int resultLimit = DEFAULT_REQUEST_LIMIT; private EditorPartPresenter editor; @Inject public SqlRequestLauncherPresenter(final SqlRequestLauncherView view, final SqlRequestLauncherConstants constants, final PreferencesManager preferencesManager, final SqlEditorProvider sqlEditorProvider) { this.view = view; this.view.setDelegate(this); this.constants = constants; this.editor = sqlEditorProvider.getEditor(); final String prefRequestLimit = preferencesManager.getValue(PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT); if (prefRequestLimit != null) { try { int prefValue = Integer.valueOf(prefRequestLimit); if (prefValue > 0) { this.resultLimit = prefValue; } else { Log.warn(SqlRequestLauncherPresenter.class, "negative value stored in preference " + PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT); } } catch (final NumberFormatException e) { StringBuilder sb = new StringBuilder("Preference stored in ") .append(PREFERENCE_KEY_DEFAULT_REQUEST_LIMIT) .append(" is not an integer (") .append(resultLimit) .append(")."); Log.warn(SqlRequestLauncherPresenter.class, sb.toString()); } } } @Override public String getTitle() { return this.constants.sqlEditorWindowTitle(); } @Override public ImageResource getTitleImage() { return null; } @Override public String getTitleToolTip() { return null; } @Override public void go(final AcceptsOneWidget container) { container.setWidget(view); editor.go(this.view.getEditorZone()); } @Override public boolean onClose() { return this.editor.onClose(); } @Override public void onOpen() { this.editor.onOpen(); } @Override public void datasourceChanged(final String newDataSourceId) { this.selectedDatasourceId = newDataSourceId; } @Override public void resultLimitChanged(final int newResultLimit) { if (newResultLimit > 0) { this.resultLimit = newResultLimit; } else { this.view.setResultLimit(this.resultLimit); } } @Override public void executeRequested(final String request) { } }
Push the request limit value to the view
codenvy-ext-datasource-core/src/main/java/com/codenvy/ide/ext/datasource/client/sqllauncher/SqlRequestLauncherPresenter.java
Push the request limit value to the view
Java
mpl-2.0
9a8487645b41775627e81c9ac347438f9ad6a8d0
0
milankarunarathne/openmrs-core,ldf92/openmrs-core,milankarunarathne/openmrs-core,kabariyamilind/openMRSDEV,maany/openmrs-core,lilo2k/openmrs-core,sadhanvejella/openmrs,MuhammadSafwan/Stop-Button-Ability,jcantu1988/openmrs-core,macorrales/openmrs-core,dlahn/openmrs-core,jamesfeshner/openmrs-module,naraink/openmrs-core,Negatu/openmrs-core,aj-jaswanth/openmrs-core,jembi/openmrs-core,alexei-grigoriev/openmrs-core,nilusi/Legacy-UI,MitchellBot/openmrs-core,michaelhofer/openmrs-core,aj-jaswanth/openmrs-core,iLoop2/openmrs-core,rbtracker/openmrs-core,aboutdata/openmrs-core,jcantu1988/openmrs-core,kristopherschmidt/openmrs-core,ssmusoke/openmrs-core,Negatu/openmrs-core,sadhanvejella/openmrs,joansmith/openmrs-core,MuhammadSafwan/Stop-Button-Ability,koskedk/openmrs-core,Negatu/openmrs-core,Winbobob/openmrs-core,MitchellBot/openmrs-core,Winbobob/openmrs-core,aboutdata/openmrs-core,preethi29/openmrs-core,maekstr/openmrs-core,chethandeshpande/openmrs-core,shiangree/openmrs-core,dcmul/openmrs-core,jvena1/openmrs-core,nilusi/Legacy-UI,maany/openmrs-core,aboutdata/openmrs-core,trsorsimoII/openmrs-core,jcantu1988/openmrs-core,asifur77/openmrs,MuhammadSafwan/Stop-Button-Ability,hoquangtruong/TestMylyn,Ch3ck/openmrs-core,kckc/openmrs-core,lbl52001/openmrs-core,maany/openmrs-core,sadhanvejella/openmrs,donaldgavis/openmrs-core,dlahn/openmrs-core,Ch3ck/openmrs-core,chethandeshpande/openmrs-core,pselle/openmrs-core,sravanthi17/openmrs-core,kigsmtua/openmrs-core,joansmith/openmrs-core,jamesfeshner/openmrs-module,michaelhofer/openmrs-core,sadhanvejella/openmrs,WANeves/openmrs-core,maekstr/openmrs-core,Openmrs-joel/openmrs-core,trsorsimoII/openmrs-core,vinayvenu/openmrs-core,macorrales/openmrs-core,chethandeshpande/openmrs-core,Openmrs-joel/openmrs-core,lilo2k/openmrs-core,ern2/openmrs-core,kigsmtua/openmrs-core,dcmul/openmrs-core,koskedk/openmrs-core,MuhammadSafwan/Stop-Button-Ability,dcmul/openmrs-core,ldf92/openmrs-core,kigsmtua/openmrs-core,Negatu/openmrs-core,hoquangtruong/TestMylyn,dlahn/openmrs-core,sintjuri/openmrs-core,kckc/openmrs-core,kigsmtua/openmrs-core,kigsmtua/openmrs-core,iLoop2/openmrs-core,AbhijitParate/openmrs-core,kabariyamilind/openMRSDEV,ssmusoke/openmrs-core,maekstr/openmrs-core,jembi/openmrs-core,Ch3ck/openmrs-core,alexei-grigoriev/openmrs-core,ldf92/openmrs-core,jvena1/openmrs-core,Openmrs-joel/openmrs-core,shiangree/openmrs-core,shiangree/openmrs-core,geoff-wasilwa/openmrs-core,siddharthkhabia/openmrs-core,pselle/openmrs-core,milankarunarathne/openmrs-core,ern2/openmrs-core,chethandeshpande/openmrs-core,jvena1/openmrs-core,pselle/openmrs-core,prisamuel/openmrs-core,rbtracker/openmrs-core,prisamuel/openmrs-core,pselle/openmrs-core,Negatu/openmrs-core,Winbobob/openmrs-core,trsorsimoII/openmrs-core,jamesfeshner/openmrs-module,nilusi/Legacy-UI,donaldgavis/openmrs-core,kckc/openmrs-core,aboutdata/openmrs-core,foolchan2556/openmrs-core,dlahn/openmrs-core,Ch3ck/openmrs-core,MuhammadSafwan/Stop-Button-Ability,nilusi/Legacy-UI,lilo2k/openmrs-core,andyvand/OpenMRS,vinayvenu/openmrs-core,kristopherschmidt/openmrs-core,jcantu1988/openmrs-core,macorrales/openmrs-core,lilo2k/openmrs-core,iLoop2/openmrs-core,WANeves/openmrs-core,jvena1/openmrs-core,iLoop2/openmrs-core,ssmusoke/openmrs-core,jvena1/openmrs-core,foolchan2556/openmrs-core,maany/openmrs-core,asifur77/openmrs,koskedk/openmrs-core,koskedk/openmrs-core,dcmul/openmrs-core,preethi29/openmrs-core,asifur77/openmrs,MuhammadSafwan/Stop-Button-Ability,naraink/openmrs-core,prisamuel/openmrs-core,joansmith/openmrs-core,vinayvenu/openmrs-core,michaelhofer/openmrs-core,ldf92/openmrs-core,lilo2k/openmrs-core,shiangree/openmrs-core,alexwind26/openmrs-core,andyvand/OpenMRS,prisamuel/openmrs-core,macorrales/openmrs-core,maekstr/openmrs-core,iLoop2/openmrs-core,WANeves/openmrs-core,prisamuel/openmrs-core,kabariyamilind/openMRSDEV,Ch3ck/openmrs-core,Winbobob/openmrs-core,alexwind26/openmrs-core,kristopherschmidt/openmrs-core,spereverziev/openmrs-core,sravanthi17/openmrs-core,Negatu/openmrs-core,donaldgavis/openmrs-core,lbl52001/openmrs-core,siddharthkhabia/openmrs-core,jembi/openmrs-core,jamesfeshner/openmrs-module,preethi29/openmrs-core,rbtracker/openmrs-core,hoquangtruong/TestMylyn,chethandeshpande/openmrs-core,WANeves/openmrs-core,donaldgavis/openmrs-core,andyvand/OpenMRS,ssmusoke/openmrs-core,jembi/openmrs-core,dcmul/openmrs-core,WANeves/openmrs-core,macorrales/openmrs-core,siddharthkhabia/openmrs-core,naraink/openmrs-core,preethi29/openmrs-core,jembi/openmrs-core,sravanthi17/openmrs-core,kristopherschmidt/openmrs-core,shiangree/openmrs-core,lbl52001/openmrs-core,joansmith/openmrs-core,ssmusoke/openmrs-core,sintjuri/openmrs-core,milankarunarathne/openmrs-core,foolchan2556/openmrs-core,AbhijitParate/openmrs-core,aboutdata/openmrs-core,koskedk/openmrs-core,iLoop2/openmrs-core,aj-jaswanth/openmrs-core,sintjuri/openmrs-core,nilusi/Legacy-UI,nilusi/Legacy-UI,AbhijitParate/openmrs-core,trsorsimoII/openmrs-core,milankarunarathne/openmrs-core,foolchan2556/openmrs-core,michaelhofer/openmrs-core,naraink/openmrs-core,spereverziev/openmrs-core,donaldgavis/openmrs-core,sadhanvejella/openmrs,geoff-wasilwa/openmrs-core,koskedk/openmrs-core,kabariyamilind/openMRSDEV,spereverziev/openmrs-core,lilo2k/openmrs-core,Winbobob/openmrs-core,hoquangtruong/TestMylyn,andyvand/OpenMRS,aj-jaswanth/openmrs-core,alexei-grigoriev/openmrs-core,andyvand/OpenMRS,foolchan2556/openmrs-core,siddharthkhabia/openmrs-core,AbhijitParate/openmrs-core,sintjuri/openmrs-core,geoff-wasilwa/openmrs-core,alexei-grigoriev/openmrs-core,geoff-wasilwa/openmrs-core,trsorsimoII/openmrs-core,alexwind26/openmrs-core,prisamuel/openmrs-core,AbhijitParate/openmrs-core,alexei-grigoriev/openmrs-core,lbl52001/openmrs-core,michaelhofer/openmrs-core,dcmul/openmrs-core,sadhanvejella/openmrs,kabariyamilind/openMRSDEV,sravanthi17/openmrs-core,milankarunarathne/openmrs-core,spereverziev/openmrs-core,alexei-grigoriev/openmrs-core,hoquangtruong/TestMylyn,asifur77/openmrs,rbtracker/openmrs-core,Winbobob/openmrs-core,Openmrs-joel/openmrs-core,vinayvenu/openmrs-core,sintjuri/openmrs-core,AbhijitParate/openmrs-core,jamesfeshner/openmrs-module,alexwind26/openmrs-core,vinayvenu/openmrs-core,MitchellBot/openmrs-core,alexwind26/openmrs-core,siddharthkhabia/openmrs-core,lbl52001/openmrs-core,siddharthkhabia/openmrs-core,pselle/openmrs-core,foolchan2556/openmrs-core,sravanthi17/openmrs-core,geoff-wasilwa/openmrs-core,ern2/openmrs-core,aboutdata/openmrs-core,aj-jaswanth/openmrs-core,jcantu1988/openmrs-core,Openmrs-joel/openmrs-core,spereverziev/openmrs-core,kckc/openmrs-core,shiangree/openmrs-core,kckc/openmrs-core,maekstr/openmrs-core,kckc/openmrs-core,andyvand/OpenMRS,maekstr/openmrs-core,asifur77/openmrs,ldf92/openmrs-core,WANeves/openmrs-core,preethi29/openmrs-core,maany/openmrs-core,joansmith/openmrs-core,MitchellBot/openmrs-core,MitchellBot/openmrs-core,pselle/openmrs-core,hoquangtruong/TestMylyn,spereverziev/openmrs-core,naraink/openmrs-core,jembi/openmrs-core,lbl52001/openmrs-core,kristopherschmidt/openmrs-core,rbtracker/openmrs-core,kigsmtua/openmrs-core,dlahn/openmrs-core,ern2/openmrs-core,naraink/openmrs-core,ern2/openmrs-core,sintjuri/openmrs-core
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.web.controller.patient; import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Concept; import org.openmrs.Location; import org.openmrs.Obs; import org.openmrs.Patient; import org.openmrs.PatientIdentifier; import org.openmrs.PatientIdentifierType; import org.openmrs.PatientIdentifierType.LocationBehavior; import org.openmrs.Person; import org.openmrs.PersonAddress; import org.openmrs.PersonAttribute; import org.openmrs.PersonName; import org.openmrs.Relationship; import org.openmrs.RelationshipType; import org.openmrs.api.APIException; import org.openmrs.api.context.Context; import org.openmrs.util.LocationUtility; import org.openmrs.util.OpenmrsConstants; import org.openmrs.util.OpenmrsUtil; import org.openmrs.validator.PatientValidator; import org.openmrs.web.WebConstants; import org.openmrs.web.controller.person.PersonFormController; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.validation.BindException; import org.springframework.validation.BindingResult; import org.springframework.validation.Errors; import org.springframework.validation.ObjectError; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.context.request.WebRequest; /** * This controller is used for the "mini"/"new"/"short" patient form. Only key/important attributes * for the patient are displayed and allowed to be edited * * @see org.openmrs.web.controller.patient.PatientFormController */ @Controller public class ShortPatientFormController { private static final Log log = LogFactory.getLog(ShortPatientFormController.class); private static final String SHORT_PATIENT_FORM_URL = "/admin/patients/shortPatientForm"; private static final String FIND_PATIENT_PAGE = "findPatient"; private static final String PATIENT_DASHBOARD_URL = "/patientDashboard.form"; @Autowired PatientValidator patientValidator; /** * @return */ @RequestMapping(method = RequestMethod.GET, value = SHORT_PATIENT_FORM_URL) public void showForm() { } @ModelAttribute("patientModel") public ShortPatientModel getPatientModel(@RequestParam(value = "patientId", required = false) Integer patientId, ModelMap model, WebRequest request) { Patient patient; if (patientId != null) { patient = Context.getPatientService().getPatientOrPromotePerson(patientId); if (patient == null) { throw new IllegalArgumentException("No patient or person with the given id"); } } else { // we may have some details to add to a blank patient patient = new Patient(); String name = request.getParameter("addName"); if (!StringUtils.isBlank(name)) { String gender = request.getParameter("addGender"); String date = request.getParameter("addBirthdate"); String age = request.getParameter("addAge"); PersonFormController.getMiniPerson(patient, name, gender, date, age); } } // if we have an existing personName, cache the original name so that we // can use it to // track changes in givenName, middleName, familyName, will also use // it to restore the original values if (patient.getPersonName() != null && patient.getPersonName().getId() != null) model.addAttribute("personNameCache", PersonName.newInstance(patient.getPersonName())); else model.addAttribute("personNameCache", new PersonName()); // cache a copy of the person address for comparison in case the name is // edited if (patient.getPersonAddress() != null && patient.getPersonAddress().getId() != null) model.addAttribute("personAddressCache", patient.getPersonAddress().clone()); else model.addAttribute("personAddressCache", new PersonAddress()); String propCause = Context.getAdministrationService().getGlobalProperty("concept.causeOfDeath"); Concept conceptCause = Context.getConceptService().getConcept(propCause); String causeOfDeathOther = ""; if (conceptCause != null && patient.getPatientId() != null) { List<Obs> obssDeath = Context.getObsService().getObservationsByPersonAndConcept(patient, conceptCause); if (obssDeath.size() == 1) { Obs obsDeath = obssDeath.iterator().next(); causeOfDeathOther = obsDeath.getValueText(); if (causeOfDeathOther == null) { log.debug("cod is null, so setting to empty string"); causeOfDeathOther = ""; } else { log.debug("cod is valid: " + causeOfDeathOther); } } else { log.debug("obssDeath is wrong size: " + obssDeath.size()); } } else { log.debug("No concept cause found"); } // end get 'other' cause of death model.addAttribute("causeOfDeathOther", causeOfDeathOther); return new ShortPatientModel(patient); } @ModelAttribute("locations") public List<Location> getLocations() { return Context.getLocationService().getAllLocations(); } @ModelAttribute("defaultLocation") public Location getDefaultLocation() { return (LocationUtility.getUserDefaultLocation() != null) ? LocationUtility.getUserDefaultLocation() : LocationUtility.getDefaultLocation(); } @ModelAttribute("identifierTypes") public List<PatientIdentifierType> getIdentifierTypes() { return Context.getPatientService().getAllPatientIdentifierTypes(); } @ModelAttribute("identifierLocationUsed") public boolean getIdentifierLocationUsed() { List<PatientIdentifierType> pits = Context.getPatientService().getAllPatientIdentifierTypes(); boolean identifierLocationUsed = false; for (PatientIdentifierType pit : pits) { if (pit.getLocationBehavior() == null || pit.getLocationBehavior() == LocationBehavior.REQUIRED) { identifierLocationUsed = true; } } return identifierLocationUsed; } /** * Handles the form submission by validating the form fields and saving it to the DB * * @param request the webRequest object * @param patientModel the modelObject containing the patient info collected from the form * fields * @param result * @param status * @return the view to forward to * @should pass if all the form data is valid * @should create a new patient * @should send the user back to the form in case of validation errors * @should void a name and replace it with a new one if it is changed to a unique value * @should void an address and replace it with a new one if it is changed to a unique value * @should add a new name if the person had no names * @should add a new address if the person had none * @should ignore a new address that was added and voided at same time * @should set the cause of death as none a coded concept * @should set the cause of death as a none coded concept * @should void the cause of death obs that is none coded * @should add a new person attribute with a non empty value * @should not add a new person attribute with an empty value * @should void an existing person attribute with an empty value * @should should replace an existing attribute with a new one when edited */ @RequestMapping(method = RequestMethod.POST, value = SHORT_PATIENT_FORM_URL) public String saveShortPatient(WebRequest request, @ModelAttribute("personNameCache") PersonName personNameCache, @ModelAttribute("personAddressCache") PersonAddress personAddressCache, @ModelAttribute("patientModel") ShortPatientModel patientModel, BindingResult result) { if (Context.isAuthenticated()) { // First do form validation so that we can easily bind errors to // fields new ShortPatientFormValidator().validate(patientModel, result); if (result.hasErrors()) return SHORT_PATIENT_FORM_URL; Patient patient = null; patient = getPatientFromFormData(patientModel); Errors patientErrors = new BindException(patient, "patient"); patientValidator.validate(patient, patientErrors); if (patientErrors.hasErrors()) { // bind the errors to the patientModel object by adding them to // result since this is not a patient object // so that spring doesn't try to look for getters/setters for // Patient in ShortPatientModel for (ObjectError error : patientErrors.getAllErrors()) result.reject(error.getCode(), error.getArguments(), "Validation errors found"); return SHORT_PATIENT_FORM_URL; } // check if name/address were edited, void them and replace them boolean foundChanges = hasPersonNameOrAddressChanged(patient, personNameCache, personAddressCache); try { patient = Context.getPatientService().savePatient(patient); request.setAttribute(WebConstants.OPENMRS_MSG_ATTR, Context.getMessageSourceService().getMessage( "Patient.saved"), WebRequest.SCOPE_SESSION); // TODO do we really still need this, besides ensuring that the // cause of death is provided? // process and save the death info saveDeathInfo(patientModel, request); if (!patient.getVoided()) { // save the relationships to the database Map<String, Relationship> relationships = getRelationshipsMap(patientModel, request); for (Relationship relationship : relationships.values()) { // if the user added a person to this relationship, save // it if (relationship.getPersonA() != null && relationship.getPersonB() != null) Context.getPersonService().saveRelationship(relationship); } } } catch (APIException e) { log.error("Error occurred while attempting to save patient", e); request.setAttribute(WebConstants.OPENMRS_ERROR_ATTR, Context.getMessageSourceService().getMessage( "Patient.save.error"), WebRequest.SCOPE_SESSION); // TODO revert the changes and send them back to the form // don't send the user back to the form because the created // person name/addresses // will be recreated over again if the user attempts to resubmit if (!foundChanges) return SHORT_PATIENT_FORM_URL; } return "redirect:" + PATIENT_DASHBOARD_URL + "?patientId=" + patient.getPatientId(); } return FIND_PATIENT_PAGE; } /** * Convenience method that gets the data from the patientModel * * @param patientModel the modelObject holding the form data * @return the patient object that has been populated with input from the form */ private Patient getPatientFromFormData(ShortPatientModel patientModel) { Patient patient = patientModel.getPatient(); PersonName personName = patientModel.getPersonName(); if (personName != null) { personName.setPreferred(true); patient.addName(personName); } PersonAddress personAddress = patientModel.getPersonAddress(); if (personAddress != null) { if (personAddress.isVoided() && StringUtils.isBlank(personAddress.getVoidReason())) { personAddress.setVoidReason(Context.getMessageSourceService().getMessage("general.default.voidReason")); } // don't add an address that is being created and at the // same time being removed else if (!(personAddress.isVoided() && personAddress.getPersonAddressId() == null)) { personAddress.setPreferred(true); patient.addAddress(personAddress); } } // add all the existing identifiers and any new ones. if (patientModel.getIdentifiers() != null) { for (PatientIdentifier id : patientModel.getIdentifiers()) { // skip past the new ones removed from the user interface(may be // they were invalid // and the user changed their mind about adding them and they // removed them) if (id.getPatientIdentifierId() == null && id.isVoided()) continue; patient.addIdentifier(id); } } // add the person attributes if (patientModel.getPersonAttributes() != null) { for (PersonAttribute formAttribute : patientModel.getPersonAttributes()) { //skip past new attributes with no values, because the user left them blank if (formAttribute.getPersonAttributeId() == null && StringUtils.isBlank(formAttribute.getValue())) continue; //if the value has been changed for an existing attribute, void it and create a new one if (formAttribute.getPersonAttributeId() != null && !OpenmrsUtil.nullSafeEquals(formAttribute.getValue(), patient.getAttribute( formAttribute.getAttributeType()).getValue())) { //As per the logic in Person.addAttribute, the old edited attribute will get voided //as this new one is getting added formAttribute = new PersonAttribute(formAttribute.getAttributeType(), formAttribute.getValue()); //AOP is failing to set these in unit tests, just set them here for the tests to pass formAttribute.setDateCreated(new Date()); formAttribute.setCreator(Context.getAuthenticatedUser()); } patient.addAttribute(formAttribute); } } return patient; } /** * Creates a map of string of the form 3b, 3a and the actual person Relationships * * @param person the patient/person whose relationships to return * @param request the webRequest Object * @return map of strings matched against actual relationships */ @ModelAttribute("relationshipsMap") private Map<String, Relationship> getRelationshipsMap(@ModelAttribute("patientModel") ShortPatientModel patientModel, WebRequest request) { Person person = patientModel.getPatient(); Map<String, Relationship> relationshipMap = new LinkedHashMap<String, Relationship>(); // gp is in the form "3a, 7b, 4a" String relationshipsString = Context.getAdministrationService().getGlobalProperty( OpenmrsConstants.GLOBAL_PROPERTY_NEWPATIENTFORM_RELATIONSHIPS, ""); relationshipsString = relationshipsString.trim(); if (relationshipsString.length() > 0) { String[] showRelations = relationshipsString.split(","); // iterate over strings like "3a" for (String showRelation : showRelations) { showRelation = showRelation.trim(); boolean aIsToB = true; if (showRelation.endsWith("b")) { aIsToB = false; } // trim out the trailing a or b char String showRelationId = showRelation.replace("a", ""); showRelationId = showRelationId.replace("b", ""); RelationshipType relationshipType = Context.getPersonService().getRelationshipType( Integer.valueOf(showRelationId)); // flag to know if we need to create a stub relationship boolean relationshipFound = false; if (person != null && person.getPersonId() != null) { if (aIsToB) { List<Relationship> relationships = Context.getPersonService().getRelationships(null, person, relationshipType); if (relationships.size() > 0) { relationshipMap.put(showRelation, relationships.get(0)); relationshipFound = true; } } else { List<Relationship> relationships = Context.getPersonService().getRelationships(person, null, relationshipType); if (relationships.size() > 0) { relationshipMap.put(showRelation, relationships.get(0)); relationshipFound = true; } } } // if no relationship was found, create a stub one now if (relationshipFound == false) { Relationship relationshipStub = new Relationship(); relationshipStub.setRelationshipType(relationshipType); if (aIsToB) relationshipStub.setPersonB(person); else relationshipStub.setPersonA(person); relationshipMap.put(showRelation, relationshipStub); } // check the request to see if a parameter exists in there // that matches to the user desired relation. Overwrite // any previous data if found String submittedPersonId = request.getParameter(showRelation); if (submittedPersonId != null && submittedPersonId.length() > 0) { Person submittedPerson = Context.getPersonService().getPerson(Integer.valueOf(submittedPersonId)); if (aIsToB) relationshipMap.get(showRelation).setPersonA(submittedPerson); else relationshipMap.get(showRelation).setPersonB(submittedPerson); } } } return relationshipMap; } /** * Processes the death information for a deceased patient and save it to the database * * @param patientModel the modelObject containing the patient info collected from the form * fields * @param request webRequest object */ private void saveDeathInfo(ShortPatientModel patientModel, WebRequest request) { // update the death reason if (patientModel.getPatient().getDead()) { log.debug("Patient is dead, so let's make sure there's an Obs for it"); // need to make sure there is an Obs that represents the // patient's cause of death, if applicable String codProp = Context.getAdministrationService().getGlobalProperty("concept.causeOfDeath"); Concept causeOfDeath = Context.getConceptService().getConcept(codProp); if (causeOfDeath != null) { List<Obs> obssDeath = Context.getObsService().getObservationsByPersonAndConcept(patientModel.getPatient(), causeOfDeath); if (obssDeath != null) { if (obssDeath.size() > 1) { log.warn("Multiple causes of death (" + obssDeath.size() + ")? Shouldn't be..."); } else { Obs obsDeath = null; if (obssDeath.size() == 1) { // already has a cause of death - let's edit // it. log.debug("Already has a cause of death, so changing it"); obsDeath = obssDeath.iterator().next(); } else { // no cause of death obs yet, so let's make // one log.debug("No cause of death yet, let's create one."); obsDeath = new Obs(); obsDeath.setPerson(patientModel.getPatient()); obsDeath.setConcept(causeOfDeath); } // put the right concept and (maybe) text in this obs Concept currCause = patientModel.getPatient().getCauseOfDeath(); if (currCause == null) { // set to NONE log.debug("Current cause is null, attempting to set to NONE"); String noneConcept = Context.getAdministrationService().getGlobalProperty("concept.none"); currCause = Context.getConceptService().getConcept(noneConcept); } if (currCause != null) { log.debug("Current cause is not null, setting to value_coded"); obsDeath.setValueCoded(currCause); obsDeath.setValueCodedName(currCause.getName()); Date dateDeath = patientModel.getPatient().getDeathDate(); if (dateDeath == null) dateDeath = new Date(); obsDeath.setObsDatetime(dateDeath); // check if this is an "other" concept - if // so, then we need to add value_text String otherConcept = Context.getAdministrationService().getGlobalProperty( "concept.otherNonCoded"); Concept conceptOther = Context.getConceptService().getConcept(otherConcept); if (conceptOther != null) { if (conceptOther.equals(currCause)) { // seems like this is an other // concept - let's try to get the // "other" field info String otherInfo = request.getParameter("patient.causeOfDeath_other"); if (otherInfo == null) otherInfo = ""; log.debug("Setting value_text as " + otherInfo); obsDeath.setValueText(otherInfo); } else { log.debug("New concept is NOT the OTHER concept, so setting to blank"); obsDeath.setValueText(""); } } else { log.debug("Don't seem to know about an OTHER concept, so deleting value_text"); obsDeath.setValueText(""); } if (StringUtils.isBlank(obsDeath.getVoidReason())) obsDeath.setVoidReason(Context.getMessageSourceService().getMessage( "general.default.changeReason")); Context.getObsService().saveObs(obsDeath, obsDeath.getVoidReason()); } else { log.debug("Current cause is still null - aborting mission"); } } } } else { log.debug("Cause of death is null - should not have gotten here without throwing an error on the form."); } } } /** * Convenience method that checks if the person name or person address have been changed, should * void the old person name/address and create a new one with the changes. * * @param patient the patient * @param personNameCache the cached copy of the person name * @param personAddressCache the cached copy of the person address * @return true if the personName or personAddress was edited otherwise false */ private boolean hasPersonNameOrAddressChanged(Patient patient, PersonName personNameCache, PersonAddress personAddressCache) { boolean foundChanges = false; PersonName personName = patient.getPersonName(); if (personNameCache.getId() != null) { // if the existing persoName has been edited if (!getPersonNameString(personName).equalsIgnoreCase(getPersonNameString(personNameCache))) { if (log.isDebugEnabled()) log.debug("Voiding person name with id: " + personName.getId() + " and replacing it with a new one: " + personName.toString()); foundChanges = true; // create a new one and copy the changes to it PersonName newName = PersonName.newInstance(personName); newName.setPersonNameId(null); newName.setUuid(null); newName.setChangedBy(null);// just in case it had a value newName.setDateChanged(null); newName.setCreator(Context.getAuthenticatedUser()); newName.setDateCreated(new Date()); // restore the given,middle and familyName, then void the old // name personName.setGivenName(personNameCache.getGivenName()); personName.setMiddleName(personNameCache.getMiddleName()); personName.setFamilyName(personNameCache.getFamilyName()); personName.setPreferred(false); personName.setVoided(true); personName.setVoidReason(Context.getMessageSourceService().getMessage("general.voidReasonWithArgument", new Object[] { newName.toString() }, "Voided because it was edited to: " + newName.toString(), Context.getLocale())); // add the created name patient.addName(newName); } } PersonAddress personAddress = patient.getPersonAddress(); if (personAddress != null) { if (personAddressCache.getId() != null) { // if the existing personAddress has been edited if (!personAddress.isBlank() && !personAddressCache.isBlank() && !personAddress.equalsContent(personAddressCache)) { if (log.isDebugEnabled()) log.debug("Voiding person address with id: " + personAddress.getId() + " and replacing it with a new one: " + personAddress.toString()); foundChanges = true; // create a new one and copy the changes to it PersonAddress newAddress = (PersonAddress) personAddress.clone(); newAddress.setPersonAddressId(null); newAddress.setUuid(null); newAddress.setChangedBy(null);// just in case it had a value newAddress.setDateChanged(null); newAddress.setCreator(Context.getAuthenticatedUser()); newAddress.setDateCreated(new Date()); // restore address fields that are checked for changes and // void the address personAddress.setAddress1(personAddressCache.getAddress1()); personAddress.setAddress2(personAddressCache.getAddress2()); personAddress.setAddress3(personAddressCache.getAddress3()); personAddress.setCityVillage(personAddressCache.getCityVillage()); personAddress.setCountry(personAddressCache.getCountry()); personAddress.setCountyDistrict(personAddressCache.getCountyDistrict()); personAddress.setStateProvince(personAddressCache.getStateProvince()); personAddress.setPostalCode(personAddressCache.getPostalCode()); personAddress.setLatitude(personAddressCache.getLatitude()); personAddress.setLongitude(personAddressCache.getLongitude()); personAddress.setPreferred(false); personAddress.setVoided(true); personAddress.setVoidReason(Context.getMessageSourceService().getMessage( "general.voidReasonWithArgument", new Object[] { newAddress.toString() }, "Voided because it was edited to: " + newAddress.toString(), Context.getLocale())); // Add the created one patient.addAddress(newAddress); } } } return foundChanges; } /** * Convenience method that transforms a person name to a string while ignoring null and blank * values, the returned string only contains the givenName, middleName and familyName * * @param name the person name to transform * @return the transformed string ignoring blanks and nulls */ public static String getPersonNameString(PersonName name) { ArrayList<String> tempName = new ArrayList<String>(); if (StringUtils.isNotBlank(name.getGivenName())) tempName.add(name.getGivenName().trim()); if (StringUtils.isNotBlank(name.getMiddleName())) tempName.add(name.getMiddleName().trim()); if (StringUtils.isNotBlank(name.getFamilyName())) tempName.add(name.getFamilyName().trim()); return StringUtils.join(tempName, " "); } }
web/src/main/java/org/openmrs/web/controller/patient/ShortPatientFormController.java
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.web.controller.patient; import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Concept; import org.openmrs.Location; import org.openmrs.Obs; import org.openmrs.Patient; import org.openmrs.PatientIdentifier; import org.openmrs.PatientIdentifierType; import org.openmrs.PatientIdentifierType.LocationBehavior; import org.openmrs.Person; import org.openmrs.PersonAddress; import org.openmrs.PersonAttribute; import org.openmrs.PersonName; import org.openmrs.Relationship; import org.openmrs.RelationshipType; import org.openmrs.api.APIException; import org.openmrs.api.context.Context; import org.openmrs.util.LocationUtility; import org.openmrs.util.OpenmrsConstants; import org.openmrs.util.OpenmrsUtil; import org.openmrs.validator.PatientValidator; import org.openmrs.web.WebConstants; import org.openmrs.web.controller.person.PersonFormController; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.validation.BindException; import org.springframework.validation.BindingResult; import org.springframework.validation.Errors; import org.springframework.validation.ObjectError; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.context.request.WebRequest; /** * This controller is used for the "mini"/"new"/"short" patient form. Only key/important attributes * for the patient are displayed and allowed to be edited * * @see org.openmrs.web.controller.patient.PatientFormController */ @Controller public class ShortPatientFormController { private static final Log log = LogFactory.getLog(ShortPatientFormController.class); private static final String SHORT_PATIENT_FORM_URL = "/admin/patients/shortPatientForm"; private static final String FIND_PATIENT_PAGE = "findPatient"; private static final String PATIENT_DASHBOARD_URL = "/patientDashboard.form"; @Autowired PatientValidator patientValidator; /** * @return */ @RequestMapping(method = RequestMethod.GET, value = SHORT_PATIENT_FORM_URL) public void showForm() { } @ModelAttribute("patientModel") public ShortPatientModel getPatientModel(@RequestParam(value = "patientId", required = false) Integer patientId, ModelMap model, WebRequest request) { Patient patient; if (patientId != null) { patient = Context.getPatientService().getPatientOrPromotePerson(patientId); if (patient == null) { throw new IllegalArgumentException("No patient or person with the given id"); } } else { // we may have some details to add to a blank patient patient = new Patient(); String name = request.getParameter("addName"); if (!StringUtils.isBlank(name)) { String gender = request.getParameter("addGender"); String date = request.getParameter("addBirthdate"); String age = request.getParameter("addAge"); PersonFormController.getMiniPerson(patient, name, gender, date, age); } } // if we have an existing personName, cache the original name so that we // can use it to // track changes in givenName, middleName, familyName, will also use // it to restore the original values if (patient.getPersonName() != null && patient.getPersonName().getId() != null) model.addAttribute("personNameCache", PersonName.newInstance(patient.getPersonName())); else model.addAttribute("personNameCache", new PersonName()); // cache a copy of the person address for comparison in case the name is // edited if (patient.getPersonAddress() != null && patient.getPersonAddress().getId() != null) model.addAttribute("personAddressCache", patient.getPersonAddress().clone()); else model.addAttribute("personAddressCache", new PersonAddress()); String propCause = Context.getAdministrationService().getGlobalProperty("concept.causeOfDeath"); Concept conceptCause = Context.getConceptService().getConcept(propCause); String causeOfDeathOther = ""; if (conceptCause != null && patient.getPatientId() != null) { List<Obs> obssDeath = Context.getObsService().getObservationsByPersonAndConcept(patient, conceptCause); if (obssDeath.size() == 1) { Obs obsDeath = obssDeath.iterator().next(); causeOfDeathOther = obsDeath.getValueText(); if (causeOfDeathOther == null) { log.debug("cod is null, so setting to empty string"); causeOfDeathOther = ""; } else { log.debug("cod is valid: " + causeOfDeathOther); } } else { log.debug("obssDeath is wrong size: " + obssDeath.size()); } } else { log.debug("No concept cause found"); } // end get 'other' cause of death model.addAttribute("causeOfDeathOther", causeOfDeathOther); return new ShortPatientModel(patient); } @ModelAttribute("locations") public List<Location> getLocations() { return Context.getLocationService().getAllLocations(); } @ModelAttribute("defaultLocation") public Location getDefaultLocation() { return (LocationUtility.getUserDefaultLocation() != null) ? LocationUtility.getUserDefaultLocation() : LocationUtility.getDefaultLocation(); } @ModelAttribute("identifierTypes") public List<PatientIdentifierType> getIdentifierTypes() { return Context.getPatientService().getAllPatientIdentifierTypes(); } @ModelAttribute("identifierLocationUsed") public boolean getIdentifierLocationUsed() { List<PatientIdentifierType> pits = Context.getPatientService().getAllPatientIdentifierTypes(); boolean identifierLocationUsed = false; for (PatientIdentifierType pit : pits) { if (pit.getLocationBehavior() == null || pit.getLocationBehavior() == LocationBehavior.REQUIRED) { identifierLocationUsed = true; } } return identifierLocationUsed; } /** * Handles the form submission by validating the form fields and saving it to the DB * * @param request the webRequest object * @param patientModel the modelObject containing the patient info collected from the form * fields * @param result * @param status * @return the view to forward to * @should pass if all the form data is valid * @should create a new patient * @should send the user back to the form in case of validation errors * @should void a name and replace it with a new one if it is changed to a unique value * @should void an address and replace it with a new one if it is changed to a unique value * @should add a new name if the person had no names * @should add a new address if the person had none * @should ignore a new address that was added and voided at same time * @should set the cause of death as none a coded concept * @should set the cause of death as a none coded concept * @should void the cause of death obs that is none coded * @should add a new person attribute with a non empty value * @should not add a new person attribute with an empty value * @should void an existing person attribute with an empty value * @should should replace an existing attribute with a new one when edited */ @RequestMapping(method = RequestMethod.POST, value = SHORT_PATIENT_FORM_URL) public String saveShortPatient(WebRequest request, @ModelAttribute("personNameCache") PersonName personNameCache, @ModelAttribute("personAddressCache") PersonAddress personAddressCache, @ModelAttribute("patientModel") ShortPatientModel patientModel, BindingResult result) { if (Context.isAuthenticated()) { // First do form validation so that we can easily bind errors to // fields new ShortPatientFormValidator().validate(patientModel, result); if (result.hasErrors()) return SHORT_PATIENT_FORM_URL; Patient patient = null; patient = getPatientFromFormData(patientModel); Errors patientErrors = new BindException(patient, "patient"); patientValidator.validate(patient, patientErrors); if (patientErrors.hasErrors()) { // bind the errors to the patientModel object by adding them to // result since this is not a patient object // so that spring doesn't try to look for getters/setters for // Patient in ShortPatientModel for (ObjectError error : patientErrors.getAllErrors()) result.reject(error.getCode(), error.getArguments(), "Validation errors found"); return SHORT_PATIENT_FORM_URL; } // check if name/address were edited, void them and replace them boolean foundChanges = hasPersonNameOrAddressChanged(patient, personNameCache, personAddressCache); try { patient = Context.getPatientService().savePatient(patient); request.setAttribute(WebConstants.OPENMRS_MSG_ATTR, Context.getMessageSourceService().getMessage( "Patient.saved"), WebRequest.SCOPE_SESSION); // TODO do we really still need this, besides ensuring that the // cause of death is provided? // process and save the death info saveDeathInfo(patientModel, request); if (!patient.getVoided()) { // save the relationships to the database Map<String, Relationship> relationships = getRelationshipsMap(patientModel, request); for (Relationship relationship : relationships.values()) { // if the user added a person to this relationship, save // it if (relationship.getPersonA() != null && relationship.getPersonB() != null) Context.getPersonService().saveRelationship(relationship); } } } catch (APIException e) { log.error("Error occurred while attempting to save patient", e); request.setAttribute(WebConstants.OPENMRS_ERROR_ATTR, Context.getMessageSourceService().getMessage( "Patient.save.error"), WebRequest.SCOPE_SESSION); // TODO revert the changes and send them back to the form // don't send the user back to the form because the created // person name/addresses // will be recreated over again if the user attempts to resubmit if (!foundChanges) return SHORT_PATIENT_FORM_URL; } return "redirect:" + PATIENT_DASHBOARD_URL + "?patientId=" + patient.getPatientId(); } return FIND_PATIENT_PAGE; } /** * Convenience method that gets the data from the patientModel * * @param patientModel the modelObject holding the form data * @return the patient object that has been populated with input from the form */ private Patient getPatientFromFormData(ShortPatientModel patientModel) { Patient patient = patientModel.getPatient(); PersonName personName = patientModel.getPersonName(); if (personName != null) { personName.setPreferred(true); patient.addName(personName); } PersonAddress personAddress = patientModel.getPersonAddress(); if (personAddress != null) { if (personAddress.isVoided() && StringUtils.isBlank(personAddress.getVoidReason())) { personAddress.setVoidReason(Context.getMessageSourceService().getMessage("general.default.voidReason")); } // don't add an address that is being created and at the // same time being removed else if (!(personAddress.isVoided() && personAddress.getPersonAddressId() == null)) { personAddress.setPreferred(true); patient.addAddress(personAddress); } } // add all the existing identifiers and any new ones. if (patientModel.getIdentifiers() != null) { for (PatientIdentifier id : patientModel.getIdentifiers()) { // skip past the new ones removed from the user interface(may be // they were invalid // and the user changed their mind about adding them and they // removed them) if (id.getPatientIdentifierId() == null && id.isVoided()) continue; patient.addIdentifier(id); } } // add the person attributes if (patientModel.getPersonAttributes() != null) { for (PersonAttribute formAttribute : patientModel.getPersonAttributes()) { //skip past new attributes with no values, because the user left them blank if (formAttribute.getPersonAttributeId() == null && StringUtils.isBlank(formAttribute.getValue())) continue; //if the value has been changed for an existing attribute, void it and create a new one if (formAttribute.getPersonAttributeId() != null && !OpenmrsUtil.nullSafeEquals(formAttribute.getValue(), patient.getAttribute( formAttribute.getAttributeType()).getValue())) { //As per the logic in Person.addAttribute, the old edited attribute will get voided //as this new one is getting added formAttribute = new PersonAttribute(formAttribute.getAttributeType(), formAttribute.getValue()); //AOP is failing to set these in unit tests, just set them here for the tests to pass formAttribute.setDateCreated(new Date()); formAttribute.setCreator(Context.getAuthenticatedUser()); } patient.addAttribute(formAttribute); } } return patient; } /** * Creates a map of string of the form 3b, 3a and the actual person Relationships * * @param person the patient/person whose relationships to return * @param request the webRequest Object * @return map of strings matched against actual relationships */ @ModelAttribute("relationshipsMap") private Map<String, Relationship> getRelationshipsMap(@ModelAttribute("patientModel") ShortPatientModel patientModel, WebRequest request) { Person person = patientModel.getPatient(); Map<String, Relationship> relationshipMap = new LinkedHashMap<String, Relationship>(); // gp is in the form "3a, 7b, 4a" String relationshipsString = Context.getAdministrationService().getGlobalProperty( OpenmrsConstants.GLOBAL_PROPERTY_NEWPATIENTFORM_RELATIONSHIPS, ""); relationshipsString = relationshipsString.trim(); if (relationshipsString.length() > 0) { String[] showRelations = relationshipsString.split(","); // iterate over strings like "3a" for (String showRelation : showRelations) { showRelation = showRelation.trim(); boolean aIsToB = true; if (showRelation.endsWith("b")) { aIsToB = false; } // trim out the trailing a or b char String showRelationId = showRelation.replace("a", ""); showRelationId = showRelationId.replace("b", ""); RelationshipType relationshipType = Context.getPersonService().getRelationshipType( Integer.valueOf(showRelationId)); // flag to know if we need to create a stub relationship boolean relationshipFound = false; if (person != null && person.getPersonId() != null) { if (aIsToB) { List<Relationship> relationships = Context.getPersonService().getRelationships(null, person, relationshipType); if (relationships.size() > 0) { relationshipMap.put(showRelation, relationships.get(0)); relationshipFound = true; } } else { List<Relationship> relationships = Context.getPersonService().getRelationships(person, null, relationshipType); if (relationships.size() > 0) { relationshipMap.put(showRelation, relationships.get(0)); relationshipFound = true; } } } // if no relationship was found, create a stub one now if (relationshipFound == false) { Relationship relationshipStub = new Relationship(); relationshipStub.setRelationshipType(relationshipType); if (aIsToB) relationshipStub.setPersonB(person); else relationshipStub.setPersonA(person); relationshipMap.put(showRelation, relationshipStub); } // check the request to see if a parameter exists in there // that matches to the user desired relation. Overwrite // any previous data if found String submittedPersonId = request.getParameter(showRelation); if (submittedPersonId != null && submittedPersonId.length() > 0) { Person submittedPerson = Context.getPersonService().getPerson(Integer.valueOf(submittedPersonId)); if (aIsToB) relationshipMap.get(showRelation).setPersonA(submittedPerson); else relationshipMap.get(showRelation).setPersonB(submittedPerson); } } } return relationshipMap; } /** * Processes the death information for a deceased patient and save it to the database * * @param patientModel the modelObject containing the patient info collected from the form * fields * @param request webRequest object */ private void saveDeathInfo(ShortPatientModel patientModel, WebRequest request) { // update the death reason if (patientModel.getPatient().getDead()) { log.debug("Patient is dead, so let's make sure there's an Obs for it"); // need to make sure there is an Obs that represents the // patient's cause of death, if applicable String codProp = Context.getAdministrationService().getGlobalProperty("concept.causeOfDeath"); Concept causeOfDeath = Context.getConceptService().getConcept(codProp); if (causeOfDeath != null) { List<Obs> obssDeath = Context.getObsService().getObservationsByPersonAndConcept(patientModel.getPatient(), causeOfDeath); if (obssDeath != null) { if (obssDeath.size() > 1) { log.warn("Multiple causes of death (" + obssDeath.size() + ")? Shouldn't be..."); } else { Obs obsDeath = null; if (obssDeath.size() == 1) { // already has a cause of death - let's edit // it. log.debug("Already has a cause of death, so changing it"); obsDeath = obssDeath.iterator().next(); } else { // no cause of death obs yet, so let's make // one log.debug("No cause of death yet, let's create one."); obsDeath = new Obs(); obsDeath.setPerson(patientModel.getPatient()); obsDeath.setConcept(causeOfDeath); } // put the right concept and (maybe) text in this obs Concept currCause = patientModel.getPatient().getCauseOfDeath(); if (currCause == null) { // set to NONE log.debug("Current cause is null, attempting to set to NONE"); String noneConcept = Context.getAdministrationService().getGlobalProperty("concept.none"); currCause = Context.getConceptService().getConcept(noneConcept); } if (currCause != null) { log.debug("Current cause is not null, setting to value_coded"); obsDeath.setValueCoded(currCause); obsDeath.setValueCodedName(currCause.getName()); Date dateDeath = patientModel.getPatient().getDeathDate(); if (dateDeath == null) dateDeath = new Date(); obsDeath.setObsDatetime(dateDeath); // check if this is an "other" concept - if // so, then we need to add value_text String otherConcept = Context.getAdministrationService().getGlobalProperty( "concept.otherNonCoded"); Concept conceptOther = Context.getConceptService().getConcept(otherConcept); if (conceptOther != null) { if (conceptOther.equals(currCause)) { // seems like this is an other // concept - let's try to get the // "other" field info String otherInfo = request.getParameter("patient.causeOfDeath_other"); if (otherInfo == null) otherInfo = ""; log.debug("Setting value_text as " + otherInfo); obsDeath.setValueText(otherInfo); } else { log.debug("New concept is NOT the OTHER concept, so setting to blank"); obsDeath.setValueText(""); } } else { log.debug("Don't seem to know about an OTHER concept, so deleting value_text"); obsDeath.setValueText(""); } if (StringUtils.isBlank(obsDeath.getVoidReason())) obsDeath.setVoidReason(Context.getMessageSourceService().getMessage( "general.default.changeReason")); Context.getObsService().saveObs(obsDeath, obsDeath.getVoidReason()); } else { log.debug("Current cause is still null - aborting mission"); } } } } else { log.debug("Cause of death is null - should not have gotten here without throwing an error on the form."); } } } /** * Convenience method that checks if the person name or person address have been changed, should * void the old person name/address and create a new one with the changes. * * @param patient the patient * @param personNameCache the cached copy of the person name * @param personAddressCache the cached copy of the person address * @return true if the personName or personAddress was edited otherwise false */ private boolean hasPersonNameOrAddressChanged(Patient patient, PersonName personNameCache, PersonAddress personAddressCache) { boolean foundChanges = false; PersonName personName = patient.getPersonName(); if (personNameCache.getId() != null) { // if the existing persoName has been edited if (!getPersonNameString(personName).equalsIgnoreCase(getPersonNameString(personNameCache))) { if (log.isDebugEnabled()) log.debug("Voiding person name with id: " + personName.getId() + " and replacing it with a new one: " + personName.toString()); foundChanges = true; // create a new one and copy the changes to it PersonName newName = PersonName.newInstance(personName); newName.setPersonNameId(null); newName.setUuid(null); newName.setChangedBy(null);// just in case it had a value newName.setDateChanged(null); newName.setCreator(Context.getAuthenticatedUser()); newName.setDateCreated(new Date()); // restore the given,middle and familyName, then void the old // name personName.setGivenName(personNameCache.getGivenName()); personName.setMiddleName(personNameCache.getMiddleName()); personName.setFamilyName(personNameCache.getFamilyName()); personName.setPreferred(false); personName.setVoided(true); personName.setVoidReason(Context.getMessageSourceService().getMessage("general.voidReasonWithArgument", new Object[] { newName.toString() }, "Voided because it was edited to: " + newName.toString(), Context.getLocale())); // add the created name patient.addName(newName); } } PersonAddress personAddress = patient.getPersonAddress(); if (personAddress != null) { if (personAddressCache.getId() != null) { // if the existing personAddress has been edited if (!personAddress.isBlank() && !personAddressCache.isBlank() && !personAddress.toString().equalsIgnoreCase(personAddressCache.toString())) { if (log.isDebugEnabled()) log.debug("Voiding person address with id: " + personAddress.getId() + " and replacing it with a new one: " + personAddress.toString()); foundChanges = true; // create a new one and copy the changes to it PersonAddress newAddress = (PersonAddress) personAddress.clone(); newAddress.setPersonAddressId(null); newAddress.setUuid(null); newAddress.setChangedBy(null);// just in case it had a value newAddress.setDateChanged(null); newAddress.setCreator(Context.getAuthenticatedUser()); newAddress.setDateCreated(new Date()); // restore address fields that are checked for changes and // void the address personAddress.setAddress1(personAddressCache.getAddress1()); personAddress.setAddress2(personAddressCache.getAddress2()); personAddress.setAddress3(personAddressCache.getAddress3()); personAddress.setCityVillage(personAddressCache.getCityVillage()); personAddress.setCountry(personAddressCache.getCountry()); personAddress.setCountyDistrict(personAddressCache.getCountyDistrict()); personAddress.setStateProvince(personAddressCache.getStateProvince()); personAddress.setPostalCode(personAddressCache.getPostalCode()); personAddress.setLatitude(personAddressCache.getLatitude()); personAddress.setLongitude(personAddressCache.getLongitude()); personAddress.setPreferred(false); personAddress.setVoided(true); personAddress.setVoidReason(Context.getMessageSourceService().getMessage( "general.voidReasonWithArgument", new Object[] { newAddress.toString() }, "Voided because it was edited to: " + newAddress.toString(), Context.getLocale())); // Add the created one patient.addAddress(newAddress); } } } return foundChanges; } /** * Convenience method that transforms a person name to a string while ignoring null and blank * values, the returned string only contains the givenName, middleName and familyName * * @param name the person name to transform * @return the transformed string ignoring blanks and nulls */ public static String getPersonNameString(PersonName name) { ArrayList<String> tempName = new ArrayList<String>(); if (StringUtils.isNotBlank(name.getGivenName())) tempName.add(name.getGivenName().trim()); if (StringUtils.isNotBlank(name.getMiddleName())) tempName.add(name.getMiddleName().trim()); if (StringUtils.isNotBlank(name.getFamilyName())) tempName.add(name.getFamilyName().trim()); return StringUtils.join(tempName, " "); } }
Back porting TRUNK-3241: PersonAddresses get voided and recreated on form submission git-svn-id: 4718f6e240ca7210205b40b658d48b65419e2f31@26785 5bac5841-c719-aa4e-b3fe-cce5062f897a
web/src/main/java/org/openmrs/web/controller/patient/ShortPatientFormController.java
Back porting TRUNK-3241: PersonAddresses get voided and recreated on form submission
Java
agpl-3.0
5e4fe7f9b019f65c61f9500a170299969f2fef3d
0
KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1
package nl.mpi.kinnate.gedcomimport; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import javax.swing.JProgressBar; import javax.swing.JTextArea; import junit.framework.TestCase; import nl.mpi.arbil.util.ApplicationVersionManager; import nl.mpi.arbil.util.ArbilBugCatcher; import nl.mpi.kinnate.KinOathVersion; import nl.mpi.kinnate.userstorage.KinSessionStorage; /** * Document : CsvImporterTest Created on : Dec 30, 2011, 10:11:44 * * @author petwit */ public class CsvImporterTest extends TestCase { /** * Test of getFieldsForLine method, of class CsvImporter. */ public void testFieldsForLine() { runFieldsForLine("one,two,three", new String[]{"one", "two", "three"}, ','); runFieldsForLine("one\ttwo\tthree", new String[]{"one", "two", "three"}, '\t'); runFieldsForLine("one,two,three\n", new String[]{"one", "two", "three"}, ','); runFieldsForLine("one,two,three\r", new String[]{"one", "two", "three"}, ','); runFieldsForLine("one,\"two\",three\r", new String[]{"one", "two", "three"}, ','); runFieldsForLine("one,\"two\",three\r", new String[]{"one", "two", "three"}, ','); runFieldsForLine("one,\"two\nextra\",three\r", new String[]{"one", "two\nextra", "three"}, ','); runFieldsForLine("one,\"two,extra\",three\r", new String[]{"one", "two,extra", "three"}, ','); runFieldsForLine("one,\"two\textra\",three\r", new String[]{"one", "two\textra", "three"}, ','); runFieldsForLine("one\t\"two,extra\"\tthree\r", new String[]{"one", "two,extra", "three"}, '\t'); runFieldsForLine("one\t\"two\textra\"\tthree\r", new String[]{"one", "two\textra", "three"}, '\t'); runFieldsForLine("* comments \n* more comments\r\r\n\n\rone\t\"two\textra\"\tthree\r", new String[]{"one", "two\textra", "three"}, '\t'); } private void runFieldsForLine(String csvInputString, String[] expectedResult, char fieldSeparator) { try { BufferedReader bufferedReader = new BufferedReader(new StringReader(csvInputString)); final ApplicationVersionManager applicationVersionManager = new ApplicationVersionManager(new KinOathVersion()); final KinSessionStorage kinSessionStorage = new KinSessionStorage(applicationVersionManager); final ArbilBugCatcher bugCatcher = new ArbilBugCatcher(kinSessionStorage, new ApplicationVersionManager(new KinOathVersion())); ArrayList<String> arrayList = new CsvImporter(new JProgressBar(), new JTextArea(), true, new KinSessionStorage(applicationVersionManager)).getFieldsForLineExcludingComments(bufferedReader, fieldSeparator); assertTrue("Incorrect number of fields found", expectedResult.length == arrayList.size()); for (int arrayCounter = 0; arrayCounter < expectedResult.length; arrayCounter++) { assertEquals(csvInputString, expectedResult[arrayCounter], arrayList.get(arrayCounter)); } } catch (IOException exception) { fail(exception.getMessage()); } } }
desktop/src/test/java/nl/mpi/kinnate/gedcomimport/CsvImporterTest.java
package nl.mpi.kinnate.gedcomimport; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import javax.swing.JProgressBar; import javax.swing.JTextArea; import junit.framework.TestCase; import nl.mpi.arbil.util.ApplicationVersionManager; import nl.mpi.arbil.util.ArbilBugCatcher; import nl.mpi.kinnate.KinOathVersion; import nl.mpi.kinnate.userstorage.KinSessionStorage; /** * Document : CsvImporterTest * Created on : Dec 30, 2011, 10:11:44 * @author petwit */ public class CsvImporterTest extends TestCase { /** * Test of getFieldsForLine method, of class CsvImporter. */ public void testFieldsForLine() { runFieldsForLine("one,two,three", new String[]{"one", "two", "three"}); runFieldsForLine("one\ttwo\tthree", new String[]{"one", "two", "three"}); runFieldsForLine("one,two,three\n", new String[]{"one", "two", "three"}); runFieldsForLine("one,two,three\r", new String[]{"one", "two", "three"}); runFieldsForLine("one,\"two\",three\r", new String[]{"one", "two", "three"}); runFieldsForLine("one,\"two\",three\r", new String[]{"one", "two", "three"}); runFieldsForLine("one,\"two\nextra\",three\r", new String[]{"one", "two\nextra", "three"}); runFieldsForLine("one,\"two,extra\",three\r", new String[]{"one", "two,extra", "three"}); runFieldsForLine("one,\"two\textra\",three\r", new String[]{"one", "two\textra", "three"}); runFieldsForLine("one\t\"two,extra\"\tthree\r", new String[]{"one", "two,extra", "three"}); runFieldsForLine("one\t\"two\textra\"\tthree\r", new String[]{"one", "two\textra", "three"}); } private void runFieldsForLine(String csvInputString, String[] expectedResult) { try { BufferedReader bufferedReader = new BufferedReader(new StringReader(csvInputString)); final ApplicationVersionManager applicationVersionManager = new ApplicationVersionManager(new KinOathVersion()); final KinSessionStorage kinSessionStorage = new KinSessionStorage(applicationVersionManager); final ArbilBugCatcher bugCatcher = new ArbilBugCatcher(kinSessionStorage, new ApplicationVersionManager(new KinOathVersion())); ArrayList<String> arrayList = new CsvImporter(new JProgressBar(), new JTextArea(), true, new KinSessionStorage(applicationVersionManager)).getFieldsForLine(bufferedReader); assertTrue("Incorrect number of fields found", expectedResult.length == arrayList.size()); for (int arrayCounter = 0; arrayCounter < expectedResult.length; arrayCounter++) { assertEquals(csvInputString, expectedResult[arrayCounter], arrayList.get(arrayCounter)); } } catch (IOException exception) { fail(exception.getMessage()); } } }
refs #2394 #2400 In CSV import added support for ID column in any location. Also added info text to explain the use of this in the CSV text output. Corrected some graph sorting issues found after importing a CSV from Halle. Added CSV import for the french gender words. Added support for PUCK txt tab delimited files which have new headers mid way through the file. Added further PUCK support where If the first record does not start at zero then all relations to the ID of zero will be ignored. Added an extra test case for the csv importer.
desktop/src/test/java/nl/mpi/kinnate/gedcomimport/CsvImporterTest.java
refs #2394 #2400 In CSV import added support for ID column in any location. Also added info text to explain the use of this in the CSV text output. Corrected some graph sorting issues found after importing a CSV from Halle. Added CSV import for the french gender words. Added support for PUCK txt tab delimited files which have new headers mid way through the file. Added further PUCK support where If the first record does not start at zero then all relations to the ID of zero will be ignored. Added an extra test case for the csv importer.
Java
lgpl-2.1
d46aede2c46cfbc83a392738d66931d99808e75a
0
xwiki/xwiki-enterprise,xwiki/xwiki-enterprise
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.test.ui.scheduler; import junit.framework.Assert; import org.junit.Test; import org.xwiki.test.ui.framework.AbstractAdminAuthenticatedTest; import org.xwiki.test.ui.framework.elements.ViewPage; import org.xwiki.test.ui.scheduler.elements.SchedulerJobInlinePage; /** * Tests Scheduler application features. * * @since 2.3.1 * @since 2.4M1 * @version $Id$ */ public class SchedulerTest extends AbstractAdminAuthenticatedTest { /** * Tests that a scheduler job page default edit mode is "inline" */ @Test public void testSchedulerJobDefaultEditMode() { getUtil().gotoPage("Scheduler", "WatchListDailyNotifier"); ViewPage page = new ViewPage(); Assert.assertTrue(page.exists()); page.clickEdit(); SchedulerJobInlinePage inlineJob = new SchedulerJobInlinePage(); // The edit sheet of scheduler jobs points to Quartz documentation. // Make sure this documentation is referenced to prove we are indeed in inline edit mode. // Assert.assertTrue(inlineJob.isQuartzDocumentationReferenced()); String source = getDriver().getPageSource(); try { inlineJob.isQuartzDocumentationReferenced(); } catch (Exception e) { Assert.assertEquals("fail", source); } } }
distribution-test/ui-tests/src/test/it/org/xwiki/test/ui/scheduler/SchedulerTest.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.test.ui.scheduler; import junit.framework.Assert; import org.junit.Test; import org.xwiki.test.ui.scheduler.elements.SchedulerJobInlinePage; import org.xwiki.test.ui.framework.elements.ViewPage; import org.xwiki.test.ui.framework.AbstractAdminAuthenticatedTest; /** * Tests Scheduler application features. * * @since 2.3.1 * @since 2.4M1 * @version $Id$ */ public class SchedulerTest extends AbstractAdminAuthenticatedTest { /** * Tests that a scheduler job page default edit mode is "inline" */ @Test public void testSchedulerJobDefaultEditMode() { getUtil().gotoPage("Scheduler", "WatchListDailyNotifier"); ViewPage page = new ViewPage(); Assert.assertTrue(page.exists()); page.clickEdit(); SchedulerJobInlinePage inlineJob = new SchedulerJobInlinePage(); // The edit sheet of scheduler jobs points to Quartz documentation/ // Make sure this documentation is referenced to prove we are indeed in inline edit mode. Assert.assertTrue(inlineJob.isQuartzDocumentationReferenced()); } }
Debug SchedulerTest on Hudson git-svn-id: cf27bad30c6b7316185bdac65b014e8c16cd40b6@34474 f329d543-caf0-0310-9063-dda96c69346f
distribution-test/ui-tests/src/test/it/org/xwiki/test/ui/scheduler/SchedulerTest.java
Debug SchedulerTest on Hudson
Java
lgpl-2.1
2172859e8d3a72d3b46f30bc06ab1c00527b5ab4
0
vladimir-bukhtoyarov/jagger8,anton-antonenko/jagger,Nmishin/jagger,vladimir-bukhtoyarov/jagger,SokolAndrey/jagger,vladimir-bukhtoyarov/jagger,anton-antonenko/jagger,anton-antonenko/jagger,griddynamics/jagger,SokolAndrey/jagger,griddynamics/jagger,SokolAndrey/jagger,Nmishin/jagger,vladimir-bukhtoyarov/jagger8,vladimir-bukhtoyarov/jagger,vladimir-bukhtoyarov/jagger8,Nmishin/jagger,griddynamics/jagger
/* * Copyright (c) 2010-2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved * http://www.griddynamics.com * * This library is free software; you can redistribute it and/or modify it under the terms of * the GNU Lesser General Public License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.griddynamics.jagger.engine.e1.reporting; import com.griddynamics.jagger.agent.model.MonitoringParameter; import com.griddynamics.jagger.monitoring.MonitoringParameterBean; import com.griddynamics.jagger.monitoring.reporting.GroupKey; import com.griddynamics.jagger.monitoring.reporting.SystemUnderTestPlotsGeneralProvider; import com.griddynamics.jagger.reporting.AbstractReportProvider; import net.sf.jasperreports.engine.JRDataSource; import net.sf.jasperreports.engine.data.JRBeanCollectionDataSource; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * User: dkotlyarov */ public class TestGeneralReporter extends AbstractReportProvider { private boolean sessionScopeAvailable; public static class TestDetailsDTO { private String testId; private String testName; public String getTestId() { return testId; } public void setTestId(String testId) { this.testId = testId; } public String getTestName() { return testName; } public void setTestName(String testName) { this.testName = testName; } } public boolean isSessionScopeAvailable() { return sessionScopeAvailable; } public void setSessionScopeAvailable(boolean sessionScopeAvailable) { this.sessionScopeAvailable = sessionScopeAvailable; } @Override public JRDataSource getDataSource() { List<TestDetailsDTO> result = new ArrayList<TestDetailsDTO>(); //if a session scope is disable we return a bean with an empty collection if (!sessionScopeAvailable){ return new JRBeanCollectionDataSource(result); } SystemUnderTestPlotsGeneralProvider plotsGeneralProvider = (SystemUnderTestPlotsGeneralProvider) getContext().getMappedProvider("sysUTPlotsGeneral"); Set<String> boxIdentifiers = plotsGeneralProvider.getStatistics().findBoxIdentifiers(); Set<String> sutUrls = plotsGeneralProvider.getStatistics().findSutUrls(); for (GroupKey groupName : plotsGeneralProvider.getPlotGroups().getPlotGroups().keySet()) { if (hasGlobalStatistics(plotsGeneralProvider, groupName)) { TestDetailsDTO testDetailsDTO = new TestDetailsDTO(); testDetailsDTO.setTestId(groupName.getUpperName()); testDetailsDTO.setTestName(groupName.getUpperName()); result.add(testDetailsDTO); } for (String boxIdentifier : boxIdentifiers) { if (hasBoxStatistics(plotsGeneralProvider, boxIdentifier, groupName)) { String name = groupName.getUpperName() + " on " + boxIdentifier; TestDetailsDTO testDetailsDTO = new TestDetailsDTO(); testDetailsDTO.setTestId(name); testDetailsDTO.setTestName(name); result.add(testDetailsDTO); } } for (String sutUrl : sutUrls) { if (hasSutStatistics(plotsGeneralProvider, sutUrl, groupName)) { String name = groupName.getUpperName() + " on " + sutUrl; TestDetailsDTO testDetailsDTO = new TestDetailsDTO(); testDetailsDTO.setTestId(name); testDetailsDTO.setTestName(name); result.add(testDetailsDTO); } } } return new JRBeanCollectionDataSource(result); } private boolean hasGlobalStatistics(SystemUnderTestPlotsGeneralProvider plotsGeneralProvider, GroupKey groupName) { for (MonitoringParameter parameterId : plotsGeneralProvider.getPlotGroups().getPlotGroups().get(groupName)) { MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId); if (plotsGeneralProvider.getStatistics().hasGlobalStatistics(param)) { return true; } } return false; } private boolean hasBoxStatistics(SystemUnderTestPlotsGeneralProvider plotsGeneralProvider, String boxIdentifier, GroupKey groupName) { for (MonitoringParameter parameterId : plotsGeneralProvider.getPlotGroups().getPlotGroups().get(groupName)) { MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId); if (plotsGeneralProvider.getStatistics().hasBoxStatistics(param, boxIdentifier)) { return true; } } return false; } private boolean hasSutStatistics(SystemUnderTestPlotsGeneralProvider plotsGeneralProvider, String sutUrl, GroupKey groupName) { for (MonitoringParameter parameterId : plotsGeneralProvider.getPlotGroups().getPlotGroups().get(groupName)) { MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId); if (plotsGeneralProvider.getStatistics().hasSutStatistics(param, sutUrl)) { return true; } } return false; } }
chassis/core/src/main/java/com/griddynamics/jagger/engine/e1/reporting/TestGeneralReporter.java
/* * Copyright (c) 2010-2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved * http://www.griddynamics.com * * This library is free software; you can redistribute it and/or modify it under the terms of * the GNU Lesser General Public License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.griddynamics.jagger.engine.e1.reporting; import com.griddynamics.jagger.agent.model.MonitoringParameter; import com.griddynamics.jagger.monitoring.MonitoringParameterBean; import com.griddynamics.jagger.monitoring.reporting.GroupKey; import com.griddynamics.jagger.monitoring.reporting.SystemUnderTestPlotsGeneralProvider; import com.griddynamics.jagger.reporting.AbstractReportProvider; import net.sf.jasperreports.engine.JRDataSource; import net.sf.jasperreports.engine.data.JRBeanCollectionDataSource; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * User: dkotlyarov */ public class TestGeneralReporter extends AbstractReportProvider { private boolean sessionScopeAvailable; public static class TestDetailsDTO { private String testId; private String testName; public String getTestId() { return testId; } public void setTestId(String testId) { this.testId = testId; } public String getTestName() { return testName; } public void setTestName(String testName) { this.testName = testName; } } public boolean isSessionScopeAvailable() { return sessionScopeAvailable; } public void setSessionScopeAvailable(boolean sessionScopeAvailable) { this.sessionScopeAvailable = sessionScopeAvailable; } @Override public JRDataSource getDataSource() { List<TestDetailsDTO> result = new ArrayList<TestDetailsDTO>(); //if a session scope is disable we return a bean with an empty collection if (!sessionScopeAvailable) return new JRBeanCollectionDataSource(result); SystemUnderTestPlotsGeneralProvider plotsGeneralProvider = (SystemUnderTestPlotsGeneralProvider) getContext().getMappedProvider("sysUTPlotsGeneral"); Set<String> boxIdentifiers = plotsGeneralProvider.getStatistics().findBoxIdentifiers(); Set<String> sutUrls = plotsGeneralProvider.getStatistics().findSutUrls(); for (GroupKey groupName : plotsGeneralProvider.getPlotGroups().getPlotGroups().keySet()) { if (hasGlobalStatistics(plotsGeneralProvider, groupName)) { TestDetailsDTO testDetailsDTO = new TestDetailsDTO(); testDetailsDTO.setTestId(groupName.getUpperName()); testDetailsDTO.setTestName(groupName.getUpperName()); result.add(testDetailsDTO); } for (String boxIdentifier : boxIdentifiers) { if (hasBoxStatistics(plotsGeneralProvider, boxIdentifier, groupName)) { String name = groupName.getUpperName() + " on " + boxIdentifier; TestDetailsDTO testDetailsDTO = new TestDetailsDTO(); testDetailsDTO.setTestId(name); testDetailsDTO.setTestName(name); result.add(testDetailsDTO); } } for (String sutUrl : sutUrls) { if (hasSutStatistics(plotsGeneralProvider, sutUrl, groupName)) { String name = groupName.getUpperName() + " on " + sutUrl; TestDetailsDTO testDetailsDTO = new TestDetailsDTO(); testDetailsDTO.setTestId(name); testDetailsDTO.setTestName(name); result.add(testDetailsDTO); } } } return new JRBeanCollectionDataSource(result); } private boolean hasGlobalStatistics(SystemUnderTestPlotsGeneralProvider plotsGeneralProvider, GroupKey groupName) { for (MonitoringParameter parameterId : plotsGeneralProvider.getPlotGroups().getPlotGroups().get(groupName)) { MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId); if (plotsGeneralProvider.getStatistics().hasGlobalStatistics(param)) { return true; } } return false; } private boolean hasBoxStatistics(SystemUnderTestPlotsGeneralProvider plotsGeneralProvider, String boxIdentifier, GroupKey groupName) { for (MonitoringParameter parameterId : plotsGeneralProvider.getPlotGroups().getPlotGroups().get(groupName)) { MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId); if (plotsGeneralProvider.getStatistics().hasBoxStatistics(param, boxIdentifier)) { return true; } } return false; } private boolean hasSutStatistics(SystemUnderTestPlotsGeneralProvider plotsGeneralProvider, String sutUrl, GroupKey groupName) { for (MonitoringParameter parameterId : plotsGeneralProvider.getPlotGroups().getPlotGroups().get(groupName)) { MonitoringParameterBean param = MonitoringParameterBean.copyOf(parameterId); if (plotsGeneralProvider.getStatistics().hasSutStatistics(param, sutUrl)) { return true; } } return false; } }
JFG-652 delete session scops plots
chassis/core/src/main/java/com/griddynamics/jagger/engine/e1/reporting/TestGeneralReporter.java
JFG-652 delete session scops plots
Java
lgpl-2.1
89905d9cf91c17760f2f7332466c3af584173b25
0
levants/lightmare
package org.lightmare.deploy.fs; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.config.Configuration; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.LogUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.concurrent.ThreadFactoryUtil; import org.lightmare.utils.fs.WatchUtils; /** * Deployment manager, {@link Watcher#deployFile(URL)}, * {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and * {@link File} modification event handler for deployments if java version is * 1.7 or above * * @author levan * @since 0.0.45-SNAPSHOT */ public class Watcher implements Runnable { private static final String DEPLOY_THREAD_NAME = "watch_thread"; private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5; private static final long SLEEP_TIME = 5500L; private static final ExecutorService DEPLOY_POOL = Executors .newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME, DEPLOY_POOL_PRIORITY)); private Set<DeploymentDirectory> deployments; private Set<String> dataSources; private static final Logger LOG = Logger.getLogger(Watcher.class); /** * Defines file types for watch service * * @author Levan * */ private static enum WatchFileType { DATA_SOURCE, DEPLOYMENT, NONE; } /** * To filter only deployed sub files from directory * * @author levan * @since 0.0.45-SNAPSHOT */ private static class DeployFiletr implements FileFilter { @Override public boolean accept(File file) { boolean accept; try { URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); accept = MetaContainer.chackDeployment(url); } catch (MalformedURLException ex) { LOG.error(ex.getMessage(), ex); accept = false; } catch (IOException ex) { LOG.error(ex.getMessage(), ex); accept = false; } return accept; } } private Watcher() { deployments = getDeployDirectories(); dataSources = getDataSourcePaths(); } private static URL getAppropriateURL(String fileName) throws IOException { File file = new File(fileName); URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); return url; } private static Set<DeploymentDirectory> getDeployDirectories() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (config.isWatchStatus() && CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } return deploymetDirss; } private static Set<String> getDataSourcePaths() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (config.isWatchStatus() && CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } return paths; } private static WatchFileType checkType(String fileName) { WatchFileType type; File file = new File(fileName); String path = file.getPath(); String filePath = WatchUtils.clearPath(path); path = file.getParent(); String parentPath = WatchUtils.clearPath(path); Set<DeploymentDirectory> apps = getDeployDirectories(); Set<String> dss = getDataSourcePaths(); if (CollectionUtils.valid(apps)) { String deploymantPath; Iterator<DeploymentDirectory> iterator = apps.iterator(); boolean notDeployment = Boolean.TRUE; DeploymentDirectory deployment; while (iterator.hasNext() && notDeployment) { deployment = iterator.next(); deploymantPath = deployment.getPath(); notDeployment = ObjectUtils.notEquals(deploymantPath, parentPath); } if (notDeployment) { type = WatchFileType.NONE; } else { type = WatchFileType.DEPLOYMENT; } } else if (CollectionUtils.valid(dss) && dss.contains(filePath)) { type = WatchFileType.DATA_SOURCE; } else { type = WatchFileType.NONE; } return type; } private static void fillFileList(File[] files, List<File> list) { if (CollectionUtils.valid(files)) { for (File file : files) { list.add(file); } } } /** * Lists all deployed {@link File}s * * @return {@link List}<File> */ public static List<File> listDeployments() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } File[] files; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(deploymetDirss)) { String path; DeployFiletr filter = new DeployFiletr(); for (DeploymentDirectory deployment : deploymetDirss) { path = deployment.getPath(); files = new File(path).listFiles(filter); fillFileList(files, list); } } return list; } /** * Lists all data source {@link File}s * * @return {@link List}<File> */ public static List<File> listDataSources() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } File file; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(paths)) { for (String path : paths) { file = new File(path); list.add(file); } } return list; } public static void deployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { FileParsers fileParsers = new FileParsers(); fileParsers.parseStandaloneXml(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); deployFile(url); } } public static void deployFile(URL url) throws IOException { URL[] archives = { url }; MetaContainer.getCreator().scanForBeans(archives); } public static void undeployFile(URL url) throws IOException { boolean valid = MetaContainer.undeploy(url); if (valid && RestContainer.hasRest()) { RestProvider.reload(); } } public static void undeployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { Initializer.undeploy(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); undeployFile(url); } } public static void redeployFile(String fileName) throws IOException { undeployFile(fileName); deployFile(fileName); } private void handleEvent(Path dir, WatchEvent<Path> currentEvent) throws IOException { if (currentEvent == null) { return; } Path prePath = currentEvent.context(); Path path = dir.resolve(prePath); String fileName = path.toString(); int count = currentEvent.count(); Kind<?> kind = currentEvent.kind(); if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count); redeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_DELETE) { LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count); undeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_CREATE) { LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count); redeployFile(fileName); } } private void runService(WatchService watch) throws IOException { Path dir; boolean toRun = true; boolean valid; while (toRun) { try { WatchKey key; key = watch.take(); List<WatchEvent<?>> events = key.pollEvents(); WatchEvent<?> currentEvent = null; WatchEvent<Path> typedCurrentEvent; int times = 0; dir = (Path) key.watchable(); for (WatchEvent<?> event : events) { if (event.kind() == StandardWatchEventKinds.OVERFLOW) { continue; } if (times == 0 || event.count() > currentEvent.count()) { currentEvent = event; } times++; valid = key.reset(); toRun = valid && key.isValid(); if (toRun) { Thread.sleep(SLEEP_TIME); typedCurrentEvent = ObjectUtils.cast(currentEvent); handleEvent(dir, typedCurrentEvent); } } } catch (InterruptedException ex) { throw new IOException(ex); } } } private void registerPath(FileSystem fs, String path, WatchService watch) throws IOException { Path deployPath = fs.getPath(path); deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.OVERFLOW, StandardWatchEventKinds.ENTRY_DELETE); runService(watch); } private void registerPaths(File[] files, FileSystem fs, WatchService watch) throws IOException { String path; for (File file : files) { path = file.getPath(); registerPath(fs, path, watch); } } private void registerPaths(Collection<DeploymentDirectory> deploymentDirss, FileSystem fs, WatchService watch) throws IOException { String path; boolean scan; File directory; File[] files; for (DeploymentDirectory deployment : deploymentDirss) { path = deployment.getPath(); scan = deployment.isScan(); if (scan) { directory = new File(path); files = directory.listFiles(); if (CollectionUtils.valid(files)) { registerPaths(files, fs, watch); } } else { registerPath(fs, path, watch); } } } private void registerDsPaths(Collection<String> paths, FileSystem fs, WatchService watch) throws IOException { for (String path : paths) { registerPath(fs, path, watch); } } @Override public void run() { try { FileSystem fs = FileSystems.getDefault(); WatchService watch = null; try { watch = fs.newWatchService(); } catch (IOException ex) { LOG.error(ex.getMessage(), ex); throw ex; } if (CollectionUtils.valid(deployments)) { registerPaths(deployments, fs, watch); } if (CollectionUtils.valid(dataSources)) { registerDsPaths(dataSources, fs, watch); } } catch (IOException ex) { LOG.fatal(ex.getMessage(), ex); LOG.fatal("system going to shut down cause of hot deployment"); try { ConnectionContainer.closeConnections(); } catch (IOException iex) { LOG.fatal(iex.getMessage(), iex); } System.exit(-1); } finally { DEPLOY_POOL.shutdown(); } } public static void startWatch() { Watcher watcher = new Watcher(); DEPLOY_POOL.submit(watcher); } }
src/main/java/org/lightmare/deploy/fs/Watcher.java
package org.lightmare.deploy.fs; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.cache.MetaContainer; import org.lightmare.cache.RestContainer; import org.lightmare.config.Configuration; import org.lightmare.jpa.datasource.FileParsers; import org.lightmare.jpa.datasource.Initializer; import org.lightmare.rest.providers.RestProvider; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.LogUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.concurrent.ThreadFactoryUtil; import org.lightmare.utils.fs.WatchUtils; /** * Deployment manager, {@link Watcher#deployFile(URL)}, * {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and * {@link File} modification event handler for deployments if java version is * 1.7 or above * * @author levan * @since 0.0.45-SNAPSHOT */ public class Watcher implements Runnable { private static final String DEPLOY_THREAD_NAME = "watch_thread"; private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5; private static final long SLEEP_TIME = 5500L; private static final ExecutorService DEPLOY_POOL = Executors .newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME, DEPLOY_POOL_PRIORITY)); private Set<DeploymentDirectory> deployments; private Set<String> dataSources; private static final Logger LOG = Logger.getLogger(Watcher.class); /** * Defines file types for watch service * * @author Levan * */ private static enum WatchFileType { DATA_SOURCE, DEPLOYMENT, NONE; } /** * To filter only deployed sub files from directory * * @author levan * */ private static class DeployFiletr implements FileFilter { @Override public boolean accept(File file) { boolean accept; try { URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); accept = MetaContainer.chackDeployment(url); } catch (MalformedURLException ex) { LOG.error(ex.getMessage(), ex); accept = false; } catch (IOException ex) { LOG.error(ex.getMessage(), ex); accept = false; } return accept; } } private Watcher() { deployments = getDeployDirectories(); dataSources = getDataSourcePaths(); } private static URL getAppropriateURL(String fileName) throws IOException { File file = new File(fileName); URL url = file.toURI().toURL(); url = WatchUtils.clearURL(url); return url; } private static Set<DeploymentDirectory> getDeployDirectories() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (config.isWatchStatus() && CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } return deploymetDirss; } private static Set<String> getDataSourcePaths() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (config.isWatchStatus() && CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } return paths; } private static WatchFileType checkType(String fileName) { WatchFileType type; File file = new File(fileName); String path = file.getPath(); String filePath = WatchUtils.clearPath(path); path = file.getParent(); String parentPath = WatchUtils.clearPath(path); Set<DeploymentDirectory> apps = getDeployDirectories(); Set<String> dss = getDataSourcePaths(); if (CollectionUtils.valid(apps)) { String deploymantPath; Iterator<DeploymentDirectory> iterator = apps.iterator(); boolean notDeployment = Boolean.TRUE; DeploymentDirectory deployment; while (iterator.hasNext() && notDeployment) { deployment = iterator.next(); deploymantPath = deployment.getPath(); notDeployment = ObjectUtils.notEquals(deploymantPath, parentPath); } if (notDeployment) { type = WatchFileType.NONE; } else { type = WatchFileType.DEPLOYMENT; } } else if (CollectionUtils.valid(dss) && dss.contains(filePath)) { type = WatchFileType.DATA_SOURCE; } else { type = WatchFileType.NONE; } return type; } private static void fillFileList(File[] files, List<File> list) { if (CollectionUtils.valid(files)) { for (File file : files) { list.add(file); } } } /** * Lists all deployed {@link File}s * * @return {@link List}<File> */ public static List<File> listDeployments() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>(); Set<DeploymentDirectory> deploymetDirssCurrent; for (Configuration config : configs) { deploymetDirssCurrent = config.getDeploymentPath(); if (CollectionUtils.valid(deploymetDirssCurrent)) { deploymetDirss.addAll(deploymetDirssCurrent); } } File[] files; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(deploymetDirss)) { String path; DeployFiletr filter = new DeployFiletr(); for (DeploymentDirectory deployment : deploymetDirss) { path = deployment.getPath(); files = new File(path).listFiles(filter); fillFileList(files, list); } } return list; } /** * Lists all data source {@link File}s * * @return {@link List}<File> */ public static List<File> listDataSources() { Collection<Configuration> configs = MetaContainer.CONFIGS.values(); Set<String> paths = new HashSet<String>(); Set<String> pathsCurrent; for (Configuration config : configs) { pathsCurrent = config.getDataSourcePath(); if (CollectionUtils.valid(pathsCurrent)) { paths.addAll(pathsCurrent); } } File file; List<File> list = new ArrayList<File>(); if (CollectionUtils.valid(paths)) { for (String path : paths) { file = new File(path); list.add(file); } } return list; } public static void deployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { FileParsers fileParsers = new FileParsers(); fileParsers.parseStandaloneXml(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); deployFile(url); } } public static void deployFile(URL url) throws IOException { URL[] archives = { url }; MetaContainer.getCreator().scanForBeans(archives); } public static void undeployFile(URL url) throws IOException { boolean valid = MetaContainer.undeploy(url); if (valid && RestContainer.hasRest()) { RestProvider.reload(); } } public static void undeployFile(String fileName) throws IOException { WatchFileType type = checkType(fileName); if (type.equals(WatchFileType.DATA_SOURCE)) { Initializer.undeploy(fileName); } else if (type.equals(WatchFileType.DEPLOYMENT)) { URL url = getAppropriateURL(fileName); undeployFile(url); } } public static void redeployFile(String fileName) throws IOException { undeployFile(fileName); deployFile(fileName); } private void handleEvent(Path dir, WatchEvent<Path> currentEvent) throws IOException { if (currentEvent == null) { return; } Path prePath = currentEvent.context(); Path path = dir.resolve(prePath); String fileName = path.toString(); int count = currentEvent.count(); Kind<?> kind = currentEvent.kind(); if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count); redeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_DELETE) { LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count); undeployFile(fileName); } else if (kind == StandardWatchEventKinds.ENTRY_CREATE) { LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count); redeployFile(fileName); } } private void runService(WatchService watch) throws IOException { Path dir; boolean toRun = true; boolean valid; while (toRun) { try { WatchKey key; key = watch.take(); List<WatchEvent<?>> events = key.pollEvents(); WatchEvent<?> currentEvent = null; WatchEvent<Path> typedCurrentEvent; int times = 0; dir = (Path) key.watchable(); for (WatchEvent<?> event : events) { if (event.kind() == StandardWatchEventKinds.OVERFLOW) { continue; } if (times == 0 || event.count() > currentEvent.count()) { currentEvent = event; } times++; valid = key.reset(); toRun = valid && key.isValid(); if (toRun) { Thread.sleep(SLEEP_TIME); typedCurrentEvent = ObjectUtils.cast(currentEvent); handleEvent(dir, typedCurrentEvent); } } } catch (InterruptedException ex) { throw new IOException(ex); } } } private void registerPath(FileSystem fs, String path, WatchService watch) throws IOException { Path deployPath = fs.getPath(path); deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.OVERFLOW, StandardWatchEventKinds.ENTRY_DELETE); runService(watch); } private void registerPaths(File[] files, FileSystem fs, WatchService watch) throws IOException { String path; for (File file : files) { path = file.getPath(); registerPath(fs, path, watch); } } private void registerPaths(Collection<DeploymentDirectory> deploymentDirss, FileSystem fs, WatchService watch) throws IOException { String path; boolean scan; File directory; File[] files; for (DeploymentDirectory deployment : deploymentDirss) { path = deployment.getPath(); scan = deployment.isScan(); if (scan) { directory = new File(path); files = directory.listFiles(); if (CollectionUtils.valid(files)) { registerPaths(files, fs, watch); } } else { registerPath(fs, path, watch); } } } private void registerDsPaths(Collection<String> paths, FileSystem fs, WatchService watch) throws IOException { for (String path : paths) { registerPath(fs, path, watch); } } @Override public void run() { try { FileSystem fs = FileSystems.getDefault(); WatchService watch = null; try { watch = fs.newWatchService(); } catch (IOException ex) { LOG.error(ex.getMessage(), ex); throw ex; } if (CollectionUtils.valid(deployments)) { registerPaths(deployments, fs, watch); } if (CollectionUtils.valid(dataSources)) { registerDsPaths(dataSources, fs, watch); } } catch (IOException ex) { LOG.fatal(ex.getMessage(), ex); LOG.fatal("system going to shut down cause of hot deployment"); try { ConnectionContainer.closeConnections(); } catch (IOException iex) { LOG.fatal(iex.getMessage(), iex); } System.exit(-1); } finally { DEPLOY_POOL.shutdown(); } } public static void startWatch() { Watcher watcher = new Watcher(); DEPLOY_POOL.submit(watcher); } }
improved code / comments at utility classes
src/main/java/org/lightmare/deploy/fs/Watcher.java
improved code / comments at utility classes
Java
apache-2.0
48b93e4829bd7a30b9adf059f8a445ee1e418af6
0
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
/* * The Gemma project. * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.apps; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.lang.StringUtils; import ubic.gemma.model.association.Gene2GOAssociationService; import ubic.gemma.model.common.Describable; import ubic.gemma.model.common.auditAndSecurity.eventType.ArrayDesignAnnotationFileEvent; import ubic.gemma.model.common.auditAndSecurity.eventType.AuditEventType; import ubic.gemma.model.common.description.VocabCharacteristic; import ubic.gemma.model.expression.arrayDesign.ArrayDesign; import ubic.gemma.model.expression.designElement.CompositeSequence; import ubic.gemma.model.expression.designElement.CompositeSequenceService; import ubic.gemma.model.genome.Gene; import ubic.gemma.model.genome.PredictedGene; import ubic.gemma.model.genome.ProbeAlignedRegion; import ubic.gemma.model.genome.gene.GeneService; import ubic.gemma.ontology.GeneOntologyService; import ubic.gemma.ontology.OntologyTerm; import ubic.gemma.util.ConfigUtils; /** * Given an array design creates a Gene Ontology Annotation file * Given a batch file creates all the Annotation files for the AD's specified in the batch file * Given nothing creates annotation files for every AD that isn't subsumed or mergedInto another AD. * * TODO: Make files created zip file not plain text files. * I tried to do this unsuccessfully. The zip file that was created kept being corrupt when i tried to open it. * My attempt involved wrapping the output stream in a GZIPOutputStream before creating a writer. * I tried adding a call to GZIPOutputStream.finish() before closing the file but that didn't work either. * * @author klc * @versio $Id: ArrayDesignGOAnnotationGeneratorCli.java,v 1.23 2007/10/27 * 19:46:42 paul Exp $ */ public class ArrayDesignGOAnnotationGeneratorCli extends ArrayDesignSequenceManipulatingCli { // constants final String SHORT = "short"; final String LONG = "long"; final String BIOPROCESS = "biologicalprocess"; final String BIOLOGICAL_PROCESS = "biological_process"; private static final String ANNOT_DATA_DIR = ConfigUtils .getString("gemma.appdata.home") + "/microAnnots/"; // services Gene2GOAssociationService gene2GoAssociationService; GeneService geneService; CompositeSequenceService compositeSequenceService; GeneOntologyService goService; // file info String batchFileName; //GZIPOutputStream gzipOutputStream; boolean processAllADs = false; String fileName = null; // types boolean shortAnnotations; boolean longAnnotations; boolean biologicalProcessAnnotations; boolean includeGemmaGenes; boolean overWrite = false; /* * (non-Javadoc) * * @see ubic.gemma.util.AbstractCLI#buildOptions() */ @SuppressWarnings("static-access") @Override protected void buildOptions() { super.buildOptions(); Option annotationFileOption = OptionBuilder .hasArg() .withArgName("Annotation file name") .withDescription( "The name of the Annotation file to be generated [Default = Accession number]") .withLongOpt("annotation").create('f'); Option genesIncludedOption = OptionBuilder .hasArg() .withArgName("Genes to include") .withDescription( "The type of genes that will be included: all or standard." + " All includes predicted genes and probe aligned regions. " + "Standard mode only includes known genes [Default = standard]") .withLongOpt("genes").create('g'); Option annotationType = OptionBuilder .hasArg() .withArgName("Type of annotation file") .withDescription( "Which GO terms to add to the annotation file: short, long, biologicalprocess " + "[Default=short (no parents)]. If you select biologialprocess, parents are not included.") .withLongOpt("type").create('t'); Option fileLoading = OptionBuilder .hasArg() .withArgName("Batch Generating of annotation files") .withDescription( "Use specified file for batch generating annotation files. " + "specified File format (per line): GPL,outputFileName,[short|long|biologicalprocess] Note: Overrides -a,-t,-f command line options ") .withLongOpt("load").create('l'); Option batchLoading = OptionBuilder .withArgName("Generating all annotation files") .withDescription( "Generates annotation files for all Array Designs (omits ones that are subsumed or merged) uses accession as annotation file name." + "Creates 3 zip files for each AD, no parents, parents, biological process. Overrides all other settings.") .withLongOpt("batch").create('b'); Option overWrite = OptionBuilder .withArgName("Overwrites existing files") .withDescription("If set will overwrite existing annotation files in the output directory") .withLongOpt("overwrite").create('o'); addOption(annotationFileOption); addOption(annotationType); addOption(fileLoading); addOption(genesIncludedOption); addOption(batchLoading); addOption(overWrite); } public static void main(String[] args) { ArrayDesignGOAnnotationGeneratorCli p = new ArrayDesignGOAnnotationGeneratorCli(); try { Exception ex = p.doWork(args); if (ex != null) { ex.printStackTrace(); } } catch (Exception e) { throw new RuntimeException(e); } } /* * (non-Javadoc) * * @see ubic.gemma.util.AbstractCLI#doWork(java.lang.String[]) */ @Override protected Exception doWork(String[] args) { Exception err = processCommandLine( "Array design probe ontology annotation ", args); if (err != null) return err; int n = 0; try { while (!goService.isReady()) { Thread.sleep(500); if ((n++ % 100) == 0) { log.debug("Waiting for ontologies to load"); } } if (processAllADs) { processAllADs(); } else if (batchFileName != null) { processBatchFile(this.batchFileName); } else { ArrayDesign arrayDesign = locateArrayDesign(arrayDesignName); processAD(arrayDesign, this.fileName); } } catch (Exception e) { return e; } return null; } /** * Goes over all the AD's in the database and creates annotation 3 * annotation files for each AD that is not merged into or subsumed by * another AD. Uses the Accession ID (GPL???) for the name of the annotation * file. Appends noparents, bioProcess, allParents to the file name. * * FIXME: This could be spead up dramatically. The same AD is being thawed 3 times. * the same genes are being loaded 3 times. The same go terms are loaded 3 times... etc... * * Would make sense to paralize this process as more than 1 AD could be processed at once. * @throws IOException */ @SuppressWarnings("unchecked") protected void processAllADs() throws IOException { Collection<ArrayDesign> allADs = this.arrayDesignService.loadAll(); this.includeGemmaGenes = false; this.shortAnnotations = false; this.longAnnotations = false; this.biologicalProcessAnnotations = false; for (ArrayDesign ad : allADs) { if (ad.getSubsumingArrayDesign() != null) { log.info("Skipping " + ad.getName() + " because it is subsumed by " + ad.getSubsumingArrayDesign().getName()); continue; } if (ad.getMergedInto() != null) { log.info("Skipping " + ad.getName() + " because it was merged into " + ad.getMergedInto().getName()); continue; } log.info("Processing AD: " + ad.getName()); this.shortAnnotations = true; processAD(ad, ad.getShortName() + "_NoParents"); this.shortAnnotations = false; this.biologicalProcessAnnotations = true; processAD(ad, ad.getShortName() + "_bioProcess"); this.biologicalProcessAnnotations = false; this.longAnnotations = true; processAD(ad, ad.getShortName() + "_allParents"); this.longAnnotations = false; } } /** * @throws IOException * process the current AD */ protected void processAD(ArrayDesign arrayDesign, String fileName) throws IOException { Writer writer = initOutputFile(fileName); //if no writer then we should abort (this could happen in case where we don't want to overwrite files) if (writer == null) { log.info(arrayDesign.getName() + " annotation file already exits. Skipping. "); return; } unlazifyArrayDesign(arrayDesign); Collection<CompositeSequence> compositeSequences = arrayDesign .getCompositeSequences(); log.info(arrayDesign.getName() + " has " + compositeSequences.size() + " composite sequences"); int numProcessed = generateAnnotationFile(writer, compositeSequences); writer.flush(); //gzipOutputStream.finish(); // Not nice but need to call finish on the // gzoutput stream or else the zip file will // be corrupt. writer.close(); log.info("Finished processing platform: " + arrayDesign.getName()); successObjects.add( String.format( "%s (%s)", arrayDesign.getName(), arrayDesign.getShortName() )); if (StringUtils.isBlank(fileName)) { log.info("Processed " + numProcessed + " composite sequences"); audit(arrayDesign, "Processed " + numProcessed + " composite sequences"); } else { log.info("Created file: " + fileName + " with " + numProcessed + " values"); audit(arrayDesign, "Created file: " + fileName + " with " + numProcessed + " values"); } } /** * @param arrayDesign */ private void audit(ArrayDesign arrayDesign, String note) { AuditEventType eventType = ArrayDesignAnnotationFileEvent.Factory .newInstance(); auditTrailService.addUpdateEvent(arrayDesign, eventType, note); } /** * @param fileName * @throws IOException * used for batch processing */ protected void processBatchFile(String fileName) throws IOException { log.info("Loading platforms to annotate from " + fileName); InputStream is = new FileInputStream(fileName); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line = null; int lineNumber = 0; while ((line = br.readLine()) != null) { lineNumber++; if (StringUtils.isBlank(line)) { continue; } String[] arguments = StringUtils.split(line, ','); String gpl = arguments[0]; String annotationFileName = arguments[1]; String type = arguments[2]; // Check the syntax of the given line if ((gpl == null) || StringUtils.isBlank(gpl)) { log .warn("Incorrect line format in Batch Annotation file: Line " + lineNumber + "Platform is required: " + line); log.warn("Unable to process that line. Skipping to next."); continue; } if ((annotationFileName == null) || StringUtils.isBlank(annotationFileName)) { annotationFileName = gpl; log .warn("No annotation file name specified on line: " + lineNumber + " Using platform name as default annotation file name"); } if ((type == null) || StringUtils.isBlank(type)) { type = SHORT; log.warn("No type specifed for line: " + lineNumber + " Defaulting to short"); } // need to set these so processing ad works correctly (todo: make // processtype take all 3 parameter) this.arrayDesignName = gpl; processType(type); ArrayDesign arrayDesign = locateArrayDesign(arrayDesignName); try { processAD(arrayDesign, annotationFileName); } catch (Exception e) { log.error("**** Exception while processing " + arrayDesignName + ": " + e.getMessage() + " ********"); log.error(e, e); cacheException(e); errorObjects.add(arrayDesignName + ": " + e.getMessage()); continue; } } summarizeProcessing(); } /** * Opens a file for writing and adds the header. * * @param fileName * if Null, output will be written to standard output. * @throws IOException */ protected Writer initOutputFile(String fileName) throws IOException { Writer writer; if (StringUtils.isBlank(fileName)) { log.info("Output to stdout"); writer = new PrintWriter(System.out); } else { log.info("Attempting to create new annotation file " + fileName + " \n"); File f = new File(ANNOT_DATA_DIR + fileName + ".an.txt"); if (f.exists()) { if (this.overWrite) { log.warn("Will overwrite existing file " + f); f.delete(); } else return null; } File parentDir = f.getParentFile(); if (!parentDir.exists()) parentDir.mkdirs(); f.createNewFile(); //gzipOutputStream = new GZIPOutputStream(new FileOutputStream(f)); writer = new OutputStreamWriter(new FileOutputStream(f)); } writer.write("Probe ID \t Gene \t Description \t GO Terms \n"); return writer; } /** * @param compositeSequences * @throws IOException * Gets the file ready for printing */ @SuppressWarnings("unchecked") protected int generateAnnotationFile(Writer writer, Collection<CompositeSequence> compositeSequences) throws IOException { int compositeSequencesProcessed = 0; for (CompositeSequence sequence : compositeSequences) { Collection<Gene> genes = compositeSequenceService .getGenes(sequence); ++compositeSequencesProcessed; if ((genes == null) || (genes.isEmpty())) { writeAnnotationLine(writer, sequence.getName(), "", "", null); continue; } // actually the collection gotten back is a collection of proxies // which causes issues. Need to reload the // genes from the db. Collection<Long> geneIds = new ArrayList<Long>(); for (Gene g : genes) { geneIds.add(g.getId()); } genes = geneService.loadMultiple(geneIds); String geneNames = null; String geneDescriptions = null; Collection<OntologyTerm> goTerms = new ArrayList<OntologyTerm>(); // Might be mulitple genes for a given cs. Need to hash it into one. for (Gene gene : genes) { if (gene == null) continue; // Add PARs or predicted gene info to annotation file? if ((!includeGemmaGenes) && ((gene instanceof ProbeAlignedRegion) || (gene instanceof PredictedGene))) { log .debug("Gene: " + gene.getOfficialSymbol() + " not included in annotations because it is a probeAligedRegion or predictedGene"); continue; } if (log.isDebugEnabled()) log.debug("Adding gene: " + gene.getOfficialSymbol() + " of type: " + gene.getClass()); addGoTerms(goTerms, gene); geneNames = addGeneSymbol(geneNames, gene); geneDescriptions = addGeneName(geneDescriptions, gene); } writeAnnotationLine(writer, sequence.getName(), geneNames, geneDescriptions, goTerms); if (compositeSequencesProcessed % 500 == 0 && log.isInfoEnabled()) { log.info("Processed " + compositeSequencesProcessed + "/" + compositeSequences.size() + " compositeSequences "); } } return compositeSequencesProcessed; } private Collection<OntologyTerm> addGoTerms( Collection<OntologyTerm> goTerms, Gene gene) { Collection<OntologyTerm> terms = getGoTerms(gene); goTerms.addAll(terms); return terms; } private String addGeneName(String geneDescriptions, Gene gene) { if (gene.getOfficialName() != null) { if (geneDescriptions == null) geneDescriptions = gene.getOfficialName(); else geneDescriptions += "|" + gene.getOfficialName(); } return geneDescriptions; } private String addGeneSymbol(String geneNames, Gene gene) { if (gene.getOfficialSymbol() != null) { if (geneNames == null) geneNames = gene.getOfficialSymbol(); else geneNames += "|" + gene.getOfficialSymbol(); } return geneNames; } /** * @param probeId * @param gene * @param description * @param goTerms * @throws IOException * Adds one line at a time to the annotation file */ protected void writeAnnotationLine(Writer writer, String probeId, String gene, String description, Collection<OntologyTerm> goTerms) throws IOException { if (log.isDebugEnabled()) log.debug("Generating line for annotation file \n"); if (gene == null) gene = ""; if (description == null) description = ""; writer.write(probeId + "\t" + gene + "\t" + description + "\t"); if ((goTerms == null) || goTerms.isEmpty()) { writer.write("\n"); writer.flush(); return; } boolean wrote = false; for (OntologyTerm oe : goTerms) { if (oe == null) continue; if (wrote) writer.write("|" + GeneOntologyService.asRegularGoId(oe)); else writer.write(GeneOntologyService.asRegularGoId(oe)); wrote = true; } writer.write("\n"); writer.flush(); } /** * @param gene * @return the goTerms for a given gene, as configured */ @SuppressWarnings("unchecked") protected Collection<OntologyTerm> getGoTerms(Gene gene) { Collection<VocabCharacteristic> ontos = new HashSet<VocabCharacteristic>( gene2GoAssociationService.findByGene(gene)); Collection<OntologyTerm> results = new HashSet<OntologyTerm>(); for (VocabCharacteristic vc : ontos) { results.add(GeneOntologyService.getTermForId(vc.getValue())); } if ((ontos == null) || (ontos.size() == 0)) return results; if (this.shortAnnotations) return results; if (this.longAnnotations) { Collection<OntologyTerm> oes = goService.getAllParents(results); results.addAll(oes); } else if (this.biologicalProcessAnnotations) { Collection<OntologyTerm> toRemove = new HashSet<OntologyTerm>(); for (OntologyTerm ont : results) { if ((ont == null)) continue; // / shouldn't happen! if (!goService.isBiologicalProcess(ont)) toRemove.add(ont); } for (OntologyTerm toRemoveOnto : toRemove) { results.remove(toRemoveOnto); } } return results; } /** * @param type * Intilizes variables depending on they type for file that is * needed */ private void processType(String type) { shortAnnotations = false; longAnnotations = false; biologicalProcessAnnotations = false; if (type.equalsIgnoreCase(LONG)) longAnnotations = true; else if (type.equalsIgnoreCase(BIOPROCESS)) biologicalProcessAnnotations = true; else // ( type.equalsIgnoreCase( SHORT ) ) shortAnnotations = true; } /** * @param genesToInclude */ private void processGenesIncluded(String genesToInclude) { includeGemmaGenes = false; if (genesToInclude.equalsIgnoreCase("all")) includeGemmaGenes = true; } @Override protected void processOptions() { super.processOptions(); if (this.hasOption('f')) { this.fileName = this.getOptionValue('f'); } if (this.hasOption('t')) { processType(this.getOptionValue('t')); } if (this.hasOption('l')) { this.batchFileName = this.getOptionValue('l'); } if (this.hasOption('b')) { this.processAllADs = true; } if (this.hasOption('g')) processGenesIncluded(this.getOptionValue('g')); if (this.hasOption('o')) this.overWrite = true; gene2GoAssociationService = (Gene2GOAssociationService) this .getBean("gene2GOAssociationService"); compositeSequenceService = (CompositeSequenceService) this .getBean("compositeSequenceService"); geneService = (GeneService) this.getBean("geneService"); goService = (GeneOntologyService) this.getBean("geneOntologyService"); } }
gemma-core/src/main/java/ubic/gemma/apps/ArrayDesignGOAnnotationGeneratorCli.java
/* * The Gemma project. * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.apps; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.lang.StringUtils; import ubic.gemma.model.association.Gene2GOAssociationService; import ubic.gemma.model.common.Describable; import ubic.gemma.model.common.auditAndSecurity.eventType.ArrayDesignAnnotationFileEvent; import ubic.gemma.model.common.auditAndSecurity.eventType.AuditEventType; import ubic.gemma.model.common.description.VocabCharacteristic; import ubic.gemma.model.expression.arrayDesign.ArrayDesign; import ubic.gemma.model.expression.designElement.CompositeSequence; import ubic.gemma.model.expression.designElement.CompositeSequenceService; import ubic.gemma.model.genome.Gene; import ubic.gemma.model.genome.PredictedGene; import ubic.gemma.model.genome.ProbeAlignedRegion; import ubic.gemma.model.genome.gene.GeneService; import ubic.gemma.ontology.GeneOntologyService; import ubic.gemma.ontology.OntologyTerm; import ubic.gemma.util.ConfigUtils; /** * Given an array design creates a Gene Ontology Annotation file * Given a batch file creates all the Annotation files for the AD's specified in the batch file * Given nothing creates annotation files for every AD that isn't subsumed or mergedInto another AD. * * TODO: Make files created zip file not plain text files. * I tried to do this unsuccessfully. The zip file that was created kept being corrupt when i tried to open it. * My attempt involved wrapping the output stream in a GZIPOutputStream before creating a writer. * I tried adding a call to GZIPOutputStream.finish() before closing the file but that didn't work either. * * @author klc * @versio $Id: ArrayDesignGOAnnotationGeneratorCli.java,v 1.23 2007/10/27 * 19:46:42 paul Exp $ */ public class ArrayDesignGOAnnotationGeneratorCli extends ArrayDesignSequenceManipulatingCli { // constants final String SHORT = "short"; final String LONG = "long"; final String BIOPROCESS = "biologicalprocess"; final String BIOLOGICAL_PROCESS = "biological_process"; private static final String ANNOT_DATA_DIR = ConfigUtils .getString("gemma.appdata.home") + "/microAnnots/"; // services Gene2GOAssociationService gene2GoAssociationService; GeneService geneService; CompositeSequenceService compositeSequenceService; GeneOntologyService goService; // file info String batchFileName; //GZIPOutputStream gzipOutputStream; boolean processAllADs = false; String fileName = null; // types boolean shortAnnotations; boolean longAnnotations; boolean biologicalProcessAnnotations; boolean includeGemmaGenes; /* * (non-Javadoc) * * @see ubic.gemma.util.AbstractCLI#buildOptions() */ @SuppressWarnings("static-access") @Override protected void buildOptions() { super.buildOptions(); Option annotationFileOption = OptionBuilder .hasArg() .withArgName("Annotation file name") .withDescription( "The name of the Annotation file to be generated [Default = Accession number]") .withLongOpt("annotation").create('f'); Option genesIncludedOption = OptionBuilder .hasArg() .withArgName("Genes to include") .withDescription( "The type of genes that will be included: all or standard." + " All includes predicted genes and probe aligned regions. " + "Standard mode only includes known genes [Default = standard]") .withLongOpt("genes").create('g'); Option annotationType = OptionBuilder .hasArg() .withArgName("Type of annotation file") .withDescription( "Which GO terms to add to the annotation file: short, long, biologicalprocess " + "[Default=short (no parents)]. If you select biologialprocess, parents are not included.") .withLongOpt("type").create('t'); Option fileLoading = OptionBuilder .hasArg() .withArgName("Batch Generating of annotation files") .withDescription( "Use specified file for batch generating annotation files. " + "specified File format (per line): GPL,outputFileName,[short|long|biologicalprocess] Note: Overrides -a,-t,-f command line options ") .withLongOpt("load").create('l'); Option batchLoading = OptionBuilder .withArgName("Generating all annotation files") .withDescription( "Generates annotation files for all Array Designs (omits ones that are subsumed or merged) uses accession as annotation file name." + "Creates 3 zip files for each AD, no parents, parents, biological process. Overrides all other settings.") .withLongOpt("batch").create('b'); addOption(annotationFileOption); addOption(annotationType); addOption(fileLoading); addOption(genesIncludedOption); addOption(batchLoading); } public static void main(String[] args) { ArrayDesignGOAnnotationGeneratorCli p = new ArrayDesignGOAnnotationGeneratorCli(); try { Exception ex = p.doWork(args); if (ex != null) { ex.printStackTrace(); } } catch (Exception e) { throw new RuntimeException(e); } } /* * (non-Javadoc) * * @see ubic.gemma.util.AbstractCLI#doWork(java.lang.String[]) */ @Override protected Exception doWork(String[] args) { Exception err = processCommandLine( "Array design probe ontology annotation ", args); if (err != null) return err; int n = 0; try { while (!goService.isReady()) { Thread.sleep(500); if ((n++ % 100) == 0) { log.debug("Waiting for ontologies to load"); } } if (processAllADs) { processAllADs(); } else if (batchFileName != null) { processBatchFile(this.batchFileName); } else { ArrayDesign arrayDesign = locateArrayDesign(arrayDesignName); processAD(arrayDesign, this.fileName); } } catch (Exception e) { return e; } return null; } /** * Goes over all the AD's in the database and creates annotation 3 * annotation files for each AD that is not merged into or subsumed by * another AD. Uses the Accession ID (GPL???) for the name of the annotation * file. Appends noparents, bioProcess, allParents to the file name. * * FIXME: This could be spead up dramatically. The same AD is being thawed 3 times. * the same genes are being loaded 3 times. The same go terms are loaded 3 times... etc... * * Would make sense to paralize this process as more than 1 AD could be processed at once. * @throws IOException */ @SuppressWarnings("unchecked") protected void processAllADs() throws IOException { Collection<ArrayDesign> allADs = this.arrayDesignService.loadAll(); this.includeGemmaGenes = false; this.shortAnnotations = false; this.longAnnotations = false; this.biologicalProcessAnnotations = false; for (ArrayDesign ad : allADs) { if (ad.getSubsumingArrayDesign() != null) { log.info("Skipping " + ad.getName() + " because it is subsumed by " + ad.getSubsumingArrayDesign().getName()); continue; } if (ad.getMergedInto() != null) { log.info("Skipping " + ad.getName() + " because it was merged into " + ad.getMergedInto().getName()); continue; } log.info("Processing AD: " + ad.getName()); this.shortAnnotations = true; processAD(ad, ad.getShortName() + "_NoParents"); this.shortAnnotations = false; this.biologicalProcessAnnotations = true; processAD(ad, ad.getShortName() + "_bioProcess"); this.biologicalProcessAnnotations = false; this.longAnnotations = true; processAD(ad, ad.getShortName() + "_allParents"); this.longAnnotations = false; } } /** * @throws IOException * process the current AD */ protected void processAD(ArrayDesign arrayDesign, String fileName) throws IOException { unlazifyArrayDesign(arrayDesign); Collection<CompositeSequence> compositeSequences = arrayDesign .getCompositeSequences(); log.info(arrayDesign.getName() + " has " + compositeSequences.size() + " composite sequences"); Writer writer = initOutputFile(fileName); int numProcessed = generateAnnotationFile(writer, compositeSequences); writer.flush(); //gzipOutputStream.finish(); // Not nice but need to call finish on the // gzoutput stream or else the zip file will // be corrupt. writer.close(); log.info("Finished processing platform: " + arrayDesignName); successObjects.add(((Describable) arrayDesign).getName() + " (" + (arrayDesign).getExternalReferences().iterator().next() .getAccession() + ")"); if (StringUtils.isBlank(fileName)) { log.info("Processed " + numProcessed + " composite sequences"); audit(arrayDesign, "Processed " + numProcessed + " composite sequences"); } else { log.info("Created file: " + fileName + " with " + numProcessed + " values"); audit(arrayDesign, "Created file: " + fileName + " with " + numProcessed + " values"); } } /** * @param arrayDesign */ private void audit(ArrayDesign arrayDesign, String note) { AuditEventType eventType = ArrayDesignAnnotationFileEvent.Factory .newInstance(); auditTrailService.addUpdateEvent(arrayDesign, eventType, note); } /** * @param fileName * @throws IOException * used for batch processing */ protected void processBatchFile(String fileName) throws IOException { log.info("Loading platforms to annotate from " + fileName); InputStream is = new FileInputStream(fileName); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line = null; int lineNumber = 0; while ((line = br.readLine()) != null) { lineNumber++; if (StringUtils.isBlank(line)) { continue; } String[] arguments = StringUtils.split(line, ','); String gpl = arguments[0]; String annotationFileName = arguments[1]; String type = arguments[2]; // Check the syntax of the given line if ((gpl == null) || StringUtils.isBlank(gpl)) { log .warn("Incorrect line format in Batch Annotation file: Line " + lineNumber + "Platform is required: " + line); log.warn("Unable to process that line. Skipping to next."); continue; } if ((annotationFileName == null) || StringUtils.isBlank(annotationFileName)) { annotationFileName = gpl; log .warn("No annotation file name specified on line: " + lineNumber + " Using platform name as default annotation file name"); } if ((type == null) || StringUtils.isBlank(type)) { type = SHORT; log.warn("No type specifed for line: " + lineNumber + " Defaulting to short"); } // need to set these so processing ad works correctly (todo: make // processtype take all 3 parameter) this.arrayDesignName = gpl; processType(type); ArrayDesign arrayDesign = locateArrayDesign(arrayDesignName); try { processAD(arrayDesign, annotationFileName); } catch (Exception e) { log.error("**** Exception while processing " + arrayDesignName + ": " + e.getMessage() + " ********"); log.error(e, e); cacheException(e); errorObjects.add(arrayDesignName + ": " + e.getMessage()); continue; } } summarizeProcessing(); } /** * Opens a file for writing and adds the header. * * @param fileName * if Null, output will be written to standard output. * @throws IOException */ protected Writer initOutputFile(String fileName) throws IOException { Writer writer; if (StringUtils.isBlank(fileName)) { log.info("Output to stdout"); writer = new PrintWriter(System.out); } else { // write into file log.info("Creating new annotation file " + fileName + " \n"); File f = new File(ANNOT_DATA_DIR + fileName + ".an.txt"); if (f.exists()) { log.warn("Will overwrite existing file " + f); f.delete(); } File parentDir = f.getParentFile(); if (!parentDir.exists()) parentDir.mkdirs(); f.createNewFile(); //gzipOutputStream = new GZIPOutputStream(new FileOutputStream(f)); writer = new OutputStreamWriter(new FileOutputStream(f)); } writer.write("Probe ID \t Gene \t Description \t GO Terms \n"); return writer; } /** * @param compositeSequences * @throws IOException * Gets the file ready for printing */ @SuppressWarnings("unchecked") protected int generateAnnotationFile(Writer writer, Collection<CompositeSequence> compositeSequences) throws IOException { int compositeSequencesProcessed = 0; for (CompositeSequence sequence : compositeSequences) { Collection<Gene> genes = compositeSequenceService .getGenes(sequence); ++compositeSequencesProcessed; if ((genes == null) || (genes.isEmpty())) { writeAnnotationLine(writer, sequence.getName(), "", "", null); continue; } // actually the collection gotten back is a collection of proxies // which causes issues. Need to reload the // genes from the db. Collection<Long> geneIds = new ArrayList<Long>(); for (Gene g : genes) { geneIds.add(g.getId()); } genes = geneService.loadMultiple(geneIds); String geneNames = null; String geneDescriptions = null; Collection<OntologyTerm> goTerms = new ArrayList<OntologyTerm>(); // Might be mulitple genes for a given cs. Need to hash it into one. for (Gene gene : genes) { if (gene == null) continue; // Add PARs or predicted gene info to annotation file? if ((!includeGemmaGenes) && ((gene instanceof ProbeAlignedRegion) || (gene instanceof PredictedGene))) { log .debug("Gene: " + gene.getOfficialSymbol() + " not included in annotations because it is a probeAligedRegion or predictedGene"); continue; } if (log.isDebugEnabled()) log.debug("Adding gene: " + gene.getOfficialSymbol() + " of type: " + gene.getClass()); addGoTerms(goTerms, gene); geneNames = addGeneSymbol(geneNames, gene); geneDescriptions = addGeneName(geneDescriptions, gene); } writeAnnotationLine(writer, sequence.getName(), geneNames, geneDescriptions, goTerms); if (compositeSequencesProcessed % 500 == 0 && log.isInfoEnabled()) { log.info("Processed " + compositeSequencesProcessed + "/" + compositeSequences.size() + " compositeSequences "); } } return compositeSequencesProcessed; } private Collection<OntologyTerm> addGoTerms( Collection<OntologyTerm> goTerms, Gene gene) { Collection<OntologyTerm> terms = getGoTerms(gene); goTerms.addAll(terms); return terms; } private String addGeneName(String geneDescriptions, Gene gene) { if (gene.getOfficialName() != null) { if (geneDescriptions == null) geneDescriptions = gene.getOfficialName(); else geneDescriptions += "|" + gene.getOfficialName(); } return geneDescriptions; } private String addGeneSymbol(String geneNames, Gene gene) { if (gene.getOfficialSymbol() != null) { if (geneNames == null) geneNames = gene.getOfficialSymbol(); else geneNames += "|" + gene.getOfficialSymbol(); } return geneNames; } /** * @param probeId * @param gene * @param description * @param goTerms * @throws IOException * Adds one line at a time to the annotation file */ protected void writeAnnotationLine(Writer writer, String probeId, String gene, String description, Collection<OntologyTerm> goTerms) throws IOException { if (log.isDebugEnabled()) log.debug("Generating line for annotation file \n"); if (gene == null) gene = ""; if (description == null) description = ""; writer.write(probeId + "\t" + gene + "\t" + description + "\t"); if ((goTerms == null) || goTerms.isEmpty()) { writer.write("\n"); writer.flush(); return; } boolean wrote = false; for (OntologyTerm oe : goTerms) { if (oe == null) continue; if (wrote) writer.write("|" + GeneOntologyService.asRegularGoId(oe)); else writer.write(GeneOntologyService.asRegularGoId(oe)); wrote = true; } writer.write("\n"); writer.flush(); } /** * @param gene * @return the goTerms for a given gene, as configured */ @SuppressWarnings("unchecked") protected Collection<OntologyTerm> getGoTerms(Gene gene) { Collection<VocabCharacteristic> ontos = new HashSet<VocabCharacteristic>( gene2GoAssociationService.findByGene(gene)); Collection<OntologyTerm> results = new HashSet<OntologyTerm>(); for (VocabCharacteristic vc : ontos) { results.add(GeneOntologyService.getTermForId(vc.getValue())); } if ((ontos == null) || (ontos.size() == 0)) return results; if (this.shortAnnotations) return results; if (this.longAnnotations) { Collection<OntologyTerm> oes = goService.getAllParents(results); results.addAll(oes); } else if (this.biologicalProcessAnnotations) { Collection<OntologyTerm> toRemove = new HashSet<OntologyTerm>(); for (OntologyTerm ont : results) { if ((ont == null)) continue; // / shouldn't happen! if (!goService.isBiologicalProcess(ont)) toRemove.add(ont); } for (OntologyTerm toRemoveOnto : toRemove) { results.remove(toRemoveOnto); } } return results; } /** * @param type * Intilizes variables depending on they type for file that is * needed */ private void processType(String type) { shortAnnotations = false; longAnnotations = false; biologicalProcessAnnotations = false; if (type.equalsIgnoreCase(LONG)) longAnnotations = true; else if (type.equalsIgnoreCase(BIOPROCESS)) biologicalProcessAnnotations = true; else // ( type.equalsIgnoreCase( SHORT ) ) shortAnnotations = true; } /** * @param genesToInclude */ private void processGenesIncluded(String genesToInclude) { includeGemmaGenes = false; if (genesToInclude.equalsIgnoreCase("all")) includeGemmaGenes = true; } @Override protected void processOptions() { super.processOptions(); if (this.hasOption('f')) { this.fileName = this.getOptionValue('f'); } if (this.hasOption('t')) { processType(this.getOptionValue('t')); } if (this.hasOption('l')) { this.batchFileName = this.getOptionValue('l'); } if (this.hasOption('b')) { this.processAllADs = true; } if (this.hasOption('g')) processGenesIncluded(this.getOptionValue('g')); gene2GoAssociationService = (Gene2GOAssociationService) this .getBean("gene2GOAssociationService"); compositeSequenceService = (CompositeSequenceService) this .getBean("compositeSequenceService"); geneService = (GeneService) this.getBean("geneService"); goService = (GeneOntologyService) this.getBean("geneOntologyService"); } }
added option to controll overwriting of existing annotation files.
gemma-core/src/main/java/ubic/gemma/apps/ArrayDesignGOAnnotationGeneratorCli.java
added option to controll overwriting of existing annotation files.
Java
apache-2.0
c373a971ad1e4e66a22c41188a744fde22e7da24
0
jenkinsci/gmaven,kurtharriger/gmaven
/* * Copyright (C) 2006-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.gmaven.runtime.v1_7; import groovy.lang.GroovyClassLoader; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.ModuleNode; import org.codehaus.groovy.classgen.GeneratorContext; import org.codehaus.groovy.control.CompilationFailedException; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.control.Phases; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.tools.javac.JavaAwareResolveVisitor; import org.codehaus.groovy.tools.javac.JavaCompiler; import org.codehaus.groovy.tools.javac.JavaStubGenerator; import java.io.File; import java.io.FileNotFoundException; import java.net.URL; import java.util.Iterator; import java.util.Map; /** * Java-stub-only compilation unit. * * @version $Id$ * @author <a href="mailto:[email protected]">Jason Dillon</a> * * @since 1.1 */ public class JavaStubCompilationUnit extends CompilationUnit { private static final String DOT_GROOVY = ".groovy"; private final JavaStubGenerator stubGenerator; private int stubCount; public JavaStubCompilationUnit(final CompilerConfiguration config, final GroovyClassLoader gcl, File destDir) { super(config,null,gcl); assert config != null; Map options = config.getJointCompilationOptions(); if (destDir == null) { destDir = (File) options.get("stubDir"); } boolean useJava5 = config.getTargetBytecode().equals(CompilerConfiguration.POST_JDK5); stubGenerator = new JavaStubGenerator(destDir, false, useJava5); addPhaseOperation(new PrimaryClassNodeOperation() { public void call(final SourceUnit source, final GeneratorContext context, final ClassNode node) throws CompilationFailedException { new JavaAwareResolveVisitor(JavaStubCompilationUnit.this).startResolving(node, source); } },Phases.CONVERSION); addPhaseOperation(new PrimaryClassNodeOperation() { @Override public void call(final SourceUnit source, final GeneratorContext context, final ClassNode node) throws CompilationFailedException { try { stubGenerator.generateClass(node); stubCount++; } catch (FileNotFoundException e) { source.addException(e); } } },Phases.CONVERSION); } public JavaStubCompilationUnit(final CompilerConfiguration config, final GroovyClassLoader gcl) { this(config, gcl, null); } public void gotoPhase(final int phase) throws CompilationFailedException { super.gotoPhase(phase); if (phase==Phases.SEMANTIC_ANALYSIS) { // This appears to be needed to avoid missing imports Iterator modules = getAST().getModules().iterator(); while (modules.hasNext()) { ModuleNode module = (ModuleNode) modules.next(); module.setImportsResolved(false); } } } public int getStubCount() { return stubCount; } @Override public void compile() throws CompilationFailedException { stubCount = 0; super.compile(Phases.CONVERSION); } @Override public void configure(final CompilerConfiguration config) { super.configure(config); // GroovyClassLoader should be able to find classes compiled from java sources File targetDir = config.getTargetDirectory(); if (targetDir != null) { final String classOutput = targetDir.getAbsolutePath(); getClassLoader().addClasspath(classOutput); } } @Override public SourceUnit addSource(final File file) { if (file.getName().toLowerCase().endsWith(DOT_GROOVY)) { return super.addSource(file); } return null; } @Override public SourceUnit addSource(URL url) { if (url.getPath().toLowerCase().endsWith(DOT_GROOVY)) { return super.addSource(url); } return null; } }
gmaven-runtime/gmaven-runtime-1.7/src/main/java/org/codehaus/gmaven/runtime/v1_7/JavaStubCompilationUnit.java
/* * Copyright (C) 2006-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.gmaven.runtime.v1_7; import groovy.lang.GroovyClassLoader; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.ModuleNode; import org.codehaus.groovy.classgen.GeneratorContext; import org.codehaus.groovy.control.CompilationFailedException; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.control.Phases; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.tools.javac.JavaStubGenerator; import java.io.File; import java.io.FileNotFoundException; import java.net.URL; import java.util.Iterator; import java.util.Map; /** * Java-stub-only compilation unit. * * @version $Id$ * @author <a href="mailto:[email protected]">Jason Dillon</a> * * @since 1.1 */ public class JavaStubCompilationUnit extends CompilationUnit { private static final String DOT_GROOVY = ".groovy"; private final JavaStubGenerator stubGenerator; private int stubCount; public JavaStubCompilationUnit(final CompilerConfiguration config, final GroovyClassLoader gcl, File destDir) { super(config,null,gcl); assert config != null; Map options = config.getJointCompilationOptions(); if (destDir == null) { destDir = (File) options.get("stubDir"); } boolean useJava5 = config.getTargetBytecode().equals(CompilerConfiguration.POST_JDK5); stubGenerator = new JavaStubGenerator(destDir, false, useJava5); addPhaseOperation(new PrimaryClassNodeOperation() { @Override public void call(final SourceUnit source, final GeneratorContext context, final ClassNode node) throws CompilationFailedException { try { stubGenerator.generateClass(node); stubCount++; } catch (FileNotFoundException e) { source.addException(e); } } },Phases.CONVERSION); } public JavaStubCompilationUnit(final CompilerConfiguration config, final GroovyClassLoader gcl) { this(config, gcl, null); } public void gotoPhase(final int phase) throws CompilationFailedException { super.gotoPhase(phase); if (phase==Phases.SEMANTIC_ANALYSIS) { // This appears to be needed to avoid missing imports Iterator modules = getAST().getModules().iterator(); while (modules.hasNext()) { ModuleNode module = (ModuleNode) modules.next(); module.setImportsResolved(false); } } } public int getStubCount() { return stubCount; } @Override public void compile() throws CompilationFailedException { stubCount = 0; super.compile(Phases.CONVERSION); } @Override public void configure(final CompilerConfiguration config) { super.configure(config); // GroovyClassLoader should be able to find classes compiled from java sources File targetDir = config.getTargetDirectory(); if (targetDir != null) { final String classOutput = targetDir.getAbsolutePath(); getClassLoader().addClasspath(classOutput); } } @Override public SourceUnit addSource(final File file) { if (file.getName().toLowerCase().endsWith(DOT_GROOVY)) { return super.addSource(file); } return null; } @Override public SourceUnit addSource(URL url) { if (url.getPath().toLowerCase().endsWith(DOT_GROOVY)) { return super.addSource(url); } return null; } }
This also seems needed for import/resolve to work for stubgen
gmaven-runtime/gmaven-runtime-1.7/src/main/java/org/codehaus/gmaven/runtime/v1_7/JavaStubCompilationUnit.java
This also seems needed for import/resolve to work for stubgen
Java
apache-2.0
77db82d3c2915c76d44ada8cfdf4c315f4f20539
0
b2ihealthcare/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl
/* * Copyright 2017-2018 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.core.validation; import static com.google.common.collect.Lists.newArrayList; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.BlockingQueue; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.b2international.commons.CompareUtils; import com.b2international.index.Writer; import com.b2international.snowowl.core.ComponentIdentifier; import com.b2international.snowowl.core.api.SnowowlRuntimeException; import com.b2international.snowowl.core.domain.BranchContext; import com.b2international.snowowl.core.events.Request; import com.b2international.snowowl.core.events.util.Promise; import com.b2international.snowowl.core.internal.validation.ValidationRepository; import com.b2international.snowowl.core.internal.validation.ValidationThreadPool; import com.b2international.snowowl.core.validation.eval.ValidationRuleEvaluator; import com.b2international.snowowl.core.validation.issue.ValidationIssue; import com.b2international.snowowl.core.validation.issue.ValidationIssueDetailExtension; import com.b2international.snowowl.core.validation.issue.ValidationIssueDetailExtensionProvider; import com.b2international.snowowl.core.validation.rule.ValidationRule; import com.b2international.snowowl.core.validation.rule.ValidationRuleSearchRequestBuilder; import com.b2international.snowowl.core.validation.rule.ValidationRules; import com.b2international.snowowl.core.validation.whitelist.ValidationWhiteListSearchRequestBuilder; import com.google.common.base.Stopwatch; import com.google.common.collect.HashMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Queues; /** * @since 6.0 */ final class ValidateRequest implements Request<BranchContext, ValidationResult> { private static final Logger LOG = LoggerFactory.getLogger("validation"); Collection<String> ruleIds; ValidateRequest() {} @Override public ValidationResult execute(BranchContext context) { return context.service(ValidationRepository.class).write(writer -> doValidate(context, writer)); } private ValidationResult doValidate(BranchContext context, Writer index) throws IOException { final String branchPath = context.branchPath(); ValidationRuleSearchRequestBuilder req = ValidationRequests.rules().prepareSearch(); if (!CompareUtils.isEmpty(ruleIds)) { req.filterByIds(ruleIds); } final ValidationRules rules = req .all() .build() .execute(context); final ValidationThreadPool pool = context.service(ValidationThreadPool.class); final BlockingQueue<IssuesToPersist> issuesToPersistQueue = Queues.newLinkedBlockingDeque(); // evaluate selected rules final List<Promise<Object>> validationPromises = Lists.newArrayList(); for (ValidationRule rule : rules) { final ValidationRuleEvaluator evaluator = ValidationRuleEvaluator.Registry.get(rule.getType()); if (evaluator != null) { validationPromises.add(pool.submit(rule.getCheckType(), () -> { Stopwatch w = Stopwatch.createStarted(); try { LOG.info("Executing rule '{}'...", rule.getId()); List<ComponentIdentifier> componentIdentifiers = evaluator.eval(context, rule); issuesToPersistQueue.offer(new IssuesToPersist(rule.getId(), componentIdentifiers)); LOG.info("Execution of rule '{}' successfully completed in '{}'.", rule.getId(), w); // TODO report successfully executed validation rule } catch (Exception e) { // TODO report failed validation rule LOG.info("Execution of rule '{}' failed after '{}'.", rule.getId(), w, e); } })); } } final Set<String> ruleIds = rules.stream().map(ValidationRule::getId).collect(Collectors.toSet()); final Multimap<String, ComponentIdentifier> whiteListedEntries = fetchWhiteListEntries(context, ruleIds); final Promise<List<Object>> promise = Promise.all(validationPromises); while (!promise.isDone() || !issuesToPersistQueue.isEmpty()) { if (!issuesToPersistQueue.isEmpty()) { final Collection<IssuesToPersist> issuesToPersist = newArrayList(); issuesToPersistQueue.drainTo(issuesToPersist); if (!issuesToPersist.isEmpty()) { final List<String> rulesToPersist = issuesToPersist.stream().map(itp -> itp.ruleId).collect(Collectors.toList()); LOG.info("Persisting issues generated by rules '{}'...", rulesToPersist); // persist new issues generated by rules so far, extending them using the Issue Extension API int persistedIssues = 0; final Multimap<String, ValidationIssue> issuesToExtendWithDetailsByToolingId = HashMultimap.create(); for (IssuesToPersist ruleIssues : Iterables.consumingIterable(issuesToPersist)) { final String ruleId = ruleIssues.ruleId; final List<ValidationIssue> existingRuleIssues = ValidationRequests.issues().prepareSearch() .all() .filterByBranchPath(branchPath) .filterByRule(ruleId) .build() .execute(context) .getItems(); final Set<ComponentIdentifier> existingComponentIdentifiers = existingRuleIssues.stream().map(ValidationIssue::getAffectedComponent).collect(Collectors.toSet()); // remove all processed whitelist entries final Collection<ComponentIdentifier> ruleWhiteListEntries = whiteListedEntries.removeAll(ruleId); final String toolingId = rules.stream().filter(rule -> ruleId.equals(rule.getId())).findFirst().get().getToolingId(); for (ComponentIdentifier componentIdentifier : ruleIssues.affectedComponentIds) { if (!existingComponentIdentifiers.remove(componentIdentifier)) { final ValidationIssue validationIssue = new ValidationIssue( UUID.randomUUID().toString(), ruleId, branchPath, componentIdentifier, ruleWhiteListEntries.contains(componentIdentifier)); issuesToExtendWithDetailsByToolingId.put(toolingId, validationIssue); persistedIssues++; } } final Set<String> issueIdsToDelete = existingRuleIssues .stream() .filter(issue -> existingComponentIdentifiers.contains(issue.getAffectedComponent())) .map(ValidationIssue::getId) .collect(Collectors.toSet()); if (!issueIdsToDelete.isEmpty()) { index.removeAll(Collections.singletonMap(ValidationIssue.class, issueIdsToDelete)); } } for (String toolingId : issuesToExtendWithDetailsByToolingId.keySet()) { final ValidationIssueDetailExtension extensions = ValidationIssueDetailExtensionProvider.INSTANCE.getExtensions(toolingId); final Collection<ValidationIssue> issues = issuesToExtendWithDetailsByToolingId.removeAll(toolingId); extensions.extendIssuesWithDetails(context, issues); for (ValidationIssue issue : issues) { index.put(issue.getId(), issue); } } index.commit(); LOG.info("Persisted '{}' issues generated by rules '{}'.", persistedIssues, rulesToPersist); } } try { Thread.sleep(1000L); } catch (InterruptedException e) { throw new SnowowlRuntimeException(e); } } // TODO return ValidationResult object with status and new issue IDs as set return new ValidationResult(context.id(), context.branchPath()); } private Multimap<String, ComponentIdentifier> fetchWhiteListEntries(BranchContext context, final Set<String> ruleIds) { // fetch all white list entries to determine whether an issue is whitelisted already or not final Multimap<String, ComponentIdentifier> whiteListedEntries = HashMultimap.create(); ValidationWhiteListSearchRequestBuilder whiteListReq = ValidationRequests.whiteList().prepareSearch(); // fetch whitelist entries associated with the defined rules if (!CompareUtils.isEmpty(ruleIds)) { whiteListReq.filterByRuleIds(ruleIds); } whiteListReq .all() .build() .execute(context) .stream() .forEach(whitelist -> whiteListedEntries.put(whitelist.getRuleId(), whitelist.getComponentIdentifier())); return whiteListedEntries; } public void setRuleIds(Collection<String> ruleIds) { this.ruleIds = ruleIds; } private static final class IssuesToPersist { public final String ruleId; public final Collection<ComponentIdentifier> affectedComponentIds; public IssuesToPersist(String ruleId, Collection<ComponentIdentifier> affectedComponentIds) { this.ruleId = ruleId; this.affectedComponentIds = affectedComponentIds; } } }
core/com.b2international.snowowl.datastore/src/com/b2international/snowowl/core/validation/ValidateRequest.java
/* * Copyright 2017-2018 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.core.validation; import static com.google.common.collect.Lists.newArrayList; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.BlockingQueue; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.b2international.commons.CompareUtils; import com.b2international.index.Writer; import com.b2international.snowowl.core.ComponentIdentifier; import com.b2international.snowowl.core.api.SnowowlRuntimeException; import com.b2international.snowowl.core.domain.BranchContext; import com.b2international.snowowl.core.events.Request; import com.b2international.snowowl.core.events.util.Promise; import com.b2international.snowowl.core.internal.validation.ValidationRepository; import com.b2international.snowowl.core.internal.validation.ValidationThreadPool; import com.b2international.snowowl.core.validation.eval.ValidationRuleEvaluator; import com.b2international.snowowl.core.validation.issue.ValidationIssue; import com.b2international.snowowl.core.validation.issue.ValidationIssueDetailExtension; import com.b2international.snowowl.core.validation.issue.ValidationIssueDetailExtensionProvider; import com.b2international.snowowl.core.validation.rule.ValidationRule; import com.b2international.snowowl.core.validation.rule.ValidationRuleSearchRequestBuilder; import com.b2international.snowowl.core.validation.rule.ValidationRules; import com.b2international.snowowl.core.validation.whitelist.ValidationWhiteListSearchRequestBuilder; import com.google.common.collect.HashMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Queues; /** * @since 6.0 */ final class ValidateRequest implements Request<BranchContext, ValidationResult> { private static final Logger LOG = LoggerFactory.getLogger("validation"); Collection<String> ruleIds; ValidateRequest() {} @Override public ValidationResult execute(BranchContext context) { return context.service(ValidationRepository.class).write(writer -> doValidate(context, writer)); } private ValidationResult doValidate(BranchContext context, Writer index) throws IOException { final String branchPath = context.branchPath(); ValidationRuleSearchRequestBuilder req = ValidationRequests.rules().prepareSearch(); if (!CompareUtils.isEmpty(ruleIds)) { req.filterByIds(ruleIds); } final ValidationRules rules = req .all() .build() .execute(context); final ValidationThreadPool pool = context.service(ValidationThreadPool.class); final BlockingQueue<IssuesToPersist> issuesToPersistQueue = Queues.newLinkedBlockingDeque(); // evaluate selected rules final List<Promise<Object>> validationPromises = Lists.newArrayList(); for (ValidationRule rule : rules) { final ValidationRuleEvaluator evaluator = ValidationRuleEvaluator.Registry.get(rule.getType()); if (evaluator != null) { validationPromises.add(pool.submit(rule.getCheckType(), () -> { long startTime = System.nanoTime(); try { LOG.info("Executing rule '{}'...", rule.getId()); List<ComponentIdentifier> componentIdentifiers = evaluator.eval(context, rule); issuesToPersistQueue.offer(new IssuesToPersist(rule.getId(), componentIdentifiers)); long endTime = System.nanoTime(); LOG.info("Execution of rule '{}' successfully completed took {} miliseconds", rule.getId(), (endTime - startTime) / 1000000); // TODO report successfully executed validation rule } catch (Exception e) { // TODO report failed validation rule LOG.info("Execution of rule '{}' failed", rule.getId(), e); } })); } } final Set<String> ruleIds = rules.stream().map(ValidationRule::getId).collect(Collectors.toSet()); final Multimap<String, ComponentIdentifier> whiteListedEntries = fetchWhiteListEntries(context, ruleIds); final Promise<List<Object>> promise = Promise.all(validationPromises); while (!promise.isDone() || !issuesToPersistQueue.isEmpty()) { if (!issuesToPersistQueue.isEmpty()) { final Collection<IssuesToPersist> issuesToPersist = newArrayList(); issuesToPersistQueue.drainTo(issuesToPersist); if (!issuesToPersist.isEmpty()) { final List<String> rulesToPersist = issuesToPersist.stream().map(itp -> itp.ruleId).collect(Collectors.toList()); LOG.info("Persisting issues generated by rules '{}'...", rulesToPersist); // persist new issues generated by rules so far, extending them using the Issue Extension API int persistedIssues = 0; final Multimap<String, ValidationIssue> issuesToExtendWithDetailsByToolingId = HashMultimap.create(); for (IssuesToPersist ruleIssues : Iterables.consumingIterable(issuesToPersist)) { final String ruleId = ruleIssues.ruleId; final List<ValidationIssue> existingRuleIssues = ValidationRequests.issues().prepareSearch() .all() .filterByBranchPath(branchPath) .filterByRule(ruleId) .build() .execute(context) .getItems(); final Set<ComponentIdentifier> existingComponentIdentifiers = existingRuleIssues.stream().map(ValidationIssue::getAffectedComponent).collect(Collectors.toSet()); // remove all processed whitelist entries final Collection<ComponentIdentifier> ruleWhiteListEntries = whiteListedEntries.removeAll(ruleId); final String toolingId = rules.stream().filter(rule -> ruleId.equals(rule.getId())).findFirst().get().getToolingId(); for (ComponentIdentifier componentIdentifier : ruleIssues.affectedComponentIds) { if (!existingComponentIdentifiers.remove(componentIdentifier)) { final ValidationIssue validationIssue = new ValidationIssue( UUID.randomUUID().toString(), ruleId, branchPath, componentIdentifier, ruleWhiteListEntries.contains(componentIdentifier)); issuesToExtendWithDetailsByToolingId.put(toolingId, validationIssue); persistedIssues++; } } final Set<String> issueIdsToDelete = existingRuleIssues .stream() .filter(issue -> existingComponentIdentifiers.contains(issue.getAffectedComponent())) .map(ValidationIssue::getId) .collect(Collectors.toSet()); if (!issueIdsToDelete.isEmpty()) { index.removeAll(Collections.singletonMap(ValidationIssue.class, issueIdsToDelete)); } } for (String toolingId : issuesToExtendWithDetailsByToolingId.keySet()) { final ValidationIssueDetailExtension extensions = ValidationIssueDetailExtensionProvider.INSTANCE.getExtensions(toolingId); final Collection<ValidationIssue> issues = issuesToExtendWithDetailsByToolingId.removeAll(toolingId); extensions.extendIssuesWithDetails(context, issues); for (ValidationIssue issue : issues) { index.put(issue.getId(), issue); } } index.commit(); LOG.info("Persisted '{}' issues generated by rules '{}'.", persistedIssues, rulesToPersist); } } try { Thread.sleep(1000L); } catch (InterruptedException e) { throw new SnowowlRuntimeException(e); } } // TODO return ValidationResult object with status and new issue IDs as set return new ValidationResult(context.id(), context.branchPath()); } private Multimap<String, ComponentIdentifier> fetchWhiteListEntries(BranchContext context, final Set<String> ruleIds) { // fetch all white list entries to determine whether an issue is whitelisted already or not final Multimap<String, ComponentIdentifier> whiteListedEntries = HashMultimap.create(); ValidationWhiteListSearchRequestBuilder whiteListReq = ValidationRequests.whiteList().prepareSearch(); // fetch whitelist entries associated with the defined rules if (!CompareUtils.isEmpty(ruleIds)) { whiteListReq.filterByRuleIds(ruleIds); } whiteListReq .all() .build() .execute(context) .stream() .forEach(whitelist -> whiteListedEntries.put(whitelist.getRuleId(), whitelist.getComponentIdentifier())); return whiteListedEntries; } public void setRuleIds(Collection<String> ruleIds) { this.ruleIds = ruleIds; } private static final class IssuesToPersist { public final String ruleId; public final Collection<ComponentIdentifier> affectedComponentIds; public IssuesToPersist(String ruleId, Collection<ComponentIdentifier> affectedComponentIds) { this.ruleId = ruleId; this.affectedComponentIds = affectedComponentIds; } } }
SO-3125: use Stopwatch instead of custom elapsed time tracking
core/com.b2international.snowowl.datastore/src/com/b2international/snowowl/core/validation/ValidateRequest.java
SO-3125: use Stopwatch instead of custom elapsed time tracking
Java
apache-2.0
8816f85709417f447f50693c70647395971aa834
0
OpenUniversity/ovirt-engine,halober/ovirt-engine,halober/ovirt-engine,walteryang47/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,OpenUniversity/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,eayun/ovirt-engine,halober/ovirt-engine,yapengsong/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,walteryang47/ovirt-engine,halober/ovirt-engine,yingyun001/ovirt-engine,yingyun001/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,zerodengxinchao/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine
package org.ovirt.engine.ui.uicommonweb.models.clusters; import java.util.ArrayList; import org.ovirt.engine.core.common.businessentities.MigrateOnErrorOptions; import org.ovirt.engine.core.common.businessentities.ServerCpu; import org.ovirt.engine.core.common.businessentities.StorageType; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.storage_pool; import org.ovirt.engine.core.common.mode.ApplicationMode; import org.ovirt.engine.core.compat.Event; import org.ovirt.engine.core.compat.EventArgs; import org.ovirt.engine.core.compat.IEventListener; import org.ovirt.engine.core.compat.NGuid; import org.ovirt.engine.core.compat.PropertyChangedEventArgs; import org.ovirt.engine.core.compat.StringHelper; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.ui.frontend.AsyncQuery; import org.ovirt.engine.ui.frontend.INewAsyncCallback; import org.ovirt.engine.ui.uicommonweb.DataProvider; import org.ovirt.engine.ui.uicommonweb.Linq; import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider; import org.ovirt.engine.ui.uicommonweb.models.ApplicationModeHelper; import org.ovirt.engine.ui.uicommonweb.models.EntityModel; import org.ovirt.engine.ui.uicommonweb.models.ListModel; import org.ovirt.engine.ui.uicommonweb.models.Model; import org.ovirt.engine.ui.uicommonweb.validation.I18NNameValidation; import org.ovirt.engine.ui.uicommonweb.validation.IValidation; import org.ovirt.engine.ui.uicommonweb.validation.LengthValidation; import org.ovirt.engine.ui.uicommonweb.validation.NotEmptyValidation; import org.ovirt.engine.ui.uicompat.ConstantsManager; public class ClusterModel extends Model { private int privateServerOverCommit; public int getServerOverCommit() { return privateServerOverCommit; } public void setServerOverCommit(int value) { privateServerOverCommit = value; } private int privateDesktopOverCommit; public int getDesktopOverCommit() { return privateDesktopOverCommit; } public void setDesktopOverCommit(int value) { privateDesktopOverCommit = value; } private int privateDefaultOverCommit; public int getDefaultOverCommit() { return privateDefaultOverCommit; } public void setDefaultOverCommit(int value) { privateDefaultOverCommit = value; } private VDSGroup privateEntity; public VDSGroup getEntity() { return privateEntity; } public void setEntity(VDSGroup value) { privateEntity = value; } private boolean privateIsEdit; public boolean getIsEdit() { return privateIsEdit; } public void setIsEdit(boolean value) { privateIsEdit = value; } private boolean isCPUinitialized = false; private boolean privateIsNew; public boolean getIsNew() { return privateIsNew; } public void setIsNew(boolean value) { privateIsNew = value; } private String privateOriginalName; public String getOriginalName() { return privateOriginalName; } public void setOriginalName(String value) { privateOriginalName = value; } private NGuid privateClusterId; public NGuid getClusterId() { return privateClusterId; } public void setClusterId(NGuid value) { privateClusterId = value; } private EntityModel privateName; public EntityModel getName() { return privateName; } public void setName(EntityModel value) { privateName = value; } private EntityModel privateDescription; public EntityModel getDescription() { return privateDescription; } public void setDescription(EntityModel value) { privateDescription = value; } private ListModel privateDataCenter; public ListModel getDataCenter() { return privateDataCenter; } public void setDataCenter(ListModel value) { privateDataCenter = value; } private ListModel privateCPU; public ListModel getCPU() { return privateCPU; } public void setCPU(ListModel value) { privateCPU = value; } private ListModel privateVersion; public ListModel getVersion() { return privateVersion; } public void setVersion(ListModel value) { privateVersion = value; } private EntityModel privateEnableOvirtService; public EntityModel getEnableOvirtService() { return privateEnableOvirtService; } public void setEnableOvirtService(EntityModel value) { this.privateEnableOvirtService = value; } private EntityModel privateEnableGlusterService; public EntityModel getEnableGlusterService() { return privateEnableGlusterService; } public void setEnableGlusterService(EntityModel value) { this.privateEnableGlusterService = value; } private EntityModel isImportGlusterConfiguration; public EntityModel getIsImportGlusterConfiguration() { return isImportGlusterConfiguration; } public void setIsImportGlusterConfiguration(EntityModel value) { this.isImportGlusterConfiguration = value; } private EntityModel glusterHostAddress; public EntityModel getGlusterHostAddress() { return glusterHostAddress; } public void setGlusterHostAddress(EntityModel glusterHostAddress) { this.glusterHostAddress = glusterHostAddress; } private EntityModel glusterHostFingerprint; public EntityModel getGlusterHostFingerprint() { return glusterHostFingerprint; } public void setGlusterHostFingerprint(EntityModel glusterHostFingerprint) { this.glusterHostFingerprint = glusterHostFingerprint; } private Boolean isFingerprintVerified; public Boolean isFingerprintVerified() { return isFingerprintVerified; } public void setIsFingerprintVerified(Boolean value) { this.isFingerprintVerified = value; } private EntityModel glusterHostPassword; public EntityModel getGlusterHostPassword() { return glusterHostPassword; } public void setGlusterHostPassword(EntityModel glusterHostPassword) { this.glusterHostPassword = glusterHostPassword; } private EntityModel privateOptimizationNone; public EntityModel getOptimizationNone() { return privateOptimizationNone; } public void setOptimizationNone(EntityModel value) { privateOptimizationNone = value; } private EntityModel privateOptimizationForServer; public EntityModel getOptimizationForServer() { return privateOptimizationForServer; } public void setOptimizationForServer(EntityModel value) { privateOptimizationForServer = value; } private EntityModel privateOptimizationForDesktop; public EntityModel getOptimizationForDesktop() { return privateOptimizationForDesktop; } public void setOptimizationForDesktop(EntityModel value) { privateOptimizationForDesktop = value; } private EntityModel privateOptimizationCustom; public EntityModel getOptimizationCustom() { return privateOptimizationCustom; } public void setOptimizationCustom(EntityModel value) { privateOptimizationCustom = value; } private EntityModel privateOptimizationNone_IsSelected; public EntityModel getOptimizationNone_IsSelected() { return privateOptimizationNone_IsSelected; } public void setOptimizationNone_IsSelected(EntityModel value) { privateOptimizationNone_IsSelected = value; } private EntityModel privateOptimizationForServer_IsSelected; public EntityModel getOptimizationForServer_IsSelected() { return privateOptimizationForServer_IsSelected; } public void setOptimizationForServer_IsSelected(EntityModel value) { privateOptimizationForServer_IsSelected = value; } private EntityModel privateOptimizationForDesktop_IsSelected; public EntityModel getOptimizationForDesktop_IsSelected() { return privateOptimizationForDesktop_IsSelected; } public void setOptimizationForDesktop_IsSelected(EntityModel value) { privateOptimizationForDesktop_IsSelected = value; } private EntityModel privateOptimizationCustom_IsSelected; public EntityModel getOptimizationCustom_IsSelected() { return privateOptimizationCustom_IsSelected; } public void setOptimizationCustom_IsSelected(EntityModel value) { privateOptimizationCustom_IsSelected = value; } private EntityModel privateMigrateOnErrorOption_NO; public EntityModel getMigrateOnErrorOption_NO() { return privateMigrateOnErrorOption_NO; } public void setMigrateOnErrorOption_NO(EntityModel value) { privateMigrateOnErrorOption_NO = value; } private EntityModel privateMigrateOnErrorOption_YES; public EntityModel getMigrateOnErrorOption_YES() { return privateMigrateOnErrorOption_YES; } public void setMigrateOnErrorOption_YES(EntityModel value) { privateMigrateOnErrorOption_YES = value; } private EntityModel privateMigrateOnErrorOption_HA_ONLY; public EntityModel getMigrateOnErrorOption_HA_ONLY() { return privateMigrateOnErrorOption_HA_ONLY; } public void setMigrateOnErrorOption_HA_ONLY(EntityModel value) { privateMigrateOnErrorOption_HA_ONLY = value; } private boolean isGeneralTabValid; public boolean getIsGeneralTabValid() { return isGeneralTabValid; } public void setIsGeneralTabValid(boolean value) { if (isGeneralTabValid != value) { isGeneralTabValid = value; OnPropertyChanged(new PropertyChangedEventArgs("IsGeneralTabValid")); //$NON-NLS-1$ } } private MigrateOnErrorOptions migrateOnErrorOption = MigrateOnErrorOptions.values()[0]; public MigrateOnErrorOptions getMigrateOnErrorOption() { if ((Boolean) getMigrateOnErrorOption_NO().getEntity() == true) { return MigrateOnErrorOptions.NO; } else if ((Boolean) getMigrateOnErrorOption_YES().getEntity() == true) { return MigrateOnErrorOptions.YES; } else if ((Boolean) getMigrateOnErrorOption_HA_ONLY().getEntity() == true) { return MigrateOnErrorOptions.HA_ONLY; } return MigrateOnErrorOptions.YES; } public void setMigrateOnErrorOption(MigrateOnErrorOptions value) { if (migrateOnErrorOption != value) { migrateOnErrorOption = value; // webadmin use. switch (migrateOnErrorOption) { case NO: getMigrateOnErrorOption_NO().setEntity(true); getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(false); break; case YES: getMigrateOnErrorOption_NO().setEntity(false); getMigrateOnErrorOption_YES().setEntity(true); getMigrateOnErrorOption_HA_ONLY().setEntity(false); break; case HA_ONLY: getMigrateOnErrorOption_NO().setEntity(false); getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(true); break; default: break; } OnPropertyChanged(new PropertyChangedEventArgs("MigrateOnErrorOption")); //$NON-NLS-1$ } } private boolean privateisResiliencePolicyTabAvailable; public boolean getisResiliencePolicyTabAvailable() { return privateisResiliencePolicyTabAvailable; } public void setisResiliencePolicyTabAvailable(boolean value) { privateisResiliencePolicyTabAvailable = value; } public boolean getIsResiliencePolicyTabAvailable() { return getisResiliencePolicyTabAvailable(); } public void setIsResiliencePolicyTabAvailable(boolean value) { if (getisResiliencePolicyTabAvailable() != value) { setisResiliencePolicyTabAvailable(value); OnPropertyChanged(new PropertyChangedEventArgs("IsResiliencePolicyTabAvailable")); //$NON-NLS-1$ } } public int getMemoryOverCommit() { if ((Boolean) getOptimizationNone_IsSelected().getEntity()) { return (Integer) getOptimizationNone().getEntity(); } if ((Boolean) getOptimizationForServer_IsSelected().getEntity()) { return (Integer) getOptimizationForServer().getEntity(); } if ((Boolean) getOptimizationForDesktop_IsSelected().getEntity()) { return (Integer) getOptimizationForDesktop().getEntity(); } if ((Boolean) getOptimizationCustom_IsSelected().getEntity()) { return (Integer) getOptimizationCustom().getEntity(); } return DataProvider.GetClusterDefaultMemoryOverCommit(); } public void setMemoryOverCommit(int value) { getOptimizationNone_IsSelected().setEntity(value == (Integer) getOptimizationNone().getEntity()); getOptimizationForServer_IsSelected().setEntity(value == (Integer) getOptimizationForServer().getEntity()); getOptimizationForDesktop_IsSelected().setEntity(value == (Integer) getOptimizationForDesktop().getEntity()); if (!(Boolean) getOptimizationNone_IsSelected().getEntity() && !(Boolean) getOptimizationForServer_IsSelected().getEntity() && !(Boolean) getOptimizationForDesktop_IsSelected().getEntity()) { getOptimizationCustom().setIsAvailable(true); getOptimizationCustom().setEntity(value); getOptimizationCustom_IsSelected().setIsAvailable(true); getOptimizationCustom_IsSelected().setEntity(true); } } public ClusterModel() { } public void Init(final boolean isEdit) { setIsEdit(isEdit); setName(new EntityModel()); setDescription(new EntityModel()); setEnableOvirtService(new EntityModel()); getEnableOvirtService().setEntity(ApplicationModeHelper.isModeSupported(ApplicationMode.VirtOnly)); getEnableOvirtService().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.VirtOnly && ApplicationModeHelper.isModeSupported(ApplicationMode.VirtOnly)); initImportCluster(isEdit); setEnableGlusterService(new EntityModel()); getEnableGlusterService().getEntityChangedEvent().addListener(new IEventListener() { @Override public void eventRaised(Event ev, Object sender, EventArgs args) { if (!isEdit && getEnableGlusterService().getEntity() != null && (Boolean) getEnableGlusterService().getEntity()) { getIsImportGlusterConfiguration().setIsAvailable(true); getGlusterHostAddress().setIsAvailable(true); getGlusterHostFingerprint().setIsAvailable(true); getGlusterHostPassword().setIsAvailable(true); } else { getIsImportGlusterConfiguration().setIsAvailable(false); getIsImportGlusterConfiguration().setEntity(false); getGlusterHostAddress().setIsAvailable(false); getGlusterHostFingerprint().setIsAvailable(false); getGlusterHostPassword().setIsAvailable(false); } } }); getEnableGlusterService().setEntity(ApplicationModeHelper.getUiMode() == ApplicationMode.GlusterOnly); getEnableGlusterService().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly && ApplicationModeHelper.isModeSupported(ApplicationMode.GlusterOnly)); setOptimizationNone(new EntityModel()); setOptimizationForServer(new EntityModel()); setOptimizationForDesktop(new EntityModel()); setOptimizationCustom(new EntityModel()); EntityModel tempVar = new EntityModel(); tempVar.setEntity(false); setOptimizationNone_IsSelected(tempVar); getOptimizationNone_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar2 = new EntityModel(); tempVar2.setEntity(false); setOptimizationForServer_IsSelected(tempVar2); getOptimizationForServer_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar3 = new EntityModel(); tempVar3.setEntity(false); setOptimizationForDesktop_IsSelected(tempVar3); getOptimizationForDesktop_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar4 = new EntityModel(); tempVar4.setEntity(false); tempVar4.setIsAvailable(false); setOptimizationCustom_IsSelected(tempVar4); getOptimizationCustom_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar5 = new EntityModel(); tempVar5.setEntity(false); setMigrateOnErrorOption_YES(tempVar5); getMigrateOnErrorOption_YES().getEntityChangedEvent().addListener(this); EntityModel tempVar6 = new EntityModel(); tempVar6.setEntity(false); setMigrateOnErrorOption_NO(tempVar6); getMigrateOnErrorOption_NO().getEntityChangedEvent().addListener(this); EntityModel tempVar7 = new EntityModel(); tempVar7.setEntity(false); setMigrateOnErrorOption_HA_ONLY(tempVar7); getMigrateOnErrorOption_HA_ONLY().getEntityChangedEvent().addListener(this); // Optimization methods: // default value =100; setDefaultOverCommit(DataProvider.GetClusterDefaultMemoryOverCommit()); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; clusterModel.setDesktopOverCommit((Integer) result); AsyncQuery _asyncQuery1 = new AsyncQuery(); _asyncQuery1.setModel(clusterModel); _asyncQuery1.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model1, Object result1) { ClusterModel clusterModel1 = (ClusterModel) model1; clusterModel1.setServerOverCommit((Integer) result1); // temp is used for conversion purposes EntityModel temp; temp = clusterModel1.getOptimizationNone(); temp.setEntity(clusterModel1.getDefaultOverCommit()); // res1, res2 is used for conversion purposes. boolean res1 = clusterModel1.getDesktopOverCommit() != clusterModel1.getDefaultOverCommit(); boolean res2 = clusterModel1.getServerOverCommit() != clusterModel1.getDefaultOverCommit(); temp = clusterModel1.getOptimizationNone_IsSelected(); setIsSelected(res1 && res2); temp.setEntity(getIsSelected()); temp = clusterModel1.getOptimizationForServer(); temp.setEntity(clusterModel1.getServerOverCommit()); temp = clusterModel1.getOptimizationForServer_IsSelected(); temp.setEntity(clusterModel1.getServerOverCommit() == clusterModel1.getDefaultOverCommit()); temp = clusterModel1.getOptimizationForDesktop(); temp.setEntity(clusterModel1.getDesktopOverCommit()); temp = temp = clusterModel1.getOptimizationForDesktop_IsSelected(); temp.setEntity(clusterModel1.getDesktopOverCommit() == clusterModel1.getDefaultOverCommit()); temp = clusterModel1.getOptimizationCustom(); temp.setIsAvailable(false); temp.setIsChangable(false); if (clusterModel1.getIsEdit()) { clusterModel1.postInit(); } } }; AsyncDataProvider.GetClusterServerMemoryOverCommit(_asyncQuery1); } }; AsyncDataProvider.GetClusterDesktopMemoryOverCommit(_asyncQuery); setDataCenter(new ListModel()); getDataCenter().getSelectedItemChangedEvent().addListener(this); getDataCenter().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly); setCPU(new ListModel()); getCPU().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly); setVersion(new ListModel()); getVersion().getSelectedItemChangedEvent().addListener(this); getVersion().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly); setMigrateOnErrorOption(MigrateOnErrorOptions.YES); setIsGeneralTabValid(true); setIsResiliencePolicyTabAvailable(true); } private void initImportCluster(boolean isEdit) { setGlusterHostAddress(new EntityModel()); getGlusterHostAddress().getEntityChangedEvent().addListener(new IEventListener() { @Override public void eventRaised(Event ev, Object sender, EventArgs args) { setIsFingerprintVerified(false); fetchFingerprint((String) getGlusterHostAddress().getEntity()); } }); setGlusterHostFingerprint(new EntityModel()); getGlusterHostFingerprint().setEntity(""); //$NON-NLS-1$ setIsFingerprintVerified(false); setGlusterHostPassword(new EntityModel()); setIsImportGlusterConfiguration(new EntityModel()); getIsImportGlusterConfiguration().getEntityChangedEvent().addListener(new IEventListener() { @Override public void eventRaised(Event ev, Object sender, EventArgs args) { if (getIsImportGlusterConfiguration().getEntity() != null && (Boolean) getIsImportGlusterConfiguration().getEntity()) { getGlusterHostAddress().setIsChangable(true); getGlusterHostPassword().setIsChangable(true); } else { getGlusterHostAddress().setIsChangable(false); getGlusterHostPassword().setIsChangable(false); } } }); getIsImportGlusterConfiguration().setEntity(false); } private void fetchFingerprint(String hostAddress) { AsyncQuery aQuery = new AsyncQuery(); aQuery.setModel(this); aQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { String fingerprint = (String) result; if (fingerprint != null && fingerprint.length() > 0) { getGlusterHostFingerprint().setEntity(result); setIsFingerprintVerified(true); } else { getGlusterHostFingerprint().setEntity(ConstantsManager.getInstance() .getConstants() .errorLoadingFingerprint()); setIsFingerprintVerified(false); } } }; AsyncDataProvider.GetHostFingerprint(aQuery, hostAddress); getGlusterHostFingerprint().setEntity(ConstantsManager.getInstance().getConstants().loadingFingerprint()); } private void postInit() { getDescription().setEntity(getEntity().getdescription()); setMemoryOverCommit(getEntity().getmax_vds_memory_over_commit()); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; ArrayList<storage_pool> dataCenters = (ArrayList<storage_pool>) result; clusterModel.getDataCenter().setItems(dataCenters); clusterModel.getDataCenter().setSelectedItem(null); for (storage_pool a : dataCenters) { if (clusterModel.getEntity().getstorage_pool_id() != null && a.getId().equals(clusterModel.getEntity().getstorage_pool_id())) { clusterModel.getDataCenter().setSelectedItem(a); break; } } clusterModel.getDataCenter().setIsChangable(clusterModel.getDataCenter().getSelectedItem() == null); clusterModel.setMigrateOnErrorOption(clusterModel.getEntity().getMigrateOnError()); } }; AsyncDataProvider.GetDataCenterList(_asyncQuery); } @Override public void eventRaised(Event ev, Object sender, EventArgs args) { super.eventRaised(ev, sender, args); if (ev.equals(ListModel.SelectedItemChangedEventDefinition)) { if (sender == getDataCenter()) { StoragePool_SelectedItemChanged(args); } else if (sender == getVersion()) { Version_SelectedItemChanged(args); } } else if (ev.equals(EntityModel.EntityChangedEventDefinition)) { EntityModel senderEntityModel = (EntityModel) sender; if ((Boolean) senderEntityModel.getEntity()) { if (senderEntityModel == getOptimizationNone_IsSelected()) { getOptimizationForServer_IsSelected().setEntity(false); getOptimizationForDesktop_IsSelected().setEntity(false); getOptimizationCustom_IsSelected().setEntity(false); } else if (senderEntityModel == getOptimizationForServer_IsSelected()) { getOptimizationNone_IsSelected().setEntity(false); getOptimizationForDesktop_IsSelected().setEntity(false); getOptimizationCustom_IsSelected().setEntity(false); } else if (senderEntityModel == getOptimizationForDesktop_IsSelected()) { getOptimizationNone_IsSelected().setEntity(false); getOptimizationForServer_IsSelected().setEntity(false); getOptimizationCustom_IsSelected().setEntity(false); } else if (senderEntityModel == getOptimizationCustom_IsSelected()) { getOptimizationNone_IsSelected().setEntity(false); getOptimizationForServer_IsSelected().setEntity(false); getOptimizationForDesktop_IsSelected().setEntity(false); } else if (senderEntityModel == getMigrateOnErrorOption_YES()) { getMigrateOnErrorOption_NO().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(false); } else if (senderEntityModel == getMigrateOnErrorOption_NO()) { getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(false); } else if (senderEntityModel == getMigrateOnErrorOption_HA_ONLY()) { getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_NO().setEntity(false); } } } } private void Version_SelectedItemChanged(EventArgs e) { Version version; if (getVersion().getSelectedItem() != null) { version = (Version) getVersion().getSelectedItem(); } else { version = ((storage_pool) getDataCenter().getSelectedItem()).getcompatibility_version(); } AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; ArrayList<ServerCpu> cpus = (ArrayList<ServerCpu>) result; ServerCpu oldSelectedCpu = (ServerCpu) clusterModel.getCPU().getSelectedItem(); clusterModel.getCPU().setItems(cpus); clusterModel.getCPU().setSelectedItem(oldSelectedCpu != null ? Linq.FirstOrDefault(cpus, new Linq.ServerCpuPredicate(oldSelectedCpu.getCpuName())) : null); if (clusterModel.getCPU().getSelectedItem() == null || !isCPUinitialized) { InitCPU(); } } }; AsyncDataProvider.GetCPUList(_asyncQuery, version); } private void InitCPU() { if (!isCPUinitialized && getIsEdit()) { isCPUinitialized = true; getCPU().setSelectedItem(null); for (ServerCpu a : (ArrayList<ServerCpu>) getCPU().getItems()) { if (StringHelper.stringsEqual(a.getCpuName(), getEntity().getcpu_name())) { getCPU().setSelectedItem(a); break; } } } } private void StoragePool_SelectedItemChanged(EventArgs e) { // possible versions for new cluster (when editing cluster, this event won't occur) // are actually the possible versions for the data-center that the cluster is going // to be attached to. storage_pool selectedDataCenter = (storage_pool) getDataCenter().getSelectedItem(); if (selectedDataCenter == null) { return; } if (selectedDataCenter.getstorage_pool_type() == StorageType.LOCALFS) { setIsResiliencePolicyTabAvailable(false); } else { setIsResiliencePolicyTabAvailable(true); } AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; ArrayList<Version> versions = (ArrayList<Version>) result; clusterModel.getVersion().setItems(versions); if (!versions.contains(clusterModel.getVersion().getSelectedItem())) { if (versions.contains(((storage_pool) clusterModel.getDataCenter().getSelectedItem()).getcompatibility_version())) { clusterModel.getVersion().setSelectedItem(((storage_pool) clusterModel.getDataCenter() .getSelectedItem()).getcompatibility_version()); } else { clusterModel.getVersion().setSelectedItem(Linq.SelectHighestVersion(versions)); } } else if (clusterModel.getIsEdit()) { clusterModel.getVersion().setSelectedItem(Linq.FirstOrDefault(versions, new Linq.VersionPredicate(((VDSGroup) clusterModel.getEntity()).getcompatibility_version()))); } } }; AsyncDataProvider.GetDataCenterVersions(_asyncQuery, selectedDataCenter.getId()); } public boolean Validate(boolean validateCpu) { return Validate(true, validateCpu); } public boolean Validate(boolean validateStoragePool, boolean validateCpu) { getName().ValidateEntity(new IValidation[] { new NotEmptyValidation(), new LengthValidation(40), new I18NNameValidation() }); if (validateStoragePool) { getDataCenter().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() }); } if (validateCpu) { getCPU().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() }); } else { getCPU().ValidateSelectedItem(new IValidation[] {}); } getVersion().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() }); // TODO: async validation for webadmin // string name = (string)Name.Entity; // //Check name unicitate. // if (String.Compare(name, OriginalName, true) != 0 && !DataProvider.IsClusterNameUnique(name)) // { // Name.IsValid = false; // Name.InvalidityReasons.Add("Name must be unique."); // } boolean validService = true; if (getEnableOvirtService().getIsAvailable() && getEnableGlusterService().getIsAvailable()) { validService = ((Boolean) getEnableOvirtService().getEntity()) || ((Boolean) getEnableGlusterService().getEntity()); } if (!validService) { setMessage(ConstantsManager.getInstance().getConstants().clusterServiceValidationMsg()); } else if (((Boolean) getIsImportGlusterConfiguration().getEntity()) && !isFingerprintVerified()) { setMessage(ConstantsManager.getInstance().getConstants().fingerprintNotVerified()); } else { setMessage(null); } getGlusterHostAddress().ValidateEntity(new IValidation[] { new NotEmptyValidation() }); getGlusterHostPassword().ValidateEntity(new IValidation[] { new NotEmptyValidation() }); setIsGeneralTabValid(getName().getIsValid() && getDataCenter().getIsValid() && getCPU().getIsValid() && getVersion().getIsValid() && validService && getGlusterHostAddress().getIsValid() && getGlusterHostPassword().getIsValid() && ((Boolean) getIsImportGlusterConfiguration().getEntity() ? isFingerprintVerified() : true)); return getName().getIsValid() && getDataCenter().getIsValid() && getCPU().getIsValid() && getVersion().getIsValid() && validService && getGlusterHostAddress().getIsValid() && getGlusterHostPassword().getIsValid() && ((Boolean) getIsImportGlusterConfiguration().getEntity() ? isFingerprintVerified() : true); } }
frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/clusters/ClusterModel.java
package org.ovirt.engine.ui.uicommonweb.models.clusters; import java.util.ArrayList; import org.ovirt.engine.core.common.businessentities.MigrateOnErrorOptions; import org.ovirt.engine.core.common.businessentities.ServerCpu; import org.ovirt.engine.core.common.businessentities.StorageType; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.storage_pool; import org.ovirt.engine.core.common.mode.ApplicationMode; import org.ovirt.engine.core.compat.Event; import org.ovirt.engine.core.compat.EventArgs; import org.ovirt.engine.core.compat.IEventListener; import org.ovirt.engine.core.compat.NGuid; import org.ovirt.engine.core.compat.PropertyChangedEventArgs; import org.ovirt.engine.core.compat.StringHelper; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.ui.frontend.AsyncQuery; import org.ovirt.engine.ui.frontend.INewAsyncCallback; import org.ovirt.engine.ui.uicommonweb.DataProvider; import org.ovirt.engine.ui.uicommonweb.Linq; import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider; import org.ovirt.engine.ui.uicommonweb.models.ApplicationModeHelper; import org.ovirt.engine.ui.uicommonweb.models.EntityModel; import org.ovirt.engine.ui.uicommonweb.models.ListModel; import org.ovirt.engine.ui.uicommonweb.models.Model; import org.ovirt.engine.ui.uicommonweb.validation.I18NNameValidation; import org.ovirt.engine.ui.uicommonweb.validation.IValidation; import org.ovirt.engine.ui.uicommonweb.validation.LengthValidation; import org.ovirt.engine.ui.uicommonweb.validation.NotEmptyValidation; import org.ovirt.engine.ui.uicompat.ConstantsManager; public class ClusterModel extends Model { private int privateServerOverCommit; public int getServerOverCommit() { return privateServerOverCommit; } public void setServerOverCommit(int value) { privateServerOverCommit = value; } private int privateDesktopOverCommit; public int getDesktopOverCommit() { return privateDesktopOverCommit; } public void setDesktopOverCommit(int value) { privateDesktopOverCommit = value; } private int privateDefaultOverCommit; public int getDefaultOverCommit() { return privateDefaultOverCommit; } public void setDefaultOverCommit(int value) { privateDefaultOverCommit = value; } private VDSGroup privateEntity; public VDSGroup getEntity() { return privateEntity; } public void setEntity(VDSGroup value) { privateEntity = value; } private boolean privateIsEdit; public boolean getIsEdit() { return privateIsEdit; } public void setIsEdit(boolean value) { privateIsEdit = value; } private boolean isCPUinitialized = false; private boolean privateIsNew; public boolean getIsNew() { return privateIsNew; } public void setIsNew(boolean value) { privateIsNew = value; } private String privateOriginalName; public String getOriginalName() { return privateOriginalName; } public void setOriginalName(String value) { privateOriginalName = value; } private NGuid privateClusterId; public NGuid getClusterId() { return privateClusterId; } public void setClusterId(NGuid value) { privateClusterId = value; } private EntityModel privateName; public EntityModel getName() { return privateName; } public void setName(EntityModel value) { privateName = value; } private EntityModel privateDescription; public EntityModel getDescription() { return privateDescription; } public void setDescription(EntityModel value) { privateDescription = value; } private ListModel privateDataCenter; public ListModel getDataCenter() { return privateDataCenter; } public void setDataCenter(ListModel value) { privateDataCenter = value; } private ListModel privateCPU; public ListModel getCPU() { return privateCPU; } public void setCPU(ListModel value) { privateCPU = value; } private ListModel privateVersion; public ListModel getVersion() { return privateVersion; } public void setVersion(ListModel value) { privateVersion = value; } private EntityModel privateEnableOvirtService; public EntityModel getEnableOvirtService() { return privateEnableOvirtService; } public void setEnableOvirtService(EntityModel value) { this.privateEnableOvirtService = value; } private EntityModel privateEnableGlusterService; public EntityModel getEnableGlusterService() { return privateEnableGlusterService; } public void setEnableGlusterService(EntityModel value) { this.privateEnableGlusterService = value; } private EntityModel isImportGlusterConfiguration; public EntityModel getIsImportGlusterConfiguration() { return isImportGlusterConfiguration; } public void setIsImportGlusterConfiguration(EntityModel value) { this.isImportGlusterConfiguration = value; } private EntityModel glusterHostAddress; public EntityModel getGlusterHostAddress() { return glusterHostAddress; } public void setGlusterHostAddress(EntityModel glusterHostAddress) { this.glusterHostAddress = glusterHostAddress; } private EntityModel glusterHostFingerprint; public EntityModel getGlusterHostFingerprint() { return glusterHostFingerprint; } public void setGlusterHostFingerprint(EntityModel glusterHostFingerprint) { this.glusterHostFingerprint = glusterHostFingerprint; } private Boolean isFingerprintVerified; public Boolean isFingerprintVerified() { return isFingerprintVerified; } public void setIsFingerprintVerified(Boolean value) { this.isFingerprintVerified = value; } private EntityModel glusterHostPassword; public EntityModel getGlusterHostPassword() { return glusterHostPassword; } public void setGlusterHostPassword(EntityModel glusterHostPassword) { this.glusterHostPassword = glusterHostPassword; } private EntityModel privateOptimizationNone; public EntityModel getOptimizationNone() { return privateOptimizationNone; } public void setOptimizationNone(EntityModel value) { privateOptimizationNone = value; } private EntityModel privateOptimizationForServer; public EntityModel getOptimizationForServer() { return privateOptimizationForServer; } public void setOptimizationForServer(EntityModel value) { privateOptimizationForServer = value; } private EntityModel privateOptimizationForDesktop; public EntityModel getOptimizationForDesktop() { return privateOptimizationForDesktop; } public void setOptimizationForDesktop(EntityModel value) { privateOptimizationForDesktop = value; } private EntityModel privateOptimizationCustom; public EntityModel getOptimizationCustom() { return privateOptimizationCustom; } public void setOptimizationCustom(EntityModel value) { privateOptimizationCustom = value; } private EntityModel privateOptimizationNone_IsSelected; public EntityModel getOptimizationNone_IsSelected() { return privateOptimizationNone_IsSelected; } public void setOptimizationNone_IsSelected(EntityModel value) { privateOptimizationNone_IsSelected = value; } private EntityModel privateOptimizationForServer_IsSelected; public EntityModel getOptimizationForServer_IsSelected() { return privateOptimizationForServer_IsSelected; } public void setOptimizationForServer_IsSelected(EntityModel value) { privateOptimizationForServer_IsSelected = value; } private EntityModel privateOptimizationForDesktop_IsSelected; public EntityModel getOptimizationForDesktop_IsSelected() { return privateOptimizationForDesktop_IsSelected; } public void setOptimizationForDesktop_IsSelected(EntityModel value) { privateOptimizationForDesktop_IsSelected = value; } private EntityModel privateOptimizationCustom_IsSelected; public EntityModel getOptimizationCustom_IsSelected() { return privateOptimizationCustom_IsSelected; } public void setOptimizationCustom_IsSelected(EntityModel value) { privateOptimizationCustom_IsSelected = value; } private EntityModel privateMigrateOnErrorOption_NO; public EntityModel getMigrateOnErrorOption_NO() { return privateMigrateOnErrorOption_NO; } public void setMigrateOnErrorOption_NO(EntityModel value) { privateMigrateOnErrorOption_NO = value; } private EntityModel privateMigrateOnErrorOption_YES; public EntityModel getMigrateOnErrorOption_YES() { return privateMigrateOnErrorOption_YES; } public void setMigrateOnErrorOption_YES(EntityModel value) { privateMigrateOnErrorOption_YES = value; } private EntityModel privateMigrateOnErrorOption_HA_ONLY; public EntityModel getMigrateOnErrorOption_HA_ONLY() { return privateMigrateOnErrorOption_HA_ONLY; } public void setMigrateOnErrorOption_HA_ONLY(EntityModel value) { privateMigrateOnErrorOption_HA_ONLY = value; } private boolean isGeneralTabValid; public boolean getIsGeneralTabValid() { return isGeneralTabValid; } public void setIsGeneralTabValid(boolean value) { if (isGeneralTabValid != value) { isGeneralTabValid = value; OnPropertyChanged(new PropertyChangedEventArgs("IsGeneralTabValid")); //$NON-NLS-1$ } } private MigrateOnErrorOptions migrateOnErrorOption = MigrateOnErrorOptions.values()[0]; public MigrateOnErrorOptions getMigrateOnErrorOption() { if ((Boolean) getMigrateOnErrorOption_NO().getEntity() == true) { return MigrateOnErrorOptions.NO; } else if ((Boolean) getMigrateOnErrorOption_YES().getEntity() == true) { return MigrateOnErrorOptions.YES; } else if ((Boolean) getMigrateOnErrorOption_HA_ONLY().getEntity() == true) { return MigrateOnErrorOptions.HA_ONLY; } return MigrateOnErrorOptions.YES; } public void setMigrateOnErrorOption(MigrateOnErrorOptions value) { if (migrateOnErrorOption != value) { migrateOnErrorOption = value; // webadmin use. switch (migrateOnErrorOption) { case NO: getMigrateOnErrorOption_NO().setEntity(true); getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(false); break; case YES: getMigrateOnErrorOption_NO().setEntity(false); getMigrateOnErrorOption_YES().setEntity(true); getMigrateOnErrorOption_HA_ONLY().setEntity(false); break; case HA_ONLY: getMigrateOnErrorOption_NO().setEntity(false); getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(true); break; default: break; } OnPropertyChanged(new PropertyChangedEventArgs("MigrateOnErrorOption")); //$NON-NLS-1$ } } private boolean privateisResiliencePolicyTabAvailable; public boolean getisResiliencePolicyTabAvailable() { return privateisResiliencePolicyTabAvailable; } public void setisResiliencePolicyTabAvailable(boolean value) { privateisResiliencePolicyTabAvailable = value; } public boolean getIsResiliencePolicyTabAvailable() { return getisResiliencePolicyTabAvailable(); } public void setIsResiliencePolicyTabAvailable(boolean value) { if (getisResiliencePolicyTabAvailable() != value) { setisResiliencePolicyTabAvailable(value); OnPropertyChanged(new PropertyChangedEventArgs("IsResiliencePolicyTabAvailable")); //$NON-NLS-1$ } } public int getMemoryOverCommit() { if ((Boolean) getOptimizationNone_IsSelected().getEntity()) { return (Integer) getOptimizationNone().getEntity(); } if ((Boolean) getOptimizationForServer_IsSelected().getEntity()) { return (Integer) getOptimizationForServer().getEntity(); } if ((Boolean) getOptimizationForDesktop_IsSelected().getEntity()) { return (Integer) getOptimizationForDesktop().getEntity(); } if ((Boolean) getOptimizationCustom_IsSelected().getEntity()) { return (Integer) getOptimizationCustom().getEntity(); } return DataProvider.GetClusterDefaultMemoryOverCommit(); } public void setMemoryOverCommit(int value) { getOptimizationNone_IsSelected().setEntity(value == (Integer) getOptimizationNone().getEntity()); getOptimizationForServer_IsSelected().setEntity(value == (Integer) getOptimizationForServer().getEntity()); getOptimizationForDesktop_IsSelected().setEntity(value == (Integer) getOptimizationForDesktop().getEntity()); if (!(Boolean) getOptimizationNone_IsSelected().getEntity() && !(Boolean) getOptimizationForServer_IsSelected().getEntity() && !(Boolean) getOptimizationForDesktop_IsSelected().getEntity()) { getOptimizationCustom().setIsAvailable(true); getOptimizationCustom().setEntity(value); getOptimizationCustom_IsSelected().setIsAvailable(true); getOptimizationCustom_IsSelected().setEntity(true); } } public ClusterModel() { } public void Init(final boolean isEdit) { setIsEdit(isEdit); setName(new EntityModel()); setDescription(new EntityModel()); setEnableOvirtService(new EntityModel()); getEnableOvirtService().setEntity(ApplicationModeHelper.isModeSupported(ApplicationMode.VirtOnly)); getEnableOvirtService().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.VirtOnly && ApplicationModeHelper.isModeSupported(ApplicationMode.VirtOnly)); initImportCluster(isEdit); setEnableGlusterService(new EntityModel()); getEnableGlusterService().getEntityChangedEvent().addListener(new IEventListener() { @Override public void eventRaised(Event ev, Object sender, EventArgs args) { if (!isEdit && getEnableGlusterService().getEntity() != null && (Boolean) getEnableGlusterService().getEntity()) { getIsImportGlusterConfiguration().setIsAvailable(true); getGlusterHostAddress().setIsAvailable(true); getGlusterHostFingerprint().setIsAvailable(true); getGlusterHostPassword().setIsAvailable(true); } else { getIsImportGlusterConfiguration().setIsAvailable(false); getIsImportGlusterConfiguration().setEntity(false); getGlusterHostAddress().setIsAvailable(false); getGlusterHostFingerprint().setIsAvailable(false); getGlusterHostPassword().setIsAvailable(false); } } }); getEnableGlusterService().setEntity(ApplicationModeHelper.getUiMode() == ApplicationMode.GlusterOnly); getEnableGlusterService().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly && ApplicationModeHelper.isModeSupported(ApplicationMode.GlusterOnly)); setOptimizationNone(new EntityModel()); setOptimizationForServer(new EntityModel()); setOptimizationForDesktop(new EntityModel()); setOptimizationCustom(new EntityModel()); EntityModel tempVar = new EntityModel(); tempVar.setEntity(false); setOptimizationNone_IsSelected(tempVar); getOptimizationNone_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar2 = new EntityModel(); tempVar2.setEntity(false); setOptimizationForServer_IsSelected(tempVar2); getOptimizationForServer_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar3 = new EntityModel(); tempVar3.setEntity(false); setOptimizationForDesktop_IsSelected(tempVar3); getOptimizationForDesktop_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar4 = new EntityModel(); tempVar4.setEntity(false); tempVar4.setIsAvailable(false); setOptimizationCustom_IsSelected(tempVar4); getOptimizationCustom_IsSelected().getEntityChangedEvent().addListener(this); EntityModel tempVar5 = new EntityModel(); tempVar5.setEntity(false); setMigrateOnErrorOption_YES(tempVar5); getMigrateOnErrorOption_YES().getEntityChangedEvent().addListener(this); EntityModel tempVar6 = new EntityModel(); tempVar6.setEntity(false); setMigrateOnErrorOption_NO(tempVar6); getMigrateOnErrorOption_NO().getEntityChangedEvent().addListener(this); EntityModel tempVar7 = new EntityModel(); tempVar7.setEntity(false); setMigrateOnErrorOption_HA_ONLY(tempVar7); getMigrateOnErrorOption_HA_ONLY().getEntityChangedEvent().addListener(this); // Optimization methods: // default value =100; setDefaultOverCommit(DataProvider.GetClusterDefaultMemoryOverCommit()); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; clusterModel.setDesktopOverCommit((Integer) result); AsyncQuery _asyncQuery1 = new AsyncQuery(); _asyncQuery1.setModel(clusterModel); _asyncQuery1.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model1, Object result1) { ClusterModel clusterModel1 = (ClusterModel) model1; clusterModel1.setServerOverCommit((Integer) result1); // temp is used for conversion purposes EntityModel temp; temp = clusterModel1.getOptimizationNone(); temp.setEntity(clusterModel1.getDefaultOverCommit()); // res1, res2 is used for conversion purposes. boolean res1 = clusterModel1.getDesktopOverCommit() != clusterModel1.getDefaultOverCommit(); boolean res2 = clusterModel1.getServerOverCommit() != clusterModel1.getDefaultOverCommit(); temp = clusterModel1.getOptimizationNone_IsSelected(); setIsSelected(res1 && res2); temp.setEntity(getIsSelected()); temp = clusterModel1.getOptimizationForServer(); temp.setEntity(clusterModel1.getServerOverCommit()); temp = clusterModel1.getOptimizationForServer_IsSelected(); temp.setEntity(clusterModel1.getServerOverCommit() == clusterModel1.getDefaultOverCommit()); temp = clusterModel1.getOptimizationForDesktop(); temp.setEntity(clusterModel1.getDesktopOverCommit()); temp = temp = clusterModel1.getOptimizationForDesktop_IsSelected(); temp.setEntity(clusterModel1.getDesktopOverCommit() == clusterModel1.getDefaultOverCommit()); temp = clusterModel1.getOptimizationCustom(); temp.setIsAvailable(false); temp.setIsChangable(false); if (clusterModel1.getIsEdit()) { clusterModel1.postInit(); } } }; AsyncDataProvider.GetClusterServerMemoryOverCommit(_asyncQuery1); } }; AsyncDataProvider.GetClusterDesktopMemoryOverCommit(_asyncQuery); setDataCenter(new ListModel()); getDataCenter().getSelectedItemChangedEvent().addListener(this); getDataCenter().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly); setCPU(new ListModel()); getCPU().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly); setVersion(new ListModel()); getVersion().getSelectedItemChangedEvent().addListener(this); getVersion().setIsAvailable(ApplicationModeHelper.getUiMode() != ApplicationMode.GlusterOnly); setMigrateOnErrorOption(MigrateOnErrorOptions.YES); setIsGeneralTabValid(true); setIsResiliencePolicyTabAvailable(true); } private void initImportCluster(boolean isEdit) { setGlusterHostAddress(new EntityModel()); getGlusterHostAddress().getEntityChangedEvent().addListener(new IEventListener() { @Override public void eventRaised(Event ev, Object sender, EventArgs args) { setIsFingerprintVerified(false); fetchFingerprint((String) getGlusterHostAddress().getEntity()); } }); setGlusterHostFingerprint(new EntityModel()); getGlusterHostFingerprint().setEntity(""); //$NON-NLS-1$ setIsFingerprintVerified(false); setGlusterHostPassword(new EntityModel()); setIsImportGlusterConfiguration(new EntityModel()); getIsImportGlusterConfiguration().getEntityChangedEvent().addListener(new IEventListener() { @Override public void eventRaised(Event ev, Object sender, EventArgs args) { if (getIsImportGlusterConfiguration().getEntity() != null && (Boolean) getIsImportGlusterConfiguration().getEntity()) { getGlusterHostAddress().setIsChangable(true); getGlusterHostPassword().setIsChangable(true); } else { getGlusterHostAddress().setIsChangable(false); getGlusterHostPassword().setIsChangable(false); } } }); getIsImportGlusterConfiguration().setEntity(false); } private void fetchFingerprint(String hostAddress) { AsyncQuery aQuery = new AsyncQuery(); aQuery.setModel(this); aQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { String fingerprint = (String) result; if (fingerprint != null && fingerprint.length() > 0) { getGlusterHostFingerprint().setEntity(result); setIsFingerprintVerified(true); } else { getGlusterHostFingerprint().setEntity(ConstantsManager.getInstance() .getConstants() .errorLoadingFingerprint()); setIsFingerprintVerified(false); } } }; AsyncDataProvider.GetHostFingerprint(aQuery, hostAddress); getGlusterHostFingerprint().setEntity(ConstantsManager.getInstance().getConstants().loadingFingerprint()); } private void postInit() { getDescription().setEntity(getEntity().getdescription()); setMemoryOverCommit(getEntity().getmax_vds_memory_over_commit()); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; ArrayList<storage_pool> dataCenters = (ArrayList<storage_pool>) result; clusterModel.getDataCenter().setItems(dataCenters); clusterModel.getDataCenter().setSelectedItem(null); for (storage_pool a : dataCenters) { if (clusterModel.getEntity().getstorage_pool_id() != null && a.getId().equals(clusterModel.getEntity().getstorage_pool_id())) { clusterModel.getDataCenter().setSelectedItem(a); break; } } clusterModel.getDataCenter().setIsChangable(clusterModel.getDataCenter().getSelectedItem() == null); clusterModel.setMigrateOnErrorOption(clusterModel.getEntity().getMigrateOnError()); } }; AsyncDataProvider.GetDataCenterList(_asyncQuery); } @Override public void eventRaised(Event ev, Object sender, EventArgs args) { super.eventRaised(ev, sender, args); if (ev.equals(ListModel.SelectedItemChangedEventDefinition)) { if (sender == getDataCenter()) { StoragePool_SelectedItemChanged(args); } else if (sender == getVersion()) { Version_SelectedItemChanged(args); } } else if (ev.equals(EntityModel.EntityChangedEventDefinition)) { EntityModel senderEntityModel = (EntityModel) sender; if ((Boolean) senderEntityModel.getEntity()) { if (senderEntityModel == getOptimizationNone_IsSelected()) { getOptimizationForServer_IsSelected().setEntity(false); getOptimizationForDesktop_IsSelected().setEntity(false); getOptimizationCustom_IsSelected().setEntity(false); } else if (senderEntityModel == getOptimizationForServer_IsSelected()) { getOptimizationNone_IsSelected().setEntity(false); getOptimizationForDesktop_IsSelected().setEntity(false); getOptimizationCustom_IsSelected().setEntity(false); } else if (senderEntityModel == getOptimizationForDesktop_IsSelected()) { getOptimizationNone_IsSelected().setEntity(false); getOptimizationForServer_IsSelected().setEntity(false); getOptimizationCustom_IsSelected().setEntity(false); } else if (senderEntityModel == getOptimizationCustom_IsSelected()) { getOptimizationNone_IsSelected().setEntity(false); getOptimizationForServer_IsSelected().setEntity(false); getOptimizationForDesktop_IsSelected().setEntity(false); } else if (senderEntityModel == getMigrateOnErrorOption_YES()) { getMigrateOnErrorOption_NO().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(false); } else if (senderEntityModel == getMigrateOnErrorOption_NO()) { getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_HA_ONLY().setEntity(false); } else if (senderEntityModel == getMigrateOnErrorOption_HA_ONLY()) { getMigrateOnErrorOption_YES().setEntity(false); getMigrateOnErrorOption_NO().setEntity(false); } } } } private void Version_SelectedItemChanged(EventArgs e) { Version version; if (getVersion().getSelectedItem() != null) { version = (Version) getVersion().getSelectedItem(); } else { version = ((storage_pool) getDataCenter().getSelectedItem()).getcompatibility_version(); } AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; ArrayList<ServerCpu> cpus = (ArrayList<ServerCpu>) result; ServerCpu oldSelectedCpu = (ServerCpu) clusterModel.getCPU().getSelectedItem(); clusterModel.getCPU().setItems(cpus); clusterModel.getCPU().setSelectedItem(oldSelectedCpu != null ? Linq.FirstOrDefault(cpus, new Linq.ServerCpuPredicate(oldSelectedCpu.getCpuName())) : null); if (clusterModel.getCPU().getSelectedItem() == null || !isCPUinitialized) { InitCPU(); } } }; AsyncDataProvider.GetCPUList(_asyncQuery, version); } private void InitCPU() { if (!isCPUinitialized && getIsEdit()) { isCPUinitialized = true; getCPU().setSelectedItem(null); for (ServerCpu a : (ArrayList<ServerCpu>) getCPU().getItems()) { if (StringHelper.stringsEqual(a.getCpuName(), getEntity().getcpu_name())) { getCPU().setSelectedItem(a); break; } } } } private void StoragePool_SelectedItemChanged(EventArgs e) { // possible versions for new cluster (when editing cluster, this event won't occur) // are actually the possible versions for the data-center that the cluster is going // to be attached to. storage_pool selectedDataCenter = (storage_pool) getDataCenter().getSelectedItem(); if (selectedDataCenter == null) { return; } if (selectedDataCenter.getstorage_pool_type() == StorageType.LOCALFS) { setIsResiliencePolicyTabAvailable(false); } else { setIsResiliencePolicyTabAvailable(true); } AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void OnSuccess(Object model, Object result) { ClusterModel clusterModel = (ClusterModel) model; ArrayList<Version> versions = (ArrayList<Version>) result; clusterModel.getVersion().setItems(versions); if (!versions.contains(clusterModel.getVersion().getSelectedItem())) { if (versions.contains(((storage_pool) clusterModel.getDataCenter().getSelectedItem()).getcompatibility_version())) { clusterModel.getVersion().setSelectedItem(((storage_pool) clusterModel.getDataCenter() .getSelectedItem()).getcompatibility_version()); } else { clusterModel.getVersion().setSelectedItem(Linq.SelectHighestVersion(versions)); } } else if (clusterModel.getIsEdit()) { clusterModel.getVersion().setSelectedItem(Linq.FirstOrDefault(versions, new Linq.VersionPredicate(((VDSGroup) clusterModel.getEntity()).getcompatibility_version()))); } } }; AsyncDataProvider.GetDataCenterVersions(_asyncQuery, selectedDataCenter == null ? null : (NGuid) (selectedDataCenter.getId())); } public boolean Validate(boolean validateCpu) { return Validate(true, validateCpu); } public boolean Validate(boolean validateStoragePool, boolean validateCpu) { getName().ValidateEntity(new IValidation[] { new NotEmptyValidation(), new LengthValidation(40), new I18NNameValidation() }); if (validateStoragePool) { getDataCenter().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() }); } if (validateCpu) { getCPU().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() }); } else { getCPU().ValidateSelectedItem(new IValidation[] {}); } getVersion().ValidateSelectedItem(new IValidation[] { new NotEmptyValidation() }); // TODO: async validation for webadmin // string name = (string)Name.Entity; // //Check name unicitate. // if (String.Compare(name, OriginalName, true) != 0 && !DataProvider.IsClusterNameUnique(name)) // { // Name.IsValid = false; // Name.InvalidityReasons.Add("Name must be unique."); // } boolean validService = true; if (getEnableOvirtService().getIsAvailable() && getEnableGlusterService().getIsAvailable()) { validService = ((Boolean) getEnableOvirtService().getEntity()) || ((Boolean) getEnableGlusterService().getEntity()); } if (!validService) { setMessage(ConstantsManager.getInstance().getConstants().clusterServiceValidationMsg()); } else if (((Boolean) getIsImportGlusterConfiguration().getEntity()) && !isFingerprintVerified()) { setMessage(ConstantsManager.getInstance().getConstants().fingerprintNotVerified()); } else { setMessage(null); } getGlusterHostAddress().ValidateEntity(new IValidation[] { new NotEmptyValidation() }); getGlusterHostPassword().ValidateEntity(new IValidation[] { new NotEmptyValidation() }); setIsGeneralTabValid(getName().getIsValid() && getDataCenter().getIsValid() && getCPU().getIsValid() && getVersion().getIsValid() && validService && getGlusterHostAddress().getIsValid() && getGlusterHostPassword().getIsValid() && ((Boolean) getIsImportGlusterConfiguration().getEntity() ? isFingerprintVerified() : true)); return getName().getIsValid() && getDataCenter().getIsValid() && getCPU().getIsValid() && getVersion().getIsValid() && validService && getGlusterHostAddress().getIsValid() && getGlusterHostPassword().getIsValid() && ((Boolean) getIsImportGlusterConfiguration().getEntity() ? isFingerprintVerified() : true); } }
userportal,webadmin: ClusterModel FindBugs In StoragePool_SelectedItemChanged, selectedDataCenter is validated to be non-null in the beginning of the method, so further checks are redundant, and can be safely removed. Change-Id: Id3d2dbe60db232d07f463af96a3b05b9513a40e6 Signed-off-by: Allon Mureinik <[email protected]>
frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/clusters/ClusterModel.java
userportal,webadmin: ClusterModel FindBugs
Java
apache-2.0
3fb5eafeacc9626f663584732fc64aa3f4da8574
0
subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/Subutai,subutai-io/base
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.safehaus.kiskis.mgmt.server.ui.modules.mongo.exec; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.safehaus.kiskis.mgmt.shared.protocol.Command; import org.safehaus.kiskis.mgmt.shared.protocol.Response; import org.safehaus.kiskis.mgmt.shared.protocol.ServiceLocator; import org.safehaus.kiskis.mgmt.shared.protocol.Task; import org.safehaus.kiskis.mgmt.shared.protocol.Util; import org.safehaus.kiskis.mgmt.shared.protocol.api.CommandManagerInterface; import org.safehaus.kiskis.mgmt.shared.protocol.api.ResponseListener; import org.safehaus.kiskis.mgmt.shared.protocol.enums.ResponseType; import org.safehaus.kiskis.mgmt.shared.protocol.enums.TaskStatus; /** * * @author dilshat */ public abstract class Operation implements ResponseListener { private static final Logger LOG = Logger.getLogger(Operation.class.getName()); protected final CommandManagerInterface commandManager; private final List<Task> tasks = new ArrayList<Task>(); private final String description; private volatile int currentTaskIdx = -1; private volatile boolean stopped = true; private volatile boolean failed = false; private volatile boolean succeeded = false; private final StringBuilder output = new StringBuilder(); private final StringBuilder log = new StringBuilder(); private int commandCount = 0, okCommandCount = 0; public Operation(final String description) { this.description = description; this.commandManager = ServiceLocator.getService(CommandManagerInterface.class); } public boolean isStopped() { return stopped; } public boolean isFailed() { return failed; } public boolean isSucceeded() { return succeeded; } public boolean isCompleted() { return currentTaskIdx >= tasks.size() - 1; } public boolean start() { try { if (!(succeeded || isCompleted())) { if (!failed) { if (stopped) { stopped = false; onOperationStarted(); executeNextTask(); return true; } else { appendOutput("Operation is already running"); } } else { appendOutput("Operation failed. Please start from the beginning."); } } else { appendOutput("Operation completed. No more tasks to run."); } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in start", e); } return false; } public void stop() { stopped = true; } private boolean executeNextTask() { boolean result = false; try { if (!(stopped || failed || succeeded || isCompleted())) { Task currentTask = tasks.get(++currentTaskIdx); onBeforeTaskRun(currentTask); if (currentTask != null && currentTask.getCommands() != null) { if (!currentTask.getCommands().isEmpty()) { commandCount = 0; okCommandCount = 0; for (Command cmd : currentTask.getCommands()) { commandManager.executeCommand(cmd); } result = true; } else { appendOutput(MessageFormat.format( "Task {0} has no commands", getCurrentTask().getDescription())); } } else { appendOutput("Malformed task"); } onAfterTaskRun(currentTask); } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in executeNextTask", e); } return result; } protected void addTask(Task task) { if (task != null) { tasks.add(task); } } public Task getCurrentTask() { Task task = null; if (currentTaskIdx > -1 && currentTaskIdx < tasks.size()) { task = tasks.get(currentTaskIdx); } return task; } public Task getPreviousTask() { Task task = null; if (currentTaskIdx > 0) { task = tasks.get(currentTaskIdx - 1); } return task; } public Task getNextTask() { Task task = null; if (currentTaskIdx < tasks.size() - 1) { task = tasks.get(currentTaskIdx + 1); } return task; } public String getDescription() { return description; } public int getOverallTimeout() { int timeout = 0; try { for (Task task : tasks) { int taskTimeout = 0; for (Command command : task.getCommands()) { taskTimeout += command.getRequest().getTimeout(); } taskTimeout /= task.getCommands().size(); timeout += taskTimeout; } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in getOverallTimeout", e); } return timeout; } protected void beforeResponseProcessed(Response response) { if (!Util.isStringEmpty(response.getStdOut())) { appendLog("StdOut:"); appendLog(response.getStdOut()); } if (!Util.isStringEmpty(response.getStdErr())) { appendLog("StdErr:"); appendLog(response.getStdErr()); } if (response.getType() == ResponseType.EXECUTE_RESPONSE_DONE) { appendLog("Exit Code: " + response.getExitCode()); } if (response.getType() == ResponseType.EXECUTE_TIMEOUTED) { appendLog("Command timeouted"); } } protected void processResponse(Response response) { Task task = getCurrentTask(); if (response.getType() == ResponseType.EXECUTE_RESPONSE_DONE || response.getType() == ResponseType.EXECUTE_TIMEOUTED) { commandCount++; if ((response.getType() == ResponseType.EXECUTE_RESPONSE_DONE && response.getExitCode() == 0) || task.isIgnoreExitCode()) { okCommandCount++; } task.setCompleted(commandCount == task.getCommands().size()); if (task.isCompleted()) { if (commandCount == okCommandCount) { task.setTaskStatus(TaskStatus.SUCCESS); Util.saveTask(task); } else { task.setTaskStatus(TaskStatus.FAIL); Util.saveTask(task); } } } } @Override public void onResponse(Response response) { System.out.println("PROCESSING RESPONSE " + response); clearOutput(); clearLog(); try { Task task = getCurrentTask(); if (task != null && response != null && task.getTaskStatus() == TaskStatus.NEW && response.getType() != null && task.getUuid() != null && response.getTaskUuid() != null && task.getUuid().compareTo(response.getTaskUuid()) == 0) { beforeResponseProcessed(response); processResponse(response); if (task.isCompleted()) { //task completed onTaskCompleted(task); if (task.getTaskStatus() == TaskStatus.SUCCESS) { //task succeeded or ignoreExitCode is true onTaskSucceeded(task); if (!isCompleted()) { //operation is not done yet if (!isStopped()) { //execute next task executeNextTask(); } else { // operation is stopped by use onOperationStopped(); } } else { //operation is done succeeded = true; onOperationEnded(); } } else { //task failed -> operation failed failed = true; onTaskFailed(task); onOperationFailed(); } } } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in onResponse", e); } } protected void onTaskCompleted(Task task) { } protected void onTaskSucceeded(Task task) { appendOutput(MessageFormat.format( "Task {0} succeeded.", task.getDescription())); } protected void onTaskFailed(Task task) { appendOutput(MessageFormat.format( "Task {0} failed.", task.getDescription())); } protected void onOperationEnded() { appendOutput(MessageFormat.format( "Operation \"{0}\" completed successfully.", getDescription())); } protected void onOperationStarted() { appendOutput(MessageFormat.format( "Operation \"{0}\" started.", getDescription())); } protected void onOperationStopped() { appendOutput(MessageFormat.format( "Stopped execution before task {0}.", getNextTask().getDescription())); appendOutput(MessageFormat.format( "Operation \"{0}\" stopped.", getDescription())); } protected void onOperationFailed() { appendOutput(MessageFormat.format( "Operation \"{0}\" aborted.", getDescription())); } protected void onBeforeTaskRun(Task task) { appendOutput(MessageFormat.format( "Running task {0}...", task.getDescription())); appendLog(MessageFormat.format( "======= {0} =======", task.getDescription())); if (task.isIgnoreExitCode()) { appendLog("======= Ignore ExitCode = TRUE ======="); } } protected void onAfterTaskRun(Task task) { } public String getOutput() { return output.toString(); } public void appendOutput(String s) { if (output.length() == 0) { output.append(s); } else { output.append("\n\n").append(s); } } // public void setOutput(String s) { // clearOutput(); // appendOutput(s); // } // public void clearOutput() { output.setLength(0); } public String getLog() { return log.toString(); } public void appendLog(String s) { if (log.length() == 0) { log.append(s); } else { log.append("\n\n").append(s); } } // public void setLog(String s) { // clearLog(); // appendLog(s); // } // public void clearLog() { log.setLength(0); } @Override public String getSource() { return getClass().getName(); } }
management/server/ui-modules/mongodb/src/main/java/org/safehaus/kiskis/mgmt/server/ui/modules/mongo/exec/Operation.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.safehaus.kiskis.mgmt.server.ui.modules.mongo.exec; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.safehaus.kiskis.mgmt.shared.protocol.Command; import org.safehaus.kiskis.mgmt.shared.protocol.Response; import org.safehaus.kiskis.mgmt.shared.protocol.ServiceLocator; import org.safehaus.kiskis.mgmt.shared.protocol.Task; import org.safehaus.kiskis.mgmt.shared.protocol.Util; import org.safehaus.kiskis.mgmt.shared.protocol.api.CommandManagerInterface; import org.safehaus.kiskis.mgmt.shared.protocol.api.ResponseListener; import org.safehaus.kiskis.mgmt.shared.protocol.enums.ResponseType; import org.safehaus.kiskis.mgmt.shared.protocol.enums.TaskStatus; /** * * @author dilshat */ public abstract class Operation implements ResponseListener { private static final Logger LOG = Logger.getLogger(Operation.class.getName()); protected final CommandManagerInterface commandManager; private final List<Task> tasks = new ArrayList<Task>(); private final String description; private volatile int currentTaskIdx = -1; private volatile boolean stopped = true; private volatile boolean failed = false; private volatile boolean succeeded = false; private final StringBuilder output = new StringBuilder(); private final StringBuilder log = new StringBuilder(); private int commandCount = 0, okCommandCount = 0; public Operation(final String description) { this.description = description; this.commandManager = ServiceLocator.getService(CommandManagerInterface.class); } public boolean isStopped() { return stopped; } public boolean isFailed() { return failed; } public boolean isSucceeded() { return succeeded; } public boolean isCompleted() { return currentTaskIdx >= tasks.size() - 1; } public boolean start() { try { if (!(succeeded || isCompleted())) { if (!failed) { if (stopped) { stopped = false; onOperationStarted(); executeNextTask(); return true; } else { appendOutput("Operation is already running"); } } else { appendOutput("Operation failed. Please start from the beginning."); } } else { appendOutput("Operation completed. No more tasks to run."); } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in start", e); } return false; } public void stop() { stopped = true; } private boolean executeNextTask() { boolean result = false; try { if (!(stopped || failed || succeeded || isCompleted())) { Task currentTask = tasks.get(++currentTaskIdx); onBeforeTaskRun(currentTask); if (currentTask != null && currentTask.getCommands() != null) { if (!currentTask.getCommands().isEmpty()) { commandCount = 0; okCommandCount = 0; for (Command cmd : currentTask.getCommands()) { commandManager.executeCommand(cmd); } result = true; } else { appendOutput(MessageFormat.format( "Task {0} has no commands", getCurrentTask().getDescription())); } } else { appendOutput("Malformed task"); } onAfterTaskRun(currentTask); } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in executeNextTask", e); } return result; } protected void addTask(Task task) { if (task != null) { tasks.add(task); } } public Task getCurrentTask() { Task task = null; if (currentTaskIdx > -1 && currentTaskIdx < tasks.size()) { task = tasks.get(currentTaskIdx); } return task; } public Task getPreviousTask() { Task task = null; if (currentTaskIdx > 0) { task = tasks.get(currentTaskIdx - 1); } return task; } public Task getNextTask() { Task task = null; if (currentTaskIdx < tasks.size() - 1) { task = tasks.get(currentTaskIdx + 1); } return task; } public String getDescription() { return description; } public int getOverallTimeout() { int timeout = 0; try { for (Task task : tasks) { int taskTimeout = 0; for (Command command : task.getCommands()) { taskTimeout += command.getRequest().getTimeout(); } taskTimeout /= task.getCommands().size(); timeout += taskTimeout; } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in getOverallTimeout", e); } return timeout; } protected void beforeResponseProcessed(Response response) { if (!Util.isStringEmpty(response.getStdOut())) { appendLog("StdOut:"); appendLog(response.getStdOut()); } if (!Util.isStringEmpty(response.getStdErr())) { appendLog("StdErr:"); appendLog(response.getStdErr()); } if (response.getType() == ResponseType.EXECUTE_RESPONSE_DONE) { appendLog("Exit Code: " + response.getExitCode()); } if (response.getType() == ResponseType.EXECUTE_TIMEOUTED) { appendLog("Command timeouted"); } } protected void processResponse(Response response) { Task task = getCurrentTask(); if (response.getType() == ResponseType.EXECUTE_RESPONSE_DONE || response.getType() == ResponseType.EXECUTE_TIMEOUTED) { commandCount++; if ((response.getType() == ResponseType.EXECUTE_RESPONSE_DONE && response.getExitCode() == 0) || task.isIgnoreExitCode()) { okCommandCount++; } task.setCompleted(commandCount == task.getCommands().size()); if (task.isCompleted()) { if (commandCount == okCommandCount) { task.setTaskStatus(TaskStatus.SUCCESS); Util.saveTask(task); } else { task.setTaskStatus(TaskStatus.FAIL); Util.saveTask(task); } } } } @Override public void onResponse(Response response) { clearOutput(); clearLog(); try { Task task = getCurrentTask(); if (task != null && response != null && task.getTaskStatus() == TaskStatus.NEW && response.getType() != null && task.getUuid() != null && response.getTaskUuid() != null && task.getUuid().compareTo(response.getTaskUuid()) == 0) { beforeResponseProcessed(response); processResponse(response); if (task.isCompleted()) { //task completed onTaskCompleted(task); if (task.getTaskStatus() == TaskStatus.SUCCESS) { //task succeeded or ignoreExitCode is true onTaskSucceeded(task); if (!isCompleted()) { //operation is not done yet if (!isStopped()) { //execute next task executeNextTask(); } else { // operation is stopped by use onOperationStopped(); } } else { //operation is done succeeded = true; onOperationEnded(); } } else { //task failed -> operation failed failed = true; onTaskFailed(task); onOperationFailed(); } } } } catch (Exception e) { LOG.log(Level.SEVERE, "Error in onResponse", e); } } protected void onTaskCompleted(Task task) { } protected void onTaskSucceeded(Task task) { appendOutput(MessageFormat.format( "Task {0} succeeded.", task.getDescription())); } protected void onTaskFailed(Task task) { appendOutput(MessageFormat.format( "Task {0} failed.", task.getDescription())); } protected void onOperationEnded() { appendOutput(MessageFormat.format( "Operation \"{0}\" completed successfully.", getDescription())); } protected void onOperationStarted() { appendOutput(MessageFormat.format( "Operation \"{0}\" started.", getDescription())); } protected void onOperationStopped() { appendOutput(MessageFormat.format( "Stopped execution before task {0}.", getNextTask().getDescription())); appendOutput(MessageFormat.format( "Operation \"{0}\" stopped.", getDescription())); } protected void onOperationFailed() { appendOutput(MessageFormat.format( "Operation \"{0}\" aborted.", getDescription())); } protected void onBeforeTaskRun(Task task) { appendOutput(MessageFormat.format( "Running task {0}...", task.getDescription())); appendLog(MessageFormat.format( "======= {0} =======", task.getDescription())); if (task.isIgnoreExitCode()) { appendLog("======= Ignore ExitCode = TRUE ======="); } } protected void onAfterTaskRun(Task task) { } public String getOutput() { return output.toString(); } public void appendOutput(String s) { if (output.length() == 0) { output.append(s); } else { output.append("\n\n").append(s); } } // public void setOutput(String s) { // clearOutput(); // appendOutput(s); // } // public void clearOutput() { output.setLength(0); } public String getLog() { return log.toString(); } public void appendLog(String s) { if (log.length() == 0) { log.append(s); } else { log.append("\n\n").append(s); } } // public void setLog(String s) { // clearLog(); // appendLog(s); // } // public void clearLog() { log.setLength(0); } @Override public String getSource() { return getClass().getName(); } }
Mongo in progress Former-commit-id: ea1ebec5788294826e324e93b2a40085b4f53a33
management/server/ui-modules/mongodb/src/main/java/org/safehaus/kiskis/mgmt/server/ui/modules/mongo/exec/Operation.java
Mongo in progress
Java
apache-2.0
48e85b4dee8d1b8a08a886826116e49f1ba1bce2
0
softwaremill/softwaremill-common,softwaremill/softwaremill-common
package pl.softwaremill.common.test.web.selenium; import org.testng.Assert; import static pl.softwaremill.common.test.web.selenium.AbstractSeleniumTest.fail; import static pl.softwaremill.common.test.web.selenium.AbstractSeleniumTest.selenium; /** * @author Pawel Wrzeszcz (pawel . wrzeszcz [at] gmail . com) * @author Jaroslaw Kijanowski (kijanowski [at] gmail . com) */ public class SeleniumCommands { private static final String WAIT_FOR_LOAD = "60000"; private static final int TIME_OUT = 30; public static void waitForPageToLoad() { selenium.waitForPageToLoad(WAIT_FOR_LOAD); } public static void waitFor(String xpath) throws Exception { for (int second = 0; ; second++) { if (second >= TIME_OUT) { Assert.fail("Timed out on xpath: " + xpath); } if (selenium.isElementPresent(xpath)) { break; } Thread.sleep(1000); } } public static void waitForElementPresent(String element) throws Exception { waitForElement(element, TIME_OUT, true); } public static void waitForElementNotPresent(String element) throws Exception { waitForElement(element, TIME_OUT, false); } public static void waitForElement(String element, boolean isPresent) throws Exception { waitForElement(element, TIME_OUT, isPresent); } public static void waitForElementPresent(String xpath, int timeout) throws Exception { waitForElement(xpath, timeout, true); } public static void waitForElement(String element, int timeout, boolean isPresent) throws Exception { for (int second = 0; ; second++) { if (second >= timeout) Assert.fail("Timed out on xpath: " + element); try { if (selenium.isElementPresent(element) == isPresent) { break; } } catch (Exception e) { // Empty } Thread.sleep(1000); } } public static void clickAndWait(String locator, String timeout) { selenium.click(locator); selenium.waitForPageToLoad(timeout); } public static void clickAndWait(String locator) { clickAndWait(locator, String.valueOf(TIME_OUT)); } public static void waitForElementVisible(String locator) throws Exception { waitForElementVisible(locator, true); } public static void waitForElementVisible(String locator, boolean isVisible) throws Exception { for (int second = 0; ; second++) { if (second >= TIME_OUT) Assert.fail("timeout"); try { if (selenium.isVisible(locator) == isVisible) { break; } } catch (Exception e) { // Empty } Thread.sleep(1000); } } public static void waitForTextPresent(String text) throws Exception { waitForTextPresent(text, 30); } public static void waitForTextPresent(String text, int timeout) throws Exception { for (int second = 0; ; second++) { if (second >= 30) Assert.fail("Timed out waiting for text: " + text); try { if (selenium.isTextPresent(text)) { break; } } catch (Exception e) { // Empty } Thread.sleep(1000); } } public static void clickOncePresent(String xpath) throws Exception { clickOncePresent(xpath, TIME_OUT); } public static void clickOncePresent(String xpath, int timeout) throws Exception { waitForElementPresent(xpath, timeout); selenium.click(xpath); } public static void typeOncePresent(String xpath, String text) throws Exception { typeOncePresent(xpath, text, TIME_OUT); } public static void typeOncePresent(String xpath, String text, int timeout) throws Exception { waitForElementPresent(xpath, timeout); selenium.type(xpath, text); } }
softwaremill-test/softwaremill-test-ui-web/src/main/java/pl/softwaremill/common/test/web/selenium/SeleniumCommands.java
package pl.softwaremill.common.test.web.selenium; import org.testng.Assert; import static pl.softwaremill.common.test.web.selenium.AbstractSeleniumTest.fail; import static pl.softwaremill.common.test.web.selenium.AbstractSeleniumTest.selenium; /** * @author Pawel Wrzeszcz (pawel . wrzeszcz [at] gmail . com) */ public class SeleniumCommands { public static void waitForPageToLoad() { selenium.waitForPageToLoad("60000"); } public static void waitFor(String xpath) throws Exception { for (int second = 0; ; second++) { if (second >= 30) { fail("timeout on xpath: " + xpath); } if (selenium.isElementPresent(xpath)) { break; } Thread.sleep(1000); } } public static void waitForElementPresent(String element) throws Exception { waitForElement(element, true); } public static void waitForElementNotPresent(String element) throws Exception { waitForElement(element, false); } public static void waitForElement(String element, boolean isPresent) throws Exception { for (int second = 0; ; second++) { if (second >= 30) Assert.fail("timeout"); try { if (selenium.isElementPresent(element) == isPresent) { break; } } catch (Exception e) { // Empty } Thread.sleep(1000); } } public static void clickAndWait(String locator, String timeout) { selenium.click(locator); selenium.waitForPageToLoad(timeout); } public static void waitForElementVisible(String locator) throws Exception { waitForElementVisible(locator, true); } public static void waitForElementVisible(String locator, boolean isVisible) throws Exception { for (int second = 0; ; second++) { if (second >= 30) Assert.fail("timeout"); try { if (selenium.isVisible(locator) == isVisible) { break; } } catch (Exception e) { // Empty } Thread.sleep(1000); } } }
#14 add methods to SeleniumCommands
softwaremill-test/softwaremill-test-ui-web/src/main/java/pl/softwaremill/common/test/web/selenium/SeleniumCommands.java
#14 add methods to SeleniumCommands