lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
mit
9723054eac19106cccd44adb7e60a42aaeb03284
0
sanaehirotaka/logbook-kai,Sdk0815/logbook-kai,m-seikou/logbook-kai,Sdk0815/logbook-kai,sanaehirotaka/logbook-kai,sanaehirotaka/logbook-kai,Sdk0815/logbook-kai,m-seikou/logbook-kai,m-seikou/logbook-kai
package logbook.internal.gui; import java.util.StringJoiner; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; /** * ่ณ‡ๆใƒญใ‚ฐใฎใƒ†ใƒผใƒ–ใƒซ่กŒ * */ public class ResourceTable { /** ๆ—ฅไป˜ */ private StringProperty date = new SimpleStringProperty(); /** ็‡ƒๆ–™ */ private StringProperty fuel = new SimpleStringProperty(); /** ๅผพ่–ฌ */ private StringProperty ammo = new SimpleStringProperty(); /** ้‹ผๆ */ private StringProperty metal = new SimpleStringProperty(); /** ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ private StringProperty bauxite = new SimpleStringProperty(); /** ้ซ˜้€Ÿไฟฎๅพฉๆ */ private StringProperty bucket = new SimpleStringProperty(); /** ้ซ˜้€Ÿๅปบ้€ ๆ */ private StringProperty burner = new SimpleStringProperty(); /** ้–‹็™บ่ณ‡ๆ */ private StringProperty research = new SimpleStringProperty(); /** ๆ”นไฟฎ่ณ‡ๆ */ private StringProperty improve = new SimpleStringProperty(); /**ๆ—ฅไป˜ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ—ฅไป˜ */ public StringProperty dateProperty() { return this.date; } /** * ๆ—ฅไป˜ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ—ฅไป˜ */ public String getDate() { return this.date.get(); } /** * ๆ—ฅไป˜ใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param date ๆ—ฅไป˜ */ public void setDate(String date) { this.date.set(date); } /** * ็‡ƒๆ–™ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ็‡ƒๆ–™ */ public StringProperty fuelProperty() { return this.fuel; } /** * ็‡ƒๆ–™ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ็‡ƒๆ–™ */ public String getFuel() { return this.fuel.get(); } /** * ็‡ƒๆ–™ใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param fuel ็‡ƒๆ–™ */ public void setFuel(String fuel) { this.fuel.set(fuel); } /** * ๅผพ่–ฌใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๅผพ่–ฌ */ public StringProperty ammoProperty() { return this.ammo; } /** * ๅผพ่–ฌใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๅผพ่–ฌ */ public String getAmmo() { return this.ammo.get(); } /** * ๅผพ่–ฌใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param ammo ๅผพ่–ฌ */ public void setAmmo(String ammo) { this.ammo.set(ammo); } /** * ้‹ผๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้‹ผๆ */ public StringProperty metalProperty() { return this.metal; } /** * ้‹ผๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้‹ผๆ */ public String getMetal() { return this.metal.get(); } /** * ้‹ผๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param metal ้‹ผๆ */ public void setMetal(String metal) { this.metal.set(metal); } /** * ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ public StringProperty bauxiteProperty() { return this.bauxite; } /** * ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ public String getBauxite() { return this.bauxite.get(); } /** * ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param bauxite ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ public void setBauxite(String bauxite) { this.bauxite.set(bauxite); } /** * ้ซ˜้€Ÿไฟฎๅพฉๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿไฟฎๅพฉๆ */ public StringProperty bucketProperty() { return this.bucket; } /** * ้ซ˜้€Ÿไฟฎๅพฉๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿไฟฎๅพฉๆ */ public String getBucket() { return this.bucket.get(); } /** * ้ซ˜้€Ÿไฟฎๅพฉๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param bucket ้ซ˜้€Ÿไฟฎๅพฉๆ */ public void setBucket(String bucket) { this.bucket.set(bucket); } /** * ้ซ˜้€Ÿๅปบ้€ ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿๅปบ้€ ๆ */ public StringProperty burnerProperty() { return this.burner; } /** * ้ซ˜้€Ÿๅปบ้€ ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿๅปบ้€ ๆ */ public String getBurner() { return this.burner.get(); } /** * ้ซ˜้€Ÿๅปบ้€ ๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param burner ้ซ˜้€Ÿๅปบ้€ ๆ */ public void setBurner(String burner) { this.burner.set(burner); } /** * ้–‹็™บ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้–‹็™บ่ณ‡ๆ */ public StringProperty researchProperty() { return this.research; } /** * ้–‹็™บ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้–‹็™บ่ณ‡ๆ */ public String getResearch() { return this.research.get(); } /** * ้–‹็™บ่ณ‡ๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param research ้–‹็™บ่ณ‡ๆ */ public void setResearch(String research) { this.research.set(research); } /** * ๆ”นไฟฎ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ”นไฟฎ่ณ‡ๆ */ public StringProperty improveProperty() { return this.improve; } /** * ๆ”นไฟฎ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ”นไฟฎ่ณ‡ๆ */ public String getImprove() { return this.improve.get(); } /** * ๆ”นไฟฎ่ณ‡ๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param improve ๆ”นไฟฎ่ณ‡ๆ */ public void setImprove(String improve) { this.improve.set(improve); } @Override public String toString() { return new StringJoiner("\t") .add(this.date.get()) .add(this.fuel.get()) .add(this.ammo.get()) .add(this.metal.get()) .add(this.bauxite.get()) .add(this.bucket.get()) .add(this.burner.get()) .add(this.research.get()) .add(this.improve.get()) .toString(); } }
src/main/java/logbook/internal/gui/ResourceTable.java
package logbook.internal.gui; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; /** * ่ณ‡ๆใƒญใ‚ฐใฎใƒ†ใƒผใƒ–ใƒซ่กŒ * */ public class ResourceTable { /** ๆ—ฅไป˜ */ private StringProperty date = new SimpleStringProperty(); /** ็‡ƒๆ–™ */ private StringProperty fuel = new SimpleStringProperty(); /** ๅผพ่–ฌ */ private StringProperty ammo = new SimpleStringProperty(); /** ้‹ผๆ */ private StringProperty metal = new SimpleStringProperty(); /** ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ private StringProperty bauxite = new SimpleStringProperty(); /** ้ซ˜้€Ÿไฟฎๅพฉๆ */ private StringProperty bucket = new SimpleStringProperty(); /** ้ซ˜้€Ÿๅปบ้€ ๆ */ private StringProperty burner = new SimpleStringProperty(); /** ้–‹็™บ่ณ‡ๆ */ private StringProperty research = new SimpleStringProperty(); /** ๆ”นไฟฎ่ณ‡ๆ */ private StringProperty improve = new SimpleStringProperty(); /**ๆ—ฅไป˜ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ—ฅไป˜ */ public StringProperty dateProperty() { return this.date; } /** * ๆ—ฅไป˜ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ—ฅไป˜ */ public String getDate() { return this.date.get(); } /** * ๆ—ฅไป˜ใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param date ๆ—ฅไป˜ */ public void setDate(String date) { this.date.set(date); } /** * ็‡ƒๆ–™ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ็‡ƒๆ–™ */ public StringProperty fuelProperty() { return this.fuel; } /** * ็‡ƒๆ–™ใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ็‡ƒๆ–™ */ public String getFuel() { return this.fuel.get(); } /** * ็‡ƒๆ–™ใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param fuel ็‡ƒๆ–™ */ public void setFuel(String fuel) { this.fuel.set(fuel); } /** * ๅผพ่–ฌใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๅผพ่–ฌ */ public StringProperty ammoProperty() { return this.ammo; } /** * ๅผพ่–ฌใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๅผพ่–ฌ */ public String getAmmo() { return this.ammo.get(); } /** * ๅผพ่–ฌใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param ammo ๅผพ่–ฌ */ public void setAmmo(String ammo) { this.ammo.set(ammo); } /** * ้‹ผๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้‹ผๆ */ public StringProperty metalProperty() { return this.metal; } /** * ้‹ผๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้‹ผๆ */ public String getMetal() { return this.metal.get(); } /** * ้‹ผๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param metal ้‹ผๆ */ public void setMetal(String metal) { this.metal.set(metal); } /** * ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ public StringProperty bauxiteProperty() { return this.bauxite; } /** * ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ public String getBauxite() { return this.bauxite.get(); } /** * ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param bauxite ใƒœใƒผใ‚ญใ‚ตใ‚คใƒˆ */ public void setBauxite(String bauxite) { this.bauxite.set(bauxite); } /** * ้ซ˜้€Ÿไฟฎๅพฉๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿไฟฎๅพฉๆ */ public StringProperty bucketProperty() { return this.bucket; } /** * ้ซ˜้€Ÿไฟฎๅพฉๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿไฟฎๅพฉๆ */ public String getBucket() { return this.bucket.get(); } /** * ้ซ˜้€Ÿไฟฎๅพฉๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param bucket ้ซ˜้€Ÿไฟฎๅพฉๆ */ public void setBucket(String bucket) { this.bucket.set(bucket); } /** * ้ซ˜้€Ÿๅปบ้€ ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿๅปบ้€ ๆ */ public StringProperty burnerProperty() { return this.burner; } /** * ้ซ˜้€Ÿๅปบ้€ ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้ซ˜้€Ÿๅปบ้€ ๆ */ public String getBurner() { return this.burner.get(); } /** * ้ซ˜้€Ÿๅปบ้€ ๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param burner ้ซ˜้€Ÿๅปบ้€ ๆ */ public void setBurner(String burner) { this.burner.set(burner); } /** * ้–‹็™บ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้–‹็™บ่ณ‡ๆ */ public StringProperty researchProperty() { return this.research; } /** * ้–‹็™บ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ้–‹็™บ่ณ‡ๆ */ public String getResearch() { return this.research.get(); } /** * ้–‹็™บ่ณ‡ๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param research ้–‹็™บ่ณ‡ๆ */ public void setResearch(String research) { this.research.set(research); } /** * ๆ”นไฟฎ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ”นไฟฎ่ณ‡ๆ */ public StringProperty improveProperty() { return this.improve; } /** * ๆ”นไฟฎ่ณ‡ๆใ‚’ๅ–ๅพ—ใ—ใพใ™ใ€‚ * @return ๆ”นไฟฎ่ณ‡ๆ */ public String getImprove() { return this.improve.get(); } /** * ๆ”นไฟฎ่ณ‡ๆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ * @param improve ๆ”นไฟฎ่ณ‡ๆ */ public void setImprove(String improve) { this.improve.set(improve); } }
่ณ‡ๆใƒญใ‚ฐใฎใƒ†ใƒผใƒ–ใƒซ่กŒใ‚’ใ‚ฏใƒชใƒƒใƒ—ใƒœใƒผใƒ‰ใซใ‚ณใƒ”ใƒผใ—ใŸๆ™‚ใฎๅ†…ๅฎนใ‚’ไฟฎๆญฃ
src/main/java/logbook/internal/gui/ResourceTable.java
่ณ‡ๆใƒญใ‚ฐใฎใƒ†ใƒผใƒ–ใƒซ่กŒใ‚’ใ‚ฏใƒชใƒƒใƒ—ใƒœใƒผใƒ‰ใซใ‚ณใƒ”ใƒผใ—ใŸๆ™‚ใฎๅ†…ๅฎนใ‚’ไฟฎๆญฃ
Java
mit
7c7d55eb4652804e71d85e1354533b3c842e0012
0
Eduardoveras94/url-Shortener-spark,Eduardoveras94/url-Shortener-spark
/** * Created by Siclait on 18/7/16. */ import Entity.URL; import Entity.User; import JSONTools.GeoLocation; import JSONTools.ResponseError; import eu.bitwalker.useragentutils.UserAgent; import static JSONTools.JSONUtil.json; import static spark.Spark.after; import static spark.Spark.get; public class JSONServiceController { public JSONServiceController() { // GETS // Fetch All Urls get("/json/allurls", (req, res) -> DatabaseManager.FetchAllURL(), json()); // Fetch Original of a Specific Short Url get("/json/original/:short", (req, res) -> { System.out.println("\n\nUsing Jason Service"); String shortURL = req.params(":short"); String url = DatabaseManager.FetchOriginalURL(shortURL); if(url != null) return url; res.status(400); return new ResponseError("No url with id %s found", shortURL); }, json()); // Fetch a Specific Short Url get("/json/url/:short", (req, res) -> { System.out.println("\n\nUsing Jason Service"); String shortURL = req.params(":short"); URL url = DatabaseManager.FetchURL(shortURL); if(url != null) return url; res.status(400); return new ResponseError("No url with id %s found", shortURL); }, json()); // Fetch All Users get("/json/allusers", (req, res) -> DatabaseManager.FetchAllUsers(), json()); // Fetch Specific User get("/json/user/:username", (req, res) -> { System.out.println("\n\nUsing JSON Service"); String username = req.params(":username"); User user = DatabaseManager.FetchUser(username); if(user != null) return user; res.status(400); return new ResponseError("No user with id %s found", username); }, json()); // Fetch Urls of a Specific User get("/json/:user/urls", (req, res) -> { System.out.println("\n\nUsing JSON Service"); String username = req.params(":user"); return DatabaseManager.FetchAllURLForUser(username); }, json()); // Create a new User get("/json/newuser", (req, res) -> { System.out.println("\n\nUsing JSON Service"); DatabaseManager.CreateNewUser(req.queryParams("username"), req.queryParams("firstname"), req.queryParams("lastname"), req.queryParams("password")); res.redirect("/"); return "Creating New User"; }, json()); //DatabaseManager.CreateNewShortURL(); get("/json/newurl", (req, res) -> { System.out.println("\n\nUsing JSON Service"); UserAgent userAgent = UserAgent.parseUserAgentString(req.userAgent()); //GeoLocation geo = ResourceFetcher.GetCoordinates(req.ip()); //System.out.println("Lon: " + geo.getLongitude() + " Lat: " + geo.getLatitude()); DatabaseManager.CreateNewShortURL(req.queryParams("url"), req.queryParams("username"), userAgent.getBrowser().getName(), userAgent.getOperatingSystem().getName(), req.ip(), "0", "0"); res.redirect("/"); return "Creating New User"; }, json()); after("/json/*", (req, res) -> res.type("application/json")); } }
src/main/java/JSONServiceController.java
/** * Created by Siclait on 18/7/16. */ import Entity.URL; import Entity.User; import JSONTools.GeoLocation; import JSONTools.ResponseError; import eu.bitwalker.useragentutils.UserAgent; import static JSONTools.JSONUtil.json; import static spark.Spark.after; import static spark.Spark.get; public class JSONServiceController { public JSONServiceController() { // GETS // Fetch All Urls get("/json/allurls", (req, res) -> DatabaseManager.FetchAllURL(), json()); // Fetch Original of a Specific Short Url get("/json/original/:short", (req, res) -> { System.out.println("\n\nUsing Jason Service"); String shortURL = req.params(":short"); String url = DatabaseManager.FetchOriginalURL(shortURL); if(url != null) return url; res.status(400); return new ResponseError("No url with id %s found", shortURL); }, json()); // Fetch a Specific Short Url get("/json/url/:short", (req, res) -> { System.out.println("\n\nUsing Jason Service"); String shortURL = req.params(":short"); URL url = DatabaseManager.FetchURL(shortURL); if(url != null) return url; res.status(400); return new ResponseError("No url with id %s found", shortURL); }, json()); // Fetch All Users get("/json/allusers", (req, res) -> DatabaseManager.FetchAllUsers(), json()); // Fetch Specific User get("/json/user/:username", (req, res) -> { System.out.println("\n\nUsing JSON Service"); String username = req.params(":username"); User user = DatabaseManager.FetchUser(username); if(user != null) return user; res.status(400); return new ResponseError("No user with id %s found", username); }, json()); // Fetch Urls of a Specific User get("/json/:user/urls", (req, res) -> { System.out.println("\n\nUsing JSON Service"); String username = req.params(":user"); return DatabaseManager.FetchAllURLForUser(username); }, json()); // Create a new User get("/json/newuser", (req, res) -> { System.out.println("\n\nUsing JSON Service"); DatabaseManager.CreateNewUser(req.queryParams("username"), req.queryParams("firstname"), req.queryParams("lastname"), req.queryParams("password")); res.redirect("/"); return "Creating New User"; }, json()); //DatabaseManager.CreateNewShortURL(); get("/json/newurl", (req, res) -> { System.out.println("\n\nUsing JSON Service"); UserAgent userAgent = UserAgent.parseUserAgentString(req.userAgent()); GeoLocation geo = ResourceFetcher.GetCoordinates(req.ip()); System.out.println("Lon: " + geo.getLongitude() + " Lat: " + geo.getLatitude()); DatabaseManager.CreateNewShortURL(req.queryParams("url"), req.params("username"), userAgent.getBrowser().getName(), userAgent.getOperatingSystem().getName(), req.ip(), geo.getLongitude(), geo.getLatitude()); res.redirect("/"); return "Creating New User"; }, json()); //DatabaseManager.CheckUserCredentials(); after("/json/*", (req, res) -> res.type("application/json")); } }
Could not implement Geocoding algorithim: Ip used were private
src/main/java/JSONServiceController.java
Could not implement Geocoding algorithim: Ip used were private
Java
epl-1.0
e378f30f299e7d9f3197fc3e05097a2c0a12e634
0
ControlSystemStudio/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio
/** * */ package org.csstudio.diag.epics.pvtree; import java.util.ArrayList; import org.csstudio.platform.model.IProcessVariable; import org.csstudio.utility.pv.PV; import org.csstudio.utility.pv.PVListener; import org.csstudio.utility.pv.epics.EPICS_V3_PV; import org.csstudio.value.Severity; import org.csstudio.value.Value; import org.csstudio.value.ValueUtil; import org.eclipse.core.runtime.PlatformObject; import org.eclipse.swt.widgets.Display; /** One item in the PV tree model. * <p> * Since an 'item' is a PV, possibly for a record * which has inputs, and those inputs is what we * want to drill down, this class currently includes * almost all the logic behind the tree creation. * * @author Kay Kasemir */ class PVTreeItem extends PlatformObject implements IProcessVariable { static final boolean debug = false; @SuppressWarnings("nls") private static final String input_types[] = { "ai", "aai", "bi","mbbiDirect", "mbbi", "mbboDirect","longin", "waveform", "subArray", "stringin", }; @SuppressWarnings("nls") private static final String output_types[] = { "ao", "aao", "bo", "mbbo", "longout", "stringout", "fanout" }; // TODO: Handle "sub", "genSub", "compress", // "event", "histogram", "permissive", "sel", "seq", "state", /** The model to which this whole tree belongs. */ private final PVTreeModel model; /** The parent of this item, or <code>null</code>. */ private final PVTreeItem parent; /** The info provided by the parent or creator ("PV", "INPA", ...) */ private final String info; /** The name of this PV tree item as shown in the tree. */ private final String pv_name; /** The name of the record. * <p> * For example, the 'name' could be 'fred.SEVR', then 'fred' * would be the record name. */ private final String record_name; /** The PV used for getting the current value. */ private PV pv; /** Most recent value. */ private volatile String value = null; /** Most recent severity. */ private volatile Severity severity = null; private PVListener pv_listener = new PVListener() { public void pvDisconnected(PV pv) { value = "<disconnected>"; //$NON-NLS-1$ severity = null; updateValue(); } @SuppressWarnings("nls") public void pvValueUpdate(PV pv) { try { Value pv_value = pv.getValue(); value = ValueUtil.formatValueAndSeverity(pv_value); severity = pv_value.getSeverity(); updateValue(); } catch (Exception e) { e.printStackTrace(); } } }; /** The PV used for getting the record type. */ private PV type_pv; private String type; private PVListener type_pv_listener = new PVListener() { public void pvDisconnected(PV pv) {} public void pvValueUpdate(PV pv) { try { type = pv.getValue().format(); updateType(); } catch (Exception e) { e.printStackTrace(); } } }; /** Used to read the links of this pv. */ private int input_index; private PV link_pv = null; private String link_value; private PVListener link_pv_listener = new PVListener() { public void pvDisconnected(PV pv) {} public void pvValueUpdate(PV pv) { try { link_value = pv.getValue().format(); // The value could be // a) a record name followed by "... NPP NMS". Remove that. // b) a hardware input/output "@... " or "#...". Keep that. if (link_value.length() > 1 && link_value.charAt(0) != '@' && link_value.charAt(0) != '#') { int i = link_value.indexOf(' '); if (i > 0) link_value = link_value.substring(0, i); } updateInput(); } catch (Exception e) { e.printStackTrace(); } } }; /** Tree item children, populated with info from the input links. */ private ArrayList<PVTreeItem> links = new ArrayList<PVTreeItem>(); /** Create a new PV tree item. * @param model The model to which this whole tree belongs. * @param parent The parent of this item, or <code>null</code>. * @param info The info provided by the parent or creator ("PV", "INPA", ...) * @param pv_name The name of this PV entry. */ public PVTreeItem(PVTreeModel model, PVTreeItem parent, String info, String pv_name) { this.model = model; this.parent = parent; this.info = info; this.pv_name = pv_name; this.type = null; // In case this is "record.field", get the record name. int sep = pv_name.lastIndexOf('.'); if (sep > 0) record_name = pv_name.substring(0, sep); else record_name = pv_name; if (debug) { System.out.print("New Tree item '" + pv_name + "'"); //$NON-NLS-1$ //$NON-NLS-2$ System.out.println(", record name '" + record_name + "'"); //$NON-NLS-1$ //$NON-NLS-2$ } // Avoid loops. // If the model already contains an entry with this name, // we simply display this new item, but we won't // follow its input links. PVTreeItem other = model.findPV(pv_name); // Now add this one, otherwise the previous call would have found 'this'. if (parent != null) parent.links.add(this); try { pv = new EPICS_V3_PV(pv_name); pv.addListener(pv_listener); pv.start(); } catch (Exception e) { e.printStackTrace(); } // Get type from 'other', previously used PV or via CA if (other != null) { type = other.type; if (debug) System.out.println("Known item, not traversing inputs (again)"); //$NON-NLS-1$ } else { try { type_pv = new EPICS_V3_PV(record_name + ".RTYP", true); //$NON-NLS-1$ type_pv.addListener(type_pv_listener); type_pv.start(); } catch (Exception e) { e.printStackTrace(); } } } /** Dispose this and all child entries. */ public void dispose() { for (PVTreeItem item : links) item.dispose(); pv.removeListener(pv_listener); pv.stop(); pv = null; disposeLinkPV(); disposeTypePV(); } private void disposeTypePV() { if (type_pv != null) { type_pv.removeListener(type_pv_listener); type_pv.stop(); type_pv = null; } } private void disposeLinkPV() { if (link_pv != null) { link_pv.removeListener(link_pv_listener); link_pv.stop(); link_pv = null; } } /** @return Returns the name of this PV. */ public String getName() { return pv_name; } /** @return Severity of current value. May be <code>null</code>. */ public Severity getSeverity() { return severity; } // @see IProcessVariable public String getTypeId() { return IProcessVariable.TYPE_ID; } /** @return Returns the record type of this item or <code>null</code>. */ public String getType() { return type; } /** @return Returns the parent or <code>null</code>. */ public PVTreeItem getParent() { return parent; } /** @return Returns the first link or <code>null</code>. */ public PVTreeItem getFirstLink() { if (links.size() > 0) return links.get(0); return null; } /** @return Returns the all links. */ public PVTreeItem[] getLinks() { return (PVTreeItem[]) links.toArray(new PVTreeItem[links.size()]); } /** @return Returns <code>true</code> if this item has any links. */ public boolean hasLinks() { return links.size() > 0; } /** @return Returns a String. No really, it does! */ @SuppressWarnings("nls") public String toString() { StringBuffer b = new StringBuffer(); b.append(info); b.append(" '"); b.append(pv_name); b.append("'"); if (type != null) { b.append(" ("); b.append(type); b.append(")"); } if (value != null) { b.append(" = "); b.append(value); } return b.toString(); } /** Thread-save handling of the 'value' update. */ private void updateValue() { Display.getDefault().asyncExec(new Runnable() { public void run() { // Display the received type of this record. model.itemUpdated(PVTreeItem.this); } }); } /** Thread-save handling of the 'type' update. */ @SuppressWarnings("nls") private void updateType() { if (debug) System.out.println(pv_name + " received type '" + type + "'"); Display.getDefault().asyncExec(new Runnable() { public void run() { // Already disposed? if (type_pv == null) return; // We got the type, so close the connection. disposeTypePV(); // Display the received type of this record. model.itemChanged(PVTreeItem.this); if (type.startsWith("calc")) // Read the calc or calcout's first input getCalcInput(0); else { // read INP? for (String typ : input_types) if (type.equals(typ)) { getLink(record_name + ".INP"); return; } // read DOL? for (String typ : output_types) if (type.equals(typ)) { getLink(record_name + ".DOL"); return; } // Give up Plugin.logError("Unknown record type '" + type + "'"); } } }); } /** Helper for reading a calc record's input link. */ @SuppressWarnings("nls") private void getCalcInput(int i) { input_index = i; String link_name = record_name + ".INP" + Character.toString((char)('A' + input_index)); getLink(link_name); } /** Helper for reading any link by PV name. */ private void getLink(String link_name) { disposeLinkPV(); try { link_pv = new EPICS_V3_PV(link_name); link_pv.addListener(link_pv_listener); link_pv.start(); } catch (Exception e) { e.printStackTrace(); } } /** Thread-save handling of the 'input_value' update. */ @SuppressWarnings("nls") private void updateInput() { if (debug) System.out.println(link_pv.getName() + " received '" + link_value + "'"); Display.getDefault().asyncExec(new Runnable() { public void run() { if (link_pv == null) { if (debug) System.out.println(pv_name + " already disposed"); return; } boolean is_output = link_pv.getName().endsWith("DOL"); disposeLinkPV(); boolean is_calc = type.startsWith("calc"); String info; if (is_output) info = "DOL"; else if (is_calc) info = "INP" + Character.toString((char)('A' + input_index)); else info = "INP"; if (link_value.length() > 0) { new PVTreeItem(model, PVTreeItem.this, info, link_value); model.itemChanged(PVTreeItem.this); } if (is_calc && input_index < 11) // get INPB...INPL getCalcInput(input_index + 1); } }); } }
applications/plugins/org.csstudio.diag.epics.pvtree/src/org/csstudio/diag/epics/pvtree/PVTreeItem.java
/** * */ package org.csstudio.diag.epics.pvtree; import java.util.ArrayList; import org.csstudio.platform.model.IProcessVariable; import org.csstudio.utility.pv.PV; import org.csstudio.utility.pv.PVListener; import org.csstudio.utility.pv.epics.EPICS_V3_PV; import org.csstudio.value.Severity; import org.csstudio.value.ValueUtil; import org.eclipse.core.runtime.PlatformObject; import org.eclipse.swt.widgets.Display; /** One item in the PV tree model. * <p> * Since an 'item' is a PV, possibly for a record * which has inputs, and those inputs is what we * want to drill down, this class currently includes * almost all the logic behind the tree creation. * * @author Kay Kasemir */ class PVTreeItem extends PlatformObject implements IProcessVariable { static final boolean debug = false; @SuppressWarnings("nls") private static final String input_types[] = { "ai", "aai", "bi","mbbiDirect", "mbbi", "mbboDirect","longin", "waveform", "subArray", "stringin", }; @SuppressWarnings("nls") private static final String output_types[] = { "ao", "aao", "bo", "mbbo", "longout", "stringout", "fanout" }; // TODO: Handle "sub", "genSub", "compress", // "event", "histogram", "permissive", "sel", "seq", "state", /** The model to which this whole tree belongs. */ private final PVTreeModel model; /** The parent of this item, or <code>null</code>. */ private final PVTreeItem parent; /** The info provided by the parent or creator ("PV", "INPA", ...) */ private final String info; /** The name of this PV tree item as shown in the tree. */ private final String pv_name; /** The name of the record. * <p> * For example, the 'name' could be 'fred.SEVR', then 'fred' * would be the record name. */ private final String record_name; /** The PV used for getting the current value. */ private PV pv; /** Most recent value. */ private String value = null; /** Most recent severity. */ private Severity severity = null; private PVListener pv_listener = new PVListener() { public void pvDisconnected(PV pv) { value = "<disconnected>"; //$NON-NLS-1$ severity = null; updateValue(); } @SuppressWarnings("nls") public void pvValueUpdate(PV pv) { try { value = ValueUtil.formatValueAndSeverity(pv.getValue()); updateValue(); } catch (Exception e) { e.printStackTrace(); } } }; /** The PV used for getting the record type. */ private PV type_pv; private String type; private PVListener type_pv_listener = new PVListener() { public void pvDisconnected(PV pv) {} public void pvValueUpdate(PV pv) { try { type = pv.getValue().format(); updateType(); } catch (Exception e) { e.printStackTrace(); } } }; /** Used to read the links of this pv. */ private int input_index; private PV link_pv = null; private String link_value; private PVListener link_pv_listener = new PVListener() { public void pvDisconnected(PV pv) {} public void pvValueUpdate(PV pv) { try { link_value = pv.getValue().format(); // The value could be // a) a record name followed by "... NPP NMS". Remove that. // b) a hardware input/output "@... " or "#...". Keep that. if (link_value.length() > 1 && link_value.charAt(0) != '@' && link_value.charAt(0) != '#') { int i = link_value.indexOf(' '); if (i > 0) link_value = link_value.substring(0, i); } updateInput(); } catch (Exception e) { e.printStackTrace(); } } }; /** Tree item children, populated with info from the input links. */ private ArrayList<PVTreeItem> links = new ArrayList<PVTreeItem>(); /** Create a new PV tree item. * @param model The model to which this whole tree belongs. * @param parent The parent of this item, or <code>null</code>. * @param info The info provided by the parent or creator ("PV", "INPA", ...) * @param pv_name The name of this PV entry. */ public PVTreeItem(PVTreeModel model, PVTreeItem parent, String info, String pv_name) { this.model = model; this.parent = parent; this.info = info; this.pv_name = pv_name; this.type = null; // In case this is "record.field", get the record name. int sep = pv_name.lastIndexOf('.'); if (sep > 0) record_name = pv_name.substring(0, sep); else record_name = pv_name; if (debug) { System.out.print("New Tree item '" + pv_name + "'"); //$NON-NLS-1$ //$NON-NLS-2$ System.out.println(", record name '" + record_name + "'"); //$NON-NLS-1$ //$NON-NLS-2$ } // Avoid loops. // If the model already contains an entry with this name, // we simply display this new item, but we won't // follow its input links. PVTreeItem other = model.findPV(pv_name); // Now add this one, otherwise the previous call would have found 'this'. if (parent != null) parent.links.add(this); try { pv = new EPICS_V3_PV(pv_name); pv.addListener(pv_listener); pv.start(); } catch (Exception e) { e.printStackTrace(); } // Get type from 'other', previously used PV or via CA if (other != null) { type = other.type; if (debug) System.out.println("Known item, not traversing inputs (again)"); //$NON-NLS-1$ } else { try { type_pv = new EPICS_V3_PV(record_name + ".RTYP", true); //$NON-NLS-1$ type_pv.addListener(type_pv_listener); type_pv.start(); } catch (Exception e) { e.printStackTrace(); } } } /** Dispose this and all child entries. */ public void dispose() { for (PVTreeItem item : links) item.dispose(); pv.removeListener(pv_listener); pv.stop(); pv = null; disposeLinkPV(); disposeTypePV(); } private void disposeTypePV() { if (type_pv != null) { type_pv.removeListener(type_pv_listener); type_pv.stop(); type_pv = null; } } private void disposeLinkPV() { if (link_pv != null) { link_pv.removeListener(link_pv_listener); link_pv.stop(); link_pv = null; } } /** @return Returns the name of this PV. */ public String getName() { return pv_name; } /** @return Severity of current value. May be <code>null</code>. */ public Severity getSeverity() { return severity; } // @see IProcessVariable public String getTypeId() { return IProcessVariable.TYPE_ID; } /** @return Returns the record type of this item or <code>null</code>. */ public String getType() { return type; } /** @return Returns the parent or <code>null</code>. */ public PVTreeItem getParent() { return parent; } /** @return Returns the first link or <code>null</code>. */ public PVTreeItem getFirstLink() { if (links.size() > 0) return links.get(0); return null; } /** @return Returns the all links. */ public PVTreeItem[] getLinks() { return (PVTreeItem[]) links.toArray(new PVTreeItem[links.size()]); } /** @return Returns <code>true</code> if this item has any links. */ public boolean hasLinks() { return links.size() > 0; } /** @return Returns a String. No really, it does! */ @SuppressWarnings("nls") public String toString() { StringBuffer b = new StringBuffer(); b.append(info); b.append(" '"); b.append(pv_name); b.append("'"); if (type != null) { b.append(" ("); b.append(type); b.append(")"); } if (value != null) { b.append(" = "); b.append(value); } return b.toString(); } /** Thread-save handling of the 'value' update. */ private void updateValue() { Display.getDefault().asyncExec(new Runnable() { public void run() { // Display the received type of this record. model.itemUpdated(PVTreeItem.this); } }); } /** Thread-save handling of the 'type' update. */ @SuppressWarnings("nls") private void updateType() { if (debug) System.out.println(pv_name + " received type '" + type + "'"); Display.getDefault().asyncExec(new Runnable() { public void run() { // Already disposed? if (type_pv == null) return; // We got the type, so close the connection. disposeTypePV(); // Display the received type of this record. model.itemChanged(PVTreeItem.this); if (type.startsWith("calc")) // Read the calc or calcout's first input getCalcInput(0); else { // read INP? for (String typ : input_types) if (type.equals(typ)) { getLink(record_name + ".INP"); return; } // read DOL? for (String typ : output_types) if (type.equals(typ)) { getLink(record_name + ".DOL"); return; } // Give up Plugin.logError("Unknown record type '" + type + "'"); } } }); } /** Helper for reading a calc record's input link. */ @SuppressWarnings("nls") private void getCalcInput(int i) { input_index = i; String link_name = record_name + ".INP" + Character.toString((char)('A' + input_index)); getLink(link_name); } /** Helper for reading any link by PV name. */ private void getLink(String link_name) { disposeLinkPV(); try { link_pv = new EPICS_V3_PV(link_name); link_pv.addListener(link_pv_listener); link_pv.start(); } catch (Exception e) { e.printStackTrace(); } } /** Thread-save handling of the 'input_value' update. */ @SuppressWarnings("nls") private void updateInput() { if (debug) System.out.println(link_pv.getName() + " received '" + link_value + "'"); Display.getDefault().asyncExec(new Runnable() { public void run() { if (link_pv == null) { if (debug) System.out.println(pv_name + " already disposed"); return; } boolean is_output = link_pv.getName().endsWith("DOL"); disposeLinkPV(); boolean is_calc = type.startsWith("calc"); String info; if (is_output) info = "DOL"; else if (is_calc) info = "INP" + Character.toString((char)('A' + input_index)); else info = "INP"; if (link_value.length() > 0) { new PVTreeItem(model, PVTreeItem.this, info, link_value); model.itemChanged(PVTreeItem.this); } if (is_calc && input_index < 11) // get INPB...INPL getCalcInput(input_index + 1); } }); } }
tree correctly reflects severity via color (again)
applications/plugins/org.csstudio.diag.epics.pvtree/src/org/csstudio/diag/epics/pvtree/PVTreeItem.java
tree correctly reflects severity via color (again)
Java
epl-1.0
896c8975b8aa39eddda67d17d3baabd693f40608
0
ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ESSICS/cs-studio
/******************************************************************************* * Copyright (c) 2011 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html ******************************************************************************/ package org.csstudio.diag.epics.pvtree; import org.csstudio.csdata.ProcessVariable; import org.eclipse.core.runtime.IAdapterFactory; /** Adapter from PV Tree model to {@link ProcessVariable} * @author Kay Kasemir */ public class PVTreeItemAdapter implements IAdapterFactory { final private Class<?>[] targets = new Class<?>[] { String.class, ProcessVariable.class }; @Override public Class<?>[] getAdapterList() { return targets; } @Override public <T> T getAdapter(final Object adaptableObject, final Class<T> adapterType) { final String pv_name = ((PVTreeItem)adaptableObject).getPVName(); if (adapterType == String.class) return adapterType.cast(pv_name); else if (adapterType == ProcessVariable.class) return adapterType.cast(new ProcessVariable(pv_name)); else return null; } }
applications/diag/diag-plugins/org.csstudio.diag.epics.pvtree/src/org/csstudio/diag/epics/pvtree/PVTreeItemAdapter.java
/******************************************************************************* * Copyright (c) 2011 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html ******************************************************************************/ package org.csstudio.diag.epics.pvtree; import org.csstudio.csdata.ProcessVariable; import org.eclipse.core.runtime.IAdapterFactory; /** Adapter from PV Tree model to {@link ProcessVariable} * @author Kay Kasemir */ public class PVTreeItemAdapter implements IAdapterFactory { final private Class<?>[] targets = new Class<?>[] { String.class, ProcessVariable.class }; @Override public Class<?>[] getAdapterList() { return targets; } @SuppressWarnings("rawtypes") @Override public Object getAdapter(final Object adaptableObject, final Class adapterType) { final String pv_name = ((PVTreeItem)adaptableObject).getPVName(); if (adapterType == String.class) return pv_name; else if (adapterType == ProcessVariable.class) return new ProcessVariable(pv_name); else return null; } }
pvtree: Fix cast warning
applications/diag/diag-plugins/org.csstudio.diag.epics.pvtree/src/org/csstudio/diag/epics/pvtree/PVTreeItemAdapter.java
pvtree: Fix cast warning
Java
agpl-3.0
9a8968e3ea2902ab462ab302bfa66004fb3b7ecd
0
elki-project/elki,elki-project/elki,elki-project/elki
package de.lmu.ifi.dbs.evaluation; import de.lmu.ifi.dbs.data.ClassLabel; import de.lmu.ifi.dbs.data.MetricalObject; import de.lmu.ifi.dbs.database.Database; /** * Wrapper to hold a pair of training and test data sets. * The labels of both, training and test set, are provided in labels. * * @author Arthur Zimek (<a href="mailto:[email protected]">[email protected]</a>) */ public class TrainingAndTestSet<M extends MetricalObject> { /** * The overall labels. */ private ClassLabel[] labels; /** * The training data. */ private Database<M> training; /** * The test data. */ private Database<M> test; /** * Provides a pair of training and test data sets * out of the given two databases. */ public TrainingAndTestSet(Database<M> training, Database<M> test, ClassLabel[] labels) { this.training = training; this.test = test; this.labels = labels; } /** * Returns the test data set. * * * @return the test data set */ public Database<M> getTest() { return test; } /** * Returns the training data set. * * * @return the training data set */ public Database<M> getTraining() { return training; } /** * Returns the overall labels. * * * @return the overall labels */ public ClassLabel[] getLabels() { return labels; } }
src/de/lmu/ifi/dbs/evaluation/TrainingAndTestSet.java
package de.lmu.ifi.dbs.evaluation; import de.lmu.ifi.dbs.data.ClassLabel; import de.lmu.ifi.dbs.data.MetricalObject; import de.lmu.ifi.dbs.database.Database; /** * Wrapper to hold a pair of training and test data sets. * * @author Arthur Zimek (<a href="mailto:[email protected]">[email protected]</a>) */ public class TrainingAndTestSet<M extends MetricalObject> { // TODO alle klassen beachten private ClassLabel[] labels; /** * The training data. */ private Database<M> training; /** * The test data. */ private Database<M> test; /** * Provides a pair of training and test data sets * out of the given two databases. */ public TrainingAndTestSet(Database<M> training, Database<M> test, ClassLabel[] labels) { this.training = training; this.test = test; this.labels = labels; } /** * Returns the test data set. * * * @return the test data set */ public Database<M> getTest() { return test; } /** * Returns the training data set. * * * @return the training data set */ public Database<M> getTraining() { return training; } }
labels
src/de/lmu/ifi/dbs/evaluation/TrainingAndTestSet.java
labels
Java
lgpl-2.1
427587220199e9008b6f42e6cb3884cedd8791df
0
levants/lightmare
package org.lightmare.config; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.log4j.Logger; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.jpa.datasource.PoolConfig; import org.lightmare.jpa.datasource.PoolConfig.PoolProviderType; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.IOUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; import org.yaml.snakeyaml.Yaml; /** * Retrieves and caches configuration properties from configuration file or from * {@link org.lightmare.deploy.MetaCreator.Builder} instance * * @author levan * @since 0.0.21-SNAPSHOT */ public class Configuration implements Cloneable { // Cache for all configuration passed from API or read from file private final Map<Object, Object> config = new HashMap<Object, Object>(); // Instance of pool configuration private static final PoolConfig POOL_CONFIG = new PoolConfig(); // Runtime to get available processors private static final Runtime RUNTIME = Runtime.getRuntime(); // Resource path (META-INF) private static final String META_INF_PATH = "META-INF/"; // Error messages private static final String COULD_NOT_LOAD_CONFIG_ERROR = "Could not load configuration"; private static final String COULD_NOT_OPEN_FILE_ERROR = "Could not open config file"; private static final String RESOURCE_NOT_EXISTS_ERROR = "Configuration resource doesn't exist"; private static final Logger LOG = Logger.getLogger(Configuration.class); public Configuration() { } /** * Gets value on passed generic key K of passed {@link Map} as {@link Map} * of generic key values * * @param key * @param from * @return {@link Map}<code><K, V></code> */ private <K, V> Map<K, V> getAsMap(Object key, Map<Object, Object> from) { if (from == null) { from = config; } Map<K, V> value = ObjectUtils.cast(CollectionUtils.getAsMap(key, from)); return value; } /** * Gets value on passed generic key K of cached configuration as {@link Map} * of generic key values * * @param key * @return {@link Map}<code><K, V></code> */ private <K, V> Map<K, V> getAsMap(Object key) { return getAsMap(key, null); } /** * Sets value of sub {@link Map} on passed sub key contained in cached * configuration on passed key * * @param key * @param subKey * @param value */ private <K, V> void setSubConfigValue(Object key, K subKey, V value) { Map<K, V> subConfig = getAsMap(key); if (subConfig == null) { subConfig = new HashMap<K, V>(); config.put(key, subConfig); } subConfig.put(subKey, value); } /** * Gets value of sub {@link Map} on passed sub key contained in cached * configuration on passed key * * @param key * @param subKey * @param defaultValue * @return V */ private <K, V> V getSubConfigValue(Object key, K subKey, V defaultValue) { V def; Map<K, V> subConfig = getAsMap(key); if (CollectionUtils.valid(subConfig)) { def = subConfig.get(subKey); if (def == null) { def = defaultValue; } } else { def = defaultValue; } return def; } /** * Check if sub {@link Map} contains passed sub key contained in cached * configuration on passed key * * @param key * @param subKey * @return <code>boolean</code> */ private <K> boolean containsSubConfigKey(Object key, K subKey) { boolean valid; Map<K, ?> subConfig = getAsMap(key); valid = CollectionUtils.valid(subConfig); if (valid) { valid = subConfig.containsKey(subKey); } return valid; } private <K> boolean containsConfigKey(K key) { return containsSubConfigKey(ConfigKeys.DEPLOY_CONFIG.key, key); } private <K, V> V getSubConfigValue(Object key, K subKey) { return getSubConfigValue(key, subKey, null); } private <K, V> void setConfigValue(K subKey, V value) { setSubConfigValue(ConfigKeys.DEPLOY_CONFIG.key, subKey, value); } private <K, V> V getConfigValue(K subKey, V defaultValue) { return getSubConfigValue(ConfigKeys.DEPLOY_CONFIG.key, subKey, defaultValue); } private <K, V> V getConfigValue(K subKey) { return getSubConfigValue(ConfigKeys.DEPLOY_CONFIG.key, subKey); } private <K, V> Map<K, V> getWithInitialization(Object key) { Map<K, V> result = getConfigValue(key); if (result == null) { result = new HashMap<K, V>(); setConfigValue(key, result); } return result; } private <K, V> void setWithInitialization(Object key, K subKey, V value) { Map<K, V> result = getWithInitialization(key); result.put(subKey, value); } /** * Gets value for specific key from connection persistence sub {@link Map} * of configuration if value is null then returns passed default value * * @param key * @return <code>V</code> */ public <V> V getPersistenceConfigValue(Object key, V defaultValue) { V value = CollectionUtils.getSubValue(config, ConfigKeys.DEPLOY_CONFIG.key, ConfigKeys.PERSISTENCE_CONFIG.key, key); if (value == null) { value = defaultValue; } return value; } /** * Gets value for specific key from connection persistence sub {@link Map} * of configuration * * @param key * @return <code>V</code> */ public <V> V getPersistenceConfigValue(Object key) { return getPersistenceConfigValue(key, null); } /** * Sets specific value for appropriated key in persistence configuration sub * {@link Map} of configuration * * @param key * @param value */ public void setPersistenceConfigValue(Object key, Object value) { setWithInitialization(ConfigKeys.PERSISTENCE_CONFIG.key, key, value); } /** * Gets value for specific key from connection pool configuration sub * {@link Map} of configuration if value is null then returns passed default * value * * @param key * @return <code>V</code> */ public <V> V getPoolConfigValue(Object key, V defaultValue) { V value = CollectionUtils.getSubValue(config, ConfigKeys.DEPLOY_CONFIG.key, ConfigKeys.POOL_CONFIG.key, key); if (value == null) { value = defaultValue; } return value; } /** * Gets value for specific key from connection pool configuration sub * {@link Map} of configuration * * @param key * @return <code>V</code> */ public <V> V getPoolConfigValue(Object key) { V value = getPoolConfigValue(key, null); return value; } /** * Sets specific value for appropriated key in connection pool configuration * sub {@link Map} of configuration * * @param key * @param value */ public void setPoolConfigValue(Object key, Object value) { setWithInitialization(ConfigKeys.POOL_CONFIG.key, key, value); } /** * Configuration for {@link PoolConfig} instance */ private void configurePool() { Map<Object, Object> poolProperties = getPoolConfigValue(ConfigKeys.POOL_PROPERTIES.key); if (CollectionUtils.valid(poolProperties)) { setPoolProperties(poolProperties); } String type = getPoolConfigValue(ConfigKeys.POOL_PROVIDER_TYPE.key); if (StringUtils.valid(type)) { getPoolConfig().setPoolProviderType(type); } String path = getPoolConfigValue(ConfigKeys.POOL_PROPERTIES_PATH.key); if (StringUtils.valid(path)) { setPoolPropertiesPath(path); } } private <K, V> void setIfContains(K key, V value) { boolean contains = containsConfigKey(key); if (ObjectUtils.notTrue(contains)) { setConfigValue(key, value); } } /** * Configures server from properties and default values */ private void configureServer() { // Sets default values to remote server configuration setIfContains(ConfigKeys.IP_ADDRESS.key, ConfigKeys.IP_ADDRESS.value); setIfContains(ConfigKeys.PORT.key, ConfigKeys.PORT.value); setIfContains(ConfigKeys.BOSS_POOL.key, ConfigKeys.BOSS_POOL.value); boolean contains = containsConfigKey(ConfigKeys.WORKER_POOL.key); if (ObjectUtils.notTrue(contains)) { int defaultWorkers = ConfigKeys.WORKER_POOL.getValue(); int workers = (RUNTIME.availableProcessors() * defaultWorkers); String workerProperty = String.valueOf(workers); setConfigValue(ConfigKeys.WORKER_POOL.key, workerProperty); } setIfContains(ConfigKeys.CONNECTION_TIMEOUT.key, ConfigKeys.CONNECTION_TIMEOUT.value); } /** * Merges configuration with default properties */ public void configureDeployments() { // Checks if application run in hot deployment mode Boolean hotDeployment = getConfigValue(ConfigKeys.HOT_DEPLOYMENT.key); if (hotDeployment == null) { setConfigValue(ConfigKeys.HOT_DEPLOYMENT.key, Boolean.FALSE); hotDeployment = getConfigValue(ConfigKeys.HOT_DEPLOYMENT.key); } // Check if application needs directory watch service boolean watchStatus; if (ObjectUtils.notTrue(hotDeployment)) { watchStatus = Boolean.TRUE; } else { watchStatus = Boolean.FALSE; } setConfigValue(ConfigKeys.WATCH_STATUS.key, watchStatus); // Sets deployments directories Set<DeploymentDirectory> deploymentPaths = getConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key); if (deploymentPaths == null) { deploymentPaths = ConfigKeys.DEMPLOYMENT_PATH.getValue(); setConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key, deploymentPaths); } // Sets remote control check Boolean remoteControl = getConfigValue(ConfigKeys.REMOTE_CONTROL.key); if (ObjectUtils.notNull(remoteControl)) { setRemoteControl(remoteControl); } } /** * Configures server and connection pooling */ public void configure() { configureServer(); configureDeployments(); configurePool(); } /** * Merges two {@link Map}s and if second {@link Map}'s value is instance of * {@link Map} merges this value with first {@link Map}'s value recursively * * @param map1 * @param map2 * @return <code>{@link Map}<Object, Object></code> */ protected Map<Object, Object> deepMerge(Map<Object, Object> map1, Map<Object, Object> map2) { if (map1 == null) { map1 = map2; } else { Set<Map.Entry<Object, Object>> entries2 = map2.entrySet(); Object key; Map<Object, Object> value1; Object value2; Map<Object, Object> mapValue2; Object mergedValue; for (Map.Entry<Object, Object> entry2 : entries2) { key = entry2.getKey(); value2 = entry2.getValue(); if (value2 instanceof Map) { value1 = CollectionUtils.getAsMap(key, map1); mapValue2 = ObjectUtils.cast(value2); mergedValue = deepMerge(value1, mapValue2); } else { mergedValue = value2; } if (ObjectUtils.notNull(mergedValue)) { map1.put(key, mergedValue); } } } return map1; } /** * Reads configuration from passed properties * * @param configuration */ public void configure(Map<Object, Object> configuration) { deepMerge(config, configuration); } /** * Reads configuration from passed file path * * @param configuration */ public void configure(String path) throws IOException { File yamlFile = new File(path); if (yamlFile.exists()) { InputStream stream = new FileInputStream(yamlFile); try { Yaml yaml = new Yaml(); Object configuration = yaml.load(stream); if (configuration instanceof Map) { Map<Object, Object> innerConfig = ObjectUtils .cast(configuration); configure(innerConfig); } } finally { IOUtils.close(stream); } } } /** * Gets value associated with particular key as {@link String} instance * * @param key * @return {@link String} */ public String getStringValue(String key) { String textValue; Object value = config.get(key); if (value == null) { textValue = null; } else { textValue = value.toString(); } return textValue; } /** * Gets value associated with particular key as <code>int</code> instance * * @param key * @return {@link String} */ public int getIntValue(String key) { String value = getStringValue(key); return Integer.parseInt(value); } /** * Gets value associated with particular key as <code>long</code> instance * * @param key * @return {@link String} */ public long getLongValue(String key) { String value = getStringValue(key); return Long.parseLong(value); } /** * Gets value associated with particular key as <code>boolean</code> * instance * * @param key * @return {@link String} */ public boolean getBooleanValue(String key) { String value = getStringValue(key); return Boolean.parseBoolean(value); } public void putValue(String key, String value) { config.put(key, value); } /** * Load {@link Configuration} in memory as {@link Map} of parameters * * @throws IOException */ public void loadFromStream(InputStream propertiesStream) throws IOException { try { Properties props = new Properties(); props.load(propertiesStream); for (String propertyName : props.stringPropertyNames()) { config.put(propertyName, props.getProperty(propertyName)); } } catch (IOException ex) { LOG.error(COULD_NOT_LOAD_CONFIG_ERROR, ex); } finally { IOUtils.close(propertiesStream); } } /** * Loads configuration form file * * @throws IOException */ public void loadFromFile() throws IOException { String configFilePath = ConfigKeys.CONFIG_FILE.getValue(); try { File configFile = new File(configFilePath); if (configFile.exists()) { InputStream propertiesStream = new FileInputStream(configFile); loadFromStream(propertiesStream); } else { configFile.mkdirs(); } } catch (IOException ex) { LOG.error(COULD_NOT_OPEN_FILE_ERROR, ex); } } /** * Loads configuration form file by passed file path * * @param configFilename * @throws IOException */ public void loadFromFile(String configFilename) throws IOException { try { InputStream propertiesStream = new FileInputStream(new File( configFilename)); loadFromStream(propertiesStream); } catch (IOException ex) { LOG.error(COULD_NOT_OPEN_FILE_ERROR, ex); } } /** * Loads configuration from file contained in classpath * * @param resourceName * @param loader */ public void loadFromResource(String resourceName, ClassLoader loader) throws IOException { InputStream resourceStream = loader.getResourceAsStream(StringUtils .concat(META_INF_PATH, resourceName)); if (resourceStream == null) { LOG.error(RESOURCE_NOT_EXISTS_ERROR); } else { loadFromStream(resourceStream); } } public static String getAdminUsersPath() { return ConfigKeys.ADMIN_USERS_PATH.getValue(); } public static void setAdminUsersPath(String adminUsersPath) { ConfigKeys.ADMIN_USERS_PATH.value = adminUsersPath; } public static void setRemoteControl(boolean remoteControl) { ConfigKeys.REMOTE_CONTROL.value = remoteControl; } public static boolean getRemoteControl() { return ConfigKeys.REMOTE_CONTROL.getValue(); } public boolean isRemote() { return ConfigKeys.REMOTE.getValue(); } public void setRemote(boolean remote) { ConfigKeys.REMOTE.value = remote; } public static boolean isServer() { return ConfigKeys.SERVER.getValue(); } public static void setServer(boolean server) { ConfigKeys.SERVER.value = server; } public boolean isClient() { return getConfigValue(ConfigKeys.CLIENT.key, Boolean.FALSE); } public void setClient(boolean client) { setConfigValue(ConfigKeys.CLIENT.key, client); } /** * Adds path for deployments file or directory * * @param path * @param scan */ public void addDeploymentPath(String path, boolean scan) { Set<DeploymentDirectory> deploymentPaths = getConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key); if (deploymentPaths == null) { deploymentPaths = new HashSet<DeploymentDirectory>(); setConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key, deploymentPaths); } deploymentPaths.add(new DeploymentDirectory(path, scan)); } /** * Adds path for data source file * * @param path */ public void addDataSourcePath(String path) { Set<String> dataSourcePaths = getConfigValue(ConfigKeys.DATA_SOURCE_PATH.key); if (dataSourcePaths == null) { dataSourcePaths = new HashSet<String>(); setConfigValue(ConfigKeys.DATA_SOURCE_PATH.key, dataSourcePaths); } dataSourcePaths.add(path); } public Set<DeploymentDirectory> getDeploymentPath() { return getConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key); } public Set<String> getDataSourcePath() { return getConfigValue(ConfigKeys.DATA_SOURCE_PATH.key); } public String[] getLibraryPaths() { return getConfigValue(ConfigKeys.LIBRARY_PATH.key); } public void setLibraryPaths(String[] libraryPaths) { setConfigValue(ConfigKeys.LIBRARY_PATH.key, libraryPaths); } public boolean isHotDeployment() { return getConfigValue(ConfigKeys.HOT_DEPLOYMENT.key, Boolean.FALSE); } public void setHotDeployment(boolean hotDeployment) { setConfigValue(ConfigKeys.HOT_DEPLOYMENT.key, hotDeployment); } public boolean isWatchStatus() { return getConfigValue(ConfigKeys.WATCH_STATUS.key, Boolean.FALSE); } public void setWatchStatus(boolean watchStatus) { setConfigValue(ConfigKeys.WATCH_STATUS.key, watchStatus); } /** * Property for persistence configuration * * @return <code>boolean</code> */ public boolean isScanForEntities() { return getPersistenceConfigValue(ConfigKeys.SCAN_FOR_ENTITIES.key, Boolean.FALSE); } public void setScanForEntities(boolean scanForEntities) { setPersistenceConfigValue(ConfigKeys.SCAN_FOR_ENTITIES.key, scanForEntities); } public String getAnnotatedUnitName() { return getPersistenceConfigValue(ConfigKeys.ANNOTATED_UNIT_NAME.key); } public void setAnnotatedUnitName(String annotatedUnitName) { setPersistenceConfigValue(ConfigKeys.ANNOTATED_UNIT_NAME.key, annotatedUnitName); } public String getPersXmlPath() { return getPersistenceConfigValue(ConfigKeys.PERSISTENCE_XML_PATH.key); } public void setPersXmlPath(String persXmlPath) { setPersistenceConfigValue(ConfigKeys.PERSISTENCE_XML_PATH.key, persXmlPath); } public boolean isPersXmlFromJar() { return getPersistenceConfigValue( ConfigKeys.PERSISTENCE_XML_FROM_JAR.key, Boolean.FALSE); } public void setPersXmlFromJar(boolean persXmlFromJar) { setPersistenceConfigValue(ConfigKeys.PERSISTENCE_XML_FROM_JAR.key, persXmlFromJar); } public boolean isSwapDataSource() { return getPersistenceConfigValue(ConfigKeys.SWAP_DATASOURCE.key, Boolean.FALSE); } public void setSwapDataSource(boolean swapDataSource) { setPersistenceConfigValue(ConfigKeys.SWAP_DATASOURCE.key, swapDataSource); } public boolean isScanArchives() { return getPersistenceConfigValue(ConfigKeys.SCAN_ARCHIVES.key, Boolean.FALSE); } public void setScanArchives(boolean scanArchives) { setPersistenceConfigValue(ConfigKeys.SCAN_ARCHIVES.key, scanArchives); } public boolean isPooledDataSource() { return getPersistenceConfigValue(ConfigKeys.POOLED_DATA_SOURCE.key, Boolean.FALSE); } public void setPooledDataSource(boolean pooledDataSource) { setPersistenceConfigValue(ConfigKeys.POOLED_DATA_SOURCE.key, pooledDataSource); } public Map<Object, Object> getPersistenceProperties() { return getPersistenceConfigValue(ConfigKeys.PERSISTENCE_PROPERTIES.key); } public void setPersistenceProperties( Map<Object, Object> persistenceProperties) { setPersistenceConfigValue(ConfigKeys.PERSISTENCE_PROPERTIES.key, persistenceProperties); } /** * Gets cached {@link PoolConfig} instance a connection pool configuration * * @return {@link PoolConfig} */ public static PoolConfig getPoolConfig() { return POOL_CONFIG; } public void setDataSourcePooledType(boolean dsPooledType) { PoolConfig poolConfig = getPoolConfig(); poolConfig.setPooledDataSource(dsPooledType); } public void setPoolPropertiesPath(String path) { PoolConfig poolConfig = getPoolConfig(); poolConfig.setPoolPath(path); } public void setPoolProperties( Map<? extends Object, ? extends Object> properties) { PoolConfig poolConfig = getPoolConfig(); poolConfig.getPoolProperties().putAll(properties); } public void addPoolProperty(Object key, Object value) { PoolConfig poolConfig = getPoolConfig(); poolConfig.getPoolProperties().put(key, value); } public void setPoolProviderType(PoolProviderType poolProviderType) { PoolConfig poolConfig = getPoolConfig(); poolConfig.setPoolProviderType(poolProviderType); } @Override public Object clone() throws CloneNotSupportedException { // Deep clone for configuration Configuration cloneConfig = (Configuration) super.clone(); cloneConfig.config.clear(); cloneConfig.configure(this.config); return cloneConfig; } }
src/main/java/org/lightmare/config/Configuration.java
package org.lightmare.config; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.log4j.Logger; import org.lightmare.cache.DeploymentDirectory; import org.lightmare.jpa.datasource.PoolConfig; import org.lightmare.jpa.datasource.PoolConfig.PoolProviderType; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.IOUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; import org.yaml.snakeyaml.Yaml; /** * Retrieves and caches configuration properties from configuration file or from * {@link org.lightmare.deploy.MetaCreator.Builder} instance * * @author levan * @since 0.0.21-SNAPSHOT */ public class Configuration implements Cloneable { // Cache for all configuration passed from API or read from file private final Map<Object, Object> config = new HashMap<Object, Object>(); // Instance of pool configuration private static final PoolConfig POOL_CONFIG = new PoolConfig(); // Runtime to get available processors private static final Runtime RUNTIME = Runtime.getRuntime(); // Resource path (META-INF) private static final String META_INF_PATH = "META-INF/"; // Error messages private static final String COULD_NOT_LOAD_CONFIG_ERROR = "Could not load configuration"; private static final String COULD_NOT_OPEN_FILE_ERROR = "Could not open config file"; private static final String RESOURCE_NOT_EXISTS_ERROR = "Configuration resource doesn't exist"; private static final Logger LOG = Logger.getLogger(Configuration.class); public Configuration() { } /** * Gets value on passed generic key K of passed {@link Map} as {@link Map} * of generic key values * * @param key * @param from * @return {@link Map}<code><K, V></code> */ private <K, V> Map<K, V> getAsMap(Object key, Map<Object, Object> from) { if (from == null) { from = config; } Map<K, V> value = ObjectUtils.cast(CollectionUtils.getAsMap(key, from)); return value; } /** * Gets value on passed generic key K of cached configuration as {@link Map} * of generic key values * * @param key * @return {@link Map}<code><K, V></code> */ private <K, V> Map<K, V> getAsMap(Object key) { return getAsMap(key, null); } /** * Sets value of sub {@link Map} on passed sub key contained in cached * configuration on passed key * * @param key * @param subKey * @param value */ private <K, V> void setSubConfigValue(Object key, K subKey, V value) { Map<K, V> subConfig = getAsMap(key); if (subConfig == null) { subConfig = new HashMap<K, V>(); config.put(key, subConfig); } subConfig.put(subKey, value); } /** * Gets value of sub {@link Map} on passed sub key contained in cached * configuration on passed key * * @param key * @param subKey * @param defaultValue * @return V */ private <K, V> V getSubConfigValue(Object key, K subKey, V defaultValue) { V def; Map<K, V> subConfig = getAsMap(key); if (CollectionUtils.valid(subConfig)) { def = subConfig.get(subKey); if (def == null) { def = defaultValue; } } else { def = defaultValue; } return def; } /** * Check if sub {@link Map} contains passed sub key contained in cached * configuration on passed key * * @param key * @param subKey * @return <code>boolean</code> */ private <K> boolean containsSubConfigKey(Object key, K subKey) { boolean valid; Map<K, ?> subConfig = getAsMap(key); valid = CollectionUtils.valid(subConfig); if (valid) { valid = subConfig.containsKey(subKey); } return valid; } private <K> boolean containsConfigKey(K key) { return containsSubConfigKey(ConfigKeys.DEPLOY_CONFIG.key, key); } private <K, V> V getSubConfigValue(Object key, K subKey) { return getSubConfigValue(key, subKey, null); } private <K, V> void setConfigValue(K subKey, V value) { setSubConfigValue(ConfigKeys.DEPLOY_CONFIG.key, subKey, value); } private <K, V> V getConfigValue(K subKey, V defaultValue) { return getSubConfigValue(ConfigKeys.DEPLOY_CONFIG.key, subKey, defaultValue); } private <K, V> V getConfigValue(K subKey) { return getSubConfigValue(ConfigKeys.DEPLOY_CONFIG.key, subKey); } private <K, V> Map<K, V> getWithInitialization(Object key) { Map<K, V> result = getConfigValue(key); if (result == null) { result = new HashMap<K, V>(); setConfigValue(key, result); } return result; } private <K, V> void setWithInitialization(Object key, K subKey, V value) { Map<K, V> result = getWithInitialization(key); result.put(subKey, value); } /** * Gets value for specific key from connection persistence sub {@link Map} * of configuration if value is null then returns passed default value * * @param key * @return <code>V</code> */ public <V> V getPersistenceConfigValue(Object key, V defaultValue) { V value = CollectionUtils.getSubValue(config, ConfigKeys.DEPLOY_CONFIG.key, ConfigKeys.PERSISTENCE_CONFIG.key, key); if (value == null) { value = defaultValue; } return value; } /** * Gets value for specific key from connection persistence sub {@link Map} * of configuration * * @param key * @return <code>V</code> */ public <V> V getPersistenceConfigValue(Object key) { return getPersistenceConfigValue(key, null); } /** * Sets specific value for appropriated key in persistence configuration sub * {@link Map} of configuration * * @param key * @param value */ public void setPersistenceConfigValue(Object key, Object value) { setWithInitialization(ConfigKeys.PERSISTENCE_CONFIG.key, key, value); } /** * Gets value for specific key from connection pool configuration sub * {@link Map} of configuration if value is null then returns passed default * value * * @param key * @return <code>V</code> */ public <V> V getPoolConfigValue(Object key, V defaultValue) { V value = CollectionUtils.getSubValue(config, ConfigKeys.DEPLOY_CONFIG.key, ConfigKeys.POOL_CONFIG.key, key); if (value == null) { value = defaultValue; } return value; } /** * Gets value for specific key from connection pool configuration sub * {@link Map} of configuration * * @param key * @return <code>V</code> */ public <V> V getPoolConfigValue(Object key) { V value = getPoolConfigValue(key, null); return value; } /** * Sets specific value for appropriated key in connection pool configuration * sub {@link Map} of configuration * * @param key * @param value */ public void setPoolConfigValue(Object key, Object value) { setWithInitialization(ConfigKeys.POOL_CONFIG.key, key, value); } /** * Configuration for {@link PoolConfig} instance */ private void configurePool() { Map<Object, Object> poolProperties = getPoolConfigValue(ConfigKeys.POOL_PROPERTIES.key); if (CollectionUtils.valid(poolProperties)) { setPoolProperties(poolProperties); } String type = getPoolConfigValue(ConfigKeys.POOL_PROVIDER_TYPE.key); if (StringUtils.valid(type)) { getPoolConfig().setPoolProviderType(type); } String path = getPoolConfigValue(ConfigKeys.POOL_PROPERTIES_PATH.key); if (StringUtils.valid(path)) { setPoolPropertiesPath(path); } } private <K, V> void setIfContains(K key, V value) { boolean contains = containsConfigKey(key); if (ObjectUtils.notTrue(contains)) { setConfigValue(key, value); } } /** * Configures server from properties and default values */ private void configureServer() { // Sets default values to remote server configuration setIfContains(ConfigKeys.IP_ADDRESS.key, ConfigKeys.IP_ADDRESS.value); setIfContains(ConfigKeys.PORT.key, ConfigKeys.PORT.value); setIfContains(ConfigKeys.BOSS_POOL.key, ConfigKeys.BOSS_POOL.value); boolean contains = containsConfigKey(ConfigKeys.WORKER_POOL.key); if (ObjectUtils.notTrue(contains)) { int defaultWorkers = ConfigKeys.WORKER_POOL.getValue(); int workers = (RUNTIME.availableProcessors() * defaultWorkers); String workerProperty = String.valueOf(workers); setConfigValue(ConfigKeys.WORKER_POOL.key, workerProperty); } setIfContains(ConfigKeys.CONNECTION_TIMEOUT.key, ConfigKeys.CONNECTION_TIMEOUT.value); } /** * Merges configuration with default properties */ public void configureDeployments() { // Checks if application run in hot deployment mode Boolean hotDeployment = getConfigValue(ConfigKeys.HOT_DEPLOYMENT.key); if (hotDeployment == null) { setConfigValue(ConfigKeys.HOT_DEPLOYMENT.key, Boolean.FALSE); hotDeployment = getConfigValue(ConfigKeys.HOT_DEPLOYMENT.key); } // Check if application needs directory watch service boolean watchStatus; if (ObjectUtils.notTrue(hotDeployment)) { watchStatus = Boolean.TRUE; } else { watchStatus = Boolean.FALSE; } setConfigValue(ConfigKeys.WATCH_STATUS.key, watchStatus); // Sets deployments directories Set<DeploymentDirectory> deploymentPaths = getConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key); if (deploymentPaths == null) { deploymentPaths = ConfigKeys.DEMPLOYMENT_PATH.getValue(); setConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key, deploymentPaths); } // Sets remote control check Boolean remoteControl = getConfigValue(ConfigKeys.REMOTE_CONTROL.key); if (ObjectUtils.notNull(remoteControl)) { setRemoteControl(remoteControl); } } /** * Configures server and connection pooling */ public void configure() { configureServer(); configureDeployments(); configurePool(); } /** * Merges two {@link Map}s and if second {@link Map}'s value is instance of * {@link Map} merges this value with first {@link Map}'s value recursively * * @param map1 * @param map2 * @return <code>{@link Map}<Object, Object></code> */ protected Map<Object, Object> deepMerge(Map<Object, Object> map1, Map<Object, Object> map2) { if (map1 == null) { map1 = map2; } else { Set<Map.Entry<Object, Object>> entries2 = map2.entrySet(); Object key; Map<Object, Object> value1; Object value2; Map<Object, Object> mapValue2; Object mergedValue; for (Map.Entry<Object, Object> entry2 : entries2) { key = entry2.getKey(); value2 = entry2.getValue(); if (value2 instanceof Map) { value1 = CollectionUtils.getAsMap(key, map1); mapValue2 = ObjectUtils.cast(value2); mergedValue = deepMerge(value1, mapValue2); } else { mergedValue = value2; } if (ObjectUtils.notNull(mergedValue)) { map1.put(key, mergedValue); } } } return map1; } /** * Reads configuration from passed properties * * @param configuration */ public void configure(Map<Object, Object> configuration) { deepMerge(config, configuration); } /** * Reads configuration from passed file path * * @param configuration */ public void configure(String path) throws IOException { File yamlFile = new File(path); if (yamlFile.exists()) { InputStream stream = new FileInputStream(yamlFile); try { Yaml yaml = new Yaml(); Object configuration = yaml.load(stream); if (configuration instanceof Map) { Map<Object, Object> innerConfig = ObjectUtils .cast(configuration); configure(innerConfig); } } finally { IOUtils.close(stream); } } } /** * Gets value associated with particular key as {@link String} instance * * @param key * @return {@link String} */ public String getStringValue(String key) { String textValue; Object value = config.get(key); if (value == null) { textValue = null; } else { textValue = value.toString(); } return textValue; } /** * Gets value associated with particular key as <code>int</code> instance * * @param key * @return {@link String} */ public int getIntValue(String key) { String value = getStringValue(key); return Integer.parseInt(value); } /** * Gets value associated with particular key as <code>long</code> instance * * @param key * @return {@link String} */ public long getLongValue(String key) { String value = getStringValue(key); return Long.parseLong(value); } /** * Gets value associated with particular key as <code>boolean</code> * instance * * @param key * @return {@link String} */ public boolean getBooleanValue(String key) { String value = getStringValue(key); return Boolean.parseBoolean(value); } public void putValue(String key, String value) { config.put(key, value); } /** * Load {@link Configuration} in memory as {@link Map} of parameters * * @throws IOException */ public void loadFromStream(InputStream propertiesStream) throws IOException { try { Properties props = new Properties(); props.load(propertiesStream); for (String propertyName : props.stringPropertyNames()) { config.put(propertyName, props.getProperty(propertyName)); } } catch (IOException ex) { LOG.error(COULD_NOT_LOAD_CONFIG_ERROR, ex); } finally { IOUtils.close(propertiesStream); } } /** * Loads configuration form file * * @throws IOException */ public void loadFromFile() throws IOException { String configFilePath = ConfigKeys.CONFIG_FILE.getValue(); try { File configFile = new File(configFilePath); if (configFile.exists()) { InputStream propertiesStream = new FileInputStream(configFile); loadFromStream(propertiesStream); } else { configFile.mkdirs(); } } catch (IOException ex) { LOG.error(COULD_NOT_OPEN_FILE_ERROR, ex); } } /** * Loads configuration form file by passed file path * * @param configFilename * @throws IOException */ public void loadFromFile(String configFilename) throws IOException { try { InputStream propertiesStream = new FileInputStream(new File( configFilename)); loadFromStream(propertiesStream); } catch (IOException ex) { LOG.error(COULD_NOT_OPEN_FILE_ERROR, ex); } } /** * Loads configuration from file contained in classpath * * @param resourceName * @param loader */ public void loadFromResource(String resourceName, ClassLoader loader) throws IOException { InputStream resourceStream = loader.getResourceAsStream(StringUtils .concat(META_INF_PATH, resourceName)); if (resourceStream == null) { LOG.error(RESOURCE_NOT_EXISTS_ERROR); } else { loadFromStream(resourceStream); } } public static String getAdminUsersPath() { return ConfigKeys.ADMIN_USERS_PATH.getValue(); } public static void setAdminUsersPath(String adminUsersPath) { ConfigKeys.ADMIN_USERS_PATH.value = adminUsersPath; } public static void setRemoteControl(boolean remoteControl) { ConfigKeys.REMOTE_CONTROL.value = remoteControl; } public static boolean getRemoteControl() { return ConfigKeys.REMOTE_CONTROL.getValue(); } public boolean isRemote() { return ConfigKeys.REMOTE.getValue(); } public void setRemote(boolean remote) { ConfigKeys.REMOTE.value = remote; } public static boolean isServer() { return ConfigKeys.SERVER.getValue(); } public static void setServer(boolean server) { ConfigKeys.SERVER.value = server; } public boolean isClient() { return getConfigValue(ConfigKeys.CLIENT.key, Boolean.FALSE); } public void setClient(boolean client) { setConfigValue(ConfigKeys.CLIENT.key, client); } /** * Adds path for deployments file or directory * * @param path * @param scan */ public void addDeploymentPath(String path, boolean scan) { Set<DeploymentDirectory> deploymentPaths = getConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key); if (deploymentPaths == null) { deploymentPaths = new HashSet<DeploymentDirectory>(); setConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key, deploymentPaths); } deploymentPaths.add(new DeploymentDirectory(path, scan)); } /** * Adds path for data source file * * @param path */ public void addDataSourcePath(String path) { Set<String> dataSourcePaths = getConfigValue(ConfigKeys.DATA_SOURCE_PATH.key); if (dataSourcePaths == null) { dataSourcePaths = new HashSet<String>(); setConfigValue(ConfigKeys.DATA_SOURCE_PATH.key, dataSourcePaths); } dataSourcePaths.add(path); } public Set<DeploymentDirectory> getDeploymentPath() { return getConfigValue(ConfigKeys.DEMPLOYMENT_PATH.key); } public Set<String> getDataSourcePath() { return getConfigValue(ConfigKeys.DATA_SOURCE_PATH.key); } public String[] getLibraryPaths() { return getConfigValue(ConfigKeys.LIBRARY_PATH.key); } public void setLibraryPaths(String[] libraryPaths) { setConfigValue(ConfigKeys.LIBRARY_PATH.key, libraryPaths); } public boolean isHotDeployment() { return getConfigValue(ConfigKeys.HOT_DEPLOYMENT.key, Boolean.FALSE); } public void setHotDeployment(boolean hotDeployment) { setConfigValue(ConfigKeys.HOT_DEPLOYMENT.key, hotDeployment); } public boolean isWatchStatus() { return getConfigValue(ConfigKeys.WATCH_STATUS.key, Boolean.FALSE); } public void setWatchStatus(boolean watchStatus) { setConfigValue(ConfigKeys.WATCH_STATUS.key, watchStatus); } /** * Property for persistence configuration * * @return <code>boolean</code> */ public boolean isScanForEntities() { return getPersistenceConfigValue(ConfigKeys.SCAN_FOR_ENTITIES.key, Boolean.FALSE); } public void setScanForEntities(boolean scanForEntities) { setPersistenceConfigValue(ConfigKeys.SCAN_FOR_ENTITIES.key, scanForEntities); } public String getAnnotatedUnitName() { return getPersistenceConfigValue(ConfigKeys.ANNOTATED_UNIT_NAME.key); } public void setAnnotatedUnitName(String annotatedUnitName) { setPersistenceConfigValue(ConfigKeys.ANNOTATED_UNIT_NAME.key, annotatedUnitName); } public String getPersXmlPath() { return getPersistenceConfigValue(ConfigKeys.PERSISTENCE_XML_PATH.key); } public void setPersXmlPath(String persXmlPath) { setPersistenceConfigValue(ConfigKeys.PERSISTENCE_XML_PATH.key, persXmlPath); } public boolean isPersXmlFromJar() { return getPersistenceConfigValue( ConfigKeys.PERSISTENCE_XML_FROM_JAR.key, Boolean.FALSE); } public void setPersXmlFromJar(boolean persXmlFromJar) { setPersistenceConfigValue(ConfigKeys.PERSISTENCE_XML_FROM_JAR.key, persXmlFromJar); } public boolean isSwapDataSource() { return getPersistenceConfigValue(ConfigKeys.SWAP_DATASOURCE.key, Boolean.FALSE); } public void setSwapDataSource(boolean swapDataSource) { setPersistenceConfigValue(ConfigKeys.SWAP_DATASOURCE.key, swapDataSource); } public boolean isScanArchives() { return getPersistenceConfigValue(ConfigKeys.SCAN_ARCHIVES.key, Boolean.FALSE); } public void setScanArchives(boolean scanArchives) { setPersistenceConfigValue(ConfigKeys.SCAN_ARCHIVES.key, scanArchives); } public boolean isPooledDataSource() { return getPersistenceConfigValue(ConfigKeys.POOLED_DATA_SOURCE.key, Boolean.FALSE); } public void setPooledDataSource(boolean pooledDataSource) { setPersistenceConfigValue(ConfigKeys.POOLED_DATA_SOURCE.key, pooledDataSource); } public Map<Object, Object> getPersistenceProperties() { return getPersistenceConfigValue(ConfigKeys.PERSISTENCE_PROPERTIES.key); } public void setPersistenceProperties( Map<Object, Object> persistenceProperties) { setPersistenceConfigValue(ConfigKeys.PERSISTENCE_PROPERTIES.key, persistenceProperties); } /** * Gets cached {@link PoolConfig} instance a connection pool configuration * * @return {@link PoolConfig} */ public static PoolConfig getPoolConfig() { return POOL_CONFIG; } public void setDataSourcePooledType(boolean dsPooledType) { PoolConfig poolConfig = getPoolConfig(); poolConfig.setPooledDataSource(dsPooledType); } public void setPoolPropertiesPath(String path) { PoolConfig poolConfig = getPoolConfig(); poolConfig.setPoolPath(path); } public void setPoolProperties( Map<? extends Object, ? extends Object> properties) { PoolConfig poolConfig = getPoolConfig(); poolConfig.getPoolProperties().putAll(properties); } public void addPoolProperty(Object key, Object value) { PoolConfig poolConfig = getPoolConfig(); poolConfig.getPoolProperties().put(key, value); } public void setPoolProviderType(PoolProviderType poolProviderType) { PoolConfig poolConfig = getPoolConfig(); poolConfig.setPoolProviderType(poolProviderType); } @Override public Object clone() throws CloneNotSupportedException { // Deep clone for configuration Configuration cloneConfig = (Configuration) super.clone(); cloneConfig.config.clear(); cloneConfig.configure(this.config); return cloneConfig; } }
improved / commented code utiliti classes
src/main/java/org/lightmare/config/Configuration.java
improved / commented code utiliti classes
Java
unlicense
c31434db9a4aa2845c68a9105ff19ebc9879ddef
0
gguzman89/G4-CarmenSanDiego,gguzman89/G4-CarmenSanDiego,gguzman89/G4-CarmenSanDiego
package com.example.nigthkids.carmenmobile; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import java.util.ArrayList; public class ViajarActivity extends AppCompatActivity { ListView lvPaises; String[] items = {"Argentina", "Bolivia", "Paraguay", "Chile"}; TextView tvPaisesVisitados; Button orden; Button pistas; ArrayList<String> paisesVisitados; ArrayList<String> mini_conexiones; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_viajar); lvPaises = (ListView) findViewById(R.id.lvPaises); tvPaisesVisitados = (TextView) findViewById(R.id.tvPaisesVisitados); orden = (Button) findViewById(R.id.btnVolverOrden); pistas = (Button) findViewById(R.id.btnVolverViajar); Bundle bundle = getIntent().getExtras(); if(bundle != null) { String nombrePaisActual = bundle.getString("nombrePaisActual").toString(); getSupportActionBar().setTitle("Estas en: " + nombrePaisActual); paisesVisitados = (ArrayList<String>) bundle.getSerializable("Paises Visitados"); String paisesVisited = ""; for (String nombrePais : items) { //reemplazar con la lista original: paisesVisitados paisesVisited = paisesVisited + " -> " + nombrePais; } mini_conexiones = (ArrayList<String>) bundle.getSerializable("Paises Conexiones"); tvPaisesVisitados.setText(paisesVisited); } ArrayAdapter<String> adapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1, mini_conexiones); lvPaises.setAdapter(adapter); orden.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent ordenView = new Intent(ViajarActivity.this, OrdenDeArrestoActivity.class); startActivity(ordenView); } }); pistas.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent lugaresView = new Intent(ViajarActivity.this, PedirPistasActivity.class); startActivity(lugaresView); } }); } }
CarmenMobile/app/src/main/java/com/example/nigthkids/carmenmobile/ViajarActivity.java
package com.example.nigthkids.carmenmobile; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import java.util.ArrayList; public class ViajarActivity extends AppCompatActivity { ListView lvPaises; String[] items = {"Argentina", "Bolivia", "Paraguay", "Chile"}; TextView tvPaisesVisitados; Button orden; Button pistas; ArrayList<String> paisesVisitados; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_viajar); lvPaises = (ListView) findViewById(R.id.lvPaises); tvPaisesVisitados = (TextView) findViewById(R.id.tvPaisesVisitados); orden = (Button) findViewById(R.id.btnVolverOrden); pistas = (Button) findViewById(R.id.btnVolverViajar); Bundle bundle = getIntent().getExtras(); if(bundle != null) { String nombrePaisActual = bundle.getString("nombrePaisActual").toString(); getSupportActionBar().setTitle("Estas en: " + nombrePaisActual); paisesVisitados = (ArrayList<String>) bundle.getSerializable("Paises Visitados"); String paisesVisited = ""; for (String nombrePais : items) { //reemplazar con la lista original: paisesVisitados paisesVisited = paisesVisited + " -> " + nombrePais; } tvPaisesVisitados.setText(paisesVisited); } ArrayAdapter<String> adapter = new ArrayAdapter<String>(getBaseContext(), android.R.layout.simple_list_item_1, items); lvPaises.setAdapter(adapter); orden.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent ordenView = new Intent(ViajarActivity.this, OrdenDeArrestoActivity.class); startActivity(ordenView); } }); pistas.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent lugaresView = new Intent(ViajarActivity.this, PedirPistasActivity.class); startActivity(lugaresView); } }); } }
miniConexiones True ListView
CarmenMobile/app/src/main/java/com/example/nigthkids/carmenmobile/ViajarActivity.java
miniConexiones True ListView
Java
apache-2.0
863f3f91f914e32d47f543e7d87a5d729a32ec7d
0
kef/hieos,kef/hieos,kef/hieos
/* * This code is subject to the HIEOS License, Version 1.0 * * Copyright(c) 2008-2009 Vangent, Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.vangent.hieos.xutil.metadata.structure; import com.vangent.hieos.xutil.exception.MetadataException; import com.vangent.hieos.xutil.exception.MetadataValidationException; import com.vangent.hieos.xutil.exception.NoMetadataException; import com.vangent.hieos.xutil.exception.NoSubmissionSetException; import com.vangent.hieos.xutil.exception.XdsInternalException; import com.vangent.hieos.xutil.xml.Util; import com.vangent.hieos.xutil.hl7.date.Hl7Date; import com.vangent.hieos.xutil.uuid.UuidAllocator; import com.vangent.hieos.xutil.xml.XMLParser; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMAttribute; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.log4j.Logger; /** * * @author NIST (Adapted for HIEOS). */ public class Metadata { /** * */ protected OMFactory fac; private boolean grok_metadata = true; private final static Logger logger = Logger.getLogger(Metadata.class); /** * List of IHE association types: */ //public static final List<String> iheAssocTypes = new ArrayList<String>() { // // { // add("APND"); // add("XFRM"); // add("RPLC"); // add("XFRM_RPLC"); // add("signs"); // } // }; // Valid document->document association types. public static final List<String> validDocumentAssocTypes = new ArrayList<String>() { { add(MetadataSupport.xdsB_ihe_assoc_type_apnd); add(MetadataSupport.xdsB_ihe_assoc_type_xfrm); add(MetadataSupport.xdsB_ihe_assoc_type_rplc); add(MetadataSupport.xdsB_ihe_assoc_type_xfrm_rplc); add(MetadataSupport.xdsB_ihe_assoc_type_signs); } }; // Valid submission set -> registry object assocation types. private static final List<String> validSubmissionSetAssocTypes = new ArrayList<String>() { { add(MetadataSupport.xdsB_eb_assoc_type_has_member); add(MetadataSupport.xdsB_ihe_assoc_type_submit_association); add(MetadataSupport.xdsB_ihe_assoc_type_update_availability_status); } }; // Valid folder -> document association types. private static final List<String> validFolderAssocTypes = new ArrayList<String>() { { add(MetadataSupport.xdsB_eb_assoc_type_has_member); } }; private OMElement metadata; // wrapper private OMElement wrapper; // current metadata document being parsed private List<OMElement> wrappers; // the collection of all metadata documents included in current tables private List<OMElement> extrinsicObjects = null; private List<OMElement> folders = null; private OMElement submissionSet = null; private List<OMElement> submissionSets = null; private List<OMElement> registryPackages = null; private List<OMElement> associations = null; private List<OMElement> objectRefs = null; private List<OMElement> classifications = null; private List<OMElement> allObjects = null; private List<String> objectsToDeprecate = null; private List<String> objectsReferenced = null; private Map<String, List> classificationsOfId = null; private boolean version2; //private OMElement metadataDup = null; //private OMElement wrapperDup = null; //boolean mustDup = false; //private int idAllocation = 0; private IdIndex idIndex = null; /** * * @param metadata * @throws MetadataException * @throws MetadataValidationException */ public Metadata(OMElement metadata) throws MetadataException, MetadataValidationException { this.metadata = metadata; runParser(); } /** * * @param metadataFile * @param parse * @throws XdsInternalException * @throws MetadataException * @throws MetadataValidationException */ public Metadata(File metadataFile, boolean parse) throws XdsInternalException, MetadataException, MetadataValidationException { metadata = XMLParser.fileToOM(metadataFile); wrapper = null; wrappers = new ArrayList<OMElement>(); if (parse) { wrapper = find_metadata_wrapper(); wrappers.add(wrapper); parse(false); } else { init(); } } /** * */ public Metadata() { init(); this.setGrokMetadata(false); } /** * * @param assocType * @return */ public static boolean isValidDocumentAssociationType(String assocType) { return validDocumentAssocTypes.contains(assocType); } /** * * @return */ public static List<String> getValidDocumentAssociationTypes() { return validDocumentAssocTypes; } /** * * @param assocType * @return */ public static boolean isValidFolderAssociationType(String assocType) { return validFolderAssocTypes.contains(assocType); } /** * * @return */ public static List<String> getValidFolderAssociationTypes() { return validFolderAssocTypes; } /** * * @param assocType * @return */ public static boolean isValidSubmissionSetAssociationType(String assocType) { return validSubmissionSetAssocTypes.contains(assocType); } /** * * @return */ public static List<String> getValidSubmissionSetAssociationTypes() { return validSubmissionSetAssocTypes; } /** * * @param registryObject * @param previousVersion */ // FIXME: May want to rework and not require passing "previousVersion". public static void updateRegistryObjectVersion(OMElement registryObject, String previousVersion) { // Get version //<rim:VersionInfo versionName="1" /> OMElement versionInfoEle = MetadataSupport.firstChildWithLocalName(registryObject, "VersionInfo"); if (versionInfoEle == null) { // No version info exists, create one. versionInfoEle = MetadataSupport.om_factory.createOMElement("VersionInfo", MetadataSupport.ebRIMns3); // Attach to the version info object (before first Classification). OMElement classificationEle = MetadataSupport.firstChildWithLocalName(registryObject, "Classification"); classificationEle.insertSiblingBefore(versionInfoEle); //targetObject.addChild(versionInfoEle); } Integer nextVersion = new Integer(previousVersion) + 1; OMAttribute versionNameAttr = versionInfoEle.getAttribute(new QName("versionName")); if (versionNameAttr == null) { versionInfoEle.addAttribute("versionName", nextVersion.toString(), null); } else { versionNameAttr.setAttributeValue(nextVersion.toString()); } } /** * * @param registryObject * @return */ public static Double getRegistryObjectVersion(OMElement registryObject) { // Get version OMElement versionInfoEle = MetadataSupport.firstChildWithLocalName(registryObject, "VersionInfo"); if (versionInfoEle == null) { return 1.0; // Default. } OMAttribute versionNameAttr = versionInfoEle.getAttribute(new QName("versionName")); if (versionNameAttr == null) { return 1.0; // Default. } else { String versionNameText = versionNameAttr.getAttributeValue(); return new Double(versionNameText); } } /** * * @param registryObject * @param statusValue */ public static void setStatusOnRegistryObject(OMElement registryObject, String statusValue) { OMAttribute statusAttr = registryObject.getAttribute(new QName("status")); if (statusAttr == null) { registryObject.addAttribute("status", statusValue, null); } else { statusAttr.setAttributeValue(statusValue); } } /** * * @throws MetadataException */ public void setStatusOnApprovableObjects() throws MetadataException { List<OMElement> approvableObjects = this.getApprovableObjects(); for (OMElement approvableObject : approvableObjects) { Metadata.setStatusOnRegistryObject(approvableObject, MetadataSupport.status_type_approved); } } /** * * @return */ private String allocate_id() { //idAllocation += 1; //return ("ID_" + String.valueOf(this.hashCode()) + "_" + idAllocation); return UuidAllocator.allocate(); } /** * */ public void removeDuplicates() { removeDuplicates(extrinsicObjects); removeDuplicates(folders); removeDuplicates(submissionSets); removeDuplicates(registryPackages); removeDuplicates(associations); removeDuplicates(objectRefs); removeDuplicates(classifications); removeFromObjectRefs(extrinsicObjects); removeFromObjectRefs(registryPackages); removeFromObjectRefs(associations); removeFromObjectRefs(classifications); allObjects = new ArrayList<OMElement>(); allObjects.addAll(extrinsicObjects); allObjects.addAll(registryPackages); allObjects.addAll(associations); allObjects.addAll(classifications); allObjects.addAll(objectRefs); } /** * * @param set */ private void removeDuplicates(List<OMElement> set) { boolean running = true; while (running) { running = false; for (int targetI = 0; targetI < set.size(); targetI++) { OMElement target = set.get(targetI); String targetId = id(target); for (int i = targetI + 1; i < set.size(); i++) { OMElement it = set.get(i); if (targetId.equals(id(it))) { set.remove(i); running = true; break; } } } } } /** * * @param set */ private void removeFromObjectRefs(List<OMElement> set) { for (int i = 0; i < set.size(); i++) { String id = id(set.get(i)); boolean restart = true; while (restart) { restart = false; for (int j = 0; j < objectRefs.size(); j++) { if (id.equals(id(objectRefs.get(j)))) { objectRefs.remove(j); restart = true; break; } } } } } /** * Return true if the metadata only includes object references. Otherwise, return false. * * @return Boolean result. */ public boolean isObjectRefsOnly() { return submissionSets.isEmpty() && extrinsicObjects.isEmpty() && folders.isEmpty() && associations.isEmpty() && classifications.isEmpty() && !objectRefs.isEmpty(); } /** * Return list of approvable object (ExtrinsicObjects, RegistryPackages and Associations) identifiers. * * @return The list of approvable object identifiers. */ public List<String> getApprovableObjectIds() { List<OMElement> o = new ArrayList<OMElement>(); o.addAll(this.extrinsicObjects); o.addAll(this.registryPackages); o.addAll(this.associations); return this.getObjectIds(o); } /** * Return list of approvable object (ExtrinsicObjects, RegistryPackages and Associations) identifiers. * * @return The list of approvable objects. */ public List<OMElement> getApprovableObjects() { List<OMElement> o = new ArrayList<OMElement>(); o.addAll(this.extrinsicObjects); o.addAll(this.registryPackages); o.addAll(this.associations); return o; } /** * Go through all objects in metadata and only maintain those objects that contain * an id in the passed in list. * * @param ids The list of ids to maintain in the metadata. */ public void filter(List<String> ids) { submissionSets = filter(submissionSets, ids); extrinsicObjects = filter(extrinsicObjects, ids); folders = filter(folders, ids); associations = filter(associations, ids); allObjects = filter(allObjects, ids); } /** * * @param objects * @param ids * @return */ private List<OMElement> filter(List<OMElement> objects, List<String> ids) { List<OMElement> out = new ArrayList<OMElement>(); for (OMElement object : objects) { String id = id(object); if (ids.contains(id)) { out.add(object); } } return out; } /** * * @return */ public List<OMElement> getNonObjectRefs() { List<OMElement> objs = new ArrayList<OMElement>(); objs.addAll(this.submissionSets); objs.addAll(this.folders); objs.addAll(this.extrinsicObjects); objs.addAll(this.associations); objs.addAll(this.classifications); return objs; } /** * * @return */ /* private List<List<OMElement>> getMetadataContainers() { List<List<OMElement>> containers = new ArrayList<List<OMElement>>(); containers.add(extrinsicObjects); containers.add(folders); containers.add(submissionSets); containers.add(associations); containers.add(objectRefs); containers.add(classifications); containers.add(allObjects); containers.add(registryPackages); return containers; }*/ /** * * @return */ private OMNamespace getCurrentNamespace() { if (version2) { return MetadataSupport.ebRIMns2; } return MetadataSupport.ebRIMns3; } /** * * @param x */ public void setGrokMetadata(boolean x) { grok_metadata = x; } /** * * @param metadata */ public void setMetadata(OMElement metadata) { this.metadata = metadata; init(); } /** * * @throws MetadataException * @throws MetadataValidationException */ public void runParser() throws MetadataException, MetadataValidationException { wrapper = find_metadata_wrapper(); if (wrappers == null) { wrappers = new ArrayList<OMElement>(); } wrappers.add(wrapper); parse(false); } /** * * @param m * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(Metadata m) throws MetadataException, MetadataValidationException { if (m.getRoot() != null) { addMetadata(m.getRoot(), false); } } /** * * @param m * @param discard_duplicates * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(Metadata m, boolean discard_duplicates) throws MetadataException, MetadataValidationException { addMetadata(m.getRoot(), discard_duplicates); } /** * * @param metadata * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(OMElement metadata) throws MetadataException, MetadataValidationException { addMetadata(metadata, false); } /** * * @param metadata * @param discard_duplicates * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(OMElement metadata, boolean discard_duplicates) throws MetadataException, MetadataValidationException { init(); if (wrappers == null) { wrappers = new ArrayList<OMElement>(); } this.metadata = metadata; wrapper = find_metadata_wrapper(); wrappers.add(wrapper); reindex(); parse(discard_duplicates); } /** * Add to metadata collection. If collection empty then initialize it. * @param metadata - a collection of metadata objects. Will be wrapped internally (made into * single XML document) * @param discard_duplicates * @return * @throws XdsInternalException * @throws MetadataValidationException * @throws MetadataException */ public Metadata addToMetadata(List<OMElement> metadata, boolean discard_duplicates) throws XdsInternalException, MetadataException, MetadataValidationException { for (OMElement ele : metadata) { addToMetadata(ele, discard_duplicates, false); } parse(discard_duplicates); return this; } /** * * @param new_metadata * @param discard_duplicates * @param run_parse * @return * @throws XdsInternalException * @throws MetadataException * @throws MetadataValidationException */ private Metadata addToMetadata(OMElement new_metadata, boolean discard_duplicates, boolean run_parse) throws XdsInternalException, MetadataException, MetadataValidationException { boolean hasExistingData = false; if (wrapper == null) { wrapper = makeWrapper(); metadata = new_metadata; if (wrappers == null) { wrappers = new ArrayList<OMElement>(); } wrappers.add(wrapper); } else { hasExistingData = true; } wrapper.addChild(Util.deep_copy(new_metadata)); if (run_parse) { if (hasExistingData) { reparse(discard_duplicates); } else { parse(discard_duplicates); } } return this; } /** * * @return */ private OMElement makeWrapper() { return MetadataSupport.om_factory.createOMElement("root", MetadataSupport.ebRIMns3); } /** * */ public void clearLeafClassObjects() { registryPackages.clear(); submissionSet = null; submissionSets.clear(); extrinsicObjects.clear(); associations.clear(); reindex(); } /** * */ public void clearObjectRefs() { objectRefs = new ArrayList<OMElement>(); } /** * */ private void reindex() { this.idIndex = null; // lazy } /** * * @param eos */ public void addExtrinsicObjects(List<OMElement> eos) { extrinsicObjects.addAll(eos); allObjects.addAll(eos); } /** * * @param object_refs_or_ids * @throws MetadataException */ @SuppressWarnings("unchecked") public void addObjectRefs(List<?> object_refs_or_ids) throws MetadataException { if (object_refs_or_ids.isEmpty()) { return; } Object ele = object_refs_or_ids.get(0); if (ele instanceof OMElement) { objectRefs.addAll((List<OMElement>) object_refs_or_ids); } else if (ele instanceof String) { this.makeObjectRefs((List<String>) object_refs_or_ids); } else { throw new MetadataException("Don't understand format " + ele.getClass().getName()); } } /** * * @param ids */ public void makeObjectRefs(List<String> ids) { for (String id : ids) { makeObjectRef(id); } } /** * * @param id */ private void makeObjectRef(String id) { OMElement objRef = MetadataSupport.om_factory.createOMElement(MetadataSupport.object_ref_qname); objRef.addAttribute("id", id, null); objectRefs.add(objRef); } /** * * @return * @throws XdsInternalException * @throws MetadataException * @throws MetadataValidationException */ public Metadata makeClone() throws XdsInternalException, MetadataException, MetadataValidationException { Metadata m = new Metadata(); if (wrappers != null) { if (m.wrappers == null) { m.wrappers = new ArrayList<OMElement>(); } for (OMElement ele : wrappers) { m.wrappers.add(ele); } } m.extrinsicObjects.addAll(extrinsicObjects); m.folders.addAll(folders); m.submissionSets.addAll(submissionSets); m.registryPackages.addAll(registryPackages); m.associations.addAll(associations); m.objectRefs.addAll(objectRefs); m.classifications.addAll(classifications); m.allObjects.addAll(allObjects); m.submissionSet = submissionSet; return m; } /** * * @return */ public List<OMElement> getLeafClassObjects() { List<OMElement> objs = new ArrayList<OMElement>(); objs.addAll(registryPackages); objs.addAll(extrinsicObjects); objs.addAll(associations); return objs; } /** * * @return */ public boolean isVersion2() { return version2; } /** * * @return */ public OMFactory om_factory() { if (fac == null) { fac = OMAbstractFactory.getOMFactory(); } return fac; } /** * * @return */ public OMElement getRoot() { return metadata; } /** * Return a string that can be used for debugging purposes. It lists the size * of each metadata element. * * @return String representing structure of the metadata. */ public String structure() { return this.getSubmissionSetIds().size() + " SS + " + this.extrinsicObjects.size() + " EO + " + this.folders.size() + " Fol + " + this.associations.size() + " A + " + this.objectRefs.size() + " OR"; } /** * * @param registryObject * @param idScheme * @return */ public List<OMElement> getExternalIdentifiers(OMElement registryObject, String idScheme) { List<OMElement> results = new ArrayList<OMElement>(); QName idSchemeQName = MetadataSupport.identificationscheme_qname; for (Iterator<OMElement> it = registryObject.getChildElements(); it.hasNext();) { OMElement ele = it.next(); if (!ele.getLocalName().equals("ExternalIdentifier")) { continue; } String elementIdScheme = ele.getAttributeValue(idSchemeQName); if (idScheme == null || idScheme.equals(elementIdScheme)) { results.add(ele); } } return results; } /** * * @param registryObject * @param idScheme * @return */ private boolean hasExternalIdentifier(OMElement registryObject, String idScheme) { QName idSchemeQName = new QName("identificationScheme"); for (Iterator<OMElement> it = registryObject.getChildElements(); it.hasNext();) { OMElement ele = it.next(); if (!ele.getLocalName().equals("ExternalIdentifier")) { continue; } String elementIdScheme = ele.getAttributeValue(idSchemeQName); if (idScheme.equals(elementIdScheme)) { return true; } } return false; } /** * Return true if the slot name exists for the given object. Otherwise, return false. * * @param registryObject The registry object in question. * @param slotName The name of the slot. * @return true if the slot name exists for the object, otherwise false. */ public boolean hasSlot(OMElement registryObject, String slotName) { if (registryObject == null) { return false; } for (OMElement slot : MetadataSupport.childrenWithLocalName(registryObject, "Slot")) { String name = slot.getAttributeValue(MetadataSupport.slot_name_qname); if (name.equals(slotName)) { return true; } } return false; } /** * * @param registryObject * @param slotName * @param valueIndex * @return */ public String getSlotValue(OMElement registryObject, String slotName, int valueIndex) { if (registryObject == null) { return null; } for (OMElement slot : MetadataSupport.childrenWithLocalName(registryObject, "Slot")) { String name = slot.getAttributeValue(MetadataSupport.slot_name_qname); if (!name.equals(slotName)) { continue; } OMElement valueList = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); if (valueList == null) { continue; } int valueCount = 0; for (OMElement valueElement : MetadataSupport.childrenWithLocalName(valueList, "Value")) { if (valueCount != valueIndex) { valueCount++; continue; } return valueElement.getText(); } } return null; } /** * * @param id * @param slot_name * @param value_index * @return * @throws MetadataException */ public String getSlotValue(String id, String slot_name, int value_index) throws MetadataException { return getSlotValue(getObjectById(id), slot_name, value_index); } /** * * @param obj * @param slotName * @param valueIndex * @param value */ public void setSlotValue(OMElement obj, String slotName, int valueIndex, String value) { if (obj == null) { return; } for (OMElement slot : MetadataSupport.childrenWithLocalName(obj, "Slot")) { String name = slot.getAttributeValue(MetadataSupport.slot_name_qname); if (!name.equals(slotName)) { continue; } OMElement valueList = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); if (valueList == null) { continue; } int valueCount = 0; for (OMElement valueEle : MetadataSupport.childrenWithLocalName(valueList, "Value")) { if (valueCount != valueIndex) { valueCount++; continue; } valueEle.setText(value); } } } /** * * @param ele * @return */ public String getStatus(OMElement ele) { return ele.getAttributeValue(MetadataSupport.status_qname); } /** * * @return */ public List<OMElement> getMajorObjects() { //return getMajorObjects(null); return this.allObjects; } /** * * @param type * @return */ public List<OMElement> getMajorObjects(String type) { List<OMElement> objs = new ArrayList<OMElement>(); if (wrapper != null) { for (Iterator<OMElement> it = wrapper.getChildElements(); it.hasNext();) { OMElement obj = it.next(); if (type == null || type.equals(obj.getLocalName())) { objs.add(obj); } } } return objs; } /** * * @param ele * @return */ public String getId(OMElement ele) { return ele.getAttributeValue(MetadataSupport.id_qname); } /** * * @param assoc * @return */ public String getSourceObject(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.source_object_qname); } /** * * @param assoc * @return */ public String getTargetObject(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.target_object_qname); } /** * * @param assoc * @return */ public String getAssocType(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.association_type_qname); } /** * * @param assoc * @return */ public String getAssocSource(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.source_object_qname); } /** * * @param assoc * @return */ public String getAssocTarget(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.target_object_qname); } /** * * @return */ public List<OMElement> getAllObjects() { // probably the same as Major Objects return allObjects; } /** * * @param objects * @return */ /* public List<String> getIdsForObjects(List<OMElement> objects) { List<String> ids = new ArrayList<String>(); for (OMElement object : objects) { ids.add(object.getAttributeValue(MetadataSupport.id_qname)); } return ids; }*/ /** * * @param id * @return * @throws MetadataException */ public String type(String id) throws MetadataException { OMElement ele = this.getObjectById(id); if (ele == null) { return null; } return ele.getLocalName(); } /** * * @param ids * @param ele */ private void addIds(List<String> ids, OMElement ele) { if (!ele.getLocalName().equals("ObjectRef")) { String id = ele.getAttributeValue(MetadataSupport.id_qname); if (id != null && !ids.contains(id)) { ids.add(id); } } for (Iterator<OMElement> it = ele.getChildElements(); it.hasNext();) { OMElement ele2 = it.next(); addIds(ids, ele2); // Recurse. } } /** * * @return */ public List<String> getAllDefinedIds() { List<String> ids = new ArrayList<String>(); List<OMElement> objects = getAllObjects(); for (OMElement object : objects) { addIds(ids, object); } return ids; } /** * * @return */ public List<OMElement> getRegistryPackages() { return this.registryPackages; } /** * * @return */ public List<OMElement> getSubmissionSets() { return this.submissionSets; } /** * * @return */ public List<OMElement> getExtrinsicObjects() { return extrinsicObjects; } /** * * @return */ public List<OMElement> getObjectRefs() { return objectRefs; } /** * * @return */ public List<String> getObjectRefIds() { return this.getObjectIds(this.getObjectRefs()); } /** * * @param i * @return */ public OMElement getExtrinsicObject(int i) { return getExtrinsicObjects().get(i); } /** * * @return */ public List<String> getExtrinsicObjectIds() { return this.getObjectIds(this.getExtrinsicObjects()); } /** * * @return * @throws MetadataException */ public Map<String, OMElement> getDocumentUidMap() throws MetadataException { Map<String, OMElement> map = new HashMap<String, OMElement>(); List<String> ids = this.getExtrinsicObjectIds(); for (String id : ids) { String uid = this.getUniqueIdValue(id); map.put(uid, this.getObjectById(id)); } return map; } /** * * @param m * @return * @throws MetadataException */ public List<String> getExtrinsicObjectUniqueIds() throws MetadataException { List<String> list = new ArrayList<String>(); List<String> ids = this.getExtrinsicObjectIds(); for (String id : ids) { OMElement registry_object = this.getObjectById(id); List<OMElement> eis = this.getExternalIdentifiers(id); List<OMElement> eid_eles = this.getExternalIdentifiers(registry_object, MetadataSupport.XDSDocumentEntry_uniqueid_uuid); String uid; if (eid_eles.size() > 0) { uid = eid_eles.get(0).getAttributeValue(MetadataSupport.value_qname); } else { throw new MetadataException("Document " + id + " has no uniqueId\nfound " + eis.size() + " external identifiers"); } list.add(uid); } return list; } /** * Return the "mime type" for the extrinsic object. * * @param eo An Extrinsic Object. * @return A string representing the mime type for the extrinsic object. */ public String getMimeType(OMElement eo) { return eo.getAttributeValue(MetadataSupport.mime_type_qname); } /** * * @param ro * @return */ public String getHome(OMElement ro) { return ro.getAttributeValue(MetadataSupport.home_qname); } /** * * @param ro * @return */ public String getLID(OMElement ro) { return ro.getAttributeValue(MetadataSupport.lid_qname); } /** * * @return */ public List<OMElement> getAssociations() { return associations; } /** * * @param i * @return */ public OMElement getAssociation(int i) { return getAssociations().get(i); } /** * * @return */ public List<String> getAssociationIds() { return this.getObjectIds(this.getAssociations()); } /** * * @return */ public List<String> getAssocReferences() { List<String> ids = new ArrayList<String>(); for (OMElement assoc : associations) { String obj1_id = assoc.getAttributeValue(MetadataSupport.source_object_qname); String obj2_id = assoc.getAttributeValue(MetadataSupport.target_object_qname); if (!listContains(ids, obj1_id)) { ids.add(obj1_id); } if (!listContains(ids, obj2_id)) { ids.add(obj2_id); } } return ids; } /** * * @param list * @param value * @return */ private boolean listContains(List<String> list, String value) { for (String val : list) { if (value.equals(val)) { return true; } } return false; } /** * * @param ele * @param slotName * @param slotValue */ private void addSlot(OMElement ele, String slotName, String slotValue) { OMElement slot = this.om_factory().createOMElement("Slot", getCurrentNamespace()); slot.addAttribute("name", slotName, null); OMElement valueList = this.om_factory().createOMElement("ValueList", getCurrentNamespace()); slot.addChild(valueList); OMElement value = this.om_factory().createOMElement("Value", getCurrentNamespace()); valueList.addChild(value); value.setText(slotValue); OMElement firstChild = ele.getFirstElement(); firstChild.insertSiblingBefore(slot); //ele.addChild(slot); //mustDup = true; } /** * * @param ele * @param slotName * @return */ private OMElement addSlot(OMElement ele, String slotName) { OMElement slot = this.om_factory().createOMElement("Slot", null); slot.addAttribute("name", slotName, null); OMElement valueList = this.om_factory().createOMElement("ValueList", null); slot.addChild(valueList); OMElement firstChild = ele.getFirstElement(); firstChild.insertSiblingBefore(slot); //ele.addChild(slot); //mustDup = true; return slot; } /** * * @param slot * @param value * @return */ private OMElement addSlotValue(OMElement slot, String value) { OMElement valueList = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); OMElement valueEle = this.om_factory().createOMElement("Value", null); valueEle.setText(value); valueList.addChild(valueEle); //mustDup = true; return slot; } /** * * @param ele * @return */ private String id(OMElement ele) { return ele.getAttributeValue(MetadataSupport.id_qname); } /** * * @param ele * @param slotName * @param slotValue * @throws MetadataException */ public void setSlot(OMElement ele, String slotName, String slotValue) throws MetadataException { OMElement slot = getSlot(id(ele), slotName); if (slot == null) { addSlot(ele, slotName, slotValue); } else { OMElement valueList = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); if (valueList == null) { throw new MetadataException("Slot without ValueList - slot name is " + slotName + " of object " + id(ele)); } for (Iterator<OMElement> it = valueList.getChildElements(); it.hasNext();) { OMElement v = it.next(); v.detach(); } OMElement value = MetadataSupport.om_factory.createOMElement("Value", getCurrentNamespace()); valueList.addChild(value); value.addChild(MetadataSupport.om_factory.createOMText(slotValue)); } } /** * * @param removeDups * @throws MetadataException * @throws MetadataValidationException */ private void reparse(boolean removeDups) throws MetadataException, MetadataValidationException { reinit(); reindex(); parse(removeDups); } /** * */ private void reinit() { registryPackages = null; objectsReferenced = null; objectsToDeprecate = null; wrappers = null; init(); } /** * */ private void init() { if (registryPackages == null) { associations = new ArrayList<OMElement>(); extrinsicObjects = new ArrayList<OMElement>(); registryPackages = new ArrayList<OMElement>(); submissionSets = new ArrayList<OMElement>(); objectRefs = new ArrayList<OMElement>(); folders = new ArrayList<OMElement>(); submissionSet = null; classifications = new ArrayList<OMElement>(); allObjects = new ArrayList<OMElement>(); classificationsOfId = new HashMap<String, List>(); //objects_to_deprecate = new ArrayList(); //objects_referenced = new ArrayList(); } } // referencedObjects are: // RegistryObjects referenced by Associations (sourceObject or targetObject) // That are not contained in the package that was parsed to create this instance of Metadata /** * * @return * @throws MetadataValidationException * @throws MetadataException */ public List<String> getReferencedObjects() throws MetadataValidationException, MetadataException { if (objectsReferenced == null) { this.objectsReferenced = new ArrayList<String>(); this.objectsToDeprecate = new ArrayList<String>(); for (OMElement association : associations) { //OMElement association = (OMElement) it.next(); String associationType = association.getAttributeValue(MetadataSupport.association_type_qname); String targetObject = association.getAttributeValue(MetadataSupport.target_object_qname); String sourceObject = association.getAttributeValue(MetadataSupport.source_object_qname); if (associationType == null) { throw new MetadataValidationException("Association has no associationType attribute"); } if (sourceObject == null) { throw new MetadataValidationException(associationType + " Association has no sourceObject attribute"); } if (targetObject == null) { throw new MetadataValidationException(associationType + " Association has no targetObject attribute"); } if (isUUID(sourceObject) && !containsObject(sourceObject)) { objectsReferenced.add(sourceObject); } if (isUUID(targetObject) && !containsObject(targetObject)) { objectsReferenced.add(targetObject); } if (MetadataSupport.xdsB_ihe_assoc_type_rplc.equals(associationType) || MetadataSupport.xdsB_ihe_assoc_type_xfrm_rplc.equals(associationType)) { if (!targetObject.startsWith("urn:uuid:")) { throw new MetadataValidationException("RPLC association has targetObject attribute which is not a UUID: " + targetObject); } this.objectsToDeprecate.add(targetObject); } } } return objectsReferenced; } /** * Return the patient identifier for the given OMElement. * * @param ele * @return * @throws MetadataException */ public String getPatientId(OMElement ele) throws MetadataException { if (ele == null) { return null; } String id = getId(ele); if (isDocument(id)) { return getExternalIdentifierValue(id, MetadataSupport.XDSDocumentEntry_patientid_uuid); } if (isSubmissionSet(id)) { return getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_patientid_uuid); } if (isFolder(id)) { return getExternalIdentifierValue(id, MetadataSupport.XDSFolder_patientid_uuid); } return null; } /** * * @param id * @return * @throws MetadataException */ public boolean containsObject(String id) throws MetadataException { OMElement ele = id_index().getObjectById(id); if (ele == null) { return false; } if (ele.getLocalName().equals("ObjectRef")) { return false; } return true; } /** * * @param objects * @return */ /* public List<String> idsForObjects(List<OMElement> objects) { List<String> ids = new ArrayList<String>(); for (OMElement ele : objects) { ids.add(ele.getAttributeValue(MetadataSupport.id_qname)); } return ids; }*/ /** * * @param id * @return */ private boolean isUUID(String id) { return id.startsWith("urn:uuid:"); } /** * * @return * @throws MetadataValidationException * @throws MetadataException */ public List<String> getReferencedObjectsThatMustHaveSamePatientId() throws MetadataValidationException, MetadataException { List<String> objects = new ArrayList<String>(); for (OMElement association : associations) { //OMElement association = (OMElement) it.next(); String associationType = association.getAttributeValue(MetadataSupport.association_type_qname); String targetObject = association.getAttributeValue(MetadataSupport.target_object_qname); String sourceObject = association.getAttributeValue(MetadataSupport.source_object_qname); if (associationType == null) { throw new MetadataValidationException("Association has no associationType attribute"); } if (sourceObject == null) { throw new MetadataValidationException(associationType + " Association has no sourceObject attribute"); } if (targetObject == null) { throw new MetadataValidationException(associationType + " Association has no targetObject attribute"); } if (MetadataSupport.xdsB_eb_assoc_type_has_member.equals(associationType) && "Reference".equals(getSlotValue(association, "SubmissionSetStatus", 0))) { continue; } if (sourceObject.startsWith("urn:uuid:") && id_index().getObjectById(sourceObject) == null) { objects.add(sourceObject); } if (targetObject.startsWith("urn:uuid:")) { OMElement o = id_index().getObjectById(targetObject); if (o == null) { objects.add(targetObject); } else if (o.getLocalName().equals("ObjectRef")) { objects.add(targetObject); } } } return objects; } /** * * @param id * @return * @throws MetadataValidationException * @throws MetadataException */ public boolean isReferencedObject(String id) throws MetadataValidationException, MetadataException { if (!id.startsWith("urn:uuid:")) { return false; } return getReferencedObjects().contains(id); } /** * * @return * @throws MetadataValidationException * @throws MetadataException */ public List<String> getDeprecatableObjectIds() throws MetadataValidationException, MetadataException { this.getReferencedObjects(); return objectsToDeprecate; } /** * * @param classifications */ private void add_to_classifications_of_id(List<OMElement> classifications) { for (OMElement classification : classifications) { add_to_classifications_of_id(classification); } } /** * * @param classification */ private void add_to_classifications_of_id(OMElement classification) { String id = classification.getAttributeValue(MetadataSupport.id_qname); List<OMElement> old = this.classificationsOfId.get(id); if (old == null) { old = new ArrayList<OMElement>(); classificationsOfId.put(id, old); } old.add(classification); } /** * * @param a * @return */ public OMElement addAssociation(OMElement a) { this.associations.add(a); this.allObjects.add(a); return a; } /** * * @param type * @param sourceUuid * @param targetUuid * @return */ public OMElement makeAssociation(String type, String sourceUuid, String targetUuid) { OMElement assoc = MetadataSupport.om_factory.createOMElement("Association", MetadataSupport.ebRIMns3); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("associationType", null, type)); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("sourceObject", null, sourceUuid)); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("targetObject", null, targetUuid)); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("id", null, allocate_id())); // Include this? assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("status", null, MetadataSupport.status_type_approved)); return assoc; } /** * * @param discard_duplicates * @throws MetadataException * @throws MetadataValidationException */ private void parse(boolean discard_duplicates) throws MetadataException, MetadataValidationException { init(); OMNamespace namespace = wrapper.getNamespace(); String namespace_uri = (namespace != null) ? namespace.getNamespaceURI() : ""; detect_metadata_version(namespace_uri); for (Iterator<OMElement> it = wrapper.getChildElements(); it.hasNext();) { OMElement obj = it.next(); String type = obj.getLocalName(); OMAttribute id_att = obj.getAttribute(MetadataSupport.id_qname); // obj has no id attribute - assign it one if (id_att == null) { String id = allocate_id(); id_att = obj.addAttribute("id", id, null); } else { String id = id_att.getAttributeValue(); if (id == null || id.equals("")) { id_att.setAttributeValue(allocate_id()); } } if (!discard_duplicates || !getObjectIds(allObjects).contains(id(obj))) { allObjects.add(obj); } add_to_classifications_of_id(findClassifications(obj)); if (type.equals("RegistryPackage")) { if (hasExternalIdentifier(obj, MetadataSupport.XDSSubmissionSet_uniqueid_uuid)) { if (!discard_duplicates || !getObjectIds(submissionSets).contains(id(obj))) { submissionSets.add(obj); } if (submissionSet != null && this.grok_metadata == true) { throw new MetadataException("Metadata: Submission has multiple SubmissionSets"); } submissionSet = obj; } else if (hasExternalIdentifier(obj, MetadataSupport.XDSFolder_uniqueid_uuid)) { if (!discard_duplicates || !getObjectIds(folders).contains(id(obj))) { folders.add(obj); } } if (!discard_duplicates || !getObjectIds(registryPackages).contains(id(obj))) { registryPackages.add(obj); } } else if (type.equals("ExtrinsicObject")) { if (!discard_duplicates || !getObjectIds(extrinsicObjects).contains(id(obj))) { extrinsicObjects.add(obj); } } else if (type.equals("ObjectRef")) { if (!discard_duplicates || !getObjectIds(objectRefs).contains(id(obj))) { objectRefs.add(obj); } } else if (type.equals("Classification")) { if (!discard_duplicates || !getObjectIds(classifications).contains(id(obj))) { classifications.add(obj); } add_to_classifications_of_id(obj); } else if (type.equals("Association")) { if (!discard_duplicates || !getObjectIds(associations).contains(id(obj))) { associations.add(obj); } } else { throw new MetadataException("Metadata: parse(): did not expect a " + type + " object at the top level"); } for (Iterator<OMElement> it1 = obj.getChildElements(); it1.hasNext();) { OMElement obj_i = it1.next(); String type_i = obj_i.getLocalName(); if (type_i.equals("Classification")) { if (!discard_duplicates || !getObjectIds(classifications).contains(id(obj_i))) { classifications.add(obj_i); } } } } if (grok_metadata && submissionSet == null) { throw new NoSubmissionSetException("Metadata: No Submission Set found"); } } /** * * @param namespace_uri * @throws MetadataException */ private void detect_metadata_version(String namespace_uri) throws MetadataException { // if this class later accepts v3 metadata as well we may have to worry about intermixing v2 and v3 if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:rim:xsd:2.1")) { version2 = true; } else if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1")) { version2 = true; } else if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:xsd:rim:3.0")) { version2 = false; } else if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:xsd:rs:3.0")) { version2 = false; } else { throw new MetadataException("Metadata.parse(): Cannot identify version of metadata from namespace " + namespace_uri); } } /** * * @param ele * @return */ public String getNameValue(OMElement ele) { OMElement name_ele = MetadataSupport.firstChildWithLocalName(ele, "Name"); if (name_ele == null) { return null; } OMElement loc_st = MetadataSupport.firstChildWithLocalName(name_ele, "LocalizedString"); if (loc_st == null) { return null; } return loc_st.getAttributeValue(MetadataSupport.value_qname); } /** * * @return */ public OMElement getSubmissionSet() { return submissionSet; } /** * * @return */ public String getSubmissionSetId() { OMElement ss = getSubmissionSet(); if (ss == null) { return ""; } return ss.getAttributeValue(MetadataSupport.id_qname); } /** * * @param ids * @return */ public List<OMElement> getAssociationsInclusive(List<String> ids) { List<OMElement> assocs = new ArrayList<OMElement>(); for (OMElement a : this.getAssociations()) { if (ids.contains(getAssocSource(a)) && ids.contains(getAssocTarget(a))) { assocs.add(a); } } return assocs; } /** * * @return */ public List<String> getSubmissionSetIds() { return this.getObjectIds(this.getSubmissionSets()); } /** * * @param id * @return */ public boolean isSubmissionSet(String id) { return getSubmissionSetIds().contains(id); } /** * * @param id * @return */ public boolean isFolder(String id) { return getFolderIds().contains(id); } /** * * @param id * @return */ public boolean isDocument(String id) { return this.getExtrinsicObjectIds().contains(id); } /** * Return true if all patient ids in metadata are equivalent. Otherwise return false. * @return Boolean value indicating result of verification. * @throws MetadataException */ public boolean isPatientIdConsistent() throws MetadataException { String patientID = null; for (OMElement ele : allObjects) { String pid = this.getPatientId(ele); if (patientID == null) { patientID = pid; // First go around. continue; } if (pid == null) { // No patient id on object. continue; } if (!patientID.equals(pid)) { return false; } } return true; } /** * * @return */ public List<OMElement> getFolders() { return folders; } /** * * @param parts * @return */ /* public List<String> getIds(List<OMElement> parts) { List<String> ids = new ArrayList<String>(); for (OMElement part : parts) { String id = part.getAttributeValue(MetadataSupport.id_qname); ids.add(id); } return ids; }*/ /** * * @return */ public List<String> getFolderIds() { return this.getObjectIds(this.getFolders()); } /** * * @param i * @return */ public OMElement getFolder(int i) { return getFolders().get(i); } /** * * @return * @throws MetadataException */ public List<String> getFolderUniqueIds() throws MetadataException { List<String> list = new ArrayList<String>(); List<String> ids = this.getFolderIds(); for (String id : ids) { String uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSFolder_uniqueid_uuid); if (uid == null || uid.equals("")) { throw new MetadataException("Folder " + id + " has no uniqueId"); } list.add(uid); } return list; } /** * * @return */ public List<String> getRegistryPackageIds() { return this.getObjectIds(this.getRegistryPackages()); } /** * * @param ele * @return */ public String getIdentifyingString(OMElement ele) { StringBuilder b = new StringBuilder(); b.append(ele.getLocalName()); OMElement name_ele = MetadataSupport.firstChildWithLocalName(ele, "Name"); if (name_ele != null) { OMElement loc = MetadataSupport.firstChildWithLocalName(name_ele, "LocalizedString"); if (loc != null) { String name = loc.getAttributeValue(new QName("value")); b.append(" Name=\"").append(name).append("\""); } } b.append(" id=\"").append(ele.getAttributeValue(MetadataSupport.id_qname)).append("\""); return "<" + b.toString() + ">"; } /** * * @param objects * @return */ public List<String> getObjectNames(List<OMElement> objects) { List<String> names = new ArrayList<String>(); for (OMElement obj : objects) { //OMElement obj = (OMElement) objects.get(i); names.add(obj.getLocalName()); } return names; } /** * * @return * @throws MetadataException */ private OMElement find_metadata_wrapper() throws MetadataException { if (metadata == null || metadata.getLocalName() == null) { throw new NoMetadataException("find_metadata_wrapper: Cannot find a wrapper element, top element is NULL" + ". A wrapper is one of the XML elements that holds metadata (ExtrinsicObject, RegistryPackage, Association etc.)"); } if (metadata.getLocalName().equals("TestResults")) { OMElement test_step = MetadataSupport.firstChildWithLocalName(metadata, "TestStep"); if (test_step != null) { OMElement sqt = MetadataSupport.firstChildWithLocalName(test_step, "StoredQueryTransaction"); if (sqt != null) { OMElement result = MetadataSupport.firstChildWithLocalName(sqt, "Result"); if (result != null) { OMElement ahqr = MetadataSupport.firstChildWithLocalName(result, "AdhocQueryResponse"); if (ahqr != null) { OMElement rol = MetadataSupport.firstChildWithLocalName(ahqr, "RegistryObjectList"); if (rol != null) { return rol; } } } } } } if (metadata.getLocalName().equals("LeafRegistryObjectList")) { return metadata; } if (metadata.getLocalName().equals("ProvideAndRegisterDocumentSetRequest")) { OMElement sor = MetadataSupport.firstChildWithLocalName(metadata, "SubmitObjectsRequest"); if (sor != null) { return MetadataSupport.firstChildWithLocalName(sor, "RegistryObjectList"); } } for (Iterator<OMElement> it = metadata.getChildElements(); it.hasNext();) { OMElement child = it.next(); if (child.getLocalName().equals("RegistryObjectList")) { return child; } if (child.getLocalName().equals("AdhocQueryResponse")) { OMElement achild = MetadataSupport.firstChildWithLocalName(child, "SQLQueryResult"); if (achild != null) { return achild; } } if (child.getLocalName().equals("LeafRegistryObjectList")) { return child; } } OMElement ele2 = find_metadata_wrapper2(metadata); if (ele2 != null) { return ele2; } throw new NoMetadataException("find_metadata_wrapper: Cannot find a wrapper element, top element is " + metadata.getLocalName() + ". A wrapper is one of the XML elements that holds metadata (ExtrinsicObject, RegistryPackage, Association etc.)"); } /** * * @param ele * @return * @throws MetadataException */ private OMElement find_metadata_wrapper2(OMElement ele) throws MetadataException { for (Iterator<OMElement> it = ele.getChildElements(); it.hasNext();) { OMElement e = it.next(); String name = e.getLocalName(); if (name == null) { continue; } if (name.equals("ObjectRef") || name.equals("ExtrinsicObject") || name.equals("RegistryPackage") || name.equals("Association") || name.equals("Classification")) { return ele; } OMElement e2 = find_metadata_wrapper2(e); if (e2 != null) { return e2; } } return null; } /** * * @return */ public OMElement getWrapper() { return wrapper; } /** * * @param registry_object * @param slot_name * @return */ public OMElement findSlot(OMElement registry_object, String slot_name) { for (Iterator<OMElement> it = registry_object.getChildElements(); it.hasNext();) { OMElement s = it.next(); if (!s.getLocalName().equals("Slot")) { continue; } String val = s.getAttributeValue(MetadataSupport.slot_name_qname); if (val != null && val.equals(slot_name)) { return s; } } return null; } /** * * @param registry_object * @param classificationScheme * @return */ public List<OMElement> findClassifications(OMElement registry_object, String classificationScheme) { List<OMElement> cl = new ArrayList<OMElement>(); for (Iterator<OMElement> it = registry_object.getChildElements(); it.hasNext();) { OMElement s = it.next(); if (!s.getLocalName().equals("Classification")) { continue; } String val = s.getAttributeValue(MetadataSupport.classificationscheme_qname); if (val != null && val.equals(classificationScheme)) { cl.add(s); } } return cl; } /** * * @param registry_object * @return */ public List<OMElement> findClassifications(OMElement registry_object) { List<OMElement> cl = new ArrayList<OMElement>(); for (Iterator<OMElement> it = registry_object.getChildElements(); it.hasNext();) { OMElement s = it.next(); if (!s.getLocalName().equals("Classification")) { continue; } cl.add(s); } return cl; } /** * * @param registry_object * @param element_name * @return */ public List<OMElement> findChildElements(OMElement registry_object, String element_name) { List<OMElement> al = new ArrayList<OMElement>(); for (Iterator<OMElement> it = registry_object.getChildElements(); it.hasNext();) { OMElement s = it.next(); if (s.getLocalName().equals(element_name)) { al.add(s); } } return al; } /** * * @param registryObjects * @return */ public List<String> getObjectIds(List<OMElement> registryObjects) { List<String> ids = new ArrayList<String>(); for (OMElement registryObject : registryObjects) { ids.add(registryObject.getAttributeValue(MetadataSupport.id_qname)); } return ids; } /** * * @param registryObjects * @param version2 * @return */ public List<OMElement> getObjectRefs(List<OMElement> registryObjects, boolean version2) { List<OMElement> ors = new ArrayList<OMElement>(); for (OMElement ele : registryObjects) { //OMElement ele = registryObjects.get(i); String id = ele.getAttributeValue(MetadataSupport.id_qname); OMElement or = MetadataSupport.om_factory.createOMElement("ObjectRef", (version2) ? MetadataSupport.ebRIMns2 : MetadataSupport.ebRIMns3); or.addAttribute("id", id, null); or.addAttribute("home", "", null); ors.add(or); } return ors; } /** * * @return */ public List<OMElement> getClassifications() { return classifications; } /* * by ID */ private IdIndex id_index() throws MetadataException { if (idIndex == null) { idIndex = new IdIndex(this); // System.out.println("Metadata indexed: \n" + id_index.toString()); } return idIndex; } /** * * @param log_message * @return * @throws MetadataException */ /* private IdIndex id_index(XLogMessage log_message) throws MetadataException { if (idIndex == null) { idIndex = new IdIndex(); idIndex.setLogMessage(log_message); idIndex.setMetadata(this); } return idIndex; }*/ /** * * @param object_id * @return * @throws MetadataException */ public String getNameValue(String object_id) throws MetadataException { return id_index().getNameValue(object_id); } /** * * @param object_id * @return * @throws MetadataException */ public String getDescriptionValue(String object_id) throws MetadataException { return id_index().getDescriptionValue(object_id); } /** * * @param object_id * @return * @throws MetadataException */ public List<OMElement> getSlots(String object_id) throws MetadataException { return id_index().getSlots(object_id); } /** * * @param object_id * @param name * @return * @throws MetadataException */ public OMElement getSlot(String object_id, String name) throws MetadataException { return id_index().getSlot(object_id, name); } /** * * @param object_id * @param name * @throws MetadataException */ public void removeSlot(String object_id, String name) throws MetadataException { OMElement slot = getSlot(object_id, name); if (slot != null) { slot.detach(); reindex(); } } /** * * @return */ public String getMetadataDescription() { StringBuilder buf = new StringBuilder(); buf.append(this.getSubmissionSets().size()).append(" SubmissionSets\n"); buf.append(this.getExtrinsicObjects().size()).append(" DocumentEntries\n"); buf.append(this.getFolders().size()).append(" Folders\n"); buf.append(this.getAssociations().size()).append(" Associations\n"); buf.append(this.getObjectRefs().size()).append(" ObjectRefs\n"); return buf.toString(); } /** * * @throws MetadataException */ public void fixClassifications() throws MetadataException { List<String> rpIds = getRegistryPackageIds(); for (String id : rpIds) { //String id = rpIds.get(i); List<OMElement> classifications = getClassifications(id); for (OMElement classification : classifications) { if (classification.getAttribute(MetadataSupport.noderepresentation_qname) != null) { continue; } // not a code - must be classification of RP as SS or Fol, make sure // classificationNode is present if (classification.getAttribute(MetadataSupport.classificationnode_qname) == null) { // add classification, first figure out if this is SS or Fol if (isSubmissionSet(id)) { classification.addAttribute("classificationNode", MetadataSupport.XDSSubmissionSet_classification_uuid, null); } else { classification.addAttribute("classificationNode", MetadataSupport.XDSFolder_classification_uuid, null); } } } } } /** * * @param object_id * @return * @throws MetadataException */ public List<OMElement> getClassifications(String object_id) throws MetadataException { return id_index().getClassifications(object_id); } /** * * @param object * @return * @throws MetadataException */ public List<OMElement> getClassifications(OMElement object) throws MetadataException { return id_index().getClassifications(this.getId(object)); } /** * * @param object * @param classification_scheme * @return * @throws MetadataException */ public List<OMElement> getClassifications(OMElement object, String classification_scheme) throws MetadataException { return getClassifications(this.getId(object), classification_scheme); } /** * * @param classification * @return */ public String getClassificationValue(OMElement classification) { return classification.getAttributeValue(MetadataSupport.noderepresentation_qname); } /** * * @param classification * @return */ public String getClassificationScheme(OMElement classification) { return getSlotValue(classification, "codingScheme", 0); } /** * * @param object * @param classification_scheme * @return * @throws MetadataException */ public List<String> getClassificationsValues(OMElement object, String classification_scheme) throws MetadataException { List<OMElement> classes = getClassifications(object, classification_scheme); List<String> values = new ArrayList<String>(); for (OMElement e : classes) { values.add(e.getAttributeValue(MetadataSupport.noderepresentation_qname)); } return values; } /** * * @param id * @param classification_scheme * @return * @throws MetadataException */ public List<String> getClassificationsValues(String id, String classification_scheme) throws MetadataException { return getClassificationsValues(this.getObjectById(id), classification_scheme); } /** * * @param id * @param classification_scheme * @return * @throws MetadataException */ public List<OMElement> getClassifications(String id, String classification_scheme) throws MetadataException { List<OMElement> cls = getClassifications(id); List<OMElement> cls_2 = new ArrayList<OMElement>(); for (OMElement cl : cls) { String cl_scheme = cl.getAttributeValue(MetadataSupport.classificationscheme_qname); if (cl_scheme != null && cl_scheme.equals(classification_scheme)) { cls_2.add(cl); } } return cls_2; } /** * * @param object_id * @return * @throws MetadataException */ public List<OMElement> getExternalIdentifiers(String object_id) throws MetadataException { return id_index().getExternalIdentifiers(object_id); } /** * * @param object_id * @param identifier_scheme * @return * @throws MetadataException */ public String getExternalIdentifierValue(String object_id, String identifier_scheme) throws MetadataException { return id_index().getExternalIdentifierValue(object_id, identifier_scheme); } /** * Updates all folders "lastUpdateTime" slot with the current time. * * @throws MetadataException */ public void updateFoldersLastUpdateTimeSlot() throws MetadataException { List<OMElement> folderList = this.folders; // Set XDSFolder.lastUpdateTime if ((folderList != null) && (!folderList.isEmpty())) { String timestamp = Hl7Date.now(); for (OMElement fol : folderList) { this.setSlot(fol, "lastUpdateTime", timestamp); } } } /** * * @param id * @return * @throws MetadataException */ public String getUniqueIdValue(String id) throws MetadataException { String uid; uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSDocumentEntry_uniqueid_uuid); if (uid != null && !uid.equals("")) { return uid; } uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_uniqueid_uuid); if (uid != null && !uid.equals("")) { return uid; } uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSFolder_uniqueid_uuid); if (uid != null && !uid.equals("")) { return uid; } return null; } /** * * @return * @throws MetadataException */ public List<String> getAllUids() throws MetadataException { List<String> all_ids = this.getAllDefinedIds(); List<String> all_uids = new ArrayList<String>(); for (String id : all_ids) { String uid = getUniqueIdValue(id); if (uid != null) { all_uids.add(uid); } } return all_uids; } /** * * @return */ public List<OMElement> getAllLeafClasses() { List<OMElement> lc = new ArrayList<OMElement>(); lc.addAll(extrinsicObjects); lc.addAll(registryPackages); lc.addAll(associations); return lc; } /** * * @param map * @param uid * @param hash */ private void addToUidHashMap(Map<String, List<String>> map, String uid, String hash) { if (uid == null) { return; } List<String> hash_list = map.get(uid); if (hash_list == null) { hash_list = new ArrayList<String>(); map.put(uid, hash_list); } hash_list.add(hash); } // get map of uid ==> ArrayList of hashes // for folder and ss, hash is null // Some docs may not have a hash either, depending on where this use used /** * * @return * @throws MetadataException */ public Map<String, List<String>> getUidHashMap() throws MetadataException { Map<String, List<String>> hm = new HashMap<String, List<String>>(); List<String> ids; ids = this.getExtrinsicObjectIds(); for (String id : ids) { OMElement registry_object = this.getObjectById(id); String uid; List<OMElement> eis = this.getExternalIdentifiers(id); List<OMElement> eid_eles = this.getExternalIdentifiers(registry_object, MetadataSupport.XDSDocumentEntry_uniqueid_uuid); if (eid_eles.size() > 0) { uid = eid_eles.get(0).getAttributeValue(MetadataSupport.value_qname); } else { throw new MetadataException("Metadata.getUidHashMap(): Doc " + id + " has no uniqueId\nfound " + eis.size() + " external identifiers"); } String hash = this.getSlotValue(id, "hash", 0); if (hash != null && hash.equals("")) { hash = null; } addToUidHashMap(hm, uid, hash); } ids = this.getSubmissionSetIds(); for (String id : ids) { String uid; uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_uniqueid_uuid); if (uid == null || uid.equals("")) { throw new MetadataException("Metadata.getUidHashMap(): SS " + id + " has no uniqueId"); } addToUidHashMap(hm, uid, null); } ids = this.getFolderIds(); for (String id : ids) { String uid; uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSFolder_uniqueid_uuid); if (uid == null || uid.equals("")) { throw new MetadataException("Metadata.getUidHashMap(): Fol " + id + " has no uniqueId"); } addToUidHashMap(hm, uid, null); } return hm; } /** * * @return * @throws MetadataException */ public String getSubmissionSetUniqueId() throws MetadataException { return id_index().getSubmissionSetUniqueId(); } /** * * @param m * @return * @throws MetadataException */ public List<String> getSubmissionSetUniqueIds() throws MetadataException { List<String> list = new ArrayList<String>(); List<String> ids = this.getSubmissionSetIds(); for (String id : ids) { String uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_uniqueid_uuid); if (uid == null || uid.equals("")) { throw new MetadataException("Submission Set " + id + " has no uniqueId"); } list.add(uid); } return list; } /** * * @return * @throws MetadataException */ public String getSubmissionSetPatientId() throws MetadataException { return id_index().getSubmissionSetPatientId(); } /** * * @param id * @return * @throws MetadataException */ public OMElement getObjectById(String id) throws MetadataException { return id_index().getObjectById(id); } /** * * @param id * @return * @throws MetadataException */ public String getIdentifyingString(String id) throws MetadataException { return id_index().getIdentifyingString(id); } /** * * @param id * @return * @throws MetadataException */ public String getObjectTypeById(String id) throws MetadataException { return id_index().getObjectTypeById(id); } /** * * @return * @throws MetadataException */ public String getSubmissionSetSourceId() throws MetadataException { return id_index().getSubmissionSetSourceId(); } /** * * @return * @throws XdsInternalException */ public OMElement getV3SubmitObjectsRequest() throws XdsInternalException { //OMNamespace rs = MetadataSupport.ebRSns3; OMNamespace lcm = MetadataSupport.ebLcm3; OMNamespace rim = MetadataSupport.ebRIMns3; OMElement sor = this.om_factory().createOMElement("SubmitObjectsRequest", lcm); OMElement lrol = this.om_factory().createOMElement("RegistryObjectList", rim); sor.addChild(lrol); for (OMElement obj : allObjects) { lrol.addChild(obj); } /* ArrayList objects = this.getV3(); for (int i = 0; i < objects.size(); i++) { OMElement ele = (OMElement) objects.get(i); lrol.addChild(ele); }*/ return sor; } /** * * @param value * @return */ public String stripNamespace(String value) { if (value == null) { return null; } if (value.indexOf(":") == -1) { return value; } String[] parts = value.split(":"); return parts[parts.length - 1]; } /** * * @param value * @return */ public boolean hasNamespace(String value) { if (value.indexOf(":") == -1) { return false; } return true; } /** * * @param value * @param namespace * @return */ public String addNamespace(String value, String namespace) { if (hasNamespace(value)) { return value; } if (namespace.endsWith(":")) { return namespace + value; } return namespace + ":" + value; } /** * * @param objects * @return * @throws MetadataException */ public Map<String, OMElement> getUidMap(List<OMElement> objects) throws MetadataException { Map<String, OMElement> map = new HashMap<String, OMElement>(); // uid -> OMElement for (OMElement non_ref : objects) { String non_ref_id = this.getId(non_ref); String a_uid = this.getUniqueIdValue(non_ref_id); if (a_uid != null) { map.put(a_uid, non_ref); } } return map; } /** * * @return * @throws MetadataException */ public Map<String, OMElement> getUidMap() throws MetadataException { return getUidMap(this.getNonObjectRefs()); } /** * * @param id * @return * @throws MetadataException */ public boolean isRetrievable_a(String id) throws MetadataException { String uri = this.getSlotValue(id, "URI", 0); return uri != null; } /** * * @param id * @return * @throws MetadataException */ public boolean isRetrievable_b(String id) throws MetadataException { String uid = this.getSlotValue(id, "repositoryUniqueId", 0); return uid != null; } /** * * @param eo * @return */ private boolean isURIExtendedFormat(OMElement eo) { String uri = getSlotValue(eo, "URI", 0); String uri2 = getSlotValue(eo, "URI", 1); if (uri == null) { return false; } if (uri2 != null) { return true; } String[] parts = uri.split("\\|"); return (parts.length >= 2); } /** * * @param eo * @return * @throws MetadataException */ public String getURIAttribute(OMElement eo) throws MetadataException { String eoId = getId(eo); String value = null; if (!isURIExtendedFormat(eo)) { value = getSlotValue(eo, "URI", 0); } else { HashMap<String, String> map = new HashMap<String, String>(); for (int i = 0; i < 1000; i++) { String slotValue = getSlotValue(eoId, "URI", i); if (slotValue == null) { break; } String[] parts = slotValue.split("\\|"); if (parts.length != 2 || parts[0].length() == 0) { throw new MetadataException("URI value does not parse: " + slotValue + " must be num|string format"); } map.put(parts[0], parts[1]); } StringBuilder buf = new StringBuilder(); int i = 1; for (;; i++) { String iStr = String.valueOf(i); String part = map.get(iStr); if (part == null) { break; } buf.append(part); } if (map.size() != i - 1) { throw new MetadataException("URI value does not parse: index " + i + " not found but Slot has " + map.size() + " values. Slot is\n" + getSlot(eoId, "URI").toString()); } value = buf.toString(); } if (value == null) { return null; } if (!value.startsWith("http://") && !value.startsWith("https://")) { throw new MetadataException("URI must have http:// or https:// prefix. URI was calculated to be\n" + value + "\nand original slot is\n" + getSlot(eoId, "URI").toString()); } return value; } int uriChunkSize = 100; /** * * @param size */ /** public void setUriChunkSize(int size) { uriChunkSize = size; }*/ /** * * @param a * @param b * @return */ private int min(int a, int b) { if (a < b) { return a; } return b; } /** * * @param eo * @param uri */ public void setURIAttribute(OMElement eo, String uri) { try { removeSlot(this.getId(eo), "URI"); } catch (MetadataException e) { } if (uri.length() < uriChunkSize) { addSlot(eo, "URI", uri); return; } OMElement slot = addSlot(eo, "URI"); StringBuilder buf = new StringBuilder(); int chunkIndex = 1; int uriSize = uri.length(); int strStart = 0; int strEnd = min(uriChunkSize, uriSize); while (true) { buf.setLength(0); buf.append(String.valueOf(chunkIndex++)).append("|").append(uri.substring(strStart, strEnd)); addSlotValue(slot, buf.toString()); if (strEnd == uriSize) { break; } strStart = strEnd; strEnd = min(strStart + uriChunkSize, uriSize); } } }
src/xutil/src/com/vangent/hieos/xutil/metadata/structure/Metadata.java
/* * This code is subject to the HIEOS License, Version 1.0 * * Copyright(c) 2008-2009 Vangent, Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.vangent.hieos.xutil.metadata.structure; import com.vangent.hieos.xutil.exception.MetadataException; import com.vangent.hieos.xutil.exception.MetadataValidationException; import com.vangent.hieos.xutil.exception.NoMetadataException; import com.vangent.hieos.xutil.exception.NoSubmissionSetException; import com.vangent.hieos.xutil.exception.XdsInternalException; import com.vangent.hieos.xutil.xml.Util; import com.vangent.hieos.xutil.xlog.client.XLogMessage; import com.vangent.hieos.xutil.hl7.date.Hl7Date; import com.vangent.hieos.xutil.xml.XMLParser; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import javax.xml.namespace.QName; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMAttribute; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.log4j.Logger; /** * * @author thumbe */ public class Metadata { /** * */ protected OMFactory fac; boolean grok_metadata = true; private final static Logger logger = Logger.getLogger(Metadata.class); /** * List of IHE association types: */ public static final List<String> iheAssocTypes = new ArrayList<String>() { { add("APND"); add("XFRM"); add("RPLC"); add("XFRM_RPLC"); add("signs"); } }; OMElement metadata; // wrapper OMElement wrapper; // current metadata document being parsed ArrayList<OMElement> wrappers; // the collection of all metadata documents included in current tables ArrayList<OMElement> extrinsicObjects = null; ArrayList<OMElement> folders = null; OMElement submissionSet = null; ArrayList<OMElement> submissionSets = null; ArrayList<OMElement> registryPackages = null; ArrayList<OMElement> associations = null; ArrayList<OMElement> objectRefs = null; ArrayList<OMElement> classifications = null; ArrayList<OMElement> allObjects = null; ArrayList objectsToDeprecate = null; ArrayList objectsReferenced = null; HashMap<String, ArrayList> classificationsOfId = null; boolean version2; OMElement metadataDup = null; // both of these are set by dup_wrapper which is used by metadata_copy OMElement wrapperDup = null; //boolean mustDup = false; int idAllocation = 0; IdIndex idIndex = null; /** * * @param metadata * @throws MetadataException * @throws MetadataValidationException */ public Metadata(OMElement metadata) throws MetadataException, MetadataValidationException { this.metadata = metadata; runParser(); } /** * * @param metadata_file * @param parse * @throws XdsInternalException * @throws MetadataException * @throws MetadataValidationException */ public Metadata(File metadata_file, boolean parse) throws XdsInternalException, MetadataException, MetadataValidationException { metadata = XMLParser.fileToOM(metadata_file); wrapper = null; wrappers = new ArrayList(); if (parse) { wrapper = find_metadata_wrapper(); wrappers.add(wrapper); parse(false); } else { init(); } } /** * */ public Metadata() { init(); this.setGrokMetadata(false); } /** * * @param obj * @param previousVersion */ // FIXME: May want to rework and not require passing "previousVersion". public static void updateRegistryObjectVersion(OMElement obj, String previousVersion) { // Get version //<rim:VersionInfo versionName="1" /> OMElement versionInfoEle = MetadataSupport.firstChildWithLocalName(obj, "VersionInfo"); if (versionInfoEle == null) { // No version info exists, create one. versionInfoEle = MetadataSupport.om_factory.createOMElement("VersionInfo", MetadataSupport.ebRIMns3); // Attach to the version info object (before first Classification). OMElement classificationEle = MetadataSupport.firstChildWithLocalName(obj, "Classification"); classificationEle.insertSiblingBefore(versionInfoEle); //targetObject.addChild(versionInfoEle); } Double nextVersion = new Double(previousVersion) + 1.0; OMAttribute versionNameAttr = versionInfoEle.getAttribute(new QName("versionName")); if (versionNameAttr == null) { versionInfoEle.addAttribute("versionName", nextVersion.toString(), null); } else { versionNameAttr.setAttributeValue(nextVersion.toString()); } } /** * * @param obj * @return */ public static Double getRegistryObjectVersion(OMElement obj) { // Get version OMElement versionInfoEle = MetadataSupport.firstChildWithLocalName(obj, "VersionInfo"); if (versionInfoEle == null) { return 1.0; // Default. } OMAttribute versionNameAttr = versionInfoEle.getAttribute(new QName("versionName")); if (versionNameAttr == null) { return 1.0; // Default. } else { String versionNameText = versionNameAttr.getAttributeValue(); return new Double(versionNameText); } } /** * * @return */ private String allocate_id() { idAllocation += 1; return ("ID_" + String.valueOf(this.hashCode()) + "_" + idAllocation); } /** * */ public void removeDuplicates() { removeDuplicates(extrinsicObjects); removeDuplicates(folders); removeDuplicates(submissionSets); removeDuplicates(registryPackages); removeDuplicates(associations); removeDuplicates(objectRefs); removeDuplicates(classifications); removeFromObjectRefs(extrinsicObjects); removeFromObjectRefs(registryPackages); removeFromObjectRefs(associations); removeFromObjectRefs(classifications); allObjects = new ArrayList<OMElement>(); allObjects.addAll(extrinsicObjects); allObjects.addAll(registryPackages); allObjects.addAll(associations); allObjects.addAll(classifications); allObjects.addAll(objectRefs); } /** * * @param set */ private void removeDuplicates(ArrayList<OMElement> set) { boolean running = true; while (running) { running = false; for (int targetI = 0; targetI < set.size(); targetI++) { OMElement target = set.get(targetI); String targetId = id(target); for (int i = targetI + 1; i < set.size(); i++) { OMElement it = set.get(i); if (targetId.equals(id(it))) { set.remove(i); running = true; break; } } } } } /** * * @param set */ private void removeFromObjectRefs(ArrayList<OMElement> set) { for (int i = 0; i < set.size(); i++) { String id = id(set.get(i)); boolean restart = true; while (restart) { restart = false; for (int j = 0; j < objectRefs.size(); j++) { if (id.equals(id(objectRefs.get(j)))) { objectRefs.remove(j); restart = true; break; } } } } } /** * Return true if the metadata only includes object references. Otherwise, return false. * * @return Boolean result. */ public boolean isObjectRefsOnly() { return submissionSets.isEmpty() && extrinsicObjects.isEmpty() && folders.isEmpty() && associations.isEmpty() && classifications.isEmpty() && !objectRefs.isEmpty(); } /** * Return list of approvable object (extrinsicObjects and registryPackages) identifiers. * * @return The list of approvable objects. */ public ArrayList getApprovableObjectIds() { ArrayList o = new ArrayList(); o.addAll(this.extrinsicObjects); o.addAll(this.registryPackages); return this.getObjectIds(o); } /** * Go through all objects in metadata and only maintain those objects that contain * an id in the passed in list. * * @param ids The list of ids to maintain in the metadata. */ public void filter(ArrayList<String> ids) { submissionSets = filter(submissionSets, ids); extrinsicObjects = filter(extrinsicObjects, ids); folders = filter(folders, ids); associations = filter(associations, ids); allObjects = filter(allObjects, ids); } /** * * @param objects * @param ids * @return */ private ArrayList<OMElement> filter(ArrayList<OMElement> objects, ArrayList<String> ids) { ArrayList<OMElement> out = new ArrayList<OMElement>(); for (OMElement object : objects) { String id = id(object); if (ids.contains(id)) { out.add(object); } } return out; } /** * * @return */ public ArrayList<OMElement> getNonObjectRefs() { ArrayList<OMElement> objs = new ArrayList<OMElement>(); objs.addAll(this.submissionSets); objs.addAll(this.folders); objs.addAll(this.extrinsicObjects); objs.addAll(this.associations); objs.addAll(this.classifications); return objs; } /** * * @return */ private ArrayList<ArrayList<OMElement>> getMetadataContainers() { ArrayList<ArrayList<OMElement>> containers = new ArrayList<ArrayList<OMElement>>(); containers.add(extrinsicObjects); containers.add(folders); containers.add(submissionSets); containers.add(associations); containers.add(objectRefs); containers.add(classifications); containers.add(allObjects); containers.add(registryPackages); return containers; } /** * * @return */ private OMNamespace getCurrentNamespace() { if (version2) { return MetadataSupport.ebRIMns2; } return MetadataSupport.ebRIMns3; } /** * * @param x */ public void setGrokMetadata(boolean x) { grok_metadata = x; } /** * * @param metadata */ public void setMetadata(OMElement metadata) { this.metadata = metadata; init(); } /** * * @throws MetadataException * @throws MetadataValidationException */ public void runParser() throws MetadataException, MetadataValidationException { wrapper = find_metadata_wrapper(); if (wrappers == null) { wrappers = new ArrayList<OMElement>(); } wrappers.add(wrapper); parse(false); } /** * * @param m * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(Metadata m) throws MetadataException, MetadataValidationException { if (m.getRoot() != null) { addMetadata(m.getRoot(), false); } } /** * * @param m * @param discard_duplicates * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(Metadata m, boolean discard_duplicates) throws MetadataException, MetadataValidationException { addMetadata(m.getRoot(), discard_duplicates); } /** * * @param metadata * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(OMElement metadata) throws MetadataException, MetadataValidationException { addMetadata(metadata, false); } /** * * @param metadata * @param discard_duplicates * @throws MetadataException * @throws MetadataValidationException */ public void addMetadata(OMElement metadata, boolean discard_duplicates) throws MetadataException, MetadataValidationException { init(); if (wrappers == null) { wrappers = new ArrayList(); } this.metadata = metadata; wrapper = find_metadata_wrapper(); wrappers.add(wrapper); reindex(); parse(discard_duplicates); } /** * Add to metadata collection. If collection empty then initialize it. * @param metadata - a collection of metadata objects. Will be wrapped internally (made into * single XML document) * @param discard_duplicates * @return * @throws XdsInternalException * @throws MetadataValidationException * @throws MetadataException */ public Metadata addToMetadata(List<OMElement> metadata, boolean discard_duplicates) throws XdsInternalException, MetadataException, MetadataValidationException { for (OMElement ele : metadata) { addToMetadata(ele, discard_duplicates, false); } parse(discard_duplicates); return this; } /** * * @param new_metadata * @param discard_duplicates * @param run_parse * @return * @throws XdsInternalException * @throws MetadataException * @throws MetadataValidationException */ private Metadata addToMetadata(OMElement new_metadata, boolean discard_duplicates, boolean run_parse) throws XdsInternalException, MetadataException, MetadataValidationException { boolean hasExistingData = false; if (wrapper == null) { wrapper = makeWrapper(); metadata = new_metadata; if (wrappers == null) { wrappers = new ArrayList<OMElement>(); } wrappers.add(wrapper); } else { hasExistingData = true; } wrapper.addChild(Util.deep_copy(new_metadata)); if (run_parse) { if (hasExistingData) { reparse(discard_duplicates); } else { parse(discard_duplicates); } } return this; } /** * * @return */ private OMElement makeWrapper() { return MetadataSupport.om_factory.createOMElement("root", MetadataSupport.ebRIMns3); } /** * */ public void clearLeafClassObjects() { registryPackages.clear(); submissionSet = null; submissionSets.clear(); extrinsicObjects.clear(); associations.clear(); reindex(); } /** * */ public void clearObjectRefs() { objectRefs = new ArrayList<OMElement>(); } /** * */ private void reindex() { this.idIndex = null; // lazy } /** * * @param eos */ public void addExtrinsicObjects(List<OMElement> eos) { extrinsicObjects.addAll(eos); allObjects.addAll(eos); } /** * * @param object_refs_or_ids * @throws MetadataException */ @SuppressWarnings("unchecked") public void addObjectRefs(List<?> object_refs_or_ids) throws MetadataException { if (object_refs_or_ids.isEmpty()) { return; } Object ele = object_refs_or_ids.get(0); if (ele instanceof OMElement) { objectRefs.addAll((List<OMElement>) object_refs_or_ids); } else if (ele instanceof String) { this.makeObjectRefs((List<String>) object_refs_or_ids); } else { throw new MetadataException("Don't understand format " + ele.getClass().getName()); } } /** * * @param ids */ public void makeObjectRefs(List<String> ids) { for (String id : ids) { makeObjectRef(id); } } /** * * @param id */ private void makeObjectRef(String id) { OMElement objRef = MetadataSupport.om_factory.createOMElement(MetadataSupport.object_ref_qname); objRef.addAttribute("id", id, null); objectRefs.add(objRef); } /** * * @return * @throws XdsInternalException * @throws MetadataException * @throws MetadataValidationException */ public Metadata makeClone() throws XdsInternalException, MetadataException, MetadataValidationException { Metadata m = new Metadata(); if (wrappers != null) { if (m.wrappers == null) { m.wrappers = new ArrayList<OMElement>(); } for (OMElement ele : wrappers) { m.wrappers.add(ele); } } m.extrinsicObjects.addAll(extrinsicObjects); m.folders.addAll(folders); m.submissionSets.addAll(submissionSets); m.registryPackages.addAll(registryPackages); m.associations.addAll(associations); m.objectRefs.addAll(objectRefs); m.classifications.addAll(classifications); m.allObjects.addAll(allObjects); m.submissionSet = submissionSet; return m; } /** * * @return */ public List<OMElement> getLeafClassObjects() { List<OMElement> objs = new ArrayList<OMElement>(); objs.addAll(registryPackages); objs.addAll(extrinsicObjects); objs.addAll(associations); return objs; } /** * * @return */ public boolean isVersion2() { return version2; } /** * * @return */ public OMFactory om_factory() { if (fac == null) { fac = OMAbstractFactory.getOMFactory(); } return fac; } /** * * @return */ public OMElement getRoot() { return metadata; } /** * Return a string that can be used for debugging purposes. It lists the size * of each metadata element. * * @return String representing structure of the metadata. */ public String structure() { return this.getSubmissionSetIds().size() + " SS + " + this.extrinsicObjects.size() + " EO + " + this.folders.size() + " Fol + " + this.associations.size() + " A + " + this.objectRefs.size() + " OR"; } /** * * @param registryObject * @param idScheme * @return */ public ArrayList<OMElement> getExternalIdentifiers(OMElement registryObject, String idScheme) { ArrayList<OMElement> results = new ArrayList<OMElement>(); QName idSchemeQName = MetadataSupport.identificationscheme_qname; for (Iterator it = registryObject.getChildElements(); it.hasNext();) { OMElement ele = (OMElement) it.next(); if (!ele.getLocalName().equals("ExternalIdentifier")) { continue; } String elementIdScheme = ele.getAttributeValue(idSchemeQName); if (idScheme == null || idScheme.equals(elementIdScheme)) { results.add(ele); } } return results; } /** * * @param registryObject * @param idScheme * @return */ private boolean hasExternalIdentifier(OMElement registryObject, String idScheme) { QName idSchemeQName = new QName("identificationScheme"); for (Iterator it = registryObject.getChildElements(); it.hasNext();) { OMElement ele = (OMElement) it.next(); if (!ele.getLocalName().equals("ExternalIdentifier")) { continue; } String elementIdScheme = ele.getAttributeValue(idSchemeQName); if (idScheme.equals(elementIdScheme)) { return true; } } return false; } /** * Return true if the slot name exists for the given object. Otherwise, return false. * * @param registryObject The registry object in question. * @param slotName The name of the slot. * @return true if the slot name exists for the object, otherwise false. */ public boolean hasSlot(OMElement registryObject, String slotName) { if (registryObject == null) { return false; } for (OMElement slot : MetadataSupport.childrenWithLocalName(registryObject, "Slot")) { String name = slot.getAttributeValue(MetadataSupport.slot_name_qname); if (name.equals(slotName)) { return true; } } return false; } /** * * @param registryObject * @param slotName * @param valueIndex * @return */ public String getSlotValue(OMElement registryObject, String slotName, int valueIndex) { if (registryObject == null) { return null; } for (OMElement slot : MetadataSupport.childrenWithLocalName(registryObject, "Slot")) { String name = slot.getAttributeValue(MetadataSupport.slot_name_qname); if (!name.equals(slotName)) { continue; } OMElement valueList = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); if (valueList == null) { continue; } int valueCount = 0; for (OMElement valueElement : MetadataSupport.childrenWithLocalName(valueList, "Value")) { if (valueCount != valueIndex) { valueCount++; continue; } return valueElement.getText(); } } return null; } /** * * @param id * @param slot_name * @param value_index * @return * @throws MetadataException */ public String getSlotValue(String id, String slot_name, int value_index) throws MetadataException { return getSlotValue(getObjectById(id), slot_name, value_index); } /** * * @param obj * @param slot_name * @param value_index * @param value */ public void setSlotValue(OMElement obj, String slot_name, int value_index, String value) { if (obj == null) { return; } for (OMElement slot : MetadataSupport.childrenWithLocalName(obj, "Slot")) { String name = slot.getAttributeValue(MetadataSupport.slot_name_qname); if (!name.equals(slot_name)) { continue; } OMElement value_list = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); if (value_list == null) { continue; } int value_count = 0; for (OMElement value_ele : MetadataSupport.childrenWithLocalName(value_list, "Value")) { if (value_count != value_index) { value_count++; continue; } value_ele.setText(value); } } } /** * * @param ele * @return */ public String getStatus(OMElement ele) { return ele.getAttributeValue(MetadataSupport.status_qname); } /** * * @return */ public ArrayList<OMElement> getMajorObjects() { //return getMajorObjects(null); return this.allObjects; } /** * * @param type * @return */ public ArrayList<OMElement> getMajorObjects(String type) { ArrayList<OMElement> objs = new ArrayList<OMElement>(); if (wrapper != null) { for (Iterator it = wrapper.getChildElements(); it.hasNext();) { OMElement obj = (OMElement) it.next(); if (type == null || type.equals(obj.getLocalName())) { objs.add(obj); } } } return objs; } /** * * @param ele * @return */ public String getId(OMElement ele) { return ele.getAttributeValue(MetadataSupport.id_qname); } /** * * @param assoc * @return */ public String getSourceObject(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.source_object_qname); } /** * * @param assoc * @return */ public String getTargetObject(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.target_object_qname); } /** * * @param assoc * @return */ public String getAssocType(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.association_type_qname); } /** * * @param assoc * @return */ public String getAssocSource(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.source_object_qname); } /** * * @param assoc * @return */ public String getAssocTarget(OMElement assoc) { return assoc.getAttributeValue(MetadataSupport.target_object_qname); } /** * * @return */ public ArrayList<OMElement> getAllObjects() { // probably the same as Major Objects return allObjects; } /** * * @param objects * @return */ public ArrayList<String> getIdsForObjects(List<OMElement> objects) { ArrayList<String> ids = new ArrayList<String>(); for (int i = 0; i < objects.size(); i++) { OMElement object = (OMElement) objects.get(i); String id = object.getAttributeValue(MetadataSupport.id_qname); ids.add(id); } return ids; } /** * * @param id * @return * @throws MetadataException */ public String type(String id) throws MetadataException { OMElement ele = this.getObjectById(id); if (ele == null) { return null; } return ele.getLocalName(); } /** * * @param ids * @param ele */ private void addIds(ArrayList<String> ids, OMElement ele) { if (!ele.getLocalName().equals("ObjectRef")) { String id = ele.getAttributeValue(MetadataSupport.id_qname); if (id != null && !ids.contains(id)) { ids.add(id); } } for (Iterator it = ele.getChildElements(); it.hasNext();) { OMElement ele2 = (OMElement) it.next(); addIds(ids, ele2); // Recurse. } } /** * * @return */ public ArrayList<String> getAllDefinedIds() { ArrayList<String> ids = new ArrayList<String>(); ArrayList<OMElement> objects = getAllObjects(); for (int i = 0; i < objects.size(); i++) { OMElement object = (OMElement) objects.get(i); addIds(ids, object); } return ids; } /** * * @return */ public ArrayList<OMElement> getRegistryPackages() { return this.registryPackages; } /** * * @return */ public ArrayList<OMElement> getSubmissionSets() { return this.submissionSets; } /** * * @return */ public ArrayList<OMElement> getExtrinsicObjects() { return extrinsicObjects; } /** * * @return */ public ArrayList<OMElement> getObjectRefs() { return objectRefs; } /** * * @return */ public List<String> getObjectRefIds() { List<String> ids = new ArrayList<String>(); for (Iterator<OMElement> it = getObjectRefs().iterator(); it.hasNext();) { OMElement ele = it.next(); ids.add(ele.getAttributeValue(MetadataSupport.id_qname)); } return ids; } /** * * @param i * @return */ public OMElement getExtrinsicObject(int i) { return (OMElement) getExtrinsicObjects().get(i); } /** * * @return */ public ArrayList<String> getExtrinsicObjectIds() { ArrayList<String> ids = new ArrayList<String>(); for (Iterator<OMElement> it = getExtrinsicObjects().iterator(); it.hasNext();) { OMElement ele = it.next(); ids.add(ele.getAttributeValue(MetadataSupport.id_qname)); } return ids; } /** * * @return * @throws MetadataException */ public HashMap<String, OMElement> getDocumentUidMap() throws MetadataException { HashMap<String, OMElement> map = new HashMap<String, OMElement>(); for (Iterator<String> it = getExtrinsicObjectIds().iterator(); it.hasNext();) { String id = it.next(); String uid = this.getUniqueIdValue(id); map.put(uid, this.getObjectById(id)); } return map; } /** * Return the "mime type" for the extrinsic object. * * @param eo An Extrinsic Object. * @return A string representing the mime type for the extrinsic object. */ public String getMimeType(OMElement eo) { return eo.getAttributeValue(MetadataSupport.mime_type_qname); } /** * * @param ro * @return */ public String getHome(OMElement ro) { return ro.getAttributeValue(MetadataSupport.home_qname); } /** * * @return */ public ArrayList<OMElement> getAssociations() { return associations; } /** * * @param i * @return */ public OMElement getAssociation(int i) { return (OMElement) getAssociations().get(i); } /** * * @return */ public ArrayList<String> getAssociationIds() { ArrayList<String> ids = new ArrayList<String>(); for (Iterator<OMElement> it = getAssociations().iterator(); it.hasNext();) { OMElement ele = it.next(); ids.add(ele.getAttributeValue(MetadataSupport.id_qname)); } return ids; } /** * * @return */ public List<String> getAssocReferences() { List<String> ids = new ArrayList<String>(); for (OMElement assoc : associations) { String obj1_id = assoc.getAttributeValue(MetadataSupport.source_object_qname); String obj2_id = assoc.getAttributeValue(MetadataSupport.target_object_qname); if (!listContains(ids, obj1_id)) { ids.add(obj1_id); } if (!listContains(ids, obj2_id)) { ids.add(obj2_id); } } return ids; } /** * * @param list * @param value * @return */ private boolean listContains(List<String> list, String value) { for (Iterator<String> it = list.iterator(); it.hasNext();) { String val = it.next(); if (value.equals(val)) { return true; } } return false; } /** * * @param ele * @param slot_name * @param slot_value */ private void addSlot(OMElement ele, String slot_name, String slot_value) { OMElement slot = this.om_factory().createOMElement("Slot", getCurrentNamespace()); slot.addAttribute("name", slot_name, null); OMElement value_list = this.om_factory().createOMElement("ValueList", getCurrentNamespace()); slot.addChild(value_list); OMElement value = this.om_factory().createOMElement("Value", getCurrentNamespace()); value_list.addChild(value); value.setText(slot_value); OMElement firstChild = ele.getFirstElement(); firstChild.insertSiblingBefore(slot); //ele.addChild(slot); //mustDup = true; } /** * * @param ele * @param slot_name * @return */ private OMElement addSlot(OMElement ele, String slot_name) { OMElement slot = this.om_factory().createOMElement("Slot", null); slot.addAttribute("name", slot_name, null); OMElement value_list = this.om_factory().createOMElement("ValueList", null); slot.addChild(value_list); OMElement firstChild = ele.getFirstElement(); firstChild.insertSiblingBefore(slot); //ele.addChild(slot); //mustDup = true; return slot; } /** * * @param slot * @param value * @return */ private OMElement addSlotValue(OMElement slot, String value) { OMElement value_list = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); OMElement valueEle = this.om_factory().createOMElement("Value", null); valueEle.setText(value); value_list.addChild(valueEle); //mustDup = true; return slot; } /** * * @param ele * @return */ private String id(OMElement ele) { return ele.getAttributeValue(MetadataSupport.id_qname); } /** * * @param ele * @param slot_name * @param slot_value * @throws MetadataException */ public void setSlot(OMElement ele, String slot_name, String slot_value) throws MetadataException { OMElement slot = getSlot(id(ele), slot_name); if (slot == null) { addSlot(ele, slot_name, slot_value); } else { OMElement value_list = MetadataSupport.firstChildWithLocalName(slot, "ValueList"); if (value_list == null) { throw new MetadataException("Slot without ValueList - slot name is " + slot_name + " of object " + id(ele)); } for (Iterator it = value_list.getChildElements(); it.hasNext();) { OMElement v = (OMElement) it.next(); v.detach(); } OMElement value = MetadataSupport.om_factory.createOMElement("Value", getCurrentNamespace()); value_list.addChild(value); value.addChild(MetadataSupport.om_factory.createOMText(slot_value)); } } /** * * @param rm_dups * @throws MetadataException * @throws MetadataValidationException */ private void reparse(boolean rm_dups) throws MetadataException, MetadataValidationException { reinit(); reindex(); parse(rm_dups); } /** * */ private void reinit() { registryPackages = null; objectsReferenced = null; objectsToDeprecate = null; wrappers = null; init(); } /** * */ private void init() { if (registryPackages == null) { associations = new ArrayList(); extrinsicObjects = new ArrayList(); registryPackages = new ArrayList(); submissionSets = new ArrayList(); objectRefs = new ArrayList(); folders = new ArrayList(); submissionSet = null; classifications = new ArrayList(); allObjects = new ArrayList(); classificationsOfId = new HashMap<String, ArrayList>(); //objects_to_deprecate = new ArrayList(); //objects_referenced = new ArrayList(); } } // referencedObjects are: // RegistryObjects referenced by Associations (sourceObject or targetObject) // That are not contained in the package that was parsed to create this instance of Metadata /** * * @return * @throws MetadataValidationException * @throws MetadataException */ public ArrayList getReferencedObjects() throws MetadataValidationException, MetadataException { if (objectsReferenced == null) { this.objectsReferenced = new ArrayList(); this.objectsToDeprecate = new ArrayList(); for (Iterator it = associations.iterator(); it.hasNext();) { OMElement association = (OMElement) it.next(); String association_type = association.getAttributeValue(MetadataSupport.association_type_qname); String target_object = association.getAttributeValue(MetadataSupport.target_object_qname); String source_object = association.getAttributeValue(MetadataSupport.source_object_qname); if (association_type == null) { throw new MetadataValidationException("Association has no associationType attribute"); } if (source_object == null) { throw new MetadataValidationException(association_type + " Association has no sourceObject attribute"); } if (target_object == null) { throw new MetadataValidationException(association_type + " Association has no targetObject attribute"); } if (isUUID(source_object) && !containsObject(source_object)) { objectsReferenced.add(source_object); } if (isUUID(target_object) && !containsObject(target_object)) { objectsReferenced.add(target_object); } if (MetadataSupport.xdsB_ihe_assoc_type_rplc.equals(association_type) || MetadataSupport.xdsB_ihe_assoc_type_xfrm_rplc.equals(association_type)) { if (!target_object.startsWith("urn:uuid:")) { throw new MetadataValidationException("RPLC association has targetObject attribute which is not a UUID: " + target_object); } this.objectsToDeprecate.add(target_object); } } } return objectsReferenced; } /** * Return the patient identifier for the given OMElement. * * @param ele * @return * @throws MetadataException */ private String getPatientId(OMElement ele) throws MetadataException { if (ele == null) { return null; } String id = getId(ele); if (isDocument(id)) { return getExternalIdentifierValue(id, MetadataSupport.XDSDocumentEntry_patientid_uuid); } if (isSubmissionSet(id)) { return getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_patientid_uuid); } if (isFolder(id)) { return getExternalIdentifierValue(id, MetadataSupport.XDSFolder_patientid_uuid); } return null; } /** * * @param id * @return * @throws MetadataException */ public boolean containsObject(String id) throws MetadataException { OMElement ele = id_index().getObjectById(id); if (ele == null) { return false; } if (ele.getLocalName().equals("ObjectRef")) { return false; } return true; } /** * * @param objects * @return */ public ArrayList<String> idsForObjects(ArrayList<OMElement> objects) { ArrayList<String> ids = new ArrayList<String>(); for (OMElement ele : objects) { ids.add(ele.getAttributeValue(MetadataSupport.id_qname)); } return ids; } /** * * @param id * @return */ private boolean isUUID(String id) { return id.startsWith("urn:uuid:"); } /** * * @return * @throws MetadataValidationException * @throws MetadataException */ public ArrayList getReferencedObjectsThatMustHaveSamePatientId() throws MetadataValidationException, MetadataException { ArrayList<String> objects = new ArrayList<String>(); for (Iterator it = associations.iterator(); it.hasNext();) { OMElement association = (OMElement) it.next(); String association_type = association.getAttributeValue(MetadataSupport.association_type_qname); String target_object = association.getAttributeValue(MetadataSupport.target_object_qname); String source_object = association.getAttributeValue(MetadataSupport.source_object_qname); if (association_type == null) { throw new MetadataValidationException("Association has no associationType attribute"); } if (source_object == null) { throw new MetadataValidationException(association_type + " Association has no sourceObject attribute"); } if (target_object == null) { throw new MetadataValidationException(association_type + " Association has no targetObject attribute"); } if (MetadataSupport.xdsB_eb_assoc_type_has_member.equals(association_type) && "Reference".equals(getSlotValue(association, "SubmissionSetStatus", 0))) { continue; } if (source_object.startsWith("urn:uuid:") && id_index().getObjectById(source_object) == null) { objects.add(source_object); } if (target_object.startsWith("urn:uuid:")) { OMElement o = id_index().getObjectById(target_object); if (o == null) { objects.add(target_object); } else if (o.getLocalName().equals("ObjectRef")) { objects.add(target_object); } } } return objects; } /** * * @param id * @return * @throws MetadataValidationException * @throws MetadataException */ public boolean isReferencedObject(String id) throws MetadataValidationException, MetadataException { if (!id.startsWith("urn:uuid:")) { return false; } return getReferencedObjects().contains(id); } /** * * @return * @throws MetadataValidationException * @throws MetadataException */ public ArrayList getDeprecatableObjectIds() throws MetadataValidationException, MetadataException { this.getReferencedObjects(); return objectsToDeprecate; } /** * * @param classifications */ private void add_to_classifications_of_id(ArrayList<OMElement> classifications) { for (OMElement classification : classifications) { add_to_classifications_of_id(classification); } } /** * * @param classification */ private void add_to_classifications_of_id(OMElement classification) { String id = classification.getAttributeValue(MetadataSupport.id_qname); ArrayList old = this.classificationsOfId.get(id); if (old == null) { old = new ArrayList(); classificationsOfId.put(id, old); } old.add(classification); } /** * * @param a * @return */ public OMElement addAssociation(OMElement a) { this.associations.add(a); this.allObjects.add(a); return a; } /** * * @param type * @param sourceUuid * @param targetUuid * @return */ public OMElement makeAssociation(String type, String sourceUuid, String targetUuid) { OMElement assoc = MetadataSupport.om_factory.createOMElement("Association", MetadataSupport.ebRIMns3); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("associationType", null, type)); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("sourceObject", null, sourceUuid)); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("targetObject", null, targetUuid)); assoc.addAttribute(MetadataSupport.om_factory.createOMAttribute("id", null, allocate_id())); return assoc; } /** * * @param discard_duplicates * @throws MetadataException * @throws MetadataValidationException */ private void parse(boolean discard_duplicates) throws MetadataException, MetadataValidationException { init(); OMNamespace namespace = wrapper.getNamespace(); String namespace_uri = (namespace != null) ? namespace.getNamespaceURI() : ""; detect_metadata_version(namespace_uri); for (Iterator it = wrapper.getChildElements(); it.hasNext();) { OMElement obj = (OMElement) it.next(); String type = obj.getLocalName(); OMAttribute id_att = obj.getAttribute(MetadataSupport.id_qname); // obj has no id attribute - assign it one if (id_att == null) { String id = allocate_id(); id_att = obj.addAttribute("id", id, null); } else { String id = id_att.getAttributeValue(); if (id == null || id.equals("")) { id_att.setAttributeValue(allocate_id()); } } if (!discard_duplicates || !getIds(allObjects).contains(id(obj))) { allObjects.add(obj); } add_to_classifications_of_id(findClassifications(obj)); if (type.equals("RegistryPackage")) { if (hasExternalIdentifier(obj, MetadataSupport.XDSSubmissionSet_uniqueid_uuid)) { if (!discard_duplicates || !getIds(submissionSets).contains(id(obj))) { submissionSets.add(obj); } if (submissionSet != null && this.grok_metadata == true) { throw new MetadataException("Metadata: Submission has multiple SubmissionSets"); } submissionSet = obj; } else if (hasExternalIdentifier(obj, MetadataSupport.XDSFolder_uniqueid_uuid)) { if (!discard_duplicates || !getIds(folders).contains(id(obj))) { folders.add(obj); } } if (!discard_duplicates || !getIds(registryPackages).contains(id(obj))) { registryPackages.add(obj); } } else if (type.equals("ExtrinsicObject")) { if (!discard_duplicates || !getIds(extrinsicObjects).contains(id(obj))) { extrinsicObjects.add(obj); } } else if (type.equals("ObjectRef")) { if (!discard_duplicates || !getIds(objectRefs).contains(id(obj))) { objectRefs.add(obj); } } else if (type.equals("Classification")) { if (!discard_duplicates || !getIds(classifications).contains(id(obj))) { classifications.add(obj); } add_to_classifications_of_id(obj); } else if (type.equals("Association")) { if (!discard_duplicates || !getIds(associations).contains(id(obj))) { associations.add(obj); } } else { throw new MetadataException("Metadata: parse(): did not expect a " + type + " object at the top level"); } for (Iterator it1 = obj.getChildElements(); it1.hasNext();) { OMElement obj_i = (OMElement) it1.next(); String type_i = obj_i.getLocalName(); if (type_i.equals("Classification")) { if (!discard_duplicates || !getIds(classifications).contains(id(obj_i))) { classifications.add(obj_i); } } } } if (grok_metadata && submissionSet == null) { throw new NoSubmissionSetException("Metadata: No Submission Set found"); } } /** * * @param namespace_uri * @throws MetadataException */ private void detect_metadata_version(String namespace_uri) throws MetadataException { // if this class later accepts v3 metadata as well we may have to worry about intermixing v2 and v3 if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:rim:xsd:2.1")) { version2 = true; } else if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:query:xsd:2.1")) { version2 = true; } else if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:xsd:rim:3.0")) { version2 = false; } else if (namespace_uri.equals("urn:oasis:names:tc:ebxml-regrep:xsd:rs:3.0")) { version2 = false; } else { throw new MetadataException("Metadata.parse(): Cannot identify version of metadata from namespace " + namespace_uri); } } /** * * @param ele * @return */ public String getNameValue(OMElement ele) { OMElement name_ele = MetadataSupport.firstChildWithLocalName(ele, "Name"); if (name_ele == null) { return null; } OMElement loc_st = MetadataSupport.firstChildWithLocalName(name_ele, "LocalizedString"); if (loc_st == null) { return null; } return loc_st.getAttributeValue(MetadataSupport.value_qname); } /** * * @return */ public OMElement getSubmissionSet() { return submissionSet; } /** * * @return */ public String getSubmissionSetId() { OMElement ss = getSubmissionSet(); if (ss == null) { return ""; } return ss.getAttributeValue(MetadataSupport.id_qname); } /** * * @param ids * @return */ public ArrayList<OMElement> getAssociationsInclusive(ArrayList<String> ids) { ArrayList<OMElement> assocs = new ArrayList<OMElement>(); for (OMElement a : this.getAssociations()) { if (ids.contains(getAssocSource(a)) && ids.contains(getAssocTarget(a))) { assocs.add(a); } } return assocs; } /** * * @return */ public ArrayList<String> getSubmissionSetIds() { ArrayList ids = new ArrayList(); ArrayList sss = getSubmissionSets(); for (int i = 0; i < sss.size(); i++) { OMElement ss = (OMElement) sss.get(i); String f_id = ss.getAttributeValue(MetadataSupport.id_qname); ids.add(f_id); } return ids; } /** * * @param id * @return */ public boolean isSubmissionSet(String id) { return getSubmissionSetIds().contains(id); } /** * * @param id * @return */ public boolean isFolder(String id) { return getFolderIds().contains(id); } /** * * @param id * @return */ public boolean isDocument(String id) { return this.getExtrinsicObjectIds().contains(id); } /** * Return true if all patient ids in metadata are equivalent. Otherwise return false. * @return Boolean value indicating result of verification. * @throws MetadataException */ public boolean isPatientIdConsistent() throws MetadataException { String patientID = null; for (OMElement ele : allObjects) { String pid = getPatientId(ele); if (patientID == null) { patientID = pid; continue; } if (pid == null) { continue; } if (!patientID.equals(pid)) { return false; } } return true; } /** * * @return */ public ArrayList<OMElement> getFolders() { return folders; } /** * * @param parts * @return */ public ArrayList<String> getIds(ArrayList<OMElement> parts) { ArrayList<String> ids = new ArrayList<String>(); for (int i = 0; i < parts.size(); i++) { OMElement part = (OMElement) parts.get(i); String f_id = part.getAttributeValue(MetadataSupport.id_qname); ids.add(f_id); } return ids; } /** * * @return */ public ArrayList<String> getFolderIds() { ArrayList ids = new ArrayList(); ArrayList folders = getFolders(); for (int i = 0; i < folders.size(); i++) { OMElement folder = (OMElement) folders.get(i); String f_id = folder.getAttributeValue(MetadataSupport.id_qname); ids.add(f_id); } return ids; } /** * * @param i * @return */ public OMElement getFolder(int i) { return (OMElement) getFolders().get(i); } /** * * @return */ public ArrayList getRegistryPackageIds() { ArrayList ids = new ArrayList(); ArrayList rps = this.getRegistryPackages(); for (int i = 0; i < rps.size(); i++) { OMElement rp = (OMElement) rps.get(i); String f_id = rp.getAttributeValue(MetadataSupport.id_qname); ids.add(f_id); } return ids; } /** * * @param ele * @return */ public String getIdentifyingString(OMElement ele) { StringBuffer b = new StringBuffer(); b.append(ele.getLocalName()); OMElement name_ele = MetadataSupport.firstChildWithLocalName(ele, "Name"); if (name_ele != null) { OMElement loc = MetadataSupport.firstChildWithLocalName(name_ele, "LocalizedString"); if (loc != null) { String name = loc.getAttributeValue(new QName("value")); b.append(" Name=\"" + name + "\""); } } b.append(" id=\"" + ele.getAttributeValue(MetadataSupport.id_qname) + "\""); return "<" + b.toString() + ">"; } /** * * @param objects * @return */ public ArrayList getObjectNames(ArrayList objects) { ArrayList names = new ArrayList(); for (int i = 0; i < objects.size(); i++) { OMElement obj = (OMElement) objects.get(i); names.add(obj.getLocalName()); } return names; } /** * * @return * @throws MetadataException */ private OMElement find_metadata_wrapper() throws MetadataException { if (metadata == null || metadata.getLocalName() == null) { throw new NoMetadataException("find_metadata_wrapper: Cannot find a wrapper element, top element is NULL" + ". A wrapper is one of the XML elements that holds metadata (ExtrinsicObject, RegistryPackage, Association etc.)"); } if (metadata.getLocalName().equals("TestResults")) { OMElement test_step = MetadataSupport.firstChildWithLocalName(metadata, "TestStep"); if (test_step != null) { OMElement sqt = MetadataSupport.firstChildWithLocalName(test_step, "StoredQueryTransaction"); if (sqt != null) { OMElement result = MetadataSupport.firstChildWithLocalName(sqt, "Result"); if (result != null) { OMElement ahqr = MetadataSupport.firstChildWithLocalName(result, "AdhocQueryResponse"); if (ahqr != null) { OMElement rol = MetadataSupport.firstChildWithLocalName(ahqr, "RegistryObjectList"); if (rol != null) { return rol; } } } } } } if (metadata.getLocalName().equals("LeafRegistryObjectList")) { return metadata; } if (metadata.getLocalName().equals("ProvideAndRegisterDocumentSetRequest")) { OMElement sor = MetadataSupport.firstChildWithLocalName(metadata, "SubmitObjectsRequest"); if (sor != null) { return MetadataSupport.firstChildWithLocalName(sor, "RegistryObjectList"); } } for (Iterator it = metadata.getChildElements(); it.hasNext();) { OMElement child = (OMElement) it.next(); if (child.getLocalName().equals("RegistryObjectList")) { return child; } if (child.getLocalName().equals("AdhocQueryResponse")) { OMElement achild = MetadataSupport.firstChildWithLocalName(child, "SQLQueryResult"); if (achild != null) { return achild; } } if (child.getLocalName().equals("LeafRegistryObjectList")) { return child; } } OMElement ele2 = find_metadata_wrapper2(metadata); if (ele2 != null) { return ele2; } throw new NoMetadataException("find_metadata_wrapper: Cannot find a wrapper element, top element is " + metadata.getLocalName() + ". A wrapper is one of the XML elements that holds metadata (ExtrinsicObject, RegistryPackage, Association etc.)"); } /** * * @param ele * @return * @throws MetadataException */ private OMElement find_metadata_wrapper2(OMElement ele) throws MetadataException { for (Iterator<OMElement> it = ele.getChildElements(); it.hasNext();) { OMElement e = it.next(); String name = e.getLocalName(); if (name == null) { continue; } if (name.equals("ObjectRef") || name.equals("ExtrinsicObject") || name.equals("RegistryPackage") || name.equals("Association") || name.equals("Classification")) { return ele; } OMElement e2 = find_metadata_wrapper2(e); if (e2 != null) { return e2; } } return null; } /** * * @return */ public OMElement getWrapper() { return wrapper; } /** * * @param registry_object * @param slot_name * @return */ public OMElement findSlot(OMElement registry_object, String slot_name) { for (Iterator it = registry_object.getChildElements(); it.hasNext();) { OMElement s = (OMElement) it.next(); if (!s.getLocalName().equals("Slot")) { continue; } String val = s.getAttributeValue(MetadataSupport.slot_name_qname); if (val != null && val.equals(slot_name)) { return s; } } return null; } /** * * @param registry_object * @param classificationScheme * @return */ public ArrayList findClassifications(OMElement registry_object, String classificationScheme) { ArrayList cl = new ArrayList(); for (Iterator it = registry_object.getChildElements(); it.hasNext();) { OMElement s = (OMElement) it.next(); if (!s.getLocalName().equals("Classification")) { continue; } String val = s.getAttributeValue(MetadataSupport.classificationscheme_qname); if (val != null && val.equals(classificationScheme)) { cl.add(s); } } return cl; } /** * * @param registry_object * @return */ public ArrayList findClassifications(OMElement registry_object) { ArrayList cl = new ArrayList(); for (Iterator it = registry_object.getChildElements(); it.hasNext();) { OMElement s = (OMElement) it.next(); if (!s.getLocalName().equals("Classification")) { continue; } cl.add(s); } return cl; } /** * * @param registry_object * @param element_name * @return */ public ArrayList findChildElements(OMElement registry_object, String element_name) { ArrayList al = new ArrayList(); for (Iterator it = registry_object.getChildElements(); it.hasNext();) { OMElement s = (OMElement) it.next(); if (s.getLocalName().equals(element_name)) { al.add(s); } } return al; } /** * * @param registry_objects * @return */ public ArrayList<String> getObjectIds(ArrayList<OMElement> registry_objects) { ArrayList ids = new ArrayList(); for (int i = 0; i < registry_objects.size(); i++) { OMElement ele = registry_objects.get(i); String id = ele.getAttributeValue(MetadataSupport.id_qname); ids.add(id); } return ids; } /** * * @param registry_objects * @param version2 * @return */ public ArrayList<OMElement> getObjectRefs(ArrayList registry_objects, boolean version2) { ArrayList<OMElement> ors = new ArrayList<OMElement>(); for (int i = 0; i < registry_objects.size(); i++) { OMElement ele = (OMElement) registry_objects.get(i); String id = ele.getAttributeValue(MetadataSupport.id_qname); OMElement or = MetadataSupport.om_factory.createOMElement("ObjectRef", (version2) ? MetadataSupport.ebRIMns2 : MetadataSupport.ebRIMns3); or.addAttribute("id", id, null); or.addAttribute("home", "", null); ors.add(or); } return ors; } /** * * @return */ public ArrayList<OMElement> getClassifications() { return classifications; } /* * by ID */ private IdIndex id_index() throws MetadataException { if (idIndex == null) { idIndex = new IdIndex(this); // System.out.println("Metadata indexed: \n" + id_index.toString()); } return idIndex; } /** * * @param log_message * @return * @throws MetadataException */ private IdIndex id_index(XLogMessage log_message) throws MetadataException { if (idIndex == null) { idIndex = new IdIndex(); idIndex.setLogMessage(log_message); idIndex.setMetadata(this); } return idIndex; } /** * * @param object_id * @return * @throws MetadataException */ public String getNameValue(String object_id) throws MetadataException { return id_index().getNameValue(object_id); } /** * * @param object_id * @return * @throws MetadataException */ public String getDescriptionValue(String object_id) throws MetadataException { return id_index().getDescriptionValue(object_id); } /** * * @param object_id * @return * @throws MetadataException */ public ArrayList getSlots(String object_id) throws MetadataException { return id_index().getSlots(object_id); } /** * * @param object_id * @param name * @return * @throws MetadataException */ public OMElement getSlot(String object_id, String name) throws MetadataException { return id_index().getSlot(object_id, name); } /** * * @param object_id * @param name * @throws MetadataException */ public void removeSlot(String object_id, String name) throws MetadataException { OMElement slot = getSlot(object_id, name); if (slot != null) { slot.detach(); reindex(); } } /** * * @return */ public String getMetadataDescription() { StringBuffer buf = new StringBuffer(); buf.append(this.getSubmissionSets().size() + " SubmissionSets\n"); buf.append(this.getExtrinsicObjects().size() + " DocumentEntries\n"); buf.append(this.getFolders().size() + " Folders\n"); buf.append(this.getAssociations().size() + " Associations\n"); buf.append(this.getObjectRefs().size() + " ObjectRefs\n"); return buf.toString(); } /** * * @throws MetadataException */ public void fixClassifications() throws MetadataException { ArrayList<String> rpIds = getRegistryPackageIds(); for (int i = 0; i < rpIds.size(); i++) { String id = rpIds.get(i); ArrayList<OMElement> classifications = getClassifications(id); for (OMElement classification : classifications) { if (classification.getAttribute(MetadataSupport.noderepresentation_qname) != null) { continue; } // not a code - must be classification of RP as SS or Fol, make sure // classificationNode is present if (classification.getAttribute(MetadataSupport.classificationnode_qname) == null) { // add classification, first figure out if this is SS or Fol if (isSubmissionSet(id)) { classification.addAttribute("classificationNode", MetadataSupport.XDSSubmissionSet_classification_uuid, null); } else { classification.addAttribute("classificationNode", MetadataSupport.XDSFolder_classification_uuid, null); } } } } } /** * * @param object_id * @return * @throws MetadataException */ public ArrayList<OMElement> getClassifications(String object_id) throws MetadataException { return id_index().getClassifications(object_id); } /** * * @param object * @return * @throws MetadataException */ public ArrayList<OMElement> getClassifications(OMElement object) throws MetadataException { return id_index().getClassifications(this.getId(object)); } /** * * @param object * @param classification_scheme * @return * @throws MetadataException */ public ArrayList<OMElement> getClassifications(OMElement object, String classification_scheme) throws MetadataException { return getClassifications(this.getId(object), classification_scheme); } /** * * @param classification * @return */ public String getClassificationValue(OMElement classification) { return classification.getAttributeValue(MetadataSupport.noderepresentation_qname); } /** * * @param classification * @return */ public String getClassificationScheme(OMElement classification) { return getSlotValue(classification, "codingScheme", 0); } /** * * @param object * @param classification_scheme * @return * @throws MetadataException */ public ArrayList<String> getClassificationsValues(OMElement object, String classification_scheme) throws MetadataException { ArrayList<OMElement> classes = getClassifications(object, classification_scheme); ArrayList<String> values = new ArrayList<String>(); for (OMElement e : classes) { values.add(e.getAttributeValue(MetadataSupport.noderepresentation_qname)); } return values; } /** * * @param id * @param classification_scheme * @return * @throws MetadataException */ public ArrayList<String> getClassificationsValues(String id, String classification_scheme) throws MetadataException { return getClassificationsValues(this.getObjectById(id), classification_scheme); } /** * * @param id * @param classification_scheme * @return * @throws MetadataException */ public ArrayList<OMElement> getClassifications(String id, String classification_scheme) throws MetadataException { ArrayList<OMElement> cls = getClassifications(id); ArrayList<OMElement> cls_2 = new ArrayList<OMElement>(); for (OMElement cl : cls) { String cl_scheme = cl.getAttributeValue(MetadataSupport.classificationscheme_qname); if (cl_scheme != null && cl_scheme.equals(classification_scheme)) { cls_2.add(cl); } } return cls_2; } /** * * @param object_id * @return * @throws MetadataException */ public ArrayList getExternalIdentifiers(String object_id) throws MetadataException { return id_index().getExternalIdentifiers(object_id); } /** * * @param object_id * @param identifier_scheme * @return * @throws MetadataException */ public String getExternalIdentifierValue(String object_id, String identifier_scheme) throws MetadataException { return id_index().getExternalIdentifierValue(object_id, identifier_scheme); } /** * Updates all folders "lastUpdateTime" slot with the current time. * * @throws MetadataException */ public void updateFoldersLastUpdateTimeSlot() throws MetadataException { ArrayList<OMElement> folderList = this.folders; // Set XDSFolder.lastUpdateTime if ((folderList != null) && (!folderList.isEmpty())) { String timestamp = Hl7Date.now(); for (OMElement fol : folderList) { this.setSlot(fol, "lastUpdateTime", timestamp); } } } /** * * @param id * @return * @throws MetadataException */ public String getUniqueIdValue(String id) throws MetadataException { String uid; uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSDocumentEntry_uniqueid_uuid); if (uid != null && !uid.equals("")) { return uid; } uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_uniqueid_uuid); if (uid != null && !uid.equals("")) { return uid; } uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSFolder_uniqueid_uuid); if (uid != null && !uid.equals("")) { return uid; } return null; } /** * * @return * @throws MetadataException */ public ArrayList<String> getAllUids() throws MetadataException { ArrayList<String> all_ids = this.getAllDefinedIds(); ArrayList<String> all_uids = new ArrayList<String>(); for (String id : all_ids) { String uid = getUniqueIdValue(id); if (uid != null) { all_uids.add(uid); } } return all_uids; } /** * * @return */ public List<OMElement> getAllLeafClasses() { List<OMElement> lc = new ArrayList<OMElement>(); lc.addAll(extrinsicObjects); lc.addAll(registryPackages); lc.addAll(associations); return lc; } /** * * @param map * @param uid * @param hash */ private void addToUidHashMap(HashMap<String, ArrayList<String>> map, String uid, String hash) { if (uid == null) { return; } ArrayList<String> hash_list = map.get(uid); if (hash_list == null) { hash_list = new ArrayList<String>(); map.put(uid, hash_list); } hash_list.add(hash); } // get map of uid ==> ArrayList of hashes // for folder and ss, hash is null // Some docs may not have a hash either, depending on where this use used /** * * @return * @throws MetadataException */ public HashMap<String, ArrayList<String>> getUidHashMap() throws MetadataException { HashMap<String, ArrayList<String>> hm = new HashMap<String, ArrayList<String>>(); ArrayList<String> ids; ids = this.getExtrinsicObjectIds(); for (String id : ids) { OMElement registry_object = this.getObjectById(id); String uid; ArrayList<OMElement> eis = this.getExternalIdentifiers(id); ArrayList<OMElement> eid_eles = this.getExternalIdentifiers(registry_object, MetadataSupport.XDSDocumentEntry_uniqueid_uuid); if (eid_eles.size() > 0) { uid = eid_eles.get(0).getAttributeValue(MetadataSupport.value_qname); } else { throw new MetadataException("Metadata.getUidHashMap(): Doc " + id + " has no uniqueId\nfound " + eis.size() + " external identifiers"); } String hash = this.getSlotValue(id, "hash", 0); if (hash != null && hash.equals("")) { hash = null; } addToUidHashMap(hm, uid, hash); } ids = this.getSubmissionSetIds(); for (String id : ids) { String uid; uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSSubmissionSet_uniqueid_uuid); if (uid == null || uid.equals("")) { throw new MetadataException("Metadata.getUidHashMap(): SS " + id + " has no uniqueId"); } addToUidHashMap(hm, uid, null); } ids = this.getFolderIds(); for (String id : ids) { String uid; uid = id_index().getExternalIdentifierValue(id, MetadataSupport.XDSFolder_uniqueid_uuid); if (uid == null || uid.equals("")) { throw new MetadataException("Metadata.getUidHashMap(): Fol " + id + " has no uniqueId"); } addToUidHashMap(hm, uid, null); } return hm; } /** * * @return * @throws MetadataException */ public String getSubmissionSetUniqueId() throws MetadataException { return id_index().getSubmissionSetUniqueId(); } /** * * @return * @throws MetadataException */ public String getSubmissionSetPatientId() throws MetadataException { return id_index().getSubmissionSetPatientId(); } /** * * @param id * @return * @throws MetadataException */ public OMElement getObjectById(String id) throws MetadataException { return id_index().getObjectById(id); } /** * * @param id * @return * @throws MetadataException */ public String getIdentifyingString(String id) throws MetadataException { return id_index().getIdentifyingString(id); } /** * * @param id * @return * @throws MetadataException */ public String getObjectTypeById(String id) throws MetadataException { return id_index().getObjectTypeById(id); } /** * * @return * @throws MetadataException */ public String getSubmissionSetSourceId() throws MetadataException { return id_index().getSubmissionSetSourceId(); } /** * * @return * @throws XdsInternalException */ public OMElement getV3SubmitObjectsRequest() throws XdsInternalException { //OMNamespace rs = MetadataSupport.ebRSns3; OMNamespace lcm = MetadataSupport.ebLcm3; OMNamespace rim = MetadataSupport.ebRIMns3; OMElement sor = this.om_factory().createOMElement("SubmitObjectsRequest", lcm); OMElement lrol = this.om_factory().createOMElement("RegistryObjectList", rim); sor.addChild(lrol); for (int i = 0; i < allObjects.size(); i++) { lrol.addChild(allObjects.get(i)); } /* ArrayList objects = this.getV3(); for (int i = 0; i < objects.size(); i++) { OMElement ele = (OMElement) objects.get(i); lrol.addChild(ele); }*/ return sor; } /** * * @param value * @return */ public String stripNamespace(String value) { if (value == null) { return null; } if (value.indexOf(":") == -1) { return value; } String[] parts = value.split(":"); return parts[parts.length - 1]; } /** * * @param value * @return */ public boolean hasNamespace(String value) { if (value.indexOf(":") == -1) { return false; } return true; } /** * * @param value * @param namespace * @return */ public String addNamespace(String value, String namespace) { if (hasNamespace(value)) { return value; } if (namespace.endsWith(":")) { return namespace + value; } return namespace + ":" + value; } /** * * @param objects * @return * @throws MetadataException */ public HashMap<String, OMElement> getUidMap(ArrayList<OMElement> objects) throws MetadataException { HashMap<String, OMElement> map = new HashMap<String, OMElement>(); // uid -> OMElement for (OMElement non_ref : objects) { String non_ref_id = this.getId(non_ref); String a_uid = this.getUniqueIdValue(non_ref_id); if (a_uid != null) { map.put(a_uid, non_ref); } } return map; } /** * * @return * @throws MetadataException */ public HashMap<String, OMElement> getUidMap() throws MetadataException { return getUidMap(this.getNonObjectRefs()); } /** * * @param id * @return * @throws MetadataException */ public boolean isRetrievable_a(String id) throws MetadataException { String uri = this.getSlotValue(id, "URI", 0); return uri != null; } /** * * @param id * @return * @throws MetadataException */ public boolean isRetrievable_b(String id) throws MetadataException { String uid = this.getSlotValue(id, "repositoryUniqueId", 0); return uid != null; } /** * * @param simpleAssociationType * @return */ public String v3AssociationNamespace(String simpleAssociationType) { if (Metadata.iheAssocTypes.contains(simpleAssociationType)) { return MetadataSupport.xdsB_ihe_assoc_namespace_uri; } else { return MetadataSupport.xdsB_eb_assoc_namespace_uri; } } /** * * @param eo * @return */ private boolean isURIExtendedFormat(OMElement eo) { String uri = getSlotValue(eo, "URI", 0); String uri2 = getSlotValue(eo, "URI", 1); if (uri == null) { return false; } if (uri2 != null) { return true; } String[] parts = uri.split("\\|"); return (parts.length >= 2); } /** * * @param eo * @return * @throws MetadataException */ public String getURIAttribute(OMElement eo) throws MetadataException { String eoId = getId(eo); String value = null; if (!isURIExtendedFormat(eo)) { value = getSlotValue(eo, "URI", 0); } else { HashMap<String, String> map = new HashMap<String, String>(); for (int i = 0; i < 1000; i++) { String slotValue = getSlotValue(eoId, "URI", i); if (slotValue == null) { break; } String[] parts = slotValue.split("\\|"); if (parts.length != 2 || parts[0].length() == 0) { throw new MetadataException("URI value does not parse: " + slotValue + " must be num|string format"); } map.put(parts[0], parts[1]); } StringBuffer buf = new StringBuffer(); int i = 1; for (;; i++) { String iStr = String.valueOf(i); String part = map.get(iStr); if (part == null) { break; } buf.append(part); } if (map.size() != i - 1) { throw new MetadataException("URI value does not parse: index " + i + " not found but Slot has " + map.size() + " values. Slot is\n" + getSlot(eoId, "URI").toString()); } value = buf.toString(); } if (value == null) { return null; } if (!value.startsWith("http://") && !value.startsWith("https://")) { throw new MetadataException("URI must have http:// or https:// prefix. URI was calculated to be\n" + value + "\nand original slot is\n" + getSlot(eoId, "URI").toString()); } return value; } int uriChunkSize = 100; /** * * @param size */ /** public void setUriChunkSize(int size) { uriChunkSize = size; }*/ /** * * @param a * @param b * @return */ private int min(int a, int b) { if (a < b) { return a; } return b; } /** * * @param eo * @param uri */ public void setURIAttribute(OMElement eo, String uri) { try { removeSlot(this.getId(eo), "URI"); } catch (MetadataException e) { } if (uri.length() < uriChunkSize) { addSlot(eo, "URI", uri); return; } OMElement slot = addSlot(eo, "URI"); StringBuffer buf = new StringBuffer(); int chunkIndex = 1; int uriSize = uri.length(); int strStart = 0; int strEnd = min(uriChunkSize, uriSize); while (true) { buf.setLength(0); buf.append(String.valueOf(chunkIndex++)).append("|").append(uri.substring(strStart, strEnd)); addSlotValue(slot, buf.toString()); if (strEnd == uriSize) { break; } strStart = strEnd; strEnd = min(strStart + uriChunkSize, uriSize); } } }
Changes to support Metadata Update profile implementation.
src/xutil/src/com/vangent/hieos/xutil/metadata/structure/Metadata.java
Changes to support Metadata Update profile implementation.
Java
apache-2.0
a175db7542c33717402063bf4216ea71ffdf59dd
0
ivanr/qlue
/* * Qlue Web Application Framework * Copyright 2009-2012 Ivan Ristic <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webkreator.qlue; import com.webkreator.qlue.editors.*; import com.webkreator.qlue.exceptions.*; import com.webkreator.qlue.router.QlueRouteManager; import com.webkreator.qlue.router.RouteFactory; import com.webkreator.qlue.util.*; import com.webkreator.qlue.view.*; import com.webkreator.qlue.view.velocity.ClasspathVelocityViewFactory; import com.webkreator.qlue.view.velocity.DefaultVelocityTool; import it.sauronsoftware.cron4j.InvalidPatternException; import it.sauronsoftware.cron4j.Scheduler; import org.apache.commons.mail.Email; import org.apache.commons.mail.EmailException; import org.apache.commons.mail.SimpleEmail; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; import javax.servlet.ServletException; import javax.servlet.http.*; import java.io.*; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.file.FileSystems; import java.nio.file.Path; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; /** * This class represents one Qlue application. Very simple applications might * use it directly, but most will need to subclass in order to support complex * configuration (page resolver, view resolver, etc). */ public class QlueApplication { public static final String PROPERTIES_FILENAME = "qlue.properties"; public static final String ROUTES_FILENAME = "routes.conf"; public static final String REQUEST_ACTUAL_PAGE_KEY = "QLUE_ACTUAL_PAGE"; public static final String PROPERTY_CONF_PATH = "qlue.confPath"; public static final int FRONTEND_ENCRYPTION_NO = 0; public static final int FRONTEND_ENCRYPTION_CONTAINER = 1; public static final int FRONTEND_ENCRYPTION_FORCE_YES = 2; public static final int FRONTEND_ENCRYPTION_TRUSTED_HEADER = 3; private static final String PROPERTY_CHARACTER_ENCODING = "qlue.characterEncoding"; private static final String PROPERTY_DEVMODE_ENABLED = "qlue.devmode.active"; private static final String PROPERTY_DEVMODE_RANGES = "qlue.devmode.subnets"; private static final String PROPERTY_DEVMODE_PASSWORD = "qlue.devmode.password"; private static final String PROPERTY_TRUSTED_PROXIES = "qlue.trustedProxies"; private static final String PROPERTY_FRONTEND_ENCRYPTION = "qlue.frontendEncryption"; private static final String PROPERTY_ADMIN_EMAIL = "qlue.adminEmail"; private static final String PROPERTY_URGENT_EMAIL = "qlue.urgentEmail"; private String messagesFilename = "com/webkreator/qlue/messages"; private Properties properties = new Properties(); private String appPrefix = "QlueApp"; private HttpServlet servlet; private Logger log = LoggerFactory.getLogger(QlueApplication.class); private QlueRouteManager routeManager = new QlueRouteManager(this); private ViewResolver viewResolver = new ViewResolver(); private ViewFactory viewFactory = new ClasspathVelocityViewFactory(); private HashMap<Class, PropertyEditor> editors = new HashMap<>(); private String characterEncoding = "UTF-8"; private int developmentMode = QlueConstants.DEVMODE_DISABLED; private String developmentModePassword = null; private List<CIDRUtils> developmentSubnets = null; private List<CIDRUtils> trustedProxies = null; private String adminEmail; private String urgentEmail; private int urgentCounter = -1; private SmtpEmailSender smtpEmailSender; private SmtpEmailSender asyncSmtpEmailSender; private HashMap<Locale, MessageSource> messageSources = new HashMap<>(); private String confPath; private int frontendEncryptionCheck = FRONTEND_ENCRYPTION_CONTAINER; private Timer timer; private String priorityTemplatePath; private Scheduler scheduler; /** * This is the default constructor. The idea is that a subclass will * override it and supplement with its own configuration. */ protected QlueApplication() { initPropertyEditors(); } /** * This constructor is intended for use by very simple web applications that * consist of only one package. */ public QlueApplication(String pagesHome) { initPropertyEditors(); // These are the default routes for a simple application; we use them // to avoid having to provide routing configuration. routeManager.add(RouteFactory.create(routeManager, "/_qlue/{} package:com.webkreator.qlue.pages")); routeManager.add(RouteFactory.create(routeManager, "/{} package:" + pagesHome)); } protected void determineConfigPath() { // First, try a system property. confPath = System.getProperty(PROPERTY_CONF_PATH); if (confPath != null) { return; } // Assume the configuration is in the WEB-INF folder. confPath = servlet.getServletContext().getRealPath("/WEB-INF/"); } /** * Initialize QlueApp instance. Qlue applications are designed to be used by * servlets to delegate both initialization and request processing. */ public void init(HttpServlet servlet) throws Exception { qlueInit(servlet); appInit(servlet); qluePostInit(); } protected void qlueInit(HttpServlet servlet) throws Exception { this.servlet = servlet; determineConfigPath(); loadProperties(); initRouteManagers(); if (viewResolver == null) { throw new Exception("View resolver not configured"); } if (viewFactory == null) { throw new Exception("View factory not configured"); } viewFactory.init(this); } protected void appInit(HttpServlet servlet) throws Exception { // Left for applications to override. } protected void qluePostInit() throws Exception { Calendar nextHour = Calendar.getInstance(); nextHour.set(Calendar.HOUR_OF_DAY, nextHour.get(Calendar.HOUR_OF_DAY) + 1); nextHour.set(Calendar.MINUTE, 0); nextHour.set(Calendar.SECOND, 0); scheduleTask(new SendUrgentRemindersTask(), nextHour.getTime(), 60 * 60 * 1000); scheduleApplicationJobs(); } protected void initRouteManagers() throws Exception { File routesFile = new File(confPath, ROUTES_FILENAME); if (routesFile.exists()) { routeManager.load(routesFile); } } void loadProperties() throws Exception { File propsFile; String filename = System.getProperty("qlue.properties"); if (filename == null) { filename = PROPERTIES_FILENAME; } if (filename.charAt(0) == '/') { propsFile = new File(filename); } else { propsFile = new File(confPath, filename); } if (propsFile.exists() == false) { throw new QlueException("Unable to find file: " + propsFile.getAbsolutePath()); } properties.load(new FileReader(propsFile)); properties.setProperty("confPath", confPath); properties.setProperty("webRoot", servlet.getServletContext().getRealPath("/")); if (getProperty(PROPERTY_CHARACTER_ENCODING) != null) { setCharacterEncoding(getProperty(PROPERTY_CHARACTER_ENCODING)); } if (getProperty(PROPERTY_DEVMODE_ENABLED) != null) { setApplicationDevelopmentMode(getProperty(PROPERTY_DEVMODE_ENABLED)); } if (getProperty(PROPERTY_DEVMODE_RANGES) != null) { setDevelopmentSubnets(getProperty(PROPERTY_DEVMODE_RANGES)); } if (getProperty(PROPERTY_TRUSTED_PROXIES) != null) { setTrustedProxies(getProperty(PROPERTY_TRUSTED_PROXIES)); } if (getProperty(PROPERTY_FRONTEND_ENCRYPTION) != null) { configureFrontendEncryption(getProperty(PROPERTY_FRONTEND_ENCRYPTION)); } developmentModePassword = getProperty(PROPERTY_DEVMODE_PASSWORD); adminEmail = getProperty(PROPERTY_ADMIN_EMAIL); urgentEmail = getProperty(PROPERTY_URGENT_EMAIL); // Configure the SMTP email senders smtpEmailSender = new SmtpEmailSender(); if (getBooleanProperty("qlue.smtp.async", "false")) { AsyncSmtpEmailSender myAsyncSmtpEmailSender = new AsyncSmtpEmailSender(smtpEmailSender); // Start a new daemon thread to send email in the background. Thread thread = new Thread(myAsyncSmtpEmailSender); thread.setDaemon(true); thread.start(); asyncSmtpEmailSender = myAsyncSmtpEmailSender; } else { // All email sending is synchronous. asyncSmtpEmailSender = smtpEmailSender; } smtpEmailSender.setSmtpServer(getProperty("qlue.smtp.server")); if (getProperty("qlue.smtp.port") != null) { smtpEmailSender.setSmtpPort(Integer.valueOf(getProperty("qlue.smtp.port"))); } if (getProperty("qlue.smtp.protocol") != null) { smtpEmailSender.setSmtpProtocol(getProperty("qlue.smtp.protocol")); } if (getProperty("qlue.smtp.username") != null) { smtpEmailSender.setSmtpUsername(getProperty("qlue.smtp.username")); smtpEmailSender.setSmtpPassword(getProperty("qlue.smtp.password")); } priorityTemplatePath = getProperty("qlue.velocity.priorityTemplatePath"); if (priorityTemplatePath != null) { Path p = FileSystems.getDefault().getPath(priorityTemplatePath); if (!p.isAbsolute()) { priorityTemplatePath = getApplicationRoot() + "/" + priorityTemplatePath; } File f = new File(priorityTemplatePath); if (!f.exists()) { throw new QlueException("Priority template path doesn't exist: " + priorityTemplatePath); } if (!f.isDirectory()) { throw new QlueException("Priority template path is not a directory: " + priorityTemplatePath); } } } private void configureFrontendEncryption(String value) { if ("no".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_NO; } else if ("forceYes".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_FORCE_YES; } else if ("container".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_CONTAINER; } else if ("trustedHeader".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_TRUSTED_HEADER; } else { throw new RuntimeException("Invalid value for the " + PROPERTY_FRONTEND_ENCRYPTION + " parameter:" + value); } } /** * Destroys the application. Invoked when the backing servlet is destroyed. */ public void destroy() { } /** * This method is the main entry point for request processing. */ protected void service(HttpServlet servlet, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Remember when processing began. long startTime = System.currentTimeMillis(); // Set the default character encoding. request.setCharacterEncoding(characterEncoding); response.setCharacterEncoding(characterEncoding); // Create a new application session object if one does not exist. HttpSession session = request.getSession(true); synchronized (session) { if (session.getAttribute(QlueConstants.QLUE_SESSION_OBJECT) == null) { session.setAttribute(QlueConstants.QLUE_SESSION_OBJECT, createNewSessionObject()); } } // Create a new context. TransactionContext context = new TransactionContext( this, servlet.getServletConfig(), servlet.getServletContext(), request, response); // Expose transaction information to the logging subsystem. MDC.put("txId", context.getTxId()); MDC.put("remoteAddr", context.getEffectiveRemoteAddr()); MDC.put("sessionId", context.getSession().getId()); // Proceed to the second stage of request processing try { log.debug("Processing request: " + request.getRequestURI()); serviceInternal(context); log.debug("Processed request in " + (System.currentTimeMillis() - startTime)); } finally { MDC.clear(); } } protected Object route(TransactionContext context) { return routeManager.route(context); } protected View processPage(Page page) throws Exception { View view = null; // Initialize backend. This is a handy place to do things needed for later // on, for example, configure database access. view = page.initBackend(); if (view != null) { return view; } // Check access. The idea with this hook is to run it as early as possible, // before any parameters are accessed, thus minimising the executed code. view = page.checkAccess(); if (view != null) { return view; } // For persistent pages, we clear errors only on POSTs; that // means that a subsequent GET can access the errors to show // them to the user. if (!page.isPersistent() || page.context.isPost()) { page.getErrors().clear(); } bindParameters(page); // Custom parameter validation. view = page.validateParameters(); if (view != null) { return view; } // Custom error handling. if (page.hasErrors()) { view = page.handleValidationError(); if (view != null) { return view; } } // Initialize the page. This really only makes sense for persistent pages, where you // want to run some code only once. With non-persistent pages, it's better to have // all the code in the same method. if (page.getState().equals(Page.STATE_INIT)) { view = page.init(); if (view != null) { return view; } } // Early call to prepare the page for the main thing. view = page.prepareForService(); if (view != null) { return view; } // Finally, run the main processing entry point. return page.service(); } /** * Request processing entry point. */ protected void serviceInternal(TransactionContext context) throws IOException { Page page = null; try { // First check if this is a request for a persistent page. We can // honour such requests only when we're not handling errors. if (context.isErrorHandler() == false) { // Persistent pages are identified via the "_pid" parameter. If we have // one such parameter, we look for the corresponding page in session storage. String pids[] = context.getParameterValues("_pid"); if ((pids != null) && (pids.length != 0)) { // Only one _pid parameter is allowed. if (pids.length != 1) { throw new RuntimeException("Request contains multiple _pid parameters"); } // Find the page using the requested page ID. PersistentPageRecord pageRecord = context.findPersistentPageRecord(pids[0]); if (pageRecord == null) { throw new PersistentPageNotFoundException("Persistent page not found: " + pids[0]); } // If the replacementUri is set that means that the page no longer // exist and that we need to forward all further request to it. if (pageRecord.getReplacementUri() != null) { context.getResponse().sendRedirect(pageRecord.getReplacementUri()); return; } // Otherwise, let's use this page. page = pageRecord.getPage(); if (page == null) { throw new RuntimeException("Page record doesn't contain page"); } } } // If we don't have a persistent page we'll create a new one by routing this request. if (page == null) { Object routeObject = route(context); if (routeObject == null) { throw new PageNotFoundException(); } else if (routeObject instanceof View) { page = new DirectViewPage((View) routeObject); } else if (routeObject instanceof Page) { page = (Page) routeObject; } else { throw new RuntimeException("Qlue: Unexpected router response: " + routeObject); } } // Run the page. Access to the page is synchronised, which means that only one // HTTP request can handle it at any given time. synchronized (page) { page.setApp(this); page.determineDefaultViewName(viewResolver); page.setContext(context); page.determineCommandObject(); if (page.isPersistent()) { context.persistPage(page); } // Set content type now, before any output happens. context.response.setContentType(page.getContentType()); View view = processPage(page); if (view != null) { renderView(view, context, page); } // Execute page commit. This is what it sounds like, // an opportunity to use a simple approach to transaction // management for simple applications. page.commit(); // Automatic page state transition. if (!page.isPersistent()) { // Non-persistent pages automatically transition to FINISHED so that cleanup can be invoked. page.setState(Page.STATE_FINISHED); } else { // For persistent pages, we change their state only if they're left as NEW // after execution. We change to POSTED in order to prevent multiple calls to init(). if (page.getState().equals(Page.STATE_INIT)) { page.setState(Page.STATE_WORKING); } } } } catch (PersistentPageNotFoundException ppnfe) { // When we encounter an unknown process reference, we // redirect back to the site home page. Showing errors // is probably not going to be helpful, and may actually compel the // user to go back and try again (and that's not going to work). context.getResponse().sendRedirect("/"); } catch (RequestMethodException rme) { if (page != null) { if (page.isDevelopmentMode()) { log.error(rme.getMessage(), rme); } page.rollback(); } // Convert RequestMethodException into a 405 response. context.getResponse().sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } catch (PageNotFoundException pnfe) { if (page != null) { if (page.isDevelopmentMode()) { log.error(pnfe.getMessage(), pnfe); } page.rollback(); } // Convert PageNotFoundException into a 404 response. context.getResponse().sendError(HttpServletResponse.SC_NOT_FOUND); } catch (ValidationException ve) { if (!page.isDevelopmentMode()) { if (page != null) { page.rollback(); } // Respond to validation errors with a 400 response. context.getResponse().sendError(HttpServletResponse.SC_BAD_REQUEST); } else { // In development mode, we let the exception propagate to that it // can be handled by our generic exception handler, which will show // the error information on the screen. throw ve; } } catch (QlueSecurityException se) { if (page != null) { page.rollback(); } log.error("Security exception: " + context.getRequestUriWithQueryString(), se); // Respond to security exceptions with a 400 response. context.getResponse().sendError(HttpServletResponse.SC_BAD_REQUEST); } catch (Exception e) { if (page != null) { page.rollback(); // Because we are about to throw an exception, which may cause // another page to handle this request, we need to remember // the current page (which is useful for debugging information, etc). setActualPage(page); } // Don't process the exception further if the problem is caused // by the client going away (e.g., interrupted file download). if (!e.getClass().getName().contains("ClientAbortException")) { // Handle application exception, which will record full context // data and, optionally, notify the administrator via email. handleApplicationException(context, page, e); // We do not wish to propagate the exception further, but, if it's not too late // (the response not committed), we should use a meaningful status code. if (context.getResponse().isCommitted() == false) { if (e instanceof ServiceUnavailableException) { context.getResponse().sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE); } else { context.getResponse().sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } } } } finally { // In development mode, append debugging information to the end of the page. masterWriteRequestDevelopmentInformation(context, page); // Invoke cleanup on finished pages. if ((page != null) && (page.isFinished()) && (!page.isCleanupInvoked())) { page.cleanup(); } } } /** * Handle application exception. We dump debugging information into the * application activity log and, if the admin email address is configured, * we send the same via email. */ protected void handleApplicationException(TransactionContext tx, Page page, Throwable t) { String debugInfo = null; if (tx != null) { // Dump debugging information into a String StringWriter sw = new StringWriter(); sw.append("Debugging information follows:"); try { _masterWriteRequestDevelopmentInformation(tx, page, new PrintWriter(sw)); } catch (IOException e) { // Ignore (but log, in case we do get something) log.error("Exception while preparing debugging information", e); } // Qlue formats debugging information using HTML markup, and here // we want to log it to text files, which means we need to strip // out the markup and convert entity references. HtmlToText htt = new HtmlToText(); try { htt.parse(new StringReader(sw.getBuffer().toString())); debugInfo = htt.toString(); } catch (IOException e) { log.error("Error while converting HTML", e); } } // Record message to the activity log log.error("Qlue: Unhandled application exception", t); // Send email notification try { Email email = new SimpleEmail(); email.setCharset("UTF-8"); if (t.getMessage() != null) { email.setSubject("Application Exception: " + t.getMessage()); } else { email.setSubject("Application Exception"); } StringWriter msgBody = new StringWriter(); PrintWriter pw = new PrintWriter(msgBody); t.printStackTrace(pw); pw.println(); if (debugInfo != null) { pw.print(debugInfo); } email.setMsg(msgBody.toString()); sendAdminEmail(email, true /* fatalError */); } catch (Exception e) { log.error("Failed sending admin email: ", e); } } public synchronized void sendAdminEmail(Email email) { sendAdminEmail(email, false); } public synchronized void sendAdminEmail(Email email, boolean fatalError) { if (adminEmail == null) { return; } // Configure the correct email address. try { email.setFrom(adminEmail); // If this is a fatal error and we have an // email address for emergencies, treat it // as an emergency. if ((fatalError) && (urgentEmail != null)) { email.addTo(urgentEmail); } else { email.addTo(adminEmail); } } catch (EmailException e) { log.error("Invalid admin email address", e); } // Update the email subject to include the application prefix. email.setSubject("[" + getAppPrefix() + "] " + email.getSubject()); // If the email is about a fatal problem, determine // if we want to urgently notify the administrators; we // want to send only one urgent email per time period. if ((fatalError) && (urgentEmail != null)) { // When the counter is at -1 that means we didn't // send any emails in the previous time period. In // other words, we can send one now. if (urgentCounter == -1) { urgentCounter = 0; } else { // Alternatively, just increment the counter // and send nothing. urgentCounter++; log.info("Suppressing fatal error email (" + urgentCounter + "): " + email.getSubject()); return; } } // Send the email now. try { getEmailSender().send(email); } catch (Exception e) { log.error("Failed to send email", e); } } public void renderView(View view, TransactionContext tx, Page page) throws Exception { // For persistent pages, we clear errors only on POSTs; that // means that a subsequent GET can access the errors to show // them to the user. if (!page.isPersistent() || page.context.isPost()) { createShadowInput(page, /* fromRequest */ true); } else { if (page.getState() == Page.STATE_INIT) { createShadowInput(page, /* fromRequest */ false); } } // NullView only indicates that no further output is needed. if (view instanceof NullView) { return; } // If we get a DefaultView or NamedView instance // we have to replace them with a real view, using // the name of the page in the view resolution process. if (view instanceof DefaultView) { view = viewFactory.constructView(page, page.getViewName()); } else if (view instanceof NamedView) { view = viewFactory.constructView(page, ((NamedView) view).getViewName()); } else if (view instanceof ClasspathView) { view = viewFactory.constructView(((ClasspathView) view).getViewName()); } else if (view instanceof FinalRedirectView) { page.setState(Page.STATE_FINISHED); if (((RedirectView) view).getPage() == null) { page.context.replacePage(page, (FinalRedirectView) view); } } if (view == null) { throw new RuntimeException("Qlue: Unable to resolve view"); } view.render(tx, page); } /** * Invoked to store the original text values for parameters. The text is * needed in the cases where it cannot be converted to the intended type. */ private void createShadowInput(Page page, boolean fromRequest) throws Exception { page.clearShadowInput(); // Ask the page to provide a command object, which can be // a custom object or the page itself. Object commandObject = page.getCommandObject(); if (commandObject == null) { throw new RuntimeException("Qlue: Command object cannot be null"); } // Loop through the command object fields in order to determine // if any are annotated as parameters. Remember the original // text values of parameters. Set<Field> fields = getClassPublicFields(commandObject.getClass()); for (Field f : fields) { if (f.isAnnotationPresent(QlueParameter.class)) { if (QlueFile.class.isAssignableFrom(f.getType())) { continue; } if (!Modifier.isPublic(f.getModifiers())) { throw new QlueException("QlueParameter used on a non-public field"); } // Update missing shadow input fields if (page.getShadowInput().get(f.getName()) == null) { if (f.getType().isArray()) { createShadowInputArrayParam(page, f, fromRequest); } else { createShadowInputNonArrayParam(page, f, fromRequest); } } } } } private void createShadowInputArrayParam(Page page, Field f, boolean fromRequest) throws Exception { // Find the property editor PropertyEditor pe = editors.get(f.getType().getComponentType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType().getComponentType()); } // If there is any data in the command object use it to populate shadow input if (f.get(page.getCommandObject()) != null) { Object[] originalValues = (Object[]) f.get(page.getCommandObject()); String[] textValues = new String[originalValues.length]; for (int i = 0; i < originalValues.length; i++) { textValues[i] = pe.toText(originalValues[i]); } page.getShadowInput().set(f.getName(), textValues); } if (fromRequest) { // Overwrite with the value in the request, if present String[] requestParamValues = page.context.getParameterValues(f.getName()); if (requestParamValues != null) { page.getShadowInput().set(f.getName(), requestParamValues); } } } private void createShadowInputNonArrayParam(Page page, Field f, boolean fromRequest) throws Exception { // Find the property editor PropertyEditor pe = editors.get(f.getType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType()); } // If the object exists in the command object, convert it to text using the property editor Object o = f.get(page.getCommandObject()); if (o != null) { page.getShadowInput().set(f.getName(), pe.toText(o)); } // Overwrite with the value in the request, if present if (fromRequest) { String requestParamValue = page.context.getParameter(f.getName()); if (requestParamValue != null) { page.getShadowInput().set(f.getName(), requestParamValue); } } } /** * Appends debugging information to the view, but only if the development mode is active. */ protected void masterWriteRequestDevelopmentInformation(TransactionContext context, Page page) throws IOException { if (page == null) { return; } // Check development mode if (page.isDevelopmentMode() == false) { return; } // We might be in an error handler, in which case we want to display // the state of the actual (original) page and not this one. Page actualPage = getActualPage(page); if (actualPage != null) { // Use the actual page and context page = actualPage; context = page.getContext(); } // Ignore redirections; RedirectView knows to display development // information before redirects, which is why we don't need // to worry here. int status = context.response.getStatus(); if ((status >= 300) && (status <= 399)) { return; } // Ignore responses other than text/html; we don't want to // corrupt images and other resources that are not pages. String contentType = context.response.getContentType(); if (contentType != null) { int i = contentType.indexOf(';'); if (i != -1) { contentType = contentType.substring(0, i); } if (contentType.compareToIgnoreCase("text/html") != 0) { return; } } // Append output _masterWriteRequestDevelopmentInformation(context, page, context.response.getWriter()); } protected void _masterWriteRequestDevelopmentInformation(TransactionContext context, Page page, PrintWriter out) throws IOException { if (page == null) { return; } out.println("<hr><div align=left><pre>"); out.println("<b>Request</b>\n"); context.writeRequestDevelopmentInformation(out); out.println(""); out.println("<b>Page</b>\n"); page.writeDevelopmentInformation(out); out.println(""); out.println("<b>Session</b>\n"); QlueSession qlueSession = page.getQlueSession(); if (qlueSession != null) { qlueSession.writeDevelopmentInformation(out); out.println(""); } out.println("<b>Application</b>\n"); this.writeDevelopmentInformation(out); out.println("</pre></div>"); } /** * Write application-specific debugging output. */ protected void writeDevelopmentInformation(PrintWriter out) { out.println(" Prefix: " + HtmlEncoder.html(appPrefix)); out.println(" Development mode: " + developmentMode); } protected Set<Field> getClassPublicFields(Class klass) { Set<Field> fields = new HashSet<>(); for (; ; ) { Field[] fs = klass.getDeclaredFields(); for (Field f : fs) { fields.add(f); } klass = klass.getSuperclass(); if (klass == null) { break; } if (klass.getCanonicalName().equals(Page.class.getCanonicalName())) { break; } } return fields; } public boolean shouldBindParameter(QlueParameter qp, Page page) { String state = qp.state(); // Always bind. if (state.equals(Page.STATE_ANY)) { return true; } // Bind if the parameter state matches page state. if (state.equals(page.getState())) { return true; } // Special state STATE_DEFAULT: if the page is not persistent, // bind always. Otherwise, bind only on POST. if (state.equals(Page.STATE_DEFAULT)) { if (!page.isPersistent() || page.context.isPost()) { return true; } else { return false; } } // Bind on GET requests. if (state.equals(Page.STATE_GET) && page.context.isGet()) { return true; } // Bind on POST requests. if (state.equals(Page.STATE_POST) && page.context.isPost()) { return true; } return false; } /** * Bind request parameters to the command object provided by the page. */ private void bindParameters(Page page) throws Exception { // Ask the page to provide a command object we can bind to. Simpler pages // might see themselves as the command objects; more complex might use more than one. Object commandObject = page.getCommandObject(); if (commandObject == null) { throw new RuntimeException("Qlue: Command object cannot be null"); } // Loop through the command object fields in order to determine if any are annotated as // parameters. Validate those that are, then bind them. Set<Field> fields = getClassPublicFields(commandObject.getClass()); for (Field f : fields) { // We bind command object fields that have the QlueParameter annotation. if (f.isAnnotationPresent(QlueParameter.class) == false) { continue; } // We bind only to public fields, but it commonly happens that the QlueParameter // annotation is used on other field types, leading to frustration because it's // not obvious why binding is not working. For this reason, we detect that problem // here and force an error to inform the developer. if (!Modifier.isPublic(f.getModifiers())) { throw new QlueException("QlueParameter used on a non-public field"); } try { QlueParameter qp = f.getAnnotation(QlueParameter.class); // Bind parameter when appropriate. if (shouldBindParameter(qp, page)) { if (qp.source().equals(ParamSource.URL)) { // Bind parameters transported in URL. For this to work there needs // to exist a route that parses out the parameter out of the URL. bindParameterFromString(commandObject, f, page, page.context.getUrlParameter(f.getName())); } else { if (qp.source().equals(ParamSource.GET_POST) || (qp.source().equals(ParamSource.GET) && page.context.isGet()) || (qp.source().equals(ParamSource.POST) && page.context.isPost())) { if (f.getType().isArray()) { bindArrayParameter(commandObject, f, page); } else { bindNonArrayParameter(commandObject, f, page); } } } } } catch (IllegalArgumentException e) { // Transform editor exception into a validation error. page.addError(f.getName(), e.getMessage()); } } } /** * Bind an array parameter. */ private void bindArrayParameter(Object commandObject, Field f, Page page) throws Exception { // Get the annotation QlueParameter qp = f.getAnnotation(QlueParameter.class); // Look for a property editor, which will know how // to convert text into a proper native type PropertyEditor pe = editors.get(f.getType().getComponentType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType().getComponentType()); } String[] values = page.context.getParameterValues(f.getName()); if ((values == null) || (values.length == 0)) { // Parameter not in input; create an empty array and set it on the command object. f.set(commandObject, Array.newInstance(f.getType().getComponentType(), 0)); return; } // Parameter in input boolean hasErrors = false; Object[] convertedValues = (Object[]) Array.newInstance(f.getType().getComponentType(), values.length); for (int i = 0; i < values.length; i++) { String newValue = validateParameter(page, f, qp, values[i]); if (newValue != null) { values[i] = newValue; convertedValues[i] = pe.fromText(f, values[i], f.get(commandObject)); } else { hasErrors = true; } } if (hasErrors == false) { f.set(commandObject, convertedValues); } } /** * Validate one parameter. */ protected String validateParameter(Page page, Field f, QlueParameter qp, String value) { // Transform value according to the list // of transformation functions supplied String tfn = qp.tfn(); if (tfn.length() != 0) { StringTokenizer st = new StringTokenizer(tfn, " ,"); while (st.hasMoreTokens()) { String t = st.nextToken(); if (t.compareTo("trim") == 0) { value = value.trim(); } else if (t.compareTo("lowercase") == 0) { value = value.toLowerCase(); } else { throw new RuntimeException("Qlue: Invalid parameter transformation function: " + t); } } } // If the parameter is mandatory, check that is // not empty or that it does not consist only // of whitespace characters. if (qp.mandatory()) { if (TextUtil.isEmptyOrWhitespace(value)) { page.addError(f.getName(), getFieldMissingMessage(qp)); return null; } } // Check size if (qp.maxSize() != -1) { if ((value.length() > qp.maxSize())) { if (qp.ignoreInvalid() == false) { page.addError(f.getName(), "qlue.validation.maxSize"); return null; } else { return null; } } } // Check that it conforms to the supplied regular expression if (qp.pattern().length() != 0) { Pattern p = null; // Compile the pattern first try { p = Pattern.compile(qp.pattern(), Pattern.DOTALL); } catch (PatternSyntaxException e) { throw new RuntimeException("Qlue: Invalid parameter validation pattern: " + qp.pattern()); } // Try to match Matcher m = p.matcher(value); if ((m.matches() == false)) { if (qp.ignoreInvalid() == false) { page.addError(f.getName(), "qlue.validation.pattern"); return null; } else { return null; } } } return value; } /** * Bind a parameter that is not an array. */ private void bindNonArrayParameter(Object commandObject, Field f, Page page) throws Exception { // Get the annotation QlueParameter qp = f.getAnnotation(QlueParameter.class); // First check if the parameter is a file if (QlueFile.class.isAssignableFrom(f.getType())) { bindFileParameter(commandObject, f, page); return; } // Look for a property editor, which will know how // to convert text into a native type PropertyEditor pe = editors.get(f.getType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType()); } // If the parameter is present in request, validate it and set on the command object String value = page.context.getParameter(f.getName()); if (value != null) { String newValue = validateParameter(page, f, qp, value); if (newValue != null) { value = newValue; f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); } } else { f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); // We are here if the parameter is not in the request, in which // case we need to check of the parameter is mandatory if (qp.mandatory()) { page.addError(f.getName(), getFieldMissingMessage(qp)); } } } private void bindParameterFromString(Object commandObject, Field f, Page page, String value) throws Exception { // Get the annotation QlueParameter qp = f.getAnnotation(QlueParameter.class); // First check if the parameter is a file if (QlueFile.class.isAssignableFrom(f.getType())) { throw new RuntimeException("Qlue: Unable to bind a string to file parameter"); } // Look for a property editor, which will know how // to convert text into a native type PropertyEditor pe = editors.get(f.getType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType()); } // If the parameter is present in request, validate it // and set on the command object if (value != null) { String newValue = validateParameter(page, f, qp, value); if (newValue != null) { value = newValue; f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); } } else { f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); // We are here if the parameter is not in request, in which // case we need to check of the parameter is mandatory if (qp.mandatory()) { page.addError(f.getName(), getFieldMissingMessage(qp)); } } } /** * Retrieve field message that we need to emit when a mandatory parameter is * missing. */ private String getFieldMissingMessage(QlueParameter qp) { return (qp.fieldMissingMessage().length() > 0) ? qp.fieldMissingMessage() : "qlue.validation.mandatory"; } /** * Bind file parameter. */ private void bindFileParameter(Object commandObject, Field f, Page page) throws Exception { QlueParameter qp = f.getAnnotation(QlueParameter.class); Part p = null; try { p = page.context.getPart(f.getName()); } catch (ServletException e) { } if ((p == null) || (p.getSize() == 0)) { if (qp.mandatory()) { page.addError(f.getName(), getFieldMissingMessage(qp)); } return; } File file = File.createTempFile("qlue-", ".tmp"); p.write(file.getAbsolutePath()); p.delete(); QlueFile qf = new QlueFile(file.getAbsolutePath()); qf.setContentType(p.getContentType()); qf.setSubmittedFilename(p.getSubmittedFileName()); f.set(commandObject, qf); } /** * Register a new property editor. */ private void registerPropertyEditor(PropertyEditor editor) { editors.put(editor.getEditorClass(), editor); } /** * Register the built-in property editors. */ protected void initPropertyEditors() { registerPropertyEditor(new IntegerEditor()); registerPropertyEditor(new LongEditor()); registerPropertyEditor(new StringEditor()); registerPropertyEditor(new BooleanEditor()); registerPropertyEditor(new DateEditor()); } /** * Retrieve view resolver. */ public ViewResolver getViewResolver() { return viewResolver; } /** * Set view resolver. */ protected void setViewResolver(ViewResolver viewResolver) { this.viewResolver = viewResolver; } /** * Retrieve view factory. */ public ViewFactory getViewFactory() { return viewFactory; } /** * Set view factory. */ protected void setViewFactory(ViewFactory viewFactory) { this.viewFactory = viewFactory; } /** * Get application root directory. */ public String getApplicationRoot() { return servlet.getServletContext().getRealPath("/"); } /** * Get application prefix. */ public String getAppPrefix() { return appPrefix; } /** * Set application prefix. */ protected void setAppPrefix(String appPrefix) { this.appPrefix = appPrefix; } /** * Retrieve this application's format tool, which is used in templates to * format output (but _not_ for output encoding). By default, that's an * instance of DefaultVelocityTool, but subclasses can use something else. */ public Object getVelocityTool() { return new DefaultVelocityTool(); } /** * Retrieve an encoding tool the application can use to write directly to HTML. */ public Object getEncodingTool() { return new HtmlEncoder(); } /** * This method is invoked to create a new session object. A QlueSession * instance is returned by default, but most applications will want to * override this method and provide their own session objects. */ protected QlueSession createNewSessionObject() { return new QlueSession(); } /** * Returns the session object associated with the current HTTP session. */ public QlueSession getQlueSession(HttpServletRequest request) { HttpSession httpSession = request.getSession(false); if (httpSession == null) { return null; } return (QlueSession) request.getSession().getAttribute(QlueConstants.QLUE_SESSION_OBJECT); } /** * Invalidates the existing session and creates a new one, preserving the * QlueSession object in the process. This method should be invoked * immediately after a user is authenticated to prevent session fixation * attacks. */ public void regenerateSession(HttpServletRequest request) { HttpSession existingHttpSession = request.getSession(false); if (existingHttpSession == null) { throw new IllegalStateException("Unable to regenerate session: No HTTP session"); } QlueSession qlueSession = getQlueSession(request); if (qlueSession == null) { throw new IllegalStateException("Unable to regenerate session: No Qlue session"); } QluePageManager pageManager = (QluePageManager) existingHttpSession.getAttribute(QlueConstants.QLUE_SESSION_PAGE_MANAGER); if (pageManager == null) { throw new IllegalStateException("Unable to regenerate session: No page manager"); } existingHttpSession.invalidate(); HttpSession newHttpSession = request.getSession(true); newHttpSession.setAttribute(QlueConstants.QLUE_SESSION_OBJECT, qlueSession); newHttpSession.setAttribute(QlueConstants.QLUE_SESSION_PAGE_MANAGER, pageManager); } /** * Set application prefix, which is used in logging as part of the unique transaction identifier. */ protected void setPrefix(String prefix) { this.appPrefix = prefix; } /** * Whether direct output (in which the programmer is expected to manually * encode data) is allowed. We do not allow direct output by default. * Override this method to change the behaviour. */ public boolean allowDirectOutput() { return false; } /** * Configure character encoding. */ protected void setCharacterEncoding(String characterEncoding) { this.characterEncoding = characterEncoding; } /** * Retrieves application's character encoding. */ public String getCharacterEncoding() { return characterEncoding; } /** * Configure development mode. */ protected void setApplicationDevelopmentMode(String input) { if (input.compareToIgnoreCase("on") == 0) { developmentMode = QlueConstants.DEVMODE_ENABLED; return; } else if (input.compareToIgnoreCase("off") == 0) { developmentMode = QlueConstants.DEVMODE_DISABLED; return; } else if (input.compareToIgnoreCase("ondemand") == 0) { developmentMode = QlueConstants.DEVMODE_ONDEMAND; return; } throw new IllegalArgumentException("Invalid value for development mode: " + input); } /** * Get the development mode setting. */ public int getApplicationDevelopmentMode() { return developmentMode; } /** * Set development mode password. */ public void setDevelopmentModePassword(String developmentModePassword) { this.developmentModePassword = developmentModePassword; } private void setTrustedProxies(String combinedSubnets) throws Exception { if (TextUtil.isEmpty(combinedSubnets)) { return; } String[] subnets = combinedSubnets.split("[;,\\x20]"); trustedProxies = new ArrayList<>(); for (String s : subnets) { if (TextUtil.isEmpty(s)) { continue; } if ((!s.contains("/")) && (!s.contains(":"))) { s = s + "/32"; } try { trustedProxies.add(new CIDRUtils(s)); } catch (IllegalArgumentException iae) { throw new RuntimeException("Qlue: Invalid proxy subnet: " + s); } } } public boolean isTrustedProxyRequest(TransactionContext context) { if (trustedProxies == null) { return false; } try { InetAddress remoteAddr = InetAddress.getByName(context.request.getRemoteAddr()); for (CIDRUtils su : trustedProxies) { if (su.isInRange(remoteAddr)) { return true; } } } catch (UnknownHostException e) { // Shouldn't happen. e.printStackTrace(System.err); return false; } return false; } /** * Configure the set of IP addresses that are allowed to use development mode. */ protected void setDevelopmentSubnets(String combinedSubnets) throws Exception { if (TextUtil.isEmpty(combinedSubnets)) { return; } String[] subnets = combinedSubnets.split("[;,\\x20]"); developmentSubnets = new ArrayList<>(); for (String s : subnets) { if (TextUtil.isEmpty(s)) { continue; } if ((!s.contains("/")) && (!s.contains(":"))) { s = s + "/32"; } try { developmentSubnets.add(new CIDRUtils(s)); } catch (IllegalArgumentException iae) { throw new RuntimeException("Qlue: Invalid development subnet: " + s); } } } /** * Check if the current transaction comes from an IP address that is allowed * to use development mode. */ public boolean isDeveloperRequestIpAddress(TransactionContext context) { if (developmentSubnets == null) { return false; } try { InetAddress remoteAddr = InetAddress.getByName(context.getEffectiveRemoteAddr()); for (CIDRUtils su : developmentSubnets) { if (su.isInRange(remoteAddr)) { return true; } } } catch (UnknownHostException e) { // Shouldn't happen. e.printStackTrace(System.err); return false; } return false; } /** * Check if the current transaction comes from a developer. */ public boolean isDevelopmentMode(TransactionContext context) { if (isDeveloperRequestIpAddress(context) == false) { return false; } QlueSession qlueSession = getQlueSession(context.getRequest()); if (qlueSession == null) { return false; } // Check session development mode (explicitly enabled) if (qlueSession.getDevelopmentMode() == QlueConstants.DEVMODE_ENABLED) { return true; } // Check session development mode (explicitly disabled) if (qlueSession.getDevelopmentMode() == QlueConstants.DEVMODE_DISABLED) { return false; } // Check application development mode if (getApplicationDevelopmentMode() == QlueConstants.DEVMODE_ENABLED) { return true; } return false; } /** * Check given password against the current development password. */ public boolean checkDeveloperPassword(String password) { if ((password == null) || (developmentModePassword == null)) { return false; } if (password.compareTo(developmentModePassword) == 0) { return true; } return false; } /** * Get the current development password. */ public String getDeveloperPassword() { return developmentModePassword; } /** * Retrieve this application's properties. */ public Properties getProperties() { return properties; } /** * Retrieve a single named property as text. */ public String getProperty(String key) { return VariableExpander.expand(properties.getProperty(key), properties); } /** * Retrieve a single named property as text, using the supplied default * value if the property is not set. */ public String getProperty(String key, String defaultValue) { String value = getProperty(key); if (value != null) { return value; } else { return defaultValue; } } public Boolean getBooleanProperty(String key) { String value = getProperty(key); if (value == null) { return null; } return Boolean.parseBoolean(value); } public Boolean getBooleanProperty(String key, String defaultValue) { String value = getProperty(key); if (value == null) { return Boolean.parseBoolean(defaultValue); } return Boolean.parseBoolean(value); } /** * Retrieve a single integer property. */ public Integer getIntProperty(String key) { String value = getProperty(key); if (value == null) { return null; } return Integer.parseInt(value); } /** * Retrieve a single integer property, using the supplied default value if * the property is not set. */ public Integer getIntProperty(String key, int defaultValue) { String value = getProperty(key); if (value == null) { return defaultValue; } return Integer.parseInt(value); } /** * Configure the path to the file that contains localized messages. */ protected void setMessagesFilename(String messagesFilename) { this.messagesFilename = messagesFilename; } /** * Retrieve this application's message source. */ public MessageSource getMessageSource(Locale locale) { MessageSource source = messageSources.get(locale); if (source == null) { source = new MessageSource((PropertyResourceBundle) ResourceBundle.getBundle(messagesFilename, locale), locale); messageSources.put(locale, source); } return source; } /** * Remember the current page for later use (e.g., in an error handler). */ void setActualPage(Page page) { page.context.request.setAttribute(REQUEST_ACTUAL_PAGE_KEY, page); } /** * Retrieve the actual page that tried to handle the current transaction and * failed. */ Page getActualPage(Page currentPage) { return (Page) currentPage.context.request.getAttribute(REQUEST_ACTUAL_PAGE_KEY); } /** * Allocates a new page ID. */ synchronized String generateTransactionId() { return UUID.randomUUID().toString(); } public EmailSender getEmailSender() { return asyncSmtpEmailSender; } public EmailSender getAsyncEmailSender() { return asyncSmtpEmailSender; } public EmailSender getSyncEmailSender() { return smtpEmailSender; } public String getConfPath() { return confPath; } public int getFrontendEncryptionCheck() { return frontendEncryptionCheck; } public String getAdminEmail() { return adminEmail; } protected void scheduleTask(Runnable maintenanceTask, Date firstTime, long period) { if (timer == null) { timer = new Timer(); } timer.scheduleAtFixedRate(new RunnableTaskWrapper(maintenanceTask), firstTime, period); } private class SendUrgentRemindersTask implements Runnable { @Override public void run() { try { if ((adminEmail == null) || (urgentEmail == null) || (urgentCounter < 0)) { return; } log.info("Sending urgent reminder: urgentCounter=" + urgentCounter); if (urgentCounter == 0) { // Nothing has happened in the last period; setting // the counter to -1 means that the next exception // will send an urgent email immediately. urgentCounter = -1; } else { // There were a number of exceptions in the last period, // which means that we should send a reminder email. Email email = new SimpleEmail(); email.setCharset("UTF-8"); email.setFrom(adminEmail); email.addTo(urgentEmail); email.setSubject("[" + getAppPrefix() + "] " + "Suppressed " + urgentCounter + " exception(s) in the last period"); try { getEmailSender().send(email); urgentCounter = 0; } catch (Exception e) { log.error("Failed to send email", e); } } } catch (Exception e) { log.error("SendUrgentRemindersTask exception", e); } } } private class RunnableTaskWrapper extends TimerTask { private Runnable task; RunnableTaskWrapper(Runnable task) { this.task = task; } @Override public void run() { task.run(); } } public String getPriorityTemplatePath() { return priorityTemplatePath; } /** * Returns class given its name. * * @param name * @return */ public static Class classForName(String name) { try { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); return Class.forName(name, true /* initialize */, classLoader); } catch (ClassNotFoundException e) { return null; } catch (NoClassDefFoundError e) { // NoClassDefFoundError is thrown when there is a class // that matches the name when ignoring case differences. // We do not care about that. return null; } } private void scheduleApplicationJobs() { // Create scheduler scheduler = new Scheduler(); scheduler.setDaemon(true); scheduler.start(); // Enumerate all application methods and look // for the QlueSchedule annotation Method[] methods = this.getClass().getMethods(); for (Method m : methods) { if (m.isAnnotationPresent(QlueSchedule.class)) { if (Modifier.isPublic(m.getModifiers()) || (Modifier.isProtected(m.getModifiers()))) { QlueSchedule qs = m.getAnnotation(QlueSchedule.class); try { scheduler.schedule(qs.value(), new QlueScheduleMethodTaskWrapper(this, this, m)); log.info("Scheduled method: " + m.getName()); } catch (InvalidPatternException ipe) { log.error("QlueSchedule: Invalid schedule pattern: " + qs.value()); } } else { log.error("QlueSchedule: Scheduled methods must be public or protected: " + m.getName()); } } } } }
src/com/webkreator/qlue/QlueApplication.java
/* * Qlue Web Application Framework * Copyright 2009-2012 Ivan Ristic <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webkreator.qlue; import com.webkreator.qlue.editors.*; import com.webkreator.qlue.exceptions.*; import com.webkreator.qlue.router.QlueRouteManager; import com.webkreator.qlue.router.RouteFactory; import com.webkreator.qlue.util.*; import com.webkreator.qlue.view.*; import com.webkreator.qlue.view.velocity.ClasspathVelocityViewFactory; import com.webkreator.qlue.view.velocity.DefaultVelocityTool; import it.sauronsoftware.cron4j.InvalidPatternException; import it.sauronsoftware.cron4j.Scheduler; import org.apache.commons.mail.Email; import org.apache.commons.mail.EmailException; import org.apache.commons.mail.SimpleEmail; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; import javax.servlet.ServletException; import javax.servlet.http.*; import java.io.*; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.file.FileSystems; import java.nio.file.Path; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; /** * This class represents one Qlue application. Very simple applications might * use it directly, but most will need to subclass in order to support complex * configuration (page resolver, view resolver, etc). */ public class QlueApplication { public static final String PROPERTIES_FILENAME = "qlue.properties"; public static final String ROUTES_FILENAME = "routes.conf"; public static final String REQUEST_ACTUAL_PAGE_KEY = "QLUE_ACTUAL_PAGE"; public static final String PROPERTY_CONF_PATH = "qlue.confPath"; public static final int FRONTEND_ENCRYPTION_NO = 0; public static final int FRONTEND_ENCRYPTION_CONTAINER = 1; public static final int FRONTEND_ENCRYPTION_FORCE_YES = 2; public static final int FRONTEND_ENCRYPTION_TRUSTED_HEADER = 3; private static final String PROPERTY_CHARACTER_ENCODING = "qlue.characterEncoding"; private static final String PROPERTY_DEVMODE_ENABLED = "qlue.devmode.active"; private static final String PROPERTY_DEVMODE_RANGES = "qlue.devmode.subnets"; private static final String PROPERTY_DEVMODE_PASSWORD = "qlue.devmode.password"; private static final String PROPERTY_TRUSTED_PROXIES = "qlue.trustedProxies"; private static final String PROPERTY_FRONTEND_ENCRYPTION = "qlue.frontendEncryption"; private static final String PROPERTY_ADMIN_EMAIL = "qlue.adminEmail"; private static final String PROPERTY_URGENT_EMAIL = "qlue.urgentEmail"; private String messagesFilename = "com/webkreator/qlue/messages"; private Properties properties = new Properties(); private String appPrefix = "QlueApp"; private HttpServlet servlet; private Logger log = LoggerFactory.getLogger(QlueApplication.class); private QlueRouteManager routeManager = new QlueRouteManager(this); private ViewResolver viewResolver = new ViewResolver(); private ViewFactory viewFactory = new ClasspathVelocityViewFactory(); private HashMap<Class, PropertyEditor> editors = new HashMap<>(); private String characterEncoding = "UTF-8"; private int developmentMode = QlueConstants.DEVMODE_DISABLED; private String developmentModePassword = null; private List<CIDRUtils> developmentSubnets = null; private List<CIDRUtils> trustedProxies = null; private String adminEmail; private String urgentEmail; private int urgentCounter = -1; private SmtpEmailSender smtpEmailSender; private SmtpEmailSender asyncSmtpEmailSender; private HashMap<Locale, MessageSource> messageSources = new HashMap<>(); private String confPath; private int frontendEncryptionCheck = FRONTEND_ENCRYPTION_CONTAINER; private Timer timer; private String priorityTemplatePath; private Scheduler scheduler; /** * This is the default constructor. The idea is that a subclass will * override it and supplement with its own configuration. */ protected QlueApplication() { initPropertyEditors(); } /** * This constructor is intended for use by very simple web applications that * consist of only one package. */ public QlueApplication(String pagesHome) { initPropertyEditors(); // These are the default routes for a simple application; we use them // to avoid having to provide routing configuration. routeManager.add(RouteFactory.create(routeManager, "/_qlue/{} package:com.webkreator.qlue.pages")); routeManager.add(RouteFactory.create(routeManager, "/{} package:" + pagesHome)); } protected void determineConfigPath() { // First, try a system property. confPath = System.getProperty(PROPERTY_CONF_PATH); if (confPath != null) { return; } // Assume the configuration is in the WEB-INF folder. confPath = servlet.getServletContext().getRealPath("/WEB-INF/"); } /** * Initialize QlueApp instance. Qlue applications are designed to be used by * servlets to delegate both initialization and request processing. */ public void init(HttpServlet servlet) throws Exception { this.servlet = servlet; determineConfigPath(); loadProperties(); initRouteManagers(); if (viewResolver == null) { throw new Exception("View resolver not configured"); } if (viewFactory == null) { throw new Exception("View factory not configured"); } viewFactory.init(this); Calendar nextHour = Calendar.getInstance(); nextHour.set(Calendar.HOUR_OF_DAY, nextHour.get(Calendar.HOUR_OF_DAY) + 1); nextHour.set(Calendar.MINUTE, 0); nextHour.set(Calendar.SECOND, 0); scheduleTask(new SendUrgentRemindersTask(), nextHour.getTime(), 60 * 60 * 1000); scheduleApplicationJobs(); } protected void initRouteManagers() throws Exception { File routesFile = new File(confPath, ROUTES_FILENAME); if (routesFile.exists()) { routeManager.load(routesFile); } } void loadProperties() throws Exception { File propsFile; String filename = System.getProperty("qlue.properties"); if (filename == null) { filename = PROPERTIES_FILENAME; } if (filename.charAt(0) == '/') { propsFile = new File(filename); } else { propsFile = new File(confPath, filename); } if (propsFile.exists() == false) { throw new QlueException("Unable to find file: " + propsFile.getAbsolutePath()); } properties.load(new FileReader(propsFile)); properties.setProperty("confPath", confPath); properties.setProperty("webRoot", servlet.getServletContext().getRealPath("/")); if (getProperty(PROPERTY_CHARACTER_ENCODING) != null) { setCharacterEncoding(getProperty(PROPERTY_CHARACTER_ENCODING)); } if (getProperty(PROPERTY_DEVMODE_ENABLED) != null) { setApplicationDevelopmentMode(getProperty(PROPERTY_DEVMODE_ENABLED)); } if (getProperty(PROPERTY_DEVMODE_RANGES) != null) { setDevelopmentSubnets(getProperty(PROPERTY_DEVMODE_RANGES)); } if (getProperty(PROPERTY_TRUSTED_PROXIES) != null) { setTrustedProxies(getProperty(PROPERTY_TRUSTED_PROXIES)); } if (getProperty(PROPERTY_FRONTEND_ENCRYPTION) != null) { configureFrontendEncryption(getProperty(PROPERTY_FRONTEND_ENCRYPTION)); } developmentModePassword = getProperty(PROPERTY_DEVMODE_PASSWORD); adminEmail = getProperty(PROPERTY_ADMIN_EMAIL); urgentEmail = getProperty(PROPERTY_URGENT_EMAIL); // Configure the SMTP email senders smtpEmailSender = new SmtpEmailSender(); if (getBooleanProperty("qlue.smtp.async", "false")) { AsyncSmtpEmailSender myAsyncSmtpEmailSender = new AsyncSmtpEmailSender(smtpEmailSender); // Start a new daemon thread to send email in the background. Thread thread = new Thread(myAsyncSmtpEmailSender); thread.setDaemon(true); thread.start(); asyncSmtpEmailSender = myAsyncSmtpEmailSender; } else { // All email sending is synchronous. asyncSmtpEmailSender = smtpEmailSender; } smtpEmailSender.setSmtpServer(getProperty("qlue.smtp.server")); if (getProperty("qlue.smtp.port") != null) { smtpEmailSender.setSmtpPort(Integer.valueOf(getProperty("qlue.smtp.port"))); } if (getProperty("qlue.smtp.protocol") != null) { smtpEmailSender.setSmtpProtocol(getProperty("qlue.smtp.protocol")); } if (getProperty("qlue.smtp.username") != null) { smtpEmailSender.setSmtpUsername(getProperty("qlue.smtp.username")); smtpEmailSender.setSmtpPassword(getProperty("qlue.smtp.password")); } priorityTemplatePath = getProperty("qlue.velocity.priorityTemplatePath"); if (priorityTemplatePath != null) { Path p = FileSystems.getDefault().getPath(priorityTemplatePath); if (!p.isAbsolute()) { priorityTemplatePath = getApplicationRoot() + "/" + priorityTemplatePath; } File f = new File(priorityTemplatePath); if (!f.exists()) { throw new QlueException("Priority template path doesn't exist: " + priorityTemplatePath); } if (!f.isDirectory()) { throw new QlueException("Priority template path is not a directory: " + priorityTemplatePath); } } } private void configureFrontendEncryption(String value) { if ("no".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_NO; } else if ("forceYes".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_FORCE_YES; } else if ("container".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_CONTAINER; } else if ("trustedHeader".equals(value)) { frontendEncryptionCheck = FRONTEND_ENCRYPTION_TRUSTED_HEADER; } else { throw new RuntimeException("Invalid value for the " + PROPERTY_FRONTEND_ENCRYPTION + " parameter:" + value); } } /** * Destroys the application. Invoked when the backing servlet is destroyed. */ public void destroy() { } /** * This method is the main entry point for request processing. */ protected void service(HttpServlet servlet, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Remember when processing began. long startTime = System.currentTimeMillis(); // Set the default character encoding. request.setCharacterEncoding(characterEncoding); response.setCharacterEncoding(characterEncoding); // Create a new application session object if one does not exist. HttpSession session = request.getSession(true); synchronized (session) { if (session.getAttribute(QlueConstants.QLUE_SESSION_OBJECT) == null) { session.setAttribute(QlueConstants.QLUE_SESSION_OBJECT, createNewSessionObject()); } } // Create a new context. TransactionContext context = new TransactionContext( this, servlet.getServletConfig(), servlet.getServletContext(), request, response); // Expose transaction information to the logging subsystem. MDC.put("txId", context.getTxId()); MDC.put("remoteAddr", context.getEffectiveRemoteAddr()); MDC.put("sessionId", context.getSession().getId()); // Proceed to the second stage of request processing try { log.debug("Processing request: " + request.getRequestURI()); serviceInternal(context); log.debug("Processed request in " + (System.currentTimeMillis() - startTime)); } finally { MDC.clear(); } } protected Object route(TransactionContext context) { return routeManager.route(context); } protected View processPage(Page page) throws Exception { View view = null; // Initialize backend. This is a handy place to do things needed for later // on, for example, configure database access. view = page.initBackend(); if (view != null) { return view; } // Check access. The idea with this hook is to run it as early as possible, // before any parameters are accessed, thus minimising the executed code. view = page.checkAccess(); if (view != null) { return view; } // For persistent pages, we clear errors only on POSTs; that // means that a subsequent GET can access the errors to show // them to the user. if (!page.isPersistent() || page.context.isPost()) { page.getErrors().clear(); } bindParameters(page); // Custom parameter validation. view = page.validateParameters(); if (view != null) { return view; } // Custom error handling. if (page.hasErrors()) { view = page.handleValidationError(); if (view != null) { return view; } } // Initialize the page. This really only makes sense for persistent pages, where you // want to run some code only once. With non-persistent pages, it's better to have // all the code in the same method. if (page.getState().equals(Page.STATE_INIT)) { view = page.init(); if (view != null) { return view; } } // Early call to prepare the page for the main thing. view = page.prepareForService(); if (view != null) { return view; } // Finally, run the main processing entry point. return page.service(); } /** * Request processing entry point. */ protected void serviceInternal(TransactionContext context) throws IOException { Page page = null; try { // First check if this is a request for a persistent page. We can // honour such requests only when we're not handling errors. if (context.isErrorHandler() == false) { // Persistent pages are identified via the "_pid" parameter. If we have // one such parameter, we look for the corresponding page in session storage. String pids[] = context.getParameterValues("_pid"); if ((pids != null) && (pids.length != 0)) { // Only one _pid parameter is allowed. if (pids.length != 1) { throw new RuntimeException("Request contains multiple _pid parameters"); } // Find the page using the requested page ID. PersistentPageRecord pageRecord = context.findPersistentPageRecord(pids[0]); if (pageRecord == null) { throw new PersistentPageNotFoundException("Persistent page not found: " + pids[0]); } // If the replacementUri is set that means that the page no longer // exist and that we need to forward all further request to it. if (pageRecord.getReplacementUri() != null) { context.getResponse().sendRedirect(pageRecord.getReplacementUri()); return; } // Otherwise, let's use this page. page = pageRecord.getPage(); if (page == null) { throw new RuntimeException("Page record doesn't contain page"); } } } // If we don't have a persistent page we'll create a new one by routing this request. if (page == null) { Object routeObject = route(context); if (routeObject == null) { throw new PageNotFoundException(); } else if (routeObject instanceof View) { page = new DirectViewPage((View) routeObject); } else if (routeObject instanceof Page) { page = (Page) routeObject; } else { throw new RuntimeException("Qlue: Unexpected router response: " + routeObject); } } // Run the page. Access to the page is synchronised, which means that only one // HTTP request can handle it at any given time. synchronized (page) { page.setApp(this); page.determineDefaultViewName(viewResolver); page.setContext(context); page.determineCommandObject(); if (page.isPersistent()) { context.persistPage(page); } // Set content type now, before any output happens. context.response.setContentType(page.getContentType()); View view = processPage(page); if (view != null) { renderView(view, context, page); } // Execute page commit. This is what it sounds like, // an opportunity to use a simple approach to transaction // management for simple applications. page.commit(); // Automatic page state transition. if (!page.isPersistent()) { // Non-persistent pages automatically transition to FINISHED so that cleanup can be invoked. page.setState(Page.STATE_FINISHED); } else { // For persistent pages, we change their state only if they're left as NEW // after execution. We change to POSTED in order to prevent multiple calls to init(). if (page.getState().equals(Page.STATE_INIT)) { page.setState(Page.STATE_WORKING); } } } } catch (PersistentPageNotFoundException ppnfe) { // When we encounter an unknown process reference, we // redirect back to the site home page. Showing errors // is probably not going to be helpful, and may actually compel the // user to go back and try again (and that's not going to work). context.getResponse().sendRedirect("/"); } catch (RequestMethodException rme) { if (page != null) { if (page.isDevelopmentMode()) { log.error(rme.getMessage(), rme); } page.rollback(); } // Convert RequestMethodException into a 405 response. context.getResponse().sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } catch (PageNotFoundException pnfe) { if (page != null) { if (page.isDevelopmentMode()) { log.error(pnfe.getMessage(), pnfe); } page.rollback(); } // Convert PageNotFoundException into a 404 response. context.getResponse().sendError(HttpServletResponse.SC_NOT_FOUND); } catch (ValidationException ve) { if (!page.isDevelopmentMode()) { if (page != null) { page.rollback(); } // Respond to validation errors with a 400 response. context.getResponse().sendError(HttpServletResponse.SC_BAD_REQUEST); } else { // In development mode, we let the exception propagate to that it // can be handled by our generic exception handler, which will show // the error information on the screen. throw ve; } } catch (QlueSecurityException se) { if (page != null) { page.rollback(); } log.error("Security exception: " + context.getRequestUriWithQueryString(), se); // Respond to security exceptions with a 400 response. context.getResponse().sendError(HttpServletResponse.SC_BAD_REQUEST); } catch (Exception e) { if (page != null) { page.rollback(); // Because we are about to throw an exception, which may cause // another page to handle this request, we need to remember // the current page (which is useful for debugging information, etc). setActualPage(page); } // Don't process the exception further if the problem is caused // by the client going away (e.g., interrupted file download). if (!e.getClass().getName().contains("ClientAbortException")) { // Handle application exception, which will record full context // data and, optionally, notify the administrator via email. handleApplicationException(context, page, e); // We do not wish to propagate the exception further, but, if it's not too late // (the response not committed), we should use a meaningful status code. if (context.getResponse().isCommitted() == false) { if (e instanceof ServiceUnavailableException) { context.getResponse().sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE); } else { context.getResponse().sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } } } } finally { // In development mode, append debugging information to the end of the page. masterWriteRequestDevelopmentInformation(context, page); // Invoke cleanup on finished pages. if ((page != null) && (page.isFinished()) && (!page.isCleanupInvoked())) { page.cleanup(); } } } /** * Handle application exception. We dump debugging information into the * application activity log and, if the admin email address is configured, * we send the same via email. */ protected void handleApplicationException(TransactionContext tx, Page page, Throwable t) { String debugInfo = null; if (tx != null) { // Dump debugging information into a String StringWriter sw = new StringWriter(); sw.append("Debugging information follows:"); try { _masterWriteRequestDevelopmentInformation(tx, page, new PrintWriter(sw)); } catch (IOException e) { // Ignore (but log, in case we do get something) log.error("Exception while preparing debugging information", e); } // Qlue formats debugging information using HTML markup, and here // we want to log it to text files, which means we need to strip // out the markup and convert entity references. HtmlToText htt = new HtmlToText(); try { htt.parse(new StringReader(sw.getBuffer().toString())); debugInfo = htt.toString(); } catch (IOException e) { log.error("Error while converting HTML", e); } } // Record message to the activity log log.error("Qlue: Unhandled application exception", t); // Send email notification try { Email email = new SimpleEmail(); email.setCharset("UTF-8"); if (t.getMessage() != null) { email.setSubject("Application Exception: " + t.getMessage()); } else { email.setSubject("Application Exception"); } StringWriter msgBody = new StringWriter(); PrintWriter pw = new PrintWriter(msgBody); t.printStackTrace(pw); pw.println(); if (debugInfo != null) { pw.print(debugInfo); } email.setMsg(msgBody.toString()); sendAdminEmail(email, true /* fatalError */); } catch (Exception e) { log.error("Failed sending admin email: ", e); } } public synchronized void sendAdminEmail(Email email) { sendAdminEmail(email, false); } public synchronized void sendAdminEmail(Email email, boolean fatalError) { if (adminEmail == null) { return; } // Configure the correct email address. try { email.setFrom(adminEmail); // If this is a fatal error and we have an // email address for emergencies, treat it // as an emergency. if ((fatalError) && (urgentEmail != null)) { email.addTo(urgentEmail); } else { email.addTo(adminEmail); } } catch (EmailException e) { log.error("Invalid admin email address", e); } // Update the email subject to include the application prefix. email.setSubject("[" + getAppPrefix() + "] " + email.getSubject()); // If the email is about a fatal problem, determine // if we want to urgently notify the administrators; we // want to send only one urgent email per time period. if ((fatalError) && (urgentEmail != null)) { // When the counter is at -1 that means we didn't // send any emails in the previous time period. In // other words, we can send one now. if (urgentCounter == -1) { urgentCounter = 0; } else { // Alternatively, just increment the counter // and send nothing. urgentCounter++; log.info("Suppressing fatal error email (" + urgentCounter + "): " + email.getSubject()); return; } } // Send the email now. try { getEmailSender().send(email); } catch (Exception e) { log.error("Failed to send email", e); } } public void renderView(View view, TransactionContext tx, Page page) throws Exception { // For persistent pages, we clear errors only on POSTs; that // means that a subsequent GET can access the errors to show // them to the user. if (!page.isPersistent() || page.context.isPost()) { createShadowInput(page, /* fromRequest */ true); } else { if (page.getState() == Page.STATE_INIT) { createShadowInput(page, /* fromRequest */ false); } } // NullView only indicates that no further output is needed. if (view instanceof NullView) { return; } // If we get a DefaultView or NamedView instance // we have to replace them with a real view, using // the name of the page in the view resolution process. if (view instanceof DefaultView) { view = viewFactory.constructView(page, page.getViewName()); } else if (view instanceof NamedView) { view = viewFactory.constructView(page, ((NamedView) view).getViewName()); } else if (view instanceof ClasspathView) { view = viewFactory.constructView(((ClasspathView) view).getViewName()); } else if (view instanceof FinalRedirectView) { page.setState(Page.STATE_FINISHED); if (((RedirectView) view).getPage() == null) { page.context.replacePage(page, (FinalRedirectView) view); } } if (view == null) { throw new RuntimeException("Qlue: Unable to resolve view"); } view.render(tx, page); } /** * Invoked to store the original text values for parameters. The text is * needed in the cases where it cannot be converted to the intended type. */ private void createShadowInput(Page page, boolean fromRequest) throws Exception { page.clearShadowInput(); // Ask the page to provide a command object, which can be // a custom object or the page itself. Object commandObject = page.getCommandObject(); if (commandObject == null) { throw new RuntimeException("Qlue: Command object cannot be null"); } // Loop through the command object fields in order to determine // if any are annotated as parameters. Remember the original // text values of parameters. Set<Field> fields = getClassPublicFields(commandObject.getClass()); for (Field f : fields) { if (f.isAnnotationPresent(QlueParameter.class)) { if (QlueFile.class.isAssignableFrom(f.getType())) { continue; } if (!Modifier.isPublic(f.getModifiers())) { throw new QlueException("QlueParameter used on a non-public field"); } // Update missing shadow input fields if (page.getShadowInput().get(f.getName()) == null) { if (f.getType().isArray()) { createShadowInputArrayParam(page, f, fromRequest); } else { createShadowInputNonArrayParam(page, f, fromRequest); } } } } } private void createShadowInputArrayParam(Page page, Field f, boolean fromRequest) throws Exception { // Find the property editor PropertyEditor pe = editors.get(f.getType().getComponentType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType().getComponentType()); } // If there is any data in the command object use it to populate shadow input if (f.get(page.getCommandObject()) != null) { Object[] originalValues = (Object[]) f.get(page.getCommandObject()); String[] textValues = new String[originalValues.length]; for (int i = 0; i < originalValues.length; i++) { textValues[i] = pe.toText(originalValues[i]); } page.getShadowInput().set(f.getName(), textValues); } if (fromRequest) { // Overwrite with the value in the request, if present String[] requestParamValues = page.context.getParameterValues(f.getName()); if (requestParamValues != null) { page.getShadowInput().set(f.getName(), requestParamValues); } } } private void createShadowInputNonArrayParam(Page page, Field f, boolean fromRequest) throws Exception { // Find the property editor PropertyEditor pe = editors.get(f.getType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType()); } // If the object exists in the command object, convert it to text using the property editor Object o = f.get(page.getCommandObject()); if (o != null) { page.getShadowInput().set(f.getName(), pe.toText(o)); } // Overwrite with the value in the request, if present if (fromRequest) { String requestParamValue = page.context.getParameter(f.getName()); if (requestParamValue != null) { page.getShadowInput().set(f.getName(), requestParamValue); } } } /** * Appends debugging information to the view, but only if the development mode is active. */ protected void masterWriteRequestDevelopmentInformation(TransactionContext context, Page page) throws IOException { if (page == null) { return; } // Check development mode if (page.isDevelopmentMode() == false) { return; } // We might be in an error handler, in which case we want to display // the state of the actual (original) page and not this one. Page actualPage = getActualPage(page); if (actualPage != null) { // Use the actual page and context page = actualPage; context = page.getContext(); } // Ignore redirections; RedirectView knows to display development // information before redirects, which is why we don't need // to worry here. int status = context.response.getStatus(); if ((status >= 300) && (status <= 399)) { return; } // Ignore responses other than text/html; we don't want to // corrupt images and other resources that are not pages. String contentType = context.response.getContentType(); if (contentType != null) { int i = contentType.indexOf(';'); if (i != -1) { contentType = contentType.substring(0, i); } if (contentType.compareToIgnoreCase("text/html") != 0) { return; } } // Append output _masterWriteRequestDevelopmentInformation(context, page, context.response.getWriter()); } protected void _masterWriteRequestDevelopmentInformation(TransactionContext context, Page page, PrintWriter out) throws IOException { if (page == null) { return; } out.println("<hr><div align=left><pre>"); out.println("<b>Request</b>\n"); context.writeRequestDevelopmentInformation(out); out.println(""); out.println("<b>Page</b>\n"); page.writeDevelopmentInformation(out); out.println(""); out.println("<b>Session</b>\n"); QlueSession qlueSession = page.getQlueSession(); if (qlueSession != null) { qlueSession.writeDevelopmentInformation(out); out.println(""); } out.println("<b>Application</b>\n"); this.writeDevelopmentInformation(out); out.println("</pre></div>"); } /** * Write application-specific debugging output. */ protected void writeDevelopmentInformation(PrintWriter out) { out.println(" Prefix: " + HtmlEncoder.html(appPrefix)); out.println(" Development mode: " + developmentMode); } protected Set<Field> getClassPublicFields(Class klass) { Set<Field> fields = new HashSet<>(); for (; ; ) { Field[] fs = klass.getDeclaredFields(); for (Field f : fs) { fields.add(f); } klass = klass.getSuperclass(); if (klass == null) { break; } if (klass.getCanonicalName().equals(Page.class.getCanonicalName())) { break; } } return fields; } public boolean shouldBindParameter(QlueParameter qp, Page page) { String state = qp.state(); // Always bind. if (state.equals(Page.STATE_ANY)) { return true; } // Bind if the parameter state matches page state. if (state.equals(page.getState())) { return true; } // Special state STATE_DEFAULT: if the page is not persistent, // bind always. Otherwise, bind only on POST. if (state.equals(Page.STATE_DEFAULT)) { if (!page.isPersistent() || page.context.isPost()) { return true; } else { return false; } } // Bind on GET requests. if (state.equals(Page.STATE_GET) && page.context.isGet()) { return true; } // Bind on POST requests. if (state.equals(Page.STATE_POST) && page.context.isPost()) { return true; } return false; } /** * Bind request parameters to the command object provided by the page. */ private void bindParameters(Page page) throws Exception { // Ask the page to provide a command object we can bind to. Simpler pages // might see themselves as the command objects; more complex might use more than one. Object commandObject = page.getCommandObject(); if (commandObject == null) { throw new RuntimeException("Qlue: Command object cannot be null"); } // Loop through the command object fields in order to determine if any are annotated as // parameters. Validate those that are, then bind them. Set<Field> fields = getClassPublicFields(commandObject.getClass()); for (Field f : fields) { // We bind command object fields that have the QlueParameter annotation. if (f.isAnnotationPresent(QlueParameter.class) == false) { continue; } // We bind only to public fields, but it commonly happens that the QlueParameter // annotation is used on other field types, leading to frustration because it's // not obvious why binding is not working. For this reason, we detect that problem // here and force an error to inform the developer. if (!Modifier.isPublic(f.getModifiers())) { throw new QlueException("QlueParameter used on a non-public field"); } try { QlueParameter qp = f.getAnnotation(QlueParameter.class); // Bind parameter when appropriate. if (shouldBindParameter(qp, page)) { if (qp.source().equals(ParamSource.URL)) { // Bind parameters transported in URL. For this to work there needs // to exist a route that parses out the parameter out of the URL. bindParameterFromString(commandObject, f, page, page.context.getUrlParameter(f.getName())); } else { if (qp.source().equals(ParamSource.GET_POST) || (qp.source().equals(ParamSource.GET) && page.context.isGet()) || (qp.source().equals(ParamSource.POST) && page.context.isPost())) { if (f.getType().isArray()) { bindArrayParameter(commandObject, f, page); } else { bindNonArrayParameter(commandObject, f, page); } } } } } catch (IllegalArgumentException e) { // Transform editor exception into a validation error. page.addError(f.getName(), e.getMessage()); } } } /** * Bind an array parameter. */ private void bindArrayParameter(Object commandObject, Field f, Page page) throws Exception { // Get the annotation QlueParameter qp = f.getAnnotation(QlueParameter.class); // Look for a property editor, which will know how // to convert text into a proper native type PropertyEditor pe = editors.get(f.getType().getComponentType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType().getComponentType()); } String[] values = page.context.getParameterValues(f.getName()); if ((values == null) || (values.length == 0)) { // Parameter not in input; create an empty array and set it on the command object. f.set(commandObject, Array.newInstance(f.getType().getComponentType(), 0)); return; } // Parameter in input boolean hasErrors = false; Object[] convertedValues = (Object[]) Array.newInstance(f.getType().getComponentType(), values.length); for (int i = 0; i < values.length; i++) { String newValue = validateParameter(page, f, qp, values[i]); if (newValue != null) { values[i] = newValue; convertedValues[i] = pe.fromText(f, values[i], f.get(commandObject)); } else { hasErrors = true; } } if (hasErrors == false) { f.set(commandObject, convertedValues); } } /** * Validate one parameter. */ protected String validateParameter(Page page, Field f, QlueParameter qp, String value) { // Transform value according to the list // of transformation functions supplied String tfn = qp.tfn(); if (tfn.length() != 0) { StringTokenizer st = new StringTokenizer(tfn, " ,"); while (st.hasMoreTokens()) { String t = st.nextToken(); if (t.compareTo("trim") == 0) { value = value.trim(); } else if (t.compareTo("lowercase") == 0) { value = value.toLowerCase(); } else { throw new RuntimeException("Qlue: Invalid parameter transformation function: " + t); } } } // If the parameter is mandatory, check that is // not empty or that it does not consist only // of whitespace characters. if (qp.mandatory()) { if (TextUtil.isEmptyOrWhitespace(value)) { page.addError(f.getName(), getFieldMissingMessage(qp)); return null; } } // Check size if (qp.maxSize() != -1) { if ((value.length() > qp.maxSize())) { if (qp.ignoreInvalid() == false) { page.addError(f.getName(), "qlue.validation.maxSize"); return null; } else { return null; } } } // Check that it conforms to the supplied regular expression if (qp.pattern().length() != 0) { Pattern p = null; // Compile the pattern first try { p = Pattern.compile(qp.pattern(), Pattern.DOTALL); } catch (PatternSyntaxException e) { throw new RuntimeException("Qlue: Invalid parameter validation pattern: " + qp.pattern()); } // Try to match Matcher m = p.matcher(value); if ((m.matches() == false)) { if (qp.ignoreInvalid() == false) { page.addError(f.getName(), "qlue.validation.pattern"); return null; } else { return null; } } } return value; } /** * Bind a parameter that is not an array. */ private void bindNonArrayParameter(Object commandObject, Field f, Page page) throws Exception { // Get the annotation QlueParameter qp = f.getAnnotation(QlueParameter.class); // First check if the parameter is a file if (QlueFile.class.isAssignableFrom(f.getType())) { bindFileParameter(commandObject, f, page); return; } // Look for a property editor, which will know how // to convert text into a native type PropertyEditor pe = editors.get(f.getType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType()); } // If the parameter is present in request, validate it and set on the command object String value = page.context.getParameter(f.getName()); if (value != null) { String newValue = validateParameter(page, f, qp, value); if (newValue != null) { value = newValue; f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); } } else { f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); // We are here if the parameter is not in the request, in which // case we need to check of the parameter is mandatory if (qp.mandatory()) { page.addError(f.getName(), getFieldMissingMessage(qp)); } } } private void bindParameterFromString(Object commandObject, Field f, Page page, String value) throws Exception { // Get the annotation QlueParameter qp = f.getAnnotation(QlueParameter.class); // First check if the parameter is a file if (QlueFile.class.isAssignableFrom(f.getType())) { throw new RuntimeException("Qlue: Unable to bind a string to file parameter"); } // Look for a property editor, which will know how // to convert text into a native type PropertyEditor pe = editors.get(f.getType()); if (pe == null) { throw new RuntimeException("Qlue: Binding does not know how to handle type: " + f.getType()); } // If the parameter is present in request, validate it // and set on the command object if (value != null) { String newValue = validateParameter(page, f, qp, value); if (newValue != null) { value = newValue; f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); } } else { f.set(commandObject, pe.fromText(f, value, f.get(commandObject))); // We are here if the parameter is not in request, in which // case we need to check of the parameter is mandatory if (qp.mandatory()) { page.addError(f.getName(), getFieldMissingMessage(qp)); } } } /** * Retrieve field message that we need to emit when a mandatory parameter is * missing. */ private String getFieldMissingMessage(QlueParameter qp) { return (qp.fieldMissingMessage().length() > 0) ? qp.fieldMissingMessage() : "qlue.validation.mandatory"; } /** * Bind file parameter. */ private void bindFileParameter(Object commandObject, Field f, Page page) throws Exception { QlueParameter qp = f.getAnnotation(QlueParameter.class); Part p = null; try { p = page.context.getPart(f.getName()); } catch (ServletException e) { } if ((p == null) || (p.getSize() == 0)) { if (qp.mandatory()) { page.addError(f.getName(), getFieldMissingMessage(qp)); } return; } File file = File.createTempFile("qlue-", ".tmp"); p.write(file.getAbsolutePath()); p.delete(); QlueFile qf = new QlueFile(file.getAbsolutePath()); qf.setContentType(p.getContentType()); qf.setSubmittedFilename(p.getSubmittedFileName()); f.set(commandObject, qf); } /** * Register a new property editor. */ private void registerPropertyEditor(PropertyEditor editor) { editors.put(editor.getEditorClass(), editor); } /** * Register the built-in property editors. */ protected void initPropertyEditors() { registerPropertyEditor(new IntegerEditor()); registerPropertyEditor(new LongEditor()); registerPropertyEditor(new StringEditor()); registerPropertyEditor(new BooleanEditor()); registerPropertyEditor(new DateEditor()); } /** * Retrieve view resolver. */ public ViewResolver getViewResolver() { return viewResolver; } /** * Set view resolver. */ protected void setViewResolver(ViewResolver viewResolver) { this.viewResolver = viewResolver; } /** * Retrieve view factory. */ public ViewFactory getViewFactory() { return viewFactory; } /** * Set view factory. */ protected void setViewFactory(ViewFactory viewFactory) { this.viewFactory = viewFactory; } /** * Get application root directory. */ public String getApplicationRoot() { return servlet.getServletContext().getRealPath("/"); } /** * Get application prefix. */ public String getAppPrefix() { return appPrefix; } /** * Set application prefix. */ protected void setAppPrefix(String appPrefix) { this.appPrefix = appPrefix; } /** * Retrieve this application's format tool, which is used in templates to * format output (but _not_ for output encoding). By default, that's an * instance of DefaultVelocityTool, but subclasses can use something else. */ public Object getVelocityTool() { return new DefaultVelocityTool(); } /** * Retrieve an encoding tool the application can use to write directly to HTML. */ public Object getEncodingTool() { return new HtmlEncoder(); } /** * This method is invoked to create a new session object. A QlueSession * instance is returned by default, but most applications will want to * override this method and provide their own session objects. */ protected QlueSession createNewSessionObject() { return new QlueSession(); } /** * Returns the session object associated with the current HTTP session. */ public QlueSession getQlueSession(HttpServletRequest request) { HttpSession httpSession = request.getSession(false); if (httpSession == null) { return null; } return (QlueSession) request.getSession().getAttribute(QlueConstants.QLUE_SESSION_OBJECT); } /** * Invalidates the existing session and creates a new one, preserving the * QlueSession object in the process. This method should be invoked * immediately after a user is authenticated to prevent session fixation * attacks. */ public void regenerateSession(HttpServletRequest request) { HttpSession existingHttpSession = request.getSession(false); if (existingHttpSession == null) { throw new IllegalStateException("Unable to regenerate session: No HTTP session"); } QlueSession qlueSession = getQlueSession(request); if (qlueSession == null) { throw new IllegalStateException("Unable to regenerate session: No Qlue session"); } QluePageManager pageManager = (QluePageManager) existingHttpSession.getAttribute(QlueConstants.QLUE_SESSION_PAGE_MANAGER); if (pageManager == null) { throw new IllegalStateException("Unable to regenerate session: No page manager"); } existingHttpSession.invalidate(); HttpSession newHttpSession = request.getSession(true); newHttpSession.setAttribute(QlueConstants.QLUE_SESSION_OBJECT, qlueSession); newHttpSession.setAttribute(QlueConstants.QLUE_SESSION_PAGE_MANAGER, pageManager); } /** * Set application prefix, which is used in logging as part of the unique transaction identifier. */ protected void setPrefix(String prefix) { this.appPrefix = prefix; } /** * Whether direct output (in which the programmer is expected to manually * encode data) is allowed. We do not allow direct output by default. * Override this method to change the behaviour. */ public boolean allowDirectOutput() { return false; } /** * Configure character encoding. */ protected void setCharacterEncoding(String characterEncoding) { this.characterEncoding = characterEncoding; } /** * Retrieves application's character encoding. */ public String getCharacterEncoding() { return characterEncoding; } /** * Configure development mode. */ protected void setApplicationDevelopmentMode(String input) { if (input.compareToIgnoreCase("on") == 0) { developmentMode = QlueConstants.DEVMODE_ENABLED; return; } else if (input.compareToIgnoreCase("off") == 0) { developmentMode = QlueConstants.DEVMODE_DISABLED; return; } else if (input.compareToIgnoreCase("ondemand") == 0) { developmentMode = QlueConstants.DEVMODE_ONDEMAND; return; } throw new IllegalArgumentException("Invalid value for development mode: " + input); } /** * Get the development mode setting. */ public int getApplicationDevelopmentMode() { return developmentMode; } /** * Set development mode password. */ public void setDevelopmentModePassword(String developmentModePassword) { this.developmentModePassword = developmentModePassword; } private void setTrustedProxies(String combinedSubnets) throws Exception { if (TextUtil.isEmpty(combinedSubnets)) { return; } String[] subnets = combinedSubnets.split("[;,\\x20]"); trustedProxies = new ArrayList<>(); for (String s : subnets) { if (TextUtil.isEmpty(s)) { continue; } if ((!s.contains("/")) && (!s.contains(":"))) { s = s + "/32"; } try { trustedProxies.add(new CIDRUtils(s)); } catch (IllegalArgumentException iae) { throw new RuntimeException("Qlue: Invalid proxy subnet: " + s); } } } public boolean isTrustedProxyRequest(TransactionContext context) { if (trustedProxies == null) { return false; } try { InetAddress remoteAddr = InetAddress.getByName(context.request.getRemoteAddr()); for (CIDRUtils su : trustedProxies) { if (su.isInRange(remoteAddr)) { return true; } } } catch (UnknownHostException e) { // Shouldn't happen. e.printStackTrace(System.err); return false; } return false; } /** * Configure the set of IP addresses that are allowed to use development mode. */ protected void setDevelopmentSubnets(String combinedSubnets) throws Exception { if (TextUtil.isEmpty(combinedSubnets)) { return; } String[] subnets = combinedSubnets.split("[;,\\x20]"); developmentSubnets = new ArrayList<>(); for (String s : subnets) { if (TextUtil.isEmpty(s)) { continue; } if ((!s.contains("/")) && (!s.contains(":"))) { s = s + "/32"; } try { developmentSubnets.add(new CIDRUtils(s)); } catch (IllegalArgumentException iae) { throw new RuntimeException("Qlue: Invalid development subnet: " + s); } } } /** * Check if the current transaction comes from an IP address that is allowed * to use development mode. */ public boolean isDeveloperRequestIpAddress(TransactionContext context) { if (developmentSubnets == null) { return false; } try { InetAddress remoteAddr = InetAddress.getByName(context.getEffectiveRemoteAddr()); for (CIDRUtils su : developmentSubnets) { if (su.isInRange(remoteAddr)) { return true; } } } catch (UnknownHostException e) { // Shouldn't happen. e.printStackTrace(System.err); return false; } return false; } /** * Check if the current transaction comes from a developer. */ public boolean isDevelopmentMode(TransactionContext context) { if (isDeveloperRequestIpAddress(context) == false) { return false; } QlueSession qlueSession = getQlueSession(context.getRequest()); if (qlueSession == null) { return false; } // Check session development mode (explicitly enabled) if (qlueSession.getDevelopmentMode() == QlueConstants.DEVMODE_ENABLED) { return true; } // Check session development mode (explicitly disabled) if (qlueSession.getDevelopmentMode() == QlueConstants.DEVMODE_DISABLED) { return false; } // Check application development mode if (getApplicationDevelopmentMode() == QlueConstants.DEVMODE_ENABLED) { return true; } return false; } /** * Check given password against the current development password. */ public boolean checkDeveloperPassword(String password) { if ((password == null) || (developmentModePassword == null)) { return false; } if (password.compareTo(developmentModePassword) == 0) { return true; } return false; } /** * Get the current development password. */ public String getDeveloperPassword() { return developmentModePassword; } /** * Retrieve this application's properties. */ public Properties getProperties() { return properties; } /** * Retrieve a single named property as text. */ public String getProperty(String key) { return VariableExpander.expand(properties.getProperty(key), properties); } /** * Retrieve a single named property as text, using the supplied default * value if the property is not set. */ public String getProperty(String key, String defaultValue) { String value = getProperty(key); if (value != null) { return value; } else { return defaultValue; } } public Boolean getBooleanProperty(String key) { String value = getProperty(key); if (value == null) { return null; } return Boolean.parseBoolean(value); } public Boolean getBooleanProperty(String key, String defaultValue) { String value = getProperty(key); if (value == null) { return Boolean.parseBoolean(defaultValue); } return Boolean.parseBoolean(value); } /** * Retrieve a single integer property. */ public Integer getIntProperty(String key) { String value = getProperty(key); if (value == null) { return null; } return Integer.parseInt(value); } /** * Retrieve a single integer property, using the supplied default value if * the property is not set. */ public Integer getIntProperty(String key, int defaultValue) { String value = getProperty(key); if (value == null) { return defaultValue; } return Integer.parseInt(value); } /** * Configure the path to the file that contains localized messages. */ protected void setMessagesFilename(String messagesFilename) { this.messagesFilename = messagesFilename; } /** * Retrieve this application's message source. */ public MessageSource getMessageSource(Locale locale) { MessageSource source = messageSources.get(locale); if (source == null) { source = new MessageSource((PropertyResourceBundle) ResourceBundle.getBundle(messagesFilename, locale), locale); messageSources.put(locale, source); } return source; } /** * Remember the current page for later use (e.g., in an error handler). */ void setActualPage(Page page) { page.context.request.setAttribute(REQUEST_ACTUAL_PAGE_KEY, page); } /** * Retrieve the actual page that tried to handle the current transaction and * failed. */ Page getActualPage(Page currentPage) { return (Page) currentPage.context.request.getAttribute(REQUEST_ACTUAL_PAGE_KEY); } /** * Allocates a new page ID. */ synchronized String generateTransactionId() { return UUID.randomUUID().toString(); } public EmailSender getEmailSender() { return asyncSmtpEmailSender; } public EmailSender getAsyncEmailSender() { return asyncSmtpEmailSender; } public EmailSender getSyncEmailSender() { return smtpEmailSender; } public String getConfPath() { return confPath; } public int getFrontendEncryptionCheck() { return frontendEncryptionCheck; } public String getAdminEmail() { return adminEmail; } protected void scheduleTask(Runnable maintenanceTask, Date firstTime, long period) { if (timer == null) { timer = new Timer(); } timer.scheduleAtFixedRate(new RunnableTaskWrapper(maintenanceTask), firstTime, period); } private class SendUrgentRemindersTask implements Runnable { @Override public void run() { try { if ((adminEmail == null) || (urgentEmail == null) || (urgentCounter < 0)) { return; } log.info("Sending urgent reminder: urgentCounter=" + urgentCounter); if (urgentCounter == 0) { // Nothing has happened in the last period; setting // the counter to -1 means that the next exception // will send an urgent email immediately. urgentCounter = -1; } else { // There were a number of exceptions in the last period, // which means that we should send a reminder email. Email email = new SimpleEmail(); email.setCharset("UTF-8"); email.setFrom(adminEmail); email.addTo(urgentEmail); email.setSubject("[" + getAppPrefix() + "] " + "Suppressed " + urgentCounter + " exception(s) in the last period"); try { getEmailSender().send(email); urgentCounter = 0; } catch (Exception e) { log.error("Failed to send email", e); } } } catch (Exception e) { log.error("SendUrgentRemindersTask exception", e); } } } private class RunnableTaskWrapper extends TimerTask { private Runnable task; RunnableTaskWrapper(Runnable task) { this.task = task; } @Override public void run() { task.run(); } } public String getPriorityTemplatePath() { return priorityTemplatePath; } /** * Returns class given its name. * * @param name * @return */ public static Class classForName(String name) { try { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); return Class.forName(name, true /* initialize */, classLoader); } catch (ClassNotFoundException e) { return null; } catch (NoClassDefFoundError e) { // NoClassDefFoundError is thrown when there is a class // that matches the name when ignoring case differences. // We do not care about that. return null; } } private void scheduleApplicationJobs() { // Create scheduler scheduler = new Scheduler(); scheduler.setDaemon(true); scheduler.start(); // Enumerate all application methods and look // for the QlueSchedule annotation Method[] methods = this.getClass().getMethods(); for (Method m : methods) { if (m.isAnnotationPresent(QlueSchedule.class)) { if (Modifier.isPublic(m.getModifiers()) || (Modifier.isProtected(m.getModifiers()))) { QlueSchedule qs = m.getAnnotation(QlueSchedule.class); try { scheduler.schedule(qs.value(), new QlueScheduleMethodTaskWrapper(this, this, m)); log.info("Scheduled method: " + m.getName()); } catch (InvalidPatternException ipe) { log.error("QlueSchedule: Invalid schedule pattern: " + qs.value()); } } else { log.error("QlueSchedule: Scheduled methods must be public or protected: " + m.getName()); } } } } }
Restructured how Qlue is initialised to avoid scheduling application tasks to run too soon.
src/com/webkreator/qlue/QlueApplication.java
Restructured how Qlue is initialised to avoid scheduling application tasks to run too soon.
Java
apache-2.0
064a94b9299f7919ba2f3c29344dee6409ef8078
0
Uni-Sol/batik,apache/batik,apache/batik,Uni-Sol/batik,Uni-Sol/batik,apache/batik,Uni-Sol/batik,Uni-Sol/batik,apache/batik
/***************************************************************************** * Copyright (C) The Apache Software Foundation. All rights reserved. * * ------------------------------------------------------------------------- * * This software is published under the terms of the Apache Software License * * version 1.1, a copy of which has been included with this distribution in * * the LICENSE file. * *****************************************************************************/ package org.apache.batik.swing.gvt; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Shape; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.awt.geom.AffineTransform; import java.awt.geom.NoninvertibleTransformException; import java.awt.image.BufferedImage; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import javax.swing.JComponent; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.event.AWTEventDispatcher; import org.apache.batik.gvt.renderer.ConcreteImageRendererFactory; import org.apache.batik.gvt.renderer.ImageRenderer; import org.apache.batik.gvt.renderer.ImageRendererFactory; import org.apache.batik.gvt.text.Mark; /** * This class represents a component which can display a GVT tree. * * @author <a href="mailto:[email protected]">Stephane Hillion</a> * @version $Id$ */ public class JGVTComponent extends JComponent { /** * The listener. */ protected Listener listener; /** * The GVT tree renderer. */ protected GVTTreeRenderer gvtTreeRenderer; /** * The GVT tree root. */ protected GraphicsNode gvtRoot; /** * The renderer factory. */ protected ImageRendererFactory rendererFactory = new ConcreteImageRendererFactory(); /** * The current renderer. */ protected ImageRenderer renderer; /** * The GVT tree renderer listeners. */ protected List gvtTreeRendererListeners = Collections.synchronizedList(new LinkedList()); /** * Whether a render was requested. */ protected boolean needRender; /** * Whether to allow progressive paint. */ protected boolean progressivePaint; /** * The progressive paint thread. */ protected Thread progressivePaintThread; /** * The image to paint. */ protected BufferedImage image; /** * The initial rendering transform. */ protected AffineTransform initialTransform; /** * The transform used for rendering. */ protected AffineTransform renderingTransform; /** * The transform used for painting. */ protected AffineTransform paintingTransform; /** * The interactor list. */ protected List interactors = new LinkedList(); /** * The current interactor. */ protected Interactor interactor; /** * The overlays. */ protected List overlays = new LinkedList(); /** * The event dispatcher. */ protected AWTEventDispatcher eventDispatcher; /** * The text selection manager. */ protected TextSelectionManager textSelectionManager; /** * Whether the double buffering is enabled. */ protected boolean doubleBufferedRendering; /** * Whether the GVT tree should be reactive to mouse and key events. */ protected boolean eventsEnabled; /** * Whether the text should be selectable if eventEnabled is false, * this flag is ignored. */ protected boolean selectableText; /** * Whether to suspend interactions. */ protected boolean suspendInteractions; /** * Whether to inconditionally disable interactions. */ protected boolean disableInteractions; /** * Creates a new JGVTComponent. */ public JGVTComponent() { this(false, false); } /** * Creates a new JGVTComponent. * @param eventEnabled Whether the GVT tree should be reactive * to mouse and key events. * @param selectableText Whether the text should be selectable. * if eventEnabled is false, this flag is ignored. */ public JGVTComponent(boolean eventsEnabled, boolean selectableText) { setBackground(Color.white); this.eventsEnabled = eventsEnabled; this.selectableText = selectableText; listener = createListener(); addKeyListener(listener); addMouseListener(listener); addMouseMotionListener(listener); addGVTTreeRendererListener(listener); addComponentListener(new ComponentAdapter() { public void componentResized(ComponentEvent e) { updateRenderingTransform(); scheduleGVTRendering(); } }); } /** * Returns the interactor list. */ public List getInteractors() { return interactors; } /** * Returns the overlay list. */ public List getOverlays() { return overlays; } /** * Returns the off-screen image, if any. */ public BufferedImage getOffScreen() { return image; } /** * Resets the rendering transform to its initial value. */ public void resetRenderingTransform() { setRenderingTransform(initialTransform); } /** * Stops the processing of the current tree. */ public void stopProcessing() { if (gvtTreeRenderer != null) { needRender = false; gvtTreeRenderer.interrupt(); interruptProgressivePaintThread(); } } /** * Returns the root of the GVT tree displayed by this component, if any. */ public GraphicsNode getGraphicsNode() { return gvtRoot; } /** * Sets the GVT tree to display. */ public void setGraphicsNode(GraphicsNode gn) { setGraphicsNode(gn, true); } /** * Sets the GVT tree to display. */ protected void setGraphicsNode(GraphicsNode gn, boolean createDispatcher) { gvtRoot = gn; if (gn != null && createDispatcher) { initializeEventHandling(); } if (eventDispatcher != null) { eventDispatcher.setRootNode(gn); } computeRenderingTransform(); } /** * Initializes the event handling classes. */ protected void initializeEventHandling() { if (eventsEnabled) { eventDispatcher = new AWTEventDispatcher(); if (selectableText) { textSelectionManager = new TextSelectionManager(this, eventDispatcher); } } } //////////////////////////////////////////////////////////////////////// // Selection methods //////////////////////////////////////////////////////////////////////// /** * Sets the color of the selection overlay to the specified color. * * @param color the new color of the selection overlay */ public void setSelectionOverlayColor(Color color) { if (textSelectionManager != null) { textSelectionManager.setSelectionOverlayColor(color); } } /** * Returns the color of the selection overlay. */ public Color getSelectionOverlayColor() { if (textSelectionManager != null) { return textSelectionManager.getSelectionOverlayColor(); } else { return null; } } /** * Sets the color of the outline of the selection overlay to the specified * color. * * @param color the new color of the outline of the selection overlay */ public void setSelectionOverlayStrokeColor(Color color) { if (textSelectionManager != null) { textSelectionManager.setSelectionOverlayStrokeColor(color); } } /** * Returns the color of the outline of the selection overlay. */ public Color getSelectionOverlayStrokeColor() { if (textSelectionManager != null) { return textSelectionManager.getSelectionOverlayStrokeColor(); } else { return null; } } /** * Sets whether or not the selection overlay will be painted in XOR mode, * depending on the specified parameter. * * @param state true implies the selection overlay will be in XOR mode */ public void setSelectionOverlayXORMode(boolean state) { if (textSelectionManager != null) { textSelectionManager.setSelectionOverlayXORMode(state); } } /** * Returns true if the selection overlay is painted in XOR mode, false * otherwise. */ public boolean isSelectionOverlayXORMode() { if (textSelectionManager != null) { return textSelectionManager.isSelectionOverlayXORMode(); } else { return false; } } /** * Sets the selection to the specified start and end mark. * * @param start the mark used to define where the selection starts * @param end the mark used to define where the selection ends */ public void select(Mark start, Mark end) { if (textSelectionManager != null) { textSelectionManager.setSelection(start, end); } } /** * Deselects all. */ public void deselectAll() { if (textSelectionManager != null) { textSelectionManager.clearSelection(); } } //////////////////////////////////////////////////////////////////////// // Painting methods //////////////////////////////////////////////////////////////////////// /** * Whether to enable the progressive paint. */ public void setProgressivePaint(boolean b) { if (progressivePaint != b) { progressivePaint = b; interruptProgressivePaintThread(); } } /** * Tells whether the progressive paint is enabled. */ public boolean getProgressivePaint() { return progressivePaint; } /** * Repaints immediately the component. */ public void immediateRepaint() { if (EventQueue.isDispatchThread()) { Dimension dim = getSize(); paintImmediately(0, 0, dim.width, dim.height); } else { try { EventQueue.invokeAndWait(new Runnable() { public void run() { Dimension dim = getSize(); paintImmediately(0, 0, dim.width, dim.height); } }); } catch (Exception e) { } } } /** * Paints this component. */ public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D)g; Dimension d = getSize(); g2d.setComposite(AlphaComposite.SrcOver); g2d.setPaint(getBackground()); g2d.fillRect(0, 0, d.width, d.height); if (image != null) { if (paintingTransform != null) { g2d.transform(paintingTransform); } g2d.drawRenderedImage(image, null); g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); Iterator it = overlays.iterator(); while (it.hasNext()) { ((Overlay)it.next()).paint(g); } } } /** * Sets the painting transform. A null transform is the same as * an identity transform. * The next repaint will use the given transform. */ public void setPaintingTransform(AffineTransform at) { paintingTransform = at; immediateRepaint(); } /** * Returns the current painting transform. */ public AffineTransform getPaintingTransform() { return paintingTransform; } /** * Sets the rendering transform. * Calling this method causes a rendering to be performed. */ public void setRenderingTransform(AffineTransform at) { renderingTransform = at; suspendInteractions = true; if (eventDispatcher != null) { try { eventDispatcher.setBaseTransform (renderingTransform.createInverse()); } catch (NoninvertibleTransformException e) { handleException(e); } } scheduleGVTRendering(); } /** * Returns the initial transform. */ public AffineTransform getInitialTransform() { return initialTransform; } /** * Returns the current rendering transform. */ public AffineTransform getRenderingTransform() { return renderingTransform; } /** * Sets whether this component should use double buffering to render * SVG documents. The change will be effective during the next * rendering. */ public void setDoubleBufferedRendering(boolean b) { doubleBufferedRendering = b; } /** * Tells whether this component use double buffering to render * SVG documents. */ public boolean getDoubleBufferedRendering() { return doubleBufferedRendering; } /** * Adds a GVTTreeRendererListener to this component. */ public void addGVTTreeRendererListener(GVTTreeRendererListener l) { gvtTreeRendererListeners.add(l); } /** * Removes a GVTTreeRendererListener from this component. */ public void removeGVTTreeRendererListener(GVTTreeRendererListener l) { gvtTreeRendererListeners.remove(l); } /** * Flush any cached image data (preliminary interface, * may be removed or modified in the future). */ public void flush() { renderer.flush(); } /** * Flush a rectangle of cached image data (preliminary interface, * may be removed or modified in the future). */ public void flush(Rectangle r) { renderer.flush(r); } /** * Creates a new renderer. */ protected ImageRenderer createImageRenderer() { return rendererFactory.createStaticImageRenderer(); } /** * Renders the GVT tree. */ protected void renderGVTTree() { Dimension d = getSize(); if (gvtRoot == null || d.width <= 0 || d.height <= 0) { return; } // Renderer setup. if (renderer == null || renderer.getTree() != gvtRoot) { renderer = createImageRenderer(); renderer.setTree(gvtRoot); } // Area of interest computation. AffineTransform inv; try { inv = renderingTransform.createInverse(); } catch (NoninvertibleTransformException e) { throw new InternalError(e.getMessage()); } Shape s = inv.createTransformedShape (new Rectangle(0, 0, d.width, d.height)); // Rendering thread setup. gvtTreeRenderer = new GVTTreeRenderer(renderer, renderingTransform, doubleBufferedRendering, s, d.width, d.height); gvtTreeRenderer.setPriority(Thread.MIN_PRIORITY); Iterator it = gvtTreeRendererListeners.iterator(); while (it.hasNext()) { gvtTreeRenderer.addGVTTreeRendererListener ((GVTTreeRendererListener)it.next()); } // Disable the dispatch during the rendering // to avoid concurrent access to the GVT tree. if (eventDispatcher != null) { eventDispatcher.setRootNode(null); } gvtTreeRenderer.start(); } /** * Computes the initial value of the transform used for rendering. */ protected void computeRenderingTransform() { initialTransform = new AffineTransform(); setRenderingTransform(initialTransform); } /** * Updates the value of the transform used for rendering. */ protected void updateRenderingTransform() { // Do nothing. } /** * Handles an exception. */ protected void handleException(Exception e) { // Do nothing. } /** * Releases the references to the rendering resources, */ protected void releaseRenderingReferences() { eventDispatcher = null; if (textSelectionManager != null) { overlays.remove(textSelectionManager.getSelectionOverlay()); textSelectionManager = null; } renderer = null; gvtRoot = null; } /** * Schedules a new GVT rendering. */ protected void scheduleGVTRendering() { if (gvtTreeRenderer != null) { needRender = true; gvtTreeRenderer.interrupt(); } else { renderGVTTree(); } } private void interruptProgressivePaintThread() { if (progressivePaintThread != null) { progressivePaintThread.interrupt(); progressivePaintThread = null; } } /** * Creates an instance of Listener. */ protected Listener createListener() { return new Listener(); } /** * To hide the listener methods. */ protected class Listener implements GVTTreeRendererListener, KeyListener, MouseListener, MouseMotionListener { /** * Creates a new Listener. */ protected Listener() { } // GVTTreeRendererListener /////////////////////////////////////////// /** * Called when a rendering is in its preparing phase. */ public void gvtRenderingPrepare(GVTTreeRendererEvent e) { suspendInteractions = true; if (!progressivePaint && !doubleBufferedRendering) { image = null; immediateRepaint(); } } /** * Called when a rendering started. */ public void gvtRenderingStarted(GVTTreeRendererEvent e) { paintingTransform = null; if (progressivePaint && !doubleBufferedRendering) { image = e.getImage(); progressivePaintThread = new Thread() { public void run() { final Thread thisThread = this; try { while (!isInterrupted()) { EventQueue.invokeAndWait(new Runnable() { public void run() { if (progressivePaintThread == thisThread) { Dimension dim = getSize(); paintImmediately(0, 0, dim.width, dim.height); } } }); sleep(200); } } catch (Exception ex) { } } }; progressivePaintThread.setPriority(Thread.MIN_PRIORITY + 1); progressivePaintThread.start(); } if (!doubleBufferedRendering) { suspendInteractions = false; } } /** * Called when a rendering was completed. */ public void gvtRenderingCompleted(GVTTreeRendererEvent e) { interruptProgressivePaintThread(); if (doubleBufferedRendering) { suspendInteractions = false; } gvtTreeRenderer = null; if (needRender) { renderGVTTree(); needRender = false; } else { image = e.getImage(); immediateRepaint(); } if (eventDispatcher != null) { eventDispatcher.setRootNode(gvtRoot); } } /** * Called when a rendering was cancelled. */ public void gvtRenderingCancelled(GVTTreeRendererEvent e) { renderingStopped(); } /** * Called when a rendering failed. */ public void gvtRenderingFailed(GVTTreeRendererEvent e) { renderingStopped(); } /** * The actual implementation of gvtRenderingCancelled() and * gvtRenderingFailed(). */ private void renderingStopped() { interruptProgressivePaintThread(); if (doubleBufferedRendering) { suspendInteractions = false; } gvtTreeRenderer = null; if (needRender) { renderGVTTree(); needRender = false; } else { immediateRepaint(); } } // KeyListener ////////////////////////////////////////////////////// /** * Invoked when a key has been typed. * This event occurs when a key press is followed by a key release. */ public void keyTyped(KeyEvent e) { selectInteractor(e); if (interactor != null) { interactor.keyTyped(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchKeyTyped(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchKeyTyped(KeyEvent e) { eventDispatcher.keyTyped(e); } /** * Invoked when a key has been pressed. */ public void keyPressed(KeyEvent e) { selectInteractor(e); if (interactor != null) { interactor.keyPressed(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchKeyPressed(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchKeyPressed(KeyEvent e) { eventDispatcher.keyPressed(e); } /** * Invoked when a key has been released. */ public void keyReleased(KeyEvent e) { selectInteractor(e); if (interactor != null) { interactor.keyReleased(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchKeyReleased(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchKeyReleased(KeyEvent e) { eventDispatcher.keyReleased(e); } // MouseListener //////////////////////////////////////////////////// /** * Invoked when the mouse has been clicked on a component. */ public void mouseClicked(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseClicked(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseClicked(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseClicked(MouseEvent e) { eventDispatcher.mouseClicked(e); } /** * Invoked when a mouse button has been pressed on a component. */ public void mousePressed(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mousePressed(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMousePressed(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMousePressed(MouseEvent e) { eventDispatcher.mousePressed(e); } /** * Invoked when a mouse button has been released on a component. */ public void mouseReleased(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseReleased(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseReleased(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseReleased(MouseEvent e) { eventDispatcher.mouseReleased(e); } /** * Invoked when the mouse enters a component. */ public void mouseEntered(MouseEvent e) { requestFocus(); selectInteractor(e); if (interactor != null) { interactor.mouseEntered(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseEntered(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseEntered(MouseEvent e) { eventDispatcher.mouseEntered(e); } /** * Invoked when the mouse exits a component. */ public void mouseExited(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseExited(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseExited(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseExited(MouseEvent e) { eventDispatcher.mouseExited(e); } // MouseMotionListener ////////////////////////////////////////////// /** * Invoked when a mouse button is pressed on a component and then * dragged. Mouse drag events will continue to be delivered to * the component where the first originated until the mouse button is * released (regardless of whether the mouse position is within the * bounds of the component). */ public void mouseDragged(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseDragged(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseDragged(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseDragged(MouseEvent e) { eventDispatcher.mouseDragged(e); } /** * Invoked when the mouse button has been moved on a component * (with no buttons no down). */ public void mouseMoved(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseMoved(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseMoved(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseMoved(MouseEvent e) { eventDispatcher.mouseMoved(e); } /** * Selects an interactor, given an input event. */ protected void selectInteractor(InputEvent ie) { if (!disableInteractions && !suspendInteractions && interactor == null && gvtRoot != null) { Iterator it = interactors.iterator(); while (it.hasNext()) { Interactor i = (Interactor)it.next(); if (i.startInteraction(ie)) { interactor = i; break; } } } } /** * Deselects an interactor, if the interaction has finished. */ protected void deselectInteractor() { if (interactor.endInteraction()) { interactor = null; } } } }
sources/org/apache/batik/swing/gvt/JGVTComponent.java
/***************************************************************************** * Copyright (C) The Apache Software Foundation. All rights reserved. * * ------------------------------------------------------------------------- * * This software is published under the terms of the Apache Software License * * version 1.1, a copy of which has been included with this distribution in * * the LICENSE file. * *****************************************************************************/ package org.apache.batik.swing.gvt; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Shape; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.awt.geom.AffineTransform; import java.awt.geom.NoninvertibleTransformException; import java.awt.image.BufferedImage; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import javax.swing.JComponent; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.event.AWTEventDispatcher; import org.apache.batik.gvt.renderer.ConcreteImageRendererFactory; import org.apache.batik.gvt.renderer.ImageRenderer; import org.apache.batik.gvt.renderer.ImageRendererFactory; import org.apache.batik.gvt.text.Mark; /** * This class represents a component which can display a GVT tree. * * @author <a href="mailto:[email protected]">Stephane Hillion</a> * @version $Id$ */ public class JGVTComponent extends JComponent { /** * The listener. */ protected Listener listener; /** * The GVT tree renderer. */ protected GVTTreeRenderer gvtTreeRenderer; /** * The GVT tree root. */ protected GraphicsNode gvtRoot; /** * The renderer factory. */ protected ImageRendererFactory rendererFactory = new ConcreteImageRendererFactory(); /** * The current renderer. */ protected ImageRenderer renderer; /** * The GVT tree renderer listeners. */ protected List gvtTreeRendererListeners = Collections.synchronizedList(new LinkedList()); /** * Whether a render was requested. */ protected boolean needRender; /** * Whether to allow progressive paint. */ protected boolean progressivePaint; /** * The progressive paint thread. */ protected Thread progressivePaintThread; /** * The image to paint. */ protected BufferedImage image; /** * The initial rendering transform. */ protected AffineTransform initialTransform; /** * The transform used for rendering. */ protected AffineTransform renderingTransform; /** * The transform used for painting. */ protected AffineTransform paintingTransform; /** * The interactor list. */ protected List interactors = new LinkedList(); /** * The current interactor. */ protected Interactor interactor; /** * The overlays. */ protected List overlays = new LinkedList(); /** * The event dispatcher. */ protected AWTEventDispatcher eventDispatcher; /** * The text selection manager. */ protected TextSelectionManager textSelectionManager; /** * Whether the double buffering is enabled. */ protected boolean doubleBufferedRendering; /** * Whether the GVT tree should be reactive to mouse and key events. */ protected boolean eventsEnabled; /** * Whether the text should be selectable if eventEnabled is false, * this flag is ignored. */ protected boolean selectableText; /** * Whether to suspend interactions. */ protected boolean suspendInteractions; /** * Whether to inconditionally disable interactions. */ protected boolean disableInteractions; /** * Creates a new JGVTComponent. */ public JGVTComponent() { this(false, false); } /** * Creates a new JGVTComponent. * @param eventEnabled Whether the GVT tree should be reactive * to mouse and key events. * @param selectableText Whether the text should be selectable. * if eventEnabled is false, this flag is ignored. */ public JGVTComponent(boolean eventsEnabled, boolean selectableText) { setBackground(Color.white); this.eventsEnabled = eventsEnabled; this.selectableText = selectableText; listener = createListener(); addKeyListener(listener); addMouseListener(listener); addMouseMotionListener(listener); addGVTTreeRendererListener(listener); addComponentListener(new ComponentAdapter() { public void componentResized(ComponentEvent e) { updateRenderingTransform(); scheduleGVTRendering(); } }); } /** * Returns the interactor list. */ public List getInteractors() { return interactors; } /** * Returns the overlay list. */ public List getOverlays() { return overlays; } /** * Returns the off-screen image, if any. */ public BufferedImage getOffScreen() { return image; } /** * Resets the rendering transform to its initial value. */ public void resetRenderingTransform() { setRenderingTransform(initialTransform); } /** * Stops the processing of the current tree. */ public void stopProcessing() { if (gvtTreeRenderer != null) { needRender = false; gvtTreeRenderer.interrupt(); interruptProgressivePaintThread(); } } /** * Returns the root of the GVT tree displayed by this component, if any. */ public GraphicsNode getGraphicsNode() { return gvtRoot; } /** * Sets the GVT tree to display. */ public void setGraphicsNode(GraphicsNode gn) { setGraphicsNode(gn, true); } /** * Sets the GVT tree to display. */ protected void setGraphicsNode(GraphicsNode gn, boolean createDispatcher) { gvtRoot = gn; if (gn != null && createDispatcher) { initializeEventHandling(); } if (eventDispatcher != null) { eventDispatcher.setRootNode(gn); } computeRenderingTransform(); } /** * Initializes the event handling classes. */ protected void initializeEventHandling() { if (eventsEnabled) { eventDispatcher = new AWTEventDispatcher(); if (selectableText) { textSelectionManager = new TextSelectionManager(this, eventDispatcher); } } } //////////////////////////////////////////////////////////////////////// // Selection methods //////////////////////////////////////////////////////////////////////// /** * Sets the color of the selection overlay to the specified color. * * @param color the new color of the selection overlay */ public void setSelectionOverlayColor(Color color) { if (textSelectionManager != null) { textSelectionManager.setSelectionOverlayColor(color); } } /** * Returns the color of the selection overlay. */ public Color getSelectionOverlayColor() { if (textSelectionManager != null) { return textSelectionManager.getSelectionOverlayColor(); } else { return null; } } /** * Sets the color of the outline of the selection overlay to the specified * color. * * @param color the new color of the outline of the selection overlay */ public void setSelectionOverlayStrokeColor(Color color) { if (textSelectionManager != null) { textSelectionManager.setSelectionOverlayStrokeColor(color); } } /** * Returns the color of the outline of the selection overlay. */ public Color getSelectionOverlayStrokeColor() { if (textSelectionManager != null) { return textSelectionManager.getSelectionOverlayStrokeColor(); } else { return null; } } /** * Sets whether or not the selection overlay will be painted in XOR mode, * depending on the specified parameter. * * @param state true implies the selection overlay will be in XOR mode */ public void setSelectionOverlayXORMode(boolean state) { if (textSelectionManager != null) { textSelectionManager.setSelectionOverlayXORMode(state); } } /** * Returns true if the selection overlay is painted in XOR mode, false * otherwise. */ public boolean isSelectionOverlayXORMode() { if (textSelectionManager != null) { return textSelectionManager.isSelectionOverlayXORMode(); } else { return false; } } /** * Sets the selection to the specified start and end mark. * * @param start the mark used to define where the selection starts * @param end the mark used to define where the selection ends */ public void select(Mark start, Mark end) { if (textSelectionManager != null) { textSelectionManager.setSelection(start, end); } } /** * Deselects all. */ public void deselectAll() { if (textSelectionManager != null) { textSelectionManager.clearSelection(); } } //////////////////////////////////////////////////////////////////////// // Painting methods //////////////////////////////////////////////////////////////////////// /** * Whether to enable the progressive paint. */ public void setProgressivePaint(boolean b) { if (progressivePaint != b) { progressivePaint = b; interruptProgressivePaintThread(); } } /** * Tells whether the progressive paint is enabled. */ public boolean getProgressivePaint() { return progressivePaint; } /** * Repaints immediately the component. */ public void immediateRepaint() { if (EventQueue.isDispatchThread()) { Dimension dim = getSize(); paintImmediately(0, 0, dim.width, dim.height); } else { try { EventQueue.invokeAndWait(new Runnable() { public void run() { Dimension dim = getSize(); paintImmediately(0, 0, dim.width, dim.height); } }); } catch (Exception e) { } } } /** * Paints this component. */ public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D)g; Dimension d = getSize(); g2d.setComposite(AlphaComposite.SrcOver); g2d.setPaint(getBackground()); g2d.fillRect(0, 0, d.width, d.height); if (image != null) { if (paintingTransform != null) { g2d.transform(paintingTransform); } g2d.drawRenderedImage(image, null); g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); Iterator it = overlays.iterator(); while (it.hasNext()) { ((Overlay)it.next()).paint(g); } } } /** * Sets the painting transform. A null transform is the same as * an identity transform. * The next repaint will use the given transform. */ public void setPaintingTransform(AffineTransform at) { paintingTransform = at; immediateRepaint(); } /** * Returns the current painting transform. */ public AffineTransform getPaintingTransform() { return paintingTransform; } /** * Sets the rendering transform. * Calling this method causes a rendering to be performed. */ public void setRenderingTransform(AffineTransform at) { renderingTransform = at; suspendInteractions = true; if (eventDispatcher != null) { try { eventDispatcher.setBaseTransform (renderingTransform.createInverse()); } catch (NoninvertibleTransformException e) { handleException(e); } } scheduleGVTRendering(); } /** * Returns the initial transform. */ public AffineTransform getInitialTransform() { return initialTransform; } /** * Returns the current rendering transform. */ public AffineTransform getRenderingTransform() { return renderingTransform; } /** * Sets whether this component should use double buffering to render * SVG documents. The change will be effective during the next * rendering. */ public void setDoubleBufferedRendering(boolean b) { doubleBufferedRendering = b; } /** * Tells whether this component use double buffering to render * SVG documents. */ public boolean getDoubleBufferedRendering() { return doubleBufferedRendering; } /** * Adds a GVTTreeRendererListener to this component. */ public void addGVTTreeRendererListener(GVTTreeRendererListener l) { gvtTreeRendererListeners.add(l); } /** * Removes a GVTTreeRendererListener from this component. */ public void removeGVTTreeRendererListener(GVTTreeRendererListener l) { gvtTreeRendererListeners.remove(l); } /** * Flush any cached image data (preliminary interface, * may be removed or modified in the future). */ public void flush() { renderer.flush(); } /** * Flush a rectangle of cached image data (preliminary interface, * may be removed or modified in the future). */ public void flush(Rectangle r) { renderer.flush(r); } /** * Creates a new renderer. */ protected ImageRenderer createImageRenderer() { return rendererFactory.createStaticImageRenderer(); } /** * Renders the GVT tree. */ protected void renderGVTTree() { Dimension d = getSize(); if (gvtRoot == null || d.width <= 0 || d.height <= 0) { return; } // Renderer setup. if (renderer == null || renderer.getTree() != gvtRoot) { renderer = createImageRenderer(); renderer.setTree(gvtRoot); } // Area of interest computation. AffineTransform inv; try { inv = renderingTransform.createInverse(); } catch (NoninvertibleTransformException e) { throw new InternalError(e.getMessage()); } Shape s = inv.createTransformedShape (new Rectangle(0, 0, d.width, d.height)); // Rendering thread setup. gvtTreeRenderer = new GVTTreeRenderer(renderer, renderingTransform, doubleBufferedRendering, s, d.width, d.height); gvtTreeRenderer.setPriority(Thread.MIN_PRIORITY); Iterator it = gvtTreeRendererListeners.iterator(); while (it.hasNext()) { gvtTreeRenderer.addGVTTreeRendererListener ((GVTTreeRendererListener)it.next()); } // Disable the dispatch during the rendering // to avoid concurrent access to the GVT tree. if (eventDispatcher != null) { eventDispatcher.setRootNode(null); } gvtTreeRenderer.start(); } /** * Computes the initial value of the transform used for rendering. */ protected void computeRenderingTransform() { initialTransform = new AffineTransform(); setRenderingTransform(initialTransform); } /** * Updates the value of the transform used for rendering. */ protected void updateRenderingTransform() { // Do nothing. } /** * Handles an exception. */ protected void handleException(Exception e) { // Do nothing. } /** * Releases the references to the rendering resources, */ protected void releaseRenderingReferences() { eventDispatcher = null; if (textSelectionManager != null) { overlays.remove(textSelectionManager.getSelectionOverlay()); textSelectionManager = null; } renderer = null; gvtRoot = null; } /** * Schedules a new GVT rendering. */ protected void scheduleGVTRendering() { if (gvtTreeRenderer != null) { needRender = true; gvtTreeRenderer.interrupt(); } else { renderGVTTree(); } } private void interruptProgressivePaintThread() { if (progressivePaintThread != null) { progressivePaintThread.interrupt(); progressivePaintThread = null; } } /** * Creates an instance of Listener. */ protected Listener createListener() { return new Listener(); } /** * To hide the listener methods. */ protected class Listener implements GVTTreeRendererListener, KeyListener, MouseListener, MouseMotionListener { /** * Creates a new Listener. */ protected Listener() { } // GVTTreeRendererListener /////////////////////////////////////////// /** * Called when a rendering is in its preparing phase. */ public void gvtRenderingPrepare(GVTTreeRendererEvent e) { suspendInteractions = true; if (!progressivePaint && !doubleBufferedRendering) { image = null; immediateRepaint(); } } /** * Called when a rendering started. */ public void gvtRenderingStarted(GVTTreeRendererEvent e) { paintingTransform = null; if (progressivePaint && !doubleBufferedRendering) { image = e.getImage(); progressivePaintThread = new Thread() { public void run() { final Thread thisThread = this; try { while (!isInterrupted()) { EventQueue.invokeAndWait(new Runnable() { public void run() { if (progressivePaintThread == thisThread) { Dimension dim = getSize(); paintImmediately(0, 0, dim.width, dim.height); } } }); sleep(200); } } catch (Exception ex) { } } }; progressivePaintThread.setPriority(Thread.MIN_PRIORITY + 1); progressivePaintThread.start(); } if (!doubleBufferedRendering) { suspendInteractions = false; } } /** * Called when a rendering was completed. */ public void gvtRenderingCompleted(GVTTreeRendererEvent e) { interruptProgressivePaintThread(); if (doubleBufferedRendering) { suspendInteractions = false; } gvtTreeRenderer = null; if (needRender) { renderGVTTree(); needRender = false; } else { image = e.getImage(); immediateRepaint(); } if (eventDispatcher != null) { eventDispatcher.setRootNode(gvtRoot); } } /** * Called when a rendering was cancelled. */ public void gvtRenderingCancelled(GVTTreeRendererEvent e) { renderingStopped(); } /** * Called when a rendering failed. */ public void gvtRenderingFailed(GVTTreeRendererEvent e) { renderingStopped(); } /** * The actual implementation of gvtRenderingCancelled() and * gvtRenderingFailed(). */ private void renderingStopped() { interruptProgressivePaintThread(); if (doubleBufferedRendering) { suspendInteractions = false; } gvtTreeRenderer = null; if (needRender) { renderGVTTree(); needRender = false; } else { immediateRepaint(); } } // KeyListener ////////////////////////////////////////////////////// /** * Invoked when a key has been typed. * This event occurs when a key press is followed by a key release. */ public void keyTyped(KeyEvent e) { selectInteractor(e); if (interactor != null) { interactor.keyTyped(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchKeyTyped(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchKeyTyped(KeyEvent e) { eventDispatcher.keyTyped(e); } /** * Invoked when a key has been pressed. */ public void keyPressed(KeyEvent e) { selectInteractor(e); if (interactor != null) { interactor.keyPressed(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchKeyPressed(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchKeyPressed(KeyEvent e) { eventDispatcher.keyPressed(e); } /** * Invoked when a key has been released. */ public void keyReleased(KeyEvent e) { selectInteractor(e); if (interactor != null) { interactor.keyReleased(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchKeyReleased(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchKeyReleased(KeyEvent e) { eventDispatcher.keyReleased(e); } // MouseListener //////////////////////////////////////////////////// /** * Invoked when the mouse has been clicked on a component. */ public void mouseClicked(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseClicked(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseClicked(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseClicked(MouseEvent e) { eventDispatcher.mouseClicked(e); } /** * Invoked when a mouse button has been pressed on a component. */ public void mousePressed(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mousePressed(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMousePressed(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMousePressed(MouseEvent e) { eventDispatcher.mousePressed(e); } /** * Invoked when a mouse button has been released on a component. */ public void mouseReleased(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseReleased(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseReleased(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseReleased(MouseEvent e) { eventDispatcher.mouseReleased(e); } /** * Invoked when the mouse enters a component. */ public void mouseEntered(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseEntered(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseEntered(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseEntered(MouseEvent e) { eventDispatcher.mouseEntered(e); } /** * Invoked when the mouse exits a component. */ public void mouseExited(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseExited(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseExited(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseExited(MouseEvent e) { eventDispatcher.mouseExited(e); } // MouseMotionListener ////////////////////////////////////////////// /** * Invoked when a mouse button is pressed on a component and then * dragged. Mouse drag events will continue to be delivered to * the component where the first originated until the mouse button is * released (regardless of whether the mouse position is within the * bounds of the component). */ public void mouseDragged(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseDragged(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseDragged(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseDragged(MouseEvent e) { eventDispatcher.mouseDragged(e); } /** * Invoked when the mouse button has been moved on a component * (with no buttons no down). */ public void mouseMoved(MouseEvent e) { selectInteractor(e); if (interactor != null) { interactor.mouseMoved(e); deselectInteractor(); } else if (eventDispatcher != null) { dispatchMouseMoved(e); } } /** * Dispatches the event to the GVT tree. */ protected void dispatchMouseMoved(MouseEvent e) { eventDispatcher.mouseMoved(e); } /** * Selects an interactor, given an input event. */ protected void selectInteractor(InputEvent ie) { if (!disableInteractions && !suspendInteractions && interactor == null && gvtRoot != null) { Iterator it = interactors.iterator(); while (it.hasNext()) { Interactor i = (Interactor)it.next(); if (i.startInteraction(ie)) { interactor = i; break; } } } } /** * Deselects an interactor, if the interaction has finished. */ protected void deselectInteractor() { if (interactor.endInteraction()) { interactor = null; } } } }
requestFocus is done each time the mouse enter the JGVTComponent so that keyboard events can be dispatched immediately (without an initial mouse click inside the canvas). PR: Obtained from: Submitted by: Reviewed by: git-svn-id: e944db0f7b5c8f0ae3e1ad43ca99b026751ef0c2@200930 13f79535-47bb-0310-9956-ffa450edef68
sources/org/apache/batik/swing/gvt/JGVTComponent.java
Java
apache-2.0
f2b1f2fe495856c8b6c424ff4e4d0d53ded24c13
0
tony--/nodyn,nodyn/nodyn,dherges/nodyn,dherges/nodyn,nodyn/nodyn,dherges/nodyn,tony--/nodyn,tony--/nodyn,nodyn/nodyn
package org.projectodd.nodej.bindings.os; import java.util.HashMap; import java.util.Map; import org.dynjs.runtime.ExecutionContext; import org.dynjs.runtime.GlobalObject; import org.hyperic.sigar.SigarException; import org.hyperic.sigar.SysInfo; public class GetOSType extends OsFunctionBinding { private static Map<String, String> names = new HashMap<String, String>(); private SysInfo sysInfo; static { names.put("MacOSX", "Darwin"); } public GetOSType(GlobalObject globalObject) { super(globalObject); sysInfo = new SysInfo(); try { sysInfo.gather(sigar); } catch (SigarException e) { e.printStackTrace(); } } @Override public Object call(ExecutionContext context, Object self, Object... args) { String name = "unknown"; name = sysInfo.getName(); if (names.get(name) != null) { return names.get(name); } return name; } }
src/main/java/org/projectodd/nodej/bindings/os/GetOSType.java
package org.projectodd.nodej.bindings.os; import java.util.HashMap; import java.util.Map; import org.dynjs.runtime.ExecutionContext; import org.dynjs.runtime.GlobalObject; public class GetOSType extends OsFunctionBinding { private static Map<String, String> names = new HashMap<String, String>(); static { names.put("Mac OS X", "Darwin"); } public GetOSType(GlobalObject globalObject) { super(globalObject); } @Override public Object call(ExecutionContext context, Object self, Object... args) { String name = System.getProperty("os.name"); if (names.get(name) != null) { return names.get(name); } return name; } }
Use sigar for uname.
src/main/java/org/projectodd/nodej/bindings/os/GetOSType.java
Use sigar for uname.
Java
apache-2.0
cb7b02d4e50e91b9377410e79c3578587a900d94
0
mittop/vaadin,shahrzadmn/vaadin,udayinfy/vaadin,fireflyc/vaadin,shahrzadmn/vaadin,oalles/vaadin,udayinfy/vaadin,oalles/vaadin,travisfw/vaadin,mittop/vaadin,peterl1084/framework,mstahv/framework,shahrzadmn/vaadin,travisfw/vaadin,Legioth/vaadin,sitexa/vaadin,Scarlethue/vaadin,peterl1084/framework,fireflyc/vaadin,synes/vaadin,kironapublic/vaadin,kironapublic/vaadin,udayinfy/vaadin,bmitc/vaadin,sitexa/vaadin,oalles/vaadin,cbmeeks/vaadin,Scarlethue/vaadin,mstahv/framework,jdahlstrom/vaadin.react,fireflyc/vaadin,asashour/framework,travisfw/vaadin,jdahlstrom/vaadin.react,jdahlstrom/vaadin.react,Peppe/vaadin,bmitc/vaadin,carrchang/vaadin,mstahv/framework,sitexa/vaadin,udayinfy/vaadin,Darsstar/framework,shahrzadmn/vaadin,asashour/framework,peterl1084/framework,Legioth/vaadin,Scarlethue/vaadin,Flamenco/vaadin,carrchang/vaadin,kironapublic/vaadin,oalles/vaadin,udayinfy/vaadin,Legioth/vaadin,travisfw/vaadin,fireflyc/vaadin,bmitc/vaadin,travisfw/vaadin,Peppe/vaadin,carrchang/vaadin,Peppe/vaadin,peterl1084/framework,Flamenco/vaadin,shahrzadmn/vaadin,Darsstar/framework,mstahv/framework,synes/vaadin,sitexa/vaadin,synes/vaadin,jdahlstrom/vaadin.react,asashour/framework,peterl1084/framework,Legioth/vaadin,Peppe/vaadin,kironapublic/vaadin,Scarlethue/vaadin,carrchang/vaadin,Darsstar/framework,oalles/vaadin,synes/vaadin,Legioth/vaadin,magi42/vaadin,Flamenco/vaadin,mittop/vaadin,magi42/vaadin,kironapublic/vaadin,cbmeeks/vaadin,synes/vaadin,cbmeeks/vaadin,bmitc/vaadin,Peppe/vaadin,mstahv/framework,asashour/framework,magi42/vaadin,cbmeeks/vaadin,fireflyc/vaadin,mittop/vaadin,asashour/framework,magi42/vaadin,Darsstar/framework,jdahlstrom/vaadin.react,Flamenco/vaadin,Scarlethue/vaadin,sitexa/vaadin,magi42/vaadin,Darsstar/framework
/* * Copyright 2000-2013 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import com.google.gwt.core.client.Duration; import com.google.gwt.core.client.JsArrayString; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style; import com.google.gwt.dom.client.Style.Overflow; import com.google.gwt.user.client.Timer; import com.vaadin.client.MeasuredSize.MeasureResult; import com.vaadin.client.ui.ManagedLayout; import com.vaadin.client.ui.PostLayoutListener; import com.vaadin.client.ui.SimpleManagedLayout; import com.vaadin.client.ui.VNotification; import com.vaadin.client.ui.layout.ElementResizeEvent; import com.vaadin.client.ui.layout.ElementResizeListener; import com.vaadin.client.ui.layout.LayoutDependencyTree; public class LayoutManager { private static final String LOOP_ABORT_MESSAGE = "Aborting layout after 100 passes. This would probably be an infinite loop."; private static final boolean debugLogging = false; private ApplicationConnection connection; private final Set<Element> measuredNonConnectorElements = new HashSet<Element>(); private final MeasuredSize nullSize = new MeasuredSize(); private LayoutDependencyTree currentDependencyTree; private FastStringSet needsHorizontalLayout = FastStringSet.create(); private FastStringSet needsVerticalLayout = FastStringSet.create(); private FastStringSet needsMeasure = FastStringSet.create(); private FastStringSet pendingOverflowFixes = FastStringSet.create(); private final Map<Element, Collection<ElementResizeListener>> elementResizeListeners = new HashMap<Element, Collection<ElementResizeListener>>(); private final Set<Element> listenersToFire = new HashSet<Element>(); private boolean layoutPending = false; private Timer layoutTimer = new Timer() { @Override public void run() { layoutNow(); } }; private boolean everythingNeedsMeasure = false; public void setConnection(ApplicationConnection connection) { if (this.connection != null) { throw new RuntimeException( "LayoutManager connection can never be changed"); } this.connection = connection; } /** * Gets the layout manager associated with the given * {@link ApplicationConnection}. * * @param connection * the application connection to get a layout manager for * @return the layout manager associated with the provided application * connection */ public static LayoutManager get(ApplicationConnection connection) { return connection.getLayoutManager(); } /** * Registers that a ManagedLayout is depending on the size of an Element. * This causes this layout manager to measure the element in the beginning * of every layout phase and call the appropriate layout method of the * managed layout if the size of the element has changed. * * @param owner * the ManagedLayout that depends on an element * @param element * the Element that should be measured */ public void registerDependency(ManagedLayout owner, Element element) { MeasuredSize measuredSize = ensureMeasured(element); setNeedsLayout(owner); measuredSize.addDependent(owner.getConnectorId()); } private MeasuredSize ensureMeasured(Element element) { MeasuredSize measuredSize = getMeasuredSize(element, null); if (measuredSize == null) { measuredSize = new MeasuredSize(); if (ConnectorMap.get(connection).getConnector(element) == null) { measuredNonConnectorElements.add(element); } setMeasuredSize(element, measuredSize); } return measuredSize; } private boolean needsMeasure(Element e) { ComponentConnector connector = connection.getConnectorMap() .getConnector(e); if (connector != null && needsMeasureForManagedLayout(connector)) { return true; } else if (elementResizeListeners.containsKey(e)) { return true; } else if (getMeasuredSize(e, nullSize).hasDependents()) { return true; } else { return false; } } private boolean needsMeasureForManagedLayout(ComponentConnector connector) { if (connector instanceof ManagedLayout) { return true; } else if (connector.getParent() instanceof ManagedLayout) { return true; } else { return false; } } /** * Assigns a measured size to an element. Method defined as protected to * allow separate implementation for IE8. * * @param element * the dom element to attach the measured size to * @param measuredSize * the measured size to attach to the element. If * <code>null</code>, any previous measured size is removed. */ protected native void setMeasuredSize(Element element, MeasuredSize measuredSize) /*-{ if (measuredSize) { element.vMeasuredSize = measuredSize; } else { delete element.vMeasuredSize; } }-*/; /** * Gets the measured size for an element. Method defined as protected to * allow separate implementation for IE8. * * @param element * The element to get measured size for * @param defaultSize * The size to return if no measured size could be found * @return The measured size for the element or {@literal defaultSize} */ protected native MeasuredSize getMeasuredSize(Element element, MeasuredSize defaultSize) /*-{ return element.vMeasuredSize || defaultSize; }-*/; private final MeasuredSize getMeasuredSize(ComponentConnector connector) { Element element = connector.getWidget().getElement(); MeasuredSize measuredSize = getMeasuredSize(element, null); if (measuredSize == null) { measuredSize = new MeasuredSize(); setMeasuredSize(element, measuredSize); } return measuredSize; } /** * Registers that a ManagedLayout is no longer depending on the size of an * Element. * * @see #registerDependency(ManagedLayout, Element) * * @param owner * the ManagedLayout no longer depends on an element * @param element * the Element that that no longer needs to be measured */ public void unregisterDependency(ManagedLayout owner, Element element) { MeasuredSize measuredSize = getMeasuredSize(element, null); if (measuredSize == null) { return; } measuredSize.removeDependent(owner.getConnectorId()); stopMeasuringIfUnecessary(element); } public boolean isLayoutRunning() { return currentDependencyTree != null; } private void countLayout(FastStringMap<Integer> layoutCounts, ManagedLayout layout) { Integer count = layoutCounts.get(layout.getConnectorId()); if (count == null) { count = Integer.valueOf(0); } else { count = Integer.valueOf(count.intValue() + 1); } layoutCounts.put(layout.getConnectorId(), count); if (count.intValue() > 2) { VConsole.error(Util.getConnectorString(layout) + " has been layouted " + count.intValue() + " times"); } } public void layoutLater() { if (!layoutPending) { layoutPending = true; layoutTimer.schedule(100); } } public void layoutNow() { if (isLayoutRunning()) { throw new IllegalStateException( "Can't start a new layout phase before the previous layout phase ends."); } layoutPending = false; layoutTimer.cancel(); try { currentDependencyTree = new LayoutDependencyTree(connection); doLayout(); } finally { currentDependencyTree = null; } } /** * Called once per iteration in the layout loop before size calculations so * different browsers quirks can be handled. Mainly this is currently for * the IE8 permutation. */ protected void performBrowserLayoutHacks() { // Permutations implement this } private void doLayout() { VConsole.log("Starting layout phase"); Profiler.enter("LayoutManager phase init"); FastStringMap<Integer> layoutCounts = FastStringMap.create(); int passes = 0; Duration totalDuration = new Duration(); ConnectorMap connectorMap = ConnectorMap.get(connection); JsArrayString dump = needsHorizontalLayout.dump(); int dumpLength = dump.length(); for (int i = 0; i < dumpLength; i++) { String layoutId = dump.get(i); currentDependencyTree.setNeedsHorizontalLayout(layoutId, true); } dump = needsVerticalLayout.dump(); dumpLength = dump.length(); for (int i = 0; i < dumpLength; i++) { String layoutId = dump.get(i); currentDependencyTree.setNeedsVerticalLayout(layoutId, true); } needsHorizontalLayout = FastStringSet.create(); needsVerticalLayout = FastStringSet.create(); dump = needsMeasure.dump(); dumpLength = dump.length(); for (int i = 0; i < dumpLength; i++) { String layoutId = dump.get(i); currentDependencyTree.setNeedsMeasure(layoutId, true); } needsMeasure = FastStringSet.create(); measureNonConnectors(); Profiler.leave("LayoutManager phase init"); while (true) { Profiler.enter("Layout pass"); passes++; performBrowserLayoutHacks(); Profiler.enter("Layout measure connectors"); int measuredConnectorCount = measureConnectors( currentDependencyTree, everythingNeedsMeasure); Profiler.leave("Layout measure connectors"); everythingNeedsMeasure = false; if (measuredConnectorCount == 0) { VConsole.log("No more changes in pass " + passes); Profiler.leave("Layout pass"); break; } int firedListeners = 0; if (!listenersToFire.isEmpty()) { firedListeners = listenersToFire.size(); Profiler.enter("Layout fire resize events"); for (Element element : listenersToFire) { Collection<ElementResizeListener> listeners = elementResizeListeners .get(element); if (listeners != null) { ElementResizeListener[] array = listeners .toArray(new ElementResizeListener[listeners .size()]); ElementResizeEvent event = new ElementResizeEvent(this, element); for (ElementResizeListener listener : array) { try { String key = null; if (Profiler.isEnabled()) { key = "ElementReizeListener.onElementReize for " + Util.getSimpleName(listener); Profiler.enter(key); } listener.onElementResize(event); if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } } } } listenersToFire.clear(); Profiler.leave("Layout fire resize events"); } Profiler.enter("LayoutManager handle ManagedLayout"); FastStringSet updatedSet = FastStringSet.create(); int layoutCount = 0; while (currentDependencyTree.hasHorizontalConnectorToLayout() || currentDependencyTree.hasVerticaConnectorToLayout()) { JsArrayString layoutTargets = currentDependencyTree .getHorizontalLayoutTargetsJsArray(); int length = layoutTargets.length(); for (int i = 0; i < length; i++) { ManagedLayout layout = (ManagedLayout) connectorMap .getConnector(layoutTargets.get(i)); if (layout instanceof DirectionalManagedLayout) { currentDependencyTree .markAsHorizontallyLayouted(layout); DirectionalManagedLayout cl = (DirectionalManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layoutHorizontally() for " + Util.getSimpleName(cl); Profiler.enter(key); } cl.layoutHorizontally(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, cl); } else { currentDependencyTree .markAsHorizontallyLayouted(layout); currentDependencyTree.markAsVerticallyLayouted(layout); SimpleManagedLayout rr = (SimpleManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layout() for " + Util.getSimpleName(rr); Profiler.enter(key); } rr.layout(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, rr); } if (debugLogging) { updatedSet.add(layout.getConnectorId()); } } layoutTargets = currentDependencyTree .getVerticalLayoutTargetsJsArray(); length = layoutTargets.length(); for (int i = 0; i < length; i++) { ManagedLayout layout = (ManagedLayout) connectorMap .getConnector(layoutTargets.get(i)); if (layout instanceof DirectionalManagedLayout) { currentDependencyTree.markAsVerticallyLayouted(layout); DirectionalManagedLayout cl = (DirectionalManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layoutHorizontally() for " + Util.getSimpleName(cl); Profiler.enter(key); } cl.layoutVertically(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, cl); } else { currentDependencyTree .markAsHorizontallyLayouted(layout); currentDependencyTree.markAsVerticallyLayouted(layout); SimpleManagedLayout rr = (SimpleManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layout() for " + Util.getSimpleName(rr); Profiler.enter(key); } rr.layout(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, rr); } if (debugLogging) { updatedSet.add(layout.getConnectorId()); } } } Profiler.leave("LayoutManager handle ManagedLayout"); if (debugLogging) { JsArrayString changedCids = updatedSet.dump(); StringBuilder b = new StringBuilder(" "); b.append(changedCids.length()); b.append(" requestLayout invocations "); if (changedCids.length() < 30) { for (int i = 0; i < changedCids.length(); i++) { if (i != 0) { b.append(", "); } else { b.append(": "); } String connectorString = changedCids.get(i); if (changedCids.length() < 10) { ServerConnector connector = ConnectorMap.get( connection).getConnector(connectorString); connectorString = Util .getConnectorString(connector); } b.append(connectorString); } } VConsole.log(b.toString()); } Profiler.leave("Layout pass"); VConsole.log("Pass " + passes + " measured " + measuredConnectorCount + " elements, fired " + firedListeners + " listeners and did " + layoutCount + " layouts."); if (passes > 100) { VConsole.log(LOOP_ABORT_MESSAGE); if (ApplicationConfiguration.isDebugMode()) { VNotification.createNotification( VNotification.DELAY_FOREVER, connection.getUIConnector().getWidget()) .show(LOOP_ABORT_MESSAGE, VNotification.CENTERED, "error"); } break; } } Profiler.enter("layout PostLayoutListener"); JsArrayObject<ComponentConnector> componentConnectors = connectorMap .getComponentConnectorsAsJsArray(); int size = componentConnectors.size(); for (int i = 0; i < size; i++) { ComponentConnector connector = componentConnectors.get(i); if (connector instanceof PostLayoutListener) { String key = null; if (Profiler.isEnabled()) { key = "layout PostLayoutListener for " + Util.getSimpleName(connector); Profiler.enter(key); } ((PostLayoutListener) connector).postLayout(); if (Profiler.isEnabled()) { Profiler.leave(key); } } } Profiler.leave("layout PostLayoutListener"); cleanMeasuredSizes(); VConsole.log("Total layout phase time: " + totalDuration.elapsedMillis() + "ms"); } private void logConnectorStatus(int connectorId) { currentDependencyTree .logDependencyStatus((ComponentConnector) ConnectorMap.get( connection).getConnector(Integer.toString(connectorId))); } private int measureConnectors(LayoutDependencyTree layoutDependencyTree, boolean measureAll) { Profiler.enter("Layout overflow fix handling"); JsArrayString pendingOverflowConnectorsIds = pendingOverflowFixes .dump(); int pendingOverflowCount = pendingOverflowConnectorsIds.length(); ConnectorMap connectorMap = ConnectorMap.get(connection); if (pendingOverflowCount > 0) { HashMap<Element, String> originalOverflows = new HashMap<Element, String>(); FastStringSet delayedOverflowFixes = FastStringSet.create(); // First set overflow to hidden (and save previous value so it can // be restored later) for (int i = 0; i < pendingOverflowCount; i++) { String connectorId = pendingOverflowConnectorsIds.get(i); ComponentConnector componentConnector = (ComponentConnector) connectorMap .getConnector(connectorId); // Delay the overflow fix if the involved connectors might still // change boolean connectorChangesExpected = !currentDependencyTree .noMoreChangesExpected(componentConnector); boolean parentChangesExcpected = componentConnector.getParent() instanceof ComponentConnector && !currentDependencyTree .noMoreChangesExpected((ComponentConnector) componentConnector .getParent()); if (connectorChangesExpected || parentChangesExcpected) { delayedOverflowFixes.add(connectorId); continue; } if (debugLogging) { VConsole.log("Doing overflow fix for " + Util.getConnectorString(componentConnector) + " in " + Util.getConnectorString(componentConnector .getParent())); } Profiler.enter("Overflow fix apply"); Element parentElement = componentConnector.getWidget() .getElement().getParentElement(); Style style = parentElement.getStyle(); String originalOverflow = style.getOverflow(); if (originalOverflow != null && !originalOverflows.containsKey(parentElement)) { // Store original value for restore, but only the first time // the value is changed originalOverflows.put(parentElement, originalOverflow); } style.setOverflow(Overflow.HIDDEN); Profiler.leave("Overflow fix apply"); } pendingOverflowFixes.removeAll(delayedOverflowFixes); JsArrayString remainingOverflowFixIds = pendingOverflowFixes.dump(); int remainingCount = remainingOverflowFixIds.length(); Profiler.enter("Overflow fix reflow"); // Then ensure all scrolling elements are reflowed by measuring for (int i = 0; i < remainingCount; i++) { ComponentConnector componentConnector = (ComponentConnector) connectorMap .getConnector(remainingOverflowFixIds.get(i)); componentConnector.getWidget().getElement().getParentElement() .getOffsetHeight(); } Profiler.leave("Overflow fix reflow"); Profiler.enter("Overflow fix restore"); // Finally restore old overflow value and update bookkeeping for (int i = 0; i < remainingCount; i++) { String connectorId = remainingOverflowFixIds.get(i); ComponentConnector componentConnector = (ComponentConnector) connectorMap .getConnector(connectorId); Element parentElement = componentConnector.getWidget() .getElement().getParentElement(); parentElement.getStyle().setProperty("overflow", originalOverflows.get(parentElement)); layoutDependencyTree.setNeedsMeasure(connectorId, true); } Profiler.leave("Overflow fix restore"); if (!pendingOverflowFixes.isEmpty()) { VConsole.log("Did overflow fix for " + remainingCount + " elements"); } pendingOverflowFixes = delayedOverflowFixes; } Profiler.leave("Layout overflow fix handling"); int measureCount = 0; if (measureAll) { Profiler.enter("Layout measureAll"); JsArrayObject<ComponentConnector> allConnectors = connectorMap .getComponentConnectorsAsJsArray(); int size = allConnectors.size(); // Find connectors that should actually be measured JsArrayObject<ComponentConnector> connectors = JsArrayObject .createArray().cast(); for (int i = 0; i < size; i++) { ComponentConnector candidate = allConnectors.get(i); if (needsMeasure(candidate.getWidget().getElement())) { connectors.add(candidate); } } int connectorCount = connectors.size(); for (int i = 0; i < connectorCount; i++) { measureConnector(connectors.get(i)); } for (int i = 0; i < connectorCount; i++) { layoutDependencyTree.setNeedsMeasure(connectors.get(i) .getConnectorId(), false); } measureCount += connectorCount; Profiler.leave("Layout measureAll"); } Profiler.enter("Layout measure from tree"); while (layoutDependencyTree.hasConnectorsToMeasure()) { JsArrayString measureTargets = layoutDependencyTree .getMeasureTargetsJsArray(); int length = measureTargets.length(); for (int i = 0; i < length; i++) { ComponentConnector connector = (ComponentConnector) connectorMap .getConnector(measureTargets.get(i)); measureConnector(connector); measureCount++; } for (int i = 0; i < length; i++) { String connectorId = measureTargets.get(i); layoutDependencyTree.setNeedsMeasure(connectorId, false); } } Profiler.leave("Layout measure from tree"); return measureCount; } private void measureConnector(ComponentConnector connector) { Profiler.enter("LayoutManager.measureConnector"); Element element = connector.getWidget().getElement(); MeasuredSize measuredSize = getMeasuredSize(connector); MeasureResult measureResult = measuredAndUpdate(element, measuredSize); if (measureResult.isChanged()) { onConnectorChange(connector, measureResult.isWidthChanged(), measureResult.isHeightChanged()); } Profiler.leave("LayoutManager.measureConnector"); } private void onConnectorChange(ComponentConnector connector, boolean widthChanged, boolean heightChanged) { Profiler.enter("LayoutManager.onConnectorChange"); setNeedsOverflowFix(connector); if (heightChanged) { currentDependencyTree.markHeightAsChanged(connector); } if (widthChanged) { currentDependencyTree.markWidthAsChanged(connector); } Profiler.leave("LayoutManager.onConnectorChange"); } private void setNeedsOverflowFix(ComponentConnector connector) { // IE9 doesn't need the original fix, but for some reason it needs this if (BrowserInfo.get().requiresOverflowAutoFix() || BrowserInfo.get().isIE9()) { ComponentConnector scrollingBoundary = currentDependencyTree .getScrollingBoundary(connector); if (scrollingBoundary != null) { pendingOverflowFixes.add(scrollingBoundary.getConnectorId()); } } } private void measureNonConnectors() { Profiler.enter("LayoutManager.measureNonConenctors"); for (Element element : measuredNonConnectorElements) { measuredAndUpdate(element, getMeasuredSize(element, null)); } Profiler.leave("LayoutManager.measureNonConenctors"); VConsole.log("Measured " + measuredNonConnectorElements.size() + " non connector elements"); } private MeasureResult measuredAndUpdate(Element element, MeasuredSize measuredSize) { MeasureResult measureResult = measuredSize.measure(element); if (measureResult.isChanged()) { notifyListenersAndDepdendents(element, measureResult.isWidthChanged(), measureResult.isHeightChanged()); } return measureResult; } private void notifyListenersAndDepdendents(Element element, boolean widthChanged, boolean heightChanged) { assert widthChanged || heightChanged; Profiler.enter("LayoutManager.notifyListenersAndDepdendents"); MeasuredSize measuredSize = getMeasuredSize(element, nullSize); JsArrayString dependents = measuredSize.getDependents(); for (int i = 0; i < dependents.length(); i++) { String pid = dependents.get(i); if (pid != null) { if (heightChanged) { currentDependencyTree.setNeedsVerticalLayout(pid, true); } if (widthChanged) { currentDependencyTree.setNeedsHorizontalLayout(pid, true); } } } if (elementResizeListeners.containsKey(element)) { listenersToFire.add(element); } Profiler.leave("LayoutManager.notifyListenersAndDepdendents"); } private static boolean isManagedLayout(ComponentConnector connector) { return connector instanceof ManagedLayout; } public void forceLayout() { ConnectorMap connectorMap = connection.getConnectorMap(); JsArrayObject<ComponentConnector> componentConnectors = connectorMap .getComponentConnectorsAsJsArray(); int size = componentConnectors.size(); for (int i = 0; i < size; i++) { ComponentConnector connector = componentConnectors.get(i); if (connector instanceof ManagedLayout) { setNeedsLayout((ManagedLayout) connector); } } setEverythingNeedsMeasure(); layoutNow(); } /** * Marks that a ManagedLayout should be layouted in the next layout phase * even if none of the elements managed by the layout have been resized. * * @param layout * the managed layout that should be layouted */ public final void setNeedsLayout(ManagedLayout layout) { setNeedsHorizontalLayout(layout); setNeedsVerticalLayout(layout); } /** * Marks that a ManagedLayout should be layouted horizontally in the next * layout phase even if none of the elements managed by the layout have been * resized horizontally. * * For SimpleManagedLayout which is always layouted in both directions, this * has the same effect as {@link #setNeedsLayout(ManagedLayout)}. * * @param layout * the managed layout that should be layouted */ public final void setNeedsHorizontalLayout(ManagedLayout layout) { assert isAttached(layout); needsHorizontalLayout.add(layout.getConnectorId()); } /** * Marks that a ManagedLayout should be layouted vertically in the next * layout phase even if none of the elements managed by the layout have been * resized vertically. * * For SimpleManagedLayout which is always layouted in both directions, this * has the same effect as {@link #setNeedsLayout(ManagedLayout)}. * * @param layout * the managed layout that should be layouted */ public final void setNeedsVerticalLayout(ManagedLayout layout) { assert isAttached(layout); needsVerticalLayout.add(layout.getConnectorId()); } private boolean isAttached(ServerConnector connector) { while (connector != null) { connector = connector.getParent(); if (connector == connection.getUIConnector()) { return true; } } // Reaching null parent before reaching UI connector -> not attached return false; } /** * Gets the outer height (including margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured outer height (including margins, paddings and * borders) of the element in pixels. */ public final int getOuterHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getOuterHeight(); } /** * Gets the outer width (including margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured outer width (including margins, paddings and * borders) of the element in pixels. */ public final int getOuterWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getOuterWidth(); } /** * Gets the inner height (excluding margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured inner height (excluding margins, paddings and * borders) of the element in pixels. */ public final int getInnerHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getInnerHeight(); } /** * Gets the inner width (excluding margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured inner width (excluding margins, paddings and * borders) of the element in pixels. */ public final int getInnerWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getInnerWidth(); } /** * Gets the border height (top border + bottom border) of the given element, * provided that it has been measured. These elements are guaranteed to be * measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured border height (top border + bottom border) of the * element in pixels. */ public final int getBorderHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getBorderHeight(); } /** * Gets the padding height (top padding + bottom padding) of the given * element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured padding height (top padding + bottom padding) of the * element in pixels. */ public int getPaddingHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingHeight(); } /** * Gets the border width (left border + right border) of the given element, * provided that it has been measured. These elements are guaranteed to be * measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured border width (left border + right border) of the * element in pixels. */ public int getBorderWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getBorderWidth(); } /** * Gets the padding width (left padding + right padding) of the given * element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured padding width (left padding + right padding) of the * element in pixels. */ public int getPaddingWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingWidth(); } /** * Gets the top padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured top padding of the element in pixels. */ public int getPaddingTop(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingTop(); } /** * Gets the left padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured left padding of the element in pixels. */ public int getPaddingLeft(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingLeft(); } /** * Gets the bottom padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured bottom padding of the element in pixels. */ public int getPaddingBottom(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingBottom(); } /** * Gets the right padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured right padding of the element in pixels. */ public int getPaddingRight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingRight(); } /** * Gets the top margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured top margin of the element in pixels. */ public int getMarginTop(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginTop(); } /** * Gets the right margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured right margin of the element in pixels. */ public int getMarginRight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginRight(); } /** * Gets the bottom margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured bottom margin of the element in pixels. */ public int getMarginBottom(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginBottom(); } /** * Gets the left margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured left margin of the element in pixels. */ public int getMarginLeft(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginLeft(); } /** * Gets the combined top & bottom margin of the given element, provided that * they have been measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured margin for * @return the measured top+bottom margin of the element in pixels. */ public int getMarginHeight(Element element) { return getMarginTop(element) + getMarginBottom(element); } /** * Gets the combined left & right margin of the given element, provided that * they have been measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured margin for * @return the measured left+right margin of the element in pixels. */ public int getMarginWidth(Element element) { return getMarginLeft(element) + getMarginRight(element); } /** * Registers the outer height (including margins, borders and paddings) of a * component. This can be used as an optimization by ManagedLayouts; by * informing the LayoutManager about what size a component will have, the * layout propagation can continue directly without first measuring the * potentially resized elements. * * @param component * the component for which the size is reported * @param outerHeight * the new outer height (including margins, borders and paddings) * of the component in pixels */ public void reportOuterHeight(ComponentConnector component, int outerHeight) { MeasuredSize measuredSize = getMeasuredSize(component); if (isLayoutRunning()) { boolean heightChanged = measuredSize.setOuterHeight(outerHeight); if (heightChanged) { onConnectorChange(component, false, true); notifyListenersAndDepdendents(component.getWidget() .getElement(), false, true); } currentDependencyTree.setNeedsVerticalMeasure(component, false); } else if (measuredSize.getOuterHeight() != outerHeight) { setNeedsMeasure(component); } } /** * Registers the height reserved for a relatively sized component. This can * be used as an optimization by ManagedLayouts; by informing the * LayoutManager about what size a component will have, the layout * propagation can continue directly without first measuring the potentially * resized elements. * * @param component * the relatively sized component for which the size is reported * @param assignedHeight * the inner height of the relatively sized component's parent * element in pixels */ public void reportHeightAssignedToRelative(ComponentConnector component, int assignedHeight) { assert component.isRelativeHeight(); float percentSize = parsePercent(component.getState().height == null ? "" : component.getState().height); int effectiveHeight = Math.round(assignedHeight * (percentSize / 100)); reportOuterHeight(component, effectiveHeight); } /** * Registers the width reserved for a relatively sized component. This can * be used as an optimization by ManagedLayouts; by informing the * LayoutManager about what size a component will have, the layout * propagation can continue directly without first measuring the potentially * resized elements. * * @param component * the relatively sized component for which the size is reported * @param assignedWidth * the inner width of the relatively sized component's parent * element in pixels */ public void reportWidthAssignedToRelative(ComponentConnector component, int assignedWidth) { assert component.isRelativeWidth(); float percentSize = parsePercent(component.getState().width == null ? "" : component.getState().width); int effectiveWidth = Math.round(assignedWidth * (percentSize / 100)); reportOuterWidth(component, effectiveWidth); } private static float parsePercent(String size) { return Float.parseFloat(size.substring(0, size.length() - 1)); } /** * Registers the outer width (including margins, borders and paddings) of a * component. This can be used as an optimization by ManagedLayouts; by * informing the LayoutManager about what size a component will have, the * layout propagation can continue directly without first measuring the * potentially resized elements. * * @param component * the component for which the size is reported * @param outerWidth * the new outer width (including margins, borders and paddings) * of the component in pixels */ public void reportOuterWidth(ComponentConnector component, int outerWidth) { MeasuredSize measuredSize = getMeasuredSize(component); if (isLayoutRunning()) { boolean widthChanged = measuredSize.setOuterWidth(outerWidth); if (widthChanged) { onConnectorChange(component, true, false); notifyListenersAndDepdendents(component.getWidget() .getElement(), true, false); } currentDependencyTree.setNeedsHorizontalMeasure(component, false); } else if (measuredSize.getOuterWidth() != outerWidth) { setNeedsMeasure(component); } } /** * Adds a listener that will be notified whenever the size of a specific * element changes. Adding a listener to an element also ensures that all * sizes for that element will be available starting from the next layout * phase. * * @param element * the element that should be checked for size changes * @param listener * an ElementResizeListener that will be informed whenever the * size of the target element has changed */ public void addElementResizeListener(Element element, ElementResizeListener listener) { Collection<ElementResizeListener> listeners = elementResizeListeners .get(element); if (listeners == null) { listeners = new HashSet<ElementResizeListener>(); elementResizeListeners.put(element, listeners); ensureMeasured(element); } listeners.add(listener); } /** * Removes an element resize listener from the provided element. This might * cause this LayoutManager to stop tracking the size of the element if no * other sources are interested in the size. * * @param element * the element to which the element resize listener was * previously added * @param listener * the ElementResizeListener that should no longer get informed * about size changes to the target element. */ public void removeElementResizeListener(Element element, ElementResizeListener listener) { Collection<ElementResizeListener> listeners = elementResizeListeners .get(element); if (listeners != null) { listeners.remove(listener); if (listeners.isEmpty()) { elementResizeListeners.remove(element); stopMeasuringIfUnecessary(element); } } } private void stopMeasuringIfUnecessary(Element element) { if (!needsMeasure(element)) { measuredNonConnectorElements.remove(element); setMeasuredSize(element, null); } } /** * Informs this LayoutManager that the size of a component might have * changed. If there is no upcoming layout phase, a new layout phase is * scheduled. This method should be used whenever a size might have changed * from outside of Vaadin's normal update phase, e.g. when an icon has been * loaded or when the user resizes some part of the UI using the mouse. * * @param component * the component whose size might have changed. */ public void setNeedsMeasure(ComponentConnector component) { if (isLayoutRunning()) { currentDependencyTree.setNeedsMeasure(component, true); } else { needsMeasure.add(component.getConnectorId()); layoutLater(); } } public void setEverythingNeedsMeasure() { everythingNeedsMeasure = true; } /** * Clean measured sizes which are no longer needed. Only for IE8. */ protected void cleanMeasuredSizes() { } }
client/src/com/vaadin/client/LayoutManager.java
/* * Copyright 2000-2013 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import com.google.gwt.core.client.Duration; import com.google.gwt.core.client.JsArrayString; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style; import com.google.gwt.dom.client.Style.Overflow; import com.google.gwt.user.client.Timer; import com.vaadin.client.MeasuredSize.MeasureResult; import com.vaadin.client.ui.ManagedLayout; import com.vaadin.client.ui.PostLayoutListener; import com.vaadin.client.ui.SimpleManagedLayout; import com.vaadin.client.ui.VNotification; import com.vaadin.client.ui.layout.ElementResizeEvent; import com.vaadin.client.ui.layout.ElementResizeListener; import com.vaadin.client.ui.layout.LayoutDependencyTree; public class LayoutManager { private static final String LOOP_ABORT_MESSAGE = "Aborting layout after 100 passes. This would probably be an infinite loop."; private static final boolean debugLogging = false; private ApplicationConnection connection; private final Set<Element> measuredNonConnectorElements = new HashSet<Element>(); private final MeasuredSize nullSize = new MeasuredSize(); private LayoutDependencyTree currentDependencyTree; private FastStringSet needsHorizontalLayout = FastStringSet.create(); private FastStringSet needsVerticalLayout = FastStringSet.create(); private FastStringSet needsMeasure = FastStringSet.create(); private FastStringSet pendingOverflowFixes = FastStringSet.create(); private final Map<Element, Collection<ElementResizeListener>> elementResizeListeners = new HashMap<Element, Collection<ElementResizeListener>>(); private final Set<Element> listenersToFire = new HashSet<Element>(); private boolean layoutPending = false; private Timer layoutTimer = new Timer() { @Override public void run() { layoutNow(); } }; private boolean everythingNeedsMeasure = false; public void setConnection(ApplicationConnection connection) { if (this.connection != null) { throw new RuntimeException( "LayoutManager connection can never be changed"); } this.connection = connection; } /** * Gets the layout manager associated with the given * {@link ApplicationConnection}. * * @param connection * the application connection to get a layout manager for * @return the layout manager associated with the provided application * connection */ public static LayoutManager get(ApplicationConnection connection) { return connection.getLayoutManager(); } /** * Registers that a ManagedLayout is depending on the size of an Element. * This causes this layout manager to measure the element in the beginning * of every layout phase and call the appropriate layout method of the * managed layout if the size of the element has changed. * * @param owner * the ManagedLayout that depends on an element * @param element * the Element that should be measured */ public void registerDependency(ManagedLayout owner, Element element) { MeasuredSize measuredSize = ensureMeasured(element); setNeedsLayout(owner); measuredSize.addDependent(owner.getConnectorId()); } private MeasuredSize ensureMeasured(Element element) { MeasuredSize measuredSize = getMeasuredSize(element, null); if (measuredSize == null) { measuredSize = new MeasuredSize(); if (ConnectorMap.get(connection).getConnector(element) == null) { measuredNonConnectorElements.add(element); } setMeasuredSize(element, measuredSize); } return measuredSize; } private boolean needsMeasure(Element e) { ComponentConnector connector = connection.getConnectorMap() .getConnector(e); if (connector != null && needsMeasureForManagedLayout(connector)) { return true; } else if (elementResizeListeners.containsKey(e)) { return true; } else if (getMeasuredSize(e, nullSize).hasDependents()) { return true; } else { return false; } } private boolean needsMeasureForManagedLayout(ComponentConnector connector) { if (connector instanceof ManagedLayout) { return true; } else if (connector.getParent() instanceof ManagedLayout) { return true; } else { return false; } } /** * Assigns a measured size to an element. Method defined as protected to * allow separate implementation for IE8. * * @param element * the dom element to attach the measured size to * @param measuredSize * the measured size to attach to the element. If * <code>null</code>, any previous measured size is removed. */ protected native void setMeasuredSize(Element element, MeasuredSize measuredSize) /*-{ if (measuredSize) { element.vMeasuredSize = measuredSize; } else { delete element.vMeasuredSize; } }-*/; /** * Gets the measured size for an element. Method defined as protected to * allow separate implementation for IE8. * * @param element * The element to get measured size for * @param defaultSize * The size to return if no measured size could be found * @return The measured size for the element or {@literal defaultSize} */ protected native MeasuredSize getMeasuredSize(Element element, MeasuredSize defaultSize) /*-{ return element.vMeasuredSize || defaultSize; }-*/; private final MeasuredSize getMeasuredSize(ComponentConnector connector) { Element element = connector.getWidget().getElement(); MeasuredSize measuredSize = getMeasuredSize(element, null); if (measuredSize == null) { measuredSize = new MeasuredSize(); setMeasuredSize(element, measuredSize); } return measuredSize; } /** * Registers that a ManagedLayout is no longer depending on the size of an * Element. * * @see #registerDependency(ManagedLayout, Element) * * @param owner * the ManagedLayout no longer depends on an element * @param element * the Element that that no longer needs to be measured */ public void unregisterDependency(ManagedLayout owner, Element element) { MeasuredSize measuredSize = getMeasuredSize(element, null); if (measuredSize == null) { return; } measuredSize.removeDependent(owner.getConnectorId()); stopMeasuringIfUnecessary(element); } public boolean isLayoutRunning() { return currentDependencyTree != null; } private void countLayout(FastStringMap<Integer> layoutCounts, ManagedLayout layout) { Integer count = layoutCounts.get(layout.getConnectorId()); if (count == null) { count = Integer.valueOf(0); } else { count = Integer.valueOf(count.intValue() + 1); } layoutCounts.put(layout.getConnectorId(), count); if (count.intValue() > 2) { VConsole.error(Util.getConnectorString(layout) + " has been layouted " + count.intValue() + " times"); } } public void layoutLater() { if (!layoutPending) { layoutPending = true; layoutTimer.schedule(100); } } public void layoutNow() { if (isLayoutRunning()) { throw new IllegalStateException( "Can't start a new layout phase before the previous layout phase ends."); } layoutPending = false; layoutTimer.cancel(); try { currentDependencyTree = new LayoutDependencyTree(connection); doLayout(); } finally { currentDependencyTree = null; } } /** * Called once per iteration in the layout loop before size calculations so * different browsers quirks can be handled. Mainly this is currently for * the IE8 permutation. */ protected void performBrowserLayoutHacks() { // Permutations implement this } private void doLayout() { VConsole.log("Starting layout phase"); Profiler.enter("LayoutManager phase init"); FastStringMap<Integer> layoutCounts = FastStringMap.create(); int passes = 0; Duration totalDuration = new Duration(); ConnectorMap connectorMap = ConnectorMap.get(connection); JsArrayString dump = needsHorizontalLayout.dump(); int dumpLength = dump.length(); for (int i = 0; i < dumpLength; i++) { String layoutId = dump.get(i); currentDependencyTree.setNeedsHorizontalLayout(layoutId, true); } dump = needsVerticalLayout.dump(); dumpLength = dump.length(); for (int i = 0; i < dumpLength; i++) { String layoutId = dump.get(i); currentDependencyTree.setNeedsVerticalLayout(layoutId, true); } needsHorizontalLayout = FastStringSet.create(); needsVerticalLayout = FastStringSet.create(); dump = needsMeasure.dump(); dumpLength = dump.length(); for (int i = 0; i < dumpLength; i++) { String layoutId = dump.get(i); currentDependencyTree.setNeedsMeasure(layoutId, true); } needsMeasure = FastStringSet.create(); measureNonConnectors(); Profiler.leave("LayoutManager phase init"); while (true) { Profiler.enter("Layout pass"); passes++; performBrowserLayoutHacks(); Profiler.enter("Layout measure connectors"); int measuredConnectorCount = measureConnectors( currentDependencyTree, everythingNeedsMeasure); Profiler.leave("Layout measure connectors"); everythingNeedsMeasure = false; if (measuredConnectorCount == 0) { VConsole.log("No more changes in pass " + passes); Profiler.leave("Layout pass"); break; } int firedListeners = 0; if (!listenersToFire.isEmpty()) { firedListeners = listenersToFire.size(); Profiler.enter("Layout fire resize events"); for (Element element : listenersToFire) { Collection<ElementResizeListener> listeners = elementResizeListeners .get(element); if (listeners != null) { ElementResizeListener[] array = listeners .toArray(new ElementResizeListener[listeners .size()]); ElementResizeEvent event = new ElementResizeEvent(this, element); for (ElementResizeListener listener : array) { try { String key = null; if (Profiler.isEnabled()) { key = "ElementReizeListener.onElementReize for " + Util.getSimpleName(listener); Profiler.enter(key); } listener.onElementResize(event); if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } } } } listenersToFire.clear(); Profiler.leave("Layout fire resize events"); } Profiler.enter("LayoutManager handle ManagedLayout"); FastStringSet updatedSet = FastStringSet.create(); int layoutCount = 0; while (currentDependencyTree.hasHorizontalConnectorToLayout() || currentDependencyTree.hasVerticaConnectorToLayout()) { JsArrayString layoutTargets = currentDependencyTree .getHorizontalLayoutTargetsJsArray(); int length = layoutTargets.length(); for (int i = 0; i < length; i++) { ManagedLayout layout = (ManagedLayout) connectorMap .getConnector(layoutTargets.get(i)); if (layout instanceof DirectionalManagedLayout) { currentDependencyTree .markAsHorizontallyLayouted(layout); DirectionalManagedLayout cl = (DirectionalManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layoutHorizontally() for " + Util.getSimpleName(cl); Profiler.enter(key); } cl.layoutHorizontally(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, cl); } else { currentDependencyTree .markAsHorizontallyLayouted(layout); currentDependencyTree.markAsVerticallyLayouted(layout); SimpleManagedLayout rr = (SimpleManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layout() for " + Util.getSimpleName(rr); Profiler.enter(key); } rr.layout(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, rr); } if (debugLogging) { updatedSet.add(layout.getConnectorId()); } } layoutTargets = currentDependencyTree .getVerticalLayoutTargetsJsArray(); length = layoutTargets.length(); for (int i = 0; i < length; i++) { ManagedLayout layout = (ManagedLayout) connectorMap .getConnector(layoutTargets.get(i)); if (layout instanceof DirectionalManagedLayout) { currentDependencyTree.markAsVerticallyLayouted(layout); DirectionalManagedLayout cl = (DirectionalManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layoutHorizontally() for " + Util.getSimpleName(cl); Profiler.enter(key); } cl.layoutVertically(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, cl); } else { currentDependencyTree .markAsHorizontallyLayouted(layout); currentDependencyTree.markAsVerticallyLayouted(layout); SimpleManagedLayout rr = (SimpleManagedLayout) layout; try { String key = null; if (Profiler.isEnabled()) { key = "layout() for " + Util.getSimpleName(rr); Profiler.enter(key); } rr.layout(); layoutCount++; if (Profiler.isEnabled()) { Profiler.leave(key); } } catch (RuntimeException e) { VConsole.error(e); } countLayout(layoutCounts, rr); } if (debugLogging) { updatedSet.add(layout.getConnectorId()); } } } Profiler.leave("LayoutManager handle ManagedLayout"); if (debugLogging) { JsArrayString changedCids = updatedSet.dump(); StringBuilder b = new StringBuilder(" "); b.append(changedCids.length()); b.append(" requestLayout invocations "); if (changedCids.length() < 30) { for (int i = 0; i < changedCids.length(); i++) { if (i != 0) { b.append(", "); } else { b.append(": "); } String connectorString = changedCids.get(i); if (changedCids.length() < 10) { ServerConnector connector = ConnectorMap.get( connection).getConnector(connectorString); connectorString = Util .getConnectorString(connector); } b.append(connectorString); } } VConsole.log(b.toString()); } Profiler.leave("Layout pass"); VConsole.log("Pass " + passes + " measured " + measuredConnectorCount + " elements, fired " + firedListeners + " listeners and did " + layoutCount + " layouts."); if (passes > 100) { VConsole.log(LOOP_ABORT_MESSAGE); if (ApplicationConfiguration.isDebugMode()) { VNotification.createNotification( VNotification.DELAY_FOREVER, connection.getUIConnector().getWidget()) .show(LOOP_ABORT_MESSAGE, VNotification.CENTERED, "error"); } break; } } Profiler.enter("layout PostLayoutListener"); JsArrayObject<ComponentConnector> componentConnectors = connectorMap .getComponentConnectorsAsJsArray(); int size = componentConnectors.size(); for (int i = 0; i < size; i++) { ComponentConnector connector = componentConnectors.get(i); if (connector instanceof PostLayoutListener) { String key = null; if (Profiler.isEnabled()) { key = "layout PostLayoutListener for " + Util.getSimpleName(connector); Profiler.enter(key); } ((PostLayoutListener) connector).postLayout(); if (Profiler.isEnabled()) { Profiler.leave(key); } } } Profiler.leave("layout PostLayoutListener"); cleanMeasuredSizes(); VConsole.log("Total layout phase time: " + totalDuration.elapsedMillis() + "ms"); } private void logConnectorStatus(int connectorId) { currentDependencyTree .logDependencyStatus((ComponentConnector) ConnectorMap.get( connection).getConnector(Integer.toString(connectorId))); } private int measureConnectors(LayoutDependencyTree layoutDependencyTree, boolean measureAll) { Profiler.enter("Layout overflow fix handling"); JsArrayString pendingOverflowConnectorsIds = pendingOverflowFixes .dump(); int pendingOverflowCount = pendingOverflowConnectorsIds.length(); ConnectorMap connectorMap = ConnectorMap.get(connection); if (pendingOverflowCount > 0) { HashMap<Element, String> originalOverflows = new HashMap<Element, String>(); FastStringSet delayedOverflowFixes = FastStringSet.create(); // First set overflow to hidden (and save previous value so it can // be restored later) for (int i = 0; i < pendingOverflowCount; i++) { String connectorId = pendingOverflowConnectorsIds.get(i); ComponentConnector componentConnector = (ComponentConnector) connectorMap .getConnector(connectorId); // Delay the overflow fix if the involved connectors might still // change boolean connectorChangesExpected = !currentDependencyTree .noMoreChangesExpected(componentConnector); boolean parentChangesExcpected = componentConnector.getParent() instanceof ComponentConnector && !currentDependencyTree .noMoreChangesExpected((ComponentConnector) componentConnector .getParent()); if (connectorChangesExpected || parentChangesExcpected) { delayedOverflowFixes.add(connectorId); continue; } if (debugLogging) { VConsole.log("Doing overflow fix for " + Util.getConnectorString(componentConnector) + " in " + Util.getConnectorString(componentConnector .getParent())); } Profiler.enter("Overflow fix apply"); Element parentElement = componentConnector.getWidget() .getElement().getParentElement(); Style style = parentElement.getStyle(); String originalOverflow = style.getOverflow(); if (originalOverflow != null && !originalOverflows.containsKey(parentElement)) { // Store original value for restore, but only the first time // the value is changed originalOverflows.put(parentElement, originalOverflow); } style.setOverflow(Overflow.HIDDEN); Profiler.leave("Overflow fix apply"); } pendingOverflowFixes.removeAll(delayedOverflowFixes); JsArrayString remainingOverflowFixIds = pendingOverflowFixes.dump(); int remainingCount = remainingOverflowFixIds.length(); Profiler.enter("Overflow fix reflow"); // Then ensure all scrolling elements are reflowed by measuring for (int i = 0; i < remainingCount; i++) { ComponentConnector componentConnector = (ComponentConnector) connectorMap .getConnector(remainingOverflowFixIds.get(i)); componentConnector.getWidget().getElement().getParentElement() .getOffsetHeight(); } Profiler.leave("Overflow fix reflow"); Profiler.enter("Overflow fix restore"); // Finally restore old overflow value and update bookkeeping for (int i = 0; i < remainingCount; i++) { String connectorId = remainingOverflowFixIds.get(i); ComponentConnector componentConnector = (ComponentConnector) connectorMap .getConnector(connectorId); Element parentElement = componentConnector.getWidget() .getElement().getParentElement(); parentElement.getStyle().setProperty("overflow", originalOverflows.get(parentElement)); layoutDependencyTree.setNeedsMeasure(connectorId, true); } Profiler.leave("Overflow fix restore"); if (!pendingOverflowFixes.isEmpty()) { VConsole.log("Did overflow fix for " + remainingCount + " elements"); } pendingOverflowFixes = delayedOverflowFixes; } Profiler.leave("Layout overflow fix handling"); int measureCount = 0; if (measureAll) { Profiler.enter("Layout measureAll"); JsArrayObject<ComponentConnector> allConnectors = connectorMap .getComponentConnectorsAsJsArray(); int size = allConnectors.size(); // Find connectors that should actually be measured JsArrayObject<ComponentConnector> connectors = JsArrayObject .createArray().cast(); for (int i = 0; i < size; i++) { ComponentConnector candidate = allConnectors.get(i); if (needsMeasure(candidate.getWidget().getElement())) { connectors.add(candidate); } } int connectorCount = connectors.size(); for (int i = 0; i < connectorCount; i++) { measureConnector(connectors.get(i)); } for (int i = 0; i < connectorCount; i++) { layoutDependencyTree.setNeedsMeasure(connectors.get(i) .getConnectorId(), false); } measureCount += connectorCount; Profiler.leave("Layout measureAll"); } Profiler.enter("Layout measure from tree"); while (layoutDependencyTree.hasConnectorsToMeasure()) { JsArrayString measureTargets = layoutDependencyTree .getMeasureTargetsJsArray(); int length = measureTargets.length(); for (int i = 0; i < length; i++) { ComponentConnector connector = (ComponentConnector) connectorMap .getConnector(measureTargets.get(i)); measureConnector(connector); measureCount++; } for (int i = 0; i < length; i++) { String connectorId = measureTargets.get(i); layoutDependencyTree.setNeedsMeasure(connectorId, false); } } Profiler.leave("Layout measure from tree"); return measureCount; } private void measureConnector(ComponentConnector connector) { Profiler.enter("LayoutManager.measureConnector"); Element element = connector.getWidget().getElement(); MeasuredSize measuredSize = getMeasuredSize(connector); MeasureResult measureResult = measuredAndUpdate(element, measuredSize); if (measureResult.isChanged()) { onConnectorChange(connector, measureResult.isWidthChanged(), measureResult.isHeightChanged()); } Profiler.leave("LayoutManager.measureConnector"); } private void onConnectorChange(ComponentConnector connector, boolean widthChanged, boolean heightChanged) { Profiler.enter("LayoutManager.onConnectorChange"); setNeedsOverflowFix(connector); if (heightChanged) { currentDependencyTree.markHeightAsChanged(connector); } if (widthChanged) { currentDependencyTree.markWidthAsChanged(connector); } Profiler.leave("LayoutManager.onConnectorChange"); } private void setNeedsOverflowFix(ComponentConnector connector) { // IE9 doesn't need the original fix, but for some reason it needs this if (BrowserInfo.get().requiresOverflowAutoFix() || BrowserInfo.get().isIE9()) { ComponentConnector scrollingBoundary = currentDependencyTree .getScrollingBoundary(connector); if (scrollingBoundary != null) { pendingOverflowFixes.add(scrollingBoundary.getConnectorId()); } } } private void measureNonConnectors() { Profiler.enter("LayoutManager.measureNonConenctors"); for (Element element : measuredNonConnectorElements) { measuredAndUpdate(element, getMeasuredSize(element, null)); } Profiler.leave("LayoutManager.measureNonConenctors"); VConsole.log("Measured " + measuredNonConnectorElements.size() + " non connector elements"); } private MeasureResult measuredAndUpdate(Element element, MeasuredSize measuredSize) { MeasureResult measureResult = measuredSize.measure(element); if (measureResult.isChanged()) { notifyListenersAndDepdendents(element, measureResult.isWidthChanged(), measureResult.isHeightChanged()); } return measureResult; } private void notifyListenersAndDepdendents(Element element, boolean widthChanged, boolean heightChanged) { assert widthChanged || heightChanged; Profiler.enter("LayoutManager.notifyListenersAndDepdendents"); MeasuredSize measuredSize = getMeasuredSize(element, nullSize); JsArrayString dependents = measuredSize.getDependents(); for (int i = 0; i < dependents.length(); i++) { String pid = dependents.get(i); if (pid != null) { if (heightChanged) { currentDependencyTree.setNeedsVerticalLayout(pid, true); } if (widthChanged) { currentDependencyTree.setNeedsHorizontalLayout(pid, true); } } } if (elementResizeListeners.containsKey(element)) { listenersToFire.add(element); } Profiler.leave("LayoutManager.notifyListenersAndDepdendents"); } private static boolean isManagedLayout(ComponentConnector connector) { return connector instanceof ManagedLayout; } public void forceLayout() { ConnectorMap connectorMap = connection.getConnectorMap(); JsArrayObject<ComponentConnector> componentConnectors = connectorMap .getComponentConnectorsAsJsArray(); int size = componentConnectors.size(); for (int i = 0; i < size; i++) { ComponentConnector connector = componentConnectors.get(i); if (connector instanceof ManagedLayout) { setNeedsLayout((ManagedLayout) connector); } } setEverythingNeedsMeasure(); layoutNow(); } /** * Marks that a ManagedLayout should be layouted in the next layout phase * even if none of the elements managed by the layout have been resized. * * @param layout * the managed layout that should be layouted */ public final void setNeedsLayout(ManagedLayout layout) { setNeedsHorizontalLayout(layout); setNeedsVerticalLayout(layout); } /** * Marks that a ManagedLayout should be layouted horizontally in the next * layout phase even if none of the elements managed by the layout have been * resized horizontally. * * For SimpleManagedLayout which is always layouted in both directions, this * has the same effect as {@link #setNeedsLayout(ManagedLayout)}. * * @param layout * the managed layout that should be layouted */ public final void setNeedsHorizontalLayout(ManagedLayout layout) { needsHorizontalLayout.add(layout.getConnectorId()); } /** * Marks that a ManagedLayout should be layouted vertically in the next * layout phase even if none of the elements managed by the layout have been * resized vertically. * * For SimpleManagedLayout which is always layouted in both directions, this * has the same effect as {@link #setNeedsLayout(ManagedLayout)}. * * @param layout * the managed layout that should be layouted */ public final void setNeedsVerticalLayout(ManagedLayout layout) { needsVerticalLayout.add(layout.getConnectorId()); } /** * Gets the outer height (including margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured outer height (including margins, paddings and * borders) of the element in pixels. */ public final int getOuterHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getOuterHeight(); } /** * Gets the outer width (including margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured outer width (including margins, paddings and * borders) of the element in pixels. */ public final int getOuterWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getOuterWidth(); } /** * Gets the inner height (excluding margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured inner height (excluding margins, paddings and * borders) of the element in pixels. */ public final int getInnerHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getInnerHeight(); } /** * Gets the inner width (excluding margins, paddings and borders) of the * given element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * -1 is returned if the element has not been measured. If 0 is returned, it * might indicate that the element is not attached to the DOM. * * @param element * the element to get the measured size for * @return the measured inner width (excluding margins, paddings and * borders) of the element in pixels. */ public final int getInnerWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getInnerWidth(); } /** * Gets the border height (top border + bottom border) of the given element, * provided that it has been measured. These elements are guaranteed to be * measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured border height (top border + bottom border) of the * element in pixels. */ public final int getBorderHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getBorderHeight(); } /** * Gets the padding height (top padding + bottom padding) of the given * element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured padding height (top padding + bottom padding) of the * element in pixels. */ public int getPaddingHeight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingHeight(); } /** * Gets the border width (left border + right border) of the given element, * provided that it has been measured. These elements are guaranteed to be * measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured border width (left border + right border) of the * element in pixels. */ public int getBorderWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getBorderWidth(); } /** * Gets the padding width (left padding + right padding) of the given * element, provided that it has been measured. These elements are * guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured padding width (left padding + right padding) of the * element in pixels. */ public int getPaddingWidth(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingWidth(); } /** * Gets the top padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured top padding of the element in pixels. */ public int getPaddingTop(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingTop(); } /** * Gets the left padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured left padding of the element in pixels. */ public int getPaddingLeft(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingLeft(); } /** * Gets the bottom padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured bottom padding of the element in pixels. */ public int getPaddingBottom(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingBottom(); } /** * Gets the right padding of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured right padding of the element in pixels. */ public int getPaddingRight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getPaddingRight(); } /** * Gets the top margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured top margin of the element in pixels. */ public int getMarginTop(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginTop(); } /** * Gets the right margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured right margin of the element in pixels. */ public int getMarginRight(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginRight(); } /** * Gets the bottom margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured bottom margin of the element in pixels. */ public int getMarginBottom(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginBottom(); } /** * Gets the left margin of the given element, provided that it has been * measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured size for * @return the measured left margin of the element in pixels. */ public int getMarginLeft(Element element) { assert needsMeasure(element) : "Getting measurement for element that is not measured"; return getMeasuredSize(element, nullSize).getMarginLeft(); } /** * Gets the combined top & bottom margin of the given element, provided that * they have been measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured margin for * @return the measured top+bottom margin of the element in pixels. */ public int getMarginHeight(Element element) { return getMarginTop(element) + getMarginBottom(element); } /** * Gets the combined left & right margin of the given element, provided that * they have been measured. These elements are guaranteed to be measured: * <ul> * <li>ManagedLayotus and their child Connectors * <li>Elements for which there is at least one ElementResizeListener * <li>Elements for which at least one ManagedLayout has registered a * dependency * </ul> * * A negative number is returned if the element has not been measured. If 0 * is returned, it might indicate that the element is not attached to the * DOM. * * @param element * the element to get the measured margin for * @return the measured left+right margin of the element in pixels. */ public int getMarginWidth(Element element) { return getMarginLeft(element) + getMarginRight(element); } /** * Registers the outer height (including margins, borders and paddings) of a * component. This can be used as an optimization by ManagedLayouts; by * informing the LayoutManager about what size a component will have, the * layout propagation can continue directly without first measuring the * potentially resized elements. * * @param component * the component for which the size is reported * @param outerHeight * the new outer height (including margins, borders and paddings) * of the component in pixels */ public void reportOuterHeight(ComponentConnector component, int outerHeight) { MeasuredSize measuredSize = getMeasuredSize(component); if (isLayoutRunning()) { boolean heightChanged = measuredSize.setOuterHeight(outerHeight); if (heightChanged) { onConnectorChange(component, false, true); notifyListenersAndDepdendents(component.getWidget() .getElement(), false, true); } currentDependencyTree.setNeedsVerticalMeasure(component, false); } else if (measuredSize.getOuterHeight() != outerHeight) { setNeedsMeasure(component); } } /** * Registers the height reserved for a relatively sized component. This can * be used as an optimization by ManagedLayouts; by informing the * LayoutManager about what size a component will have, the layout * propagation can continue directly without first measuring the potentially * resized elements. * * @param component * the relatively sized component for which the size is reported * @param assignedHeight * the inner height of the relatively sized component's parent * element in pixels */ public void reportHeightAssignedToRelative(ComponentConnector component, int assignedHeight) { assert component.isRelativeHeight(); float percentSize = parsePercent(component.getState().height == null ? "" : component.getState().height); int effectiveHeight = Math.round(assignedHeight * (percentSize / 100)); reportOuterHeight(component, effectiveHeight); } /** * Registers the width reserved for a relatively sized component. This can * be used as an optimization by ManagedLayouts; by informing the * LayoutManager about what size a component will have, the layout * propagation can continue directly without first measuring the potentially * resized elements. * * @param component * the relatively sized component for which the size is reported * @param assignedWidth * the inner width of the relatively sized component's parent * element in pixels */ public void reportWidthAssignedToRelative(ComponentConnector component, int assignedWidth) { assert component.isRelativeWidth(); float percentSize = parsePercent(component.getState().width == null ? "" : component.getState().width); int effectiveWidth = Math.round(assignedWidth * (percentSize / 100)); reportOuterWidth(component, effectiveWidth); } private static float parsePercent(String size) { return Float.parseFloat(size.substring(0, size.length() - 1)); } /** * Registers the outer width (including margins, borders and paddings) of a * component. This can be used as an optimization by ManagedLayouts; by * informing the LayoutManager about what size a component will have, the * layout propagation can continue directly without first measuring the * potentially resized elements. * * @param component * the component for which the size is reported * @param outerWidth * the new outer width (including margins, borders and paddings) * of the component in pixels */ public void reportOuterWidth(ComponentConnector component, int outerWidth) { MeasuredSize measuredSize = getMeasuredSize(component); if (isLayoutRunning()) { boolean widthChanged = measuredSize.setOuterWidth(outerWidth); if (widthChanged) { onConnectorChange(component, true, false); notifyListenersAndDepdendents(component.getWidget() .getElement(), true, false); } currentDependencyTree.setNeedsHorizontalMeasure(component, false); } else if (measuredSize.getOuterWidth() != outerWidth) { setNeedsMeasure(component); } } /** * Adds a listener that will be notified whenever the size of a specific * element changes. Adding a listener to an element also ensures that all * sizes for that element will be available starting from the next layout * phase. * * @param element * the element that should be checked for size changes * @param listener * an ElementResizeListener that will be informed whenever the * size of the target element has changed */ public void addElementResizeListener(Element element, ElementResizeListener listener) { Collection<ElementResizeListener> listeners = elementResizeListeners .get(element); if (listeners == null) { listeners = new HashSet<ElementResizeListener>(); elementResizeListeners.put(element, listeners); ensureMeasured(element); } listeners.add(listener); } /** * Removes an element resize listener from the provided element. This might * cause this LayoutManager to stop tracking the size of the element if no * other sources are interested in the size. * * @param element * the element to which the element resize listener was * previously added * @param listener * the ElementResizeListener that should no longer get informed * about size changes to the target element. */ public void removeElementResizeListener(Element element, ElementResizeListener listener) { Collection<ElementResizeListener> listeners = elementResizeListeners .get(element); if (listeners != null) { listeners.remove(listener); if (listeners.isEmpty()) { elementResizeListeners.remove(element); stopMeasuringIfUnecessary(element); } } } private void stopMeasuringIfUnecessary(Element element) { if (!needsMeasure(element)) { measuredNonConnectorElements.remove(element); setMeasuredSize(element, null); } } /** * Informs this LayoutManager that the size of a component might have * changed. If there is no upcoming layout phase, a new layout phase is * scheduled. This method should be used whenever a size might have changed * from outside of Vaadin's normal update phase, e.g. when an icon has been * loaded or when the user resizes some part of the UI using the mouse. * * @param component * the component whose size might have changed. */ public void setNeedsMeasure(ComponentConnector component) { if (isLayoutRunning()) { currentDependencyTree.setNeedsMeasure(component, true); } else { needsMeasure.add(component.getConnectorId()); layoutLater(); } } public void setEverythingNeedsMeasure() { everythingNeedsMeasure = true; } /** * Clean measured sizes which are no longer needed. Only for IE8. */ protected void cleanMeasuredSizes() { } }
Assert that connector needing layout is attached (#11698) Change-Id: Ie2d1ec330b857497c7673f3805e35b452f409f3c
client/src/com/vaadin/client/LayoutManager.java
Assert that connector needing layout is attached (#11698)
Java
apache-2.0
96c339615feede9c86798ccb9df96e986e19882c
0
MatthewTamlin/Spyglass
package com.matthewtamlin.spyglass.processors.annotation_utils; import com.matthewtamlin.java_utilities.testing.Tested; import java.lang.annotation.Annotation; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull; import static com.matthewtamlin.spyglass.processors.core.AnnotationRegistry.CALL_HANDLER_ANNOTATIONS; @Tested(testMethod = "automated") public class CallHandlerAnnotationUtil { public static boolean hasCallHandlerAnnotation(final Element element) { checkNotNull(element, "Argument \'element\' cannot be null."); for (final Class<? extends Annotation> a : CALL_HANDLER_ANNOTATIONS) { if (element.getAnnotation(a) != null) { return true; } } return false; } private CallHandlerAnnotationUtil() { throw new RuntimeException("Utility class. Do not instantiate."); } }
processors/src/main/java/com/matthewtamlin/spyglass/processors/annotation_utils/CallHandlerAnnotationUtil.java
package com.matthewtamlin.spyglass.processors.annotation_utils; import com.matthewtamlin.java_utilities.testing.Tested; import java.lang.annotation.Annotation; import javax.lang.model.element.Element; import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull; import static com.matthewtamlin.spyglass.processors.core.AnnotationRegistry.CALL_HANDLER_ANNOTATIONS; @Tested(testMethod = "automated") public class CallHandlerAnnotationUtil { public static Annotation getCallHandlerAnnotation(final Element element) { checkNotNull(element, "Argument \'element \' cannot be null."); for (final Class<? extends Annotation> a : CALL_HANDLER_ANNOTATIONS) { if (element.getAnnotation(a) != null) { return element.getAnnotation(a); } } return null; } public static boolean hasCallHandlerAnnotation(final Element element) { checkNotNull(element, "Argument \'element\' cannot be null."); for (final Class<? extends Annotation> a : CALL_HANDLER_ANNOTATIONS) { if (element.getAnnotation(a) != null) { return true; } } return false; } private CallHandlerAnnotationUtil() { throw new RuntimeException("Utility class. Do not instantiate."); } }
Deleted method getCallHandlerAnnotation
processors/src/main/java/com/matthewtamlin/spyglass/processors/annotation_utils/CallHandlerAnnotationUtil.java
Deleted method getCallHandlerAnnotation
Java
apache-2.0
6d21bb9ca4dbdc7342829c91e6f8b27c19d80106
0
bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud
package com.planet_ink.coffee_mud.Abilities.Druid; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2020 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Chant_AnimalSpy extends Chant { @Override public String ID() { return "Chant_AnimalSpy"; } private final static String localizedName = CMLib.lang().L("Animal Spy"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang().L("(Animal Spy)"); @Override public String displayText() { return localizedStaticDisplay; } @Override public int abstractQuality() { return Ability.QUALITY_OK_OTHERS; } @Override public int classificationCode() { return Ability.ACODE_CHANT | Ability.DOMAIN_ANIMALAFFINITY; } protected MOB spy = null; protected boolean disable = false; @Override public boolean tick(final Tickable ticking, final int tickID) { if(!super.tick(ticking,tickID)) return false; if((tickID==Tickable.TICKID_MOB) &&(affected==spy)) { if(spy.amDead() ||(spy.amFollowing()!=invoker) ||(!CMLib.flags().isInTheGame(spy,false))) unInvoke(); } return true; } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; if(canBeUninvoked()) { if(invoker!=null) { final Ability A=invoker.fetchEffect(this.ID()); if(A!=null) invoker.delEffect(A); invoker.tell(L("Your connection with '@x1' fades.",spy.name())); } } super.unInvoke(); } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { try { super.executeMsg(myHost,msg); if(spy==null) return; if(invoker==null) return; if((msg.amISource(spy)) &&(affected==spy) &&((msg.sourceMinor()==CMMsg.TYP_LOOK)||(msg.sourceMinor()==CMMsg.TYP_EXAMINE)) &&(msg.target()!=null) &&((invoker.location()!=spy.location())||(!(msg.target() instanceof Room)))) { disable=true; final CMMsg newAffect=CMClass.getMsg(invoker,msg.target(),msg.sourceMinor(),null); msg.target().executeMsg(invoker,newAffect); } else if((!msg.amISource(invoker)) &&(invoker.location()!=spy.location()) &&(affected==spy) &&(msg.source().location()==spy.location()) &&(msg.othersCode()!=CMMsg.NO_EFFECT) &&(msg.othersMessage()!=null) &&(!disable)) { disable=true; invoker.executeMsg(invoker,msg); } else if(msg.amISource(invoker) &&(!disable) &&(affected==invoker) &&(msg.sourceMinor()==CMMsg.TYP_SPEAK) &&(msg.sourceMessage()!=null) &&((msg.sourceMajor()&CMMsg.MASK_MAGIC)==0)) { final String msg2=CMStrings.getSayFromMessage(msg.sourceMessage()); if((msg2!=null)&&(msg2.length()>0)) spy.enqueCommand(CMParms.parse(msg2.trim()),MUDCmdProcessor.METAFLAG_FORCED,0); } } finally { disable=false; if((spy!=null)&&((spy.amFollowing()!=invoker) ||(spy.amDead()) ||(!CMLib.flags().isInTheGame(spy,false)) ||(!CMLib.flags().isInTheGame(invoker,true)))) unInvoke(); } } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { if(commands.size()<1) { mob.tell(L("Chant to whom?")); return false; } final String mobName=CMParms.combine(commands,0).trim().toUpperCase(); final MOB target=getTarget(mob,commands,givenTarget); Room newRoom=mob.location(); if(target!=null) { newRoom=target.location(); if((!CMLib.flags().isAnimalIntelligence(target)) ||(target.amFollowing()!=mob)) { mob.tell(L("You have no animal follower named '@x1' here.",mobName)); return false; } } else { mob.tell(L("You have no animal follower named '@x1' here.",mobName)); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to <T-NAMESELF>, invoking the a mystical connection.^?")); final CMMsg msg2=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),null); if((mob.location().okMessage(mob,msg))&&((newRoom==mob.location())||(newRoom.okMessage(mob,msg2)))) { mob.location().send(mob,msg); if(newRoom!=mob.location()) newRoom.send(target,msg2); spy=target; spy.setAttribute(MOB.Attrib.AUTOEXITS, true); beneficialAffect(mob,spy,asLevel,0); final Ability A=spy.fetchEffect(ID()); if(A!=null) { mob.addNonUninvokableEffect((Ability)A.copyOf()); A.setAffectedOne(spy); } } } else beneficialVisualFizzle(mob,target,L("<S-NAME> chant(s) to <T-NAMESELF>, but the magic fades.")); // return whether it worked return success; } }
com/planet_ink/coffee_mud/Abilities/Druid/Chant_AnimalSpy.java
package com.planet_ink.coffee_mud.Abilities.Druid; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2020 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Chant_AnimalSpy extends Chant { @Override public String ID() { return "Chant_AnimalSpy"; } private final static String localizedName = CMLib.lang().L("Animal Spy"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang().L("(Animal Spy)"); @Override public String displayText() { return localizedStaticDisplay; } @Override public int abstractQuality() { return Ability.QUALITY_OK_OTHERS; } @Override public int classificationCode() { return Ability.ACODE_CHANT | Ability.DOMAIN_ANIMALAFFINITY; } protected MOB spy = null; protected boolean disable = false; @Override public boolean tick(final Tickable ticking, final int tickID) { if(!super.tick(ticking,tickID)) return false; if((tickID==Tickable.TICKID_MOB) &&(affected==spy)) { if(spy.amDead() ||(spy.amFollowing()!=invoker) ||(!CMLib.flags().isInTheGame(spy,false))) unInvoke(); } return true; } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; if(canBeUninvoked()) { if(invoker!=null) { final Ability A=invoker.fetchEffect(this.ID()); if(A!=null) invoker.delEffect(A); invoker.tell(L("Your connection with '@x1' fades.",spy.name())); } } super.unInvoke(); } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { try { super.executeMsg(myHost,msg); if(spy==null) return; if(invoker==null) return; if((msg.amISource(spy)) &&((msg.sourceMinor()==CMMsg.TYP_LOOK)||(msg.sourceMinor()==CMMsg.TYP_EXAMINE)) &&(msg.target()!=null) &&((invoker.location()!=spy.location())||(!(msg.target() instanceof Room)))) { disable=true; final CMMsg newAffect=CMClass.getMsg(invoker,msg.target(),msg.sourceMinor(),null); msg.target().executeMsg(invoker,newAffect); } else if((!msg.amISource(invoker)) &&(invoker.location()!=spy.location()) &&(msg.source().location()==spy.location()) &&(msg.othersCode()!=CMMsg.NO_EFFECT) &&(msg.othersMessage()!=null) &&(!disable)) { disable=true; invoker.executeMsg(invoker,msg); } else if(msg.amISource(invoker) &&(!disable) &&(msg.sourceMinor()==CMMsg.TYP_SPEAK) &&(msg.sourceMessage()!=null) &&((msg.sourceMajor()&CMMsg.MASK_MAGIC)==0)) { final String msg2=CMStrings.getSayFromMessage(msg.sourceMessage()); if((msg2!=null)&&(msg2.length()>0)) spy.enqueCommand(CMParms.parse(msg2.trim()),MUDCmdProcessor.METAFLAG_FORCED,0); } } finally { disable=false; if((spy!=null)&&((spy.amFollowing()!=invoker) ||(spy.amDead()) ||(!CMLib.flags().isInTheGame(spy,false)) ||(!CMLib.flags().isInTheGame(invoker,true)))) unInvoke(); } } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { if(commands.size()<1) { mob.tell(L("Chant to whom?")); return false; } final String mobName=CMParms.combine(commands,0).trim().toUpperCase(); final MOB target=getTarget(mob,commands,givenTarget); Room newRoom=mob.location(); if(target!=null) { newRoom=target.location(); if((!CMLib.flags().isAnimalIntelligence(target)) ||(target.amFollowing()!=mob)) { mob.tell(L("You have no animal follower named '@x1' here.",mobName)); return false; } } else { mob.tell(L("You have no animal follower named '@x1' here.",mobName)); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to <T-NAMESELF>, invoking the a mystical connection.^?")); final CMMsg msg2=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),null); if((mob.location().okMessage(mob,msg))&&((newRoom==mob.location())||(newRoom.okMessage(mob,msg2)))) { mob.location().send(mob,msg); if(newRoom!=mob.location()) newRoom.send(target,msg2); spy=target; spy.setAttribute(MOB.Attrib.AUTOEXITS, true); beneficialAffect(mob,spy,asLevel,0); final Ability A=spy.fetchEffect(ID()); if(A!=null) { mob.addNonUninvokableEffect((Ability)A.copyOf()); A.setAffectedOne(spy); } } } else beneficialVisualFizzle(mob,target,L("<S-NAME> chant(s) to <T-NAMESELF>, but the magic fades.")); // return whether it worked return success; } }
fix to dup msg in animal spy git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@20192 0d6f1817-ed0e-0410-87c9-987e46238f29
com/planet_ink/coffee_mud/Abilities/Druid/Chant_AnimalSpy.java
fix to dup msg in animal spy
Java
apache-2.0
37256649f044d23ea585ae40d1867f715e3791c7
0
rspieldenner/servo,fengshao0907/servo,Netflix/servo,entelesis/servo,ccortezb/servo,ccortezb/servo,eonezhang/servo,brharrington/servo,gorcz/servo,fengshao0907/servo,brharrington/servo,sensaid/servo,entelesis/servo,eonezhang/servo,sensaid/servo
/** * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.netflix.servo.monitor; import com.google.common.base.Objects; import com.netflix.servo.annotations.DataSourceType; import java.util.concurrent.atomic.AtomicLongArray; import java.util.concurrent.atomic.AtomicReference; /** * A resettable counter. The value is the maximum * count per second within the specified interval until the counter is reset. */ public class PeakRateCounter extends AbstractMonitor<Number> implements Counter, ResettableMonitor<Number> { private final AtomicReference<AtomicLongArray> buckets; private final int numBuckets; public PeakRateCounter(MonitorConfig config, int intervalSeconds) { // This class will reset the value so it is not a monotonically increasing value as // expected for type=COUNTER. This class looks like a counter to the user and a gauge to // the publishing pipeline receiving the value. super(config.withAdditionalTag(DataSourceType.GAUGE)); numBuckets = intervalSeconds; buckets = new AtomicReference<AtomicLongArray>(new AtomicLongArray(numBuckets)); } @Override public Number getValue() { AtomicLongArray counts = buckets.get(); long max = 0; long cnt; for (int i = 0; i < counts.length(); i++) { cnt = counts.get(i); if (cnt > max) { max = cnt; } } return max; } /** * {@inheritDoc} */ @Override public Number getAndResetValue() { Number value = getValue(); buckets.set(new AtomicLongArray(numBuckets)); return value; } /** * {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof PeakRateCounter)) { return false; } PeakRateCounter c = (PeakRateCounter) obj; return config.equals(c.getConfig()) && (this.getValue() == c.getValue()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Objects.hashCode(config, getValue()); } /** * {@inheritDoc} */ @Override public String toString() { return Objects.toStringHelper(this) .add("config", config) .add("max rate per second", getValue()) .toString(); } /** * {@inheritDoc} */ @Override public void increment() { increment(1L); } /** * {@inheritDoc} */ @Override public void increment(long amount) { long now = System.currentTimeMillis() / 1000L; int index = (int) now % numBuckets; buckets.get().addAndGet(index, amount); } }
servo-core/src/main/java/com/netflix/servo/monitor/PeakRateCounter.java
/** * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.netflix.servo.monitor; import com.google.common.base.Objects; import com.netflix.servo.annotations.DataSourceType; import java.io.Serializable; import java.util.Collections; import java.util.Comparator; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; /** * A resettable counter implementation backed by an {@link java.util.concurrent.atomic.ConcurrentHashMap}. * The value is the maximum count per second until the counter is reset. */ public class PeakRateCounter extends AbstractMonitor<Long> implements Counter, ResettableMonitor<Long> { private final AtomicReference<TimestampedHashMap> buckets; public PeakRateCounter(MonitorConfig config) { // This class will reset the value so it is not a monotonically increasing value as // expected for type=COUNTER. This class looks like a counter to the user and a gauge to // the publishing pipeline receiving the value. super(config.withAdditionalTag(DataSourceType.GAUGE)); buckets = new AtomicReference<TimestampedHashMap>(new TimestampedHashMap()); } static class TimestampedHashMap extends ConcurrentHashMap<Long, AtomicLong> { private final long timestamp; /** * ConcurrentHashMap is initialized with the goal of reducing memory * and GC load for very large number of counters typically used. * * The initialCapacity is set for a small number of values * before reallocation. * The load factor is set high for dense packing * The concurrencyLevel is set low for concurrent writes to support * a sufficient throughput while reducing unnecessary memory loading * */ TimestampedHashMap() { super(8, 0.9f, 1); timestamp = System.currentTimeMillis(); } long getTimestamp() { return timestamp; } } @Override public Long getValue() { return getMaxValue(); } /** * {@inheritDoc} */ @Override public Long getAndResetValue() { Long value = getValue(); buckets.set(new TimestampedHashMap()); return value; } /** * {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof PeakRateCounter)) { return false; } PeakRateCounter c = (PeakRateCounter) obj; long v1 = this.getValue(); long v2 = c.getValue(); return config.equals(c.getConfig()) && (v1 == v2); } /** * {@inheritDoc} */ @Override public int hashCode() { return Objects.hashCode(config, getValue()); } /** * {@inheritDoc} */ @Override public String toString() { return Objects.toStringHelper(this) .add("config", config) .add("max rate per second", getValue()) .toString(); } /** * {@inheritDoc} */ @Override public void increment() { increment(1L); } /** * {@inheritDoc} */ @Override public void increment(long amount) { long now = System.currentTimeMillis(); long elapsedTime = now - buckets.get().getTimestamp(); long currentBucketKey = TimeUnit.SECONDS.convert(elapsedTime, TimeUnit.MILLISECONDS); incrementBucket(currentBucketKey, amount); } void incrementBucket(Long bucketKey, long amount) { AtomicLong count = buckets.get().get(bucketKey); if (count != null) { count.addAndGet(amount); } else { AtomicLong delta = new AtomicLong(amount); count = buckets.get().putIfAbsent(bucketKey, delta); if (count != null) { count.addAndGet(amount); } else { trimBuckets(bucketKey); } } } /** * Remove all but the current and max buckets. */ void trimBuckets(Long currentBucketKey) { Long maxBucketKey = getMaxBucketKey(); Set<Long> keySet = buckets.get().keySet(); for (Long key : keySet) { if ((!key.equals(maxBucketKey)) && (!key.equals(currentBucketKey))) { buckets.get().remove(key); } } } static class MapEntryValueComparator implements Comparator<Map.Entry<Long, AtomicLong>>, Serializable { @Override public int compare(Map.Entry<Long, AtomicLong> o1, Map.Entry<Long, AtomicLong> o2) { long v1 = o1.getValue().get(); long v2 = o2.getValue().get(); return (v1 == v2 ? 0 : v1 > v2 ? 1 : -1); } } private Map.Entry<Long, AtomicLong> getMaxBucket() { Set<Map.Entry<Long, AtomicLong>> entrySet = buckets.get().entrySet(); if (entrySet.isEmpty()) { return null; } Comparator<Map.Entry<Long, AtomicLong>> cmp = new MapEntryValueComparator(); Map.Entry<Long, AtomicLong> max = Collections.max(entrySet, cmp); return max; } Long getMaxBucketKey() { return getMaxBucket().getKey(); } long getMaxValue() { Map.Entry<Long, AtomicLong> bucket = getMaxBucket(); return (bucket == null? 0 : getMaxBucket().getValue().get()); } AtomicLong getBucketValue(Long key) { return buckets.get().get(key); } }
Renamed PeakRateIntervalCounter to PeakRateCounter and incorporated changes from previous review
servo-core/src/main/java/com/netflix/servo/monitor/PeakRateCounter.java
Renamed PeakRateIntervalCounter to PeakRateCounter and incorporated changes from previous review
Java
apache-2.0
485c0c819e292768dfe64ddac26814d5ae7b3894
0
OSEHRA/ISAAC,OSEHRA/ISAAC,OSEHRA/ISAAC
/* * Copyright 2018 Organizations participating in ISAAC, ISAAC's KOMET, and SOLOR development include the US Veterans Health Administration, OSHERA, and the Health Services Platform Consortium.. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sh.komet.gui.importation; import javafx.application.Platform; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleListProperty; import javafx.collections.FXCollections; import javafx.concurrent.Task; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.scene.control.*; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.cell.CheckBoxTreeTableCell; import javafx.scene.control.cell.TreeItemPropertyValueFactory; import javafx.scene.text.Text; import javafx.stage.FileChooser; import javafx.stage.FileChooser.ExtensionFilter; import javafx.stage.Stage; import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import sh.isaac.api.Get; import sh.isaac.api.LookupService; import sh.isaac.api.util.StringUtils; import sh.isaac.dbConfigBuilder.artifacts.MavenArtifactUtils; import sh.isaac.dbConfigBuilder.artifacts.SDOSourceContent; import sh.isaac.dbConfigBuilder.prefs.StoredPrefs; import sh.isaac.pombuilder.converter.SupportedConverterTypes; import sh.isaac.provider.query.lucene.indexers.DescriptionIndexer; import sh.isaac.solor.ContentProvider; import sh.isaac.solor.direct.ImportType; import sh.komet.gui.manifold.Manifold; import sh.komet.gui.util.FxGet; import sh.komet.gui.util.FxUtils; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; import static sh.komet.gui.importation.ImportItemZipEntry.FILE_PARENT_KEY; public class ImportViewController { protected static final Logger LOG = LogManager.getLogger(); @FXML private ChoiceBox<SelectedImportType> importType; @FXML private ResourceBundle resources; @FXML private URL location; @FXML private Button addButton; @FXML private Button addArtifactButton; @FXML private Button importDataButton; @FXML private TreeTableView<ImportItem> fileTreeTable; @FXML private TreeTableColumn<ImportItem, String> treeColumn; @FXML private TreeTableColumn<ImportItem, String> importColumn; @FXML private Text textImportMessage; Stage importStage; Map<TreeItem<ImportItem>, ConcurrentHashMap<String, TreeItem<ImportItem>>> fileItemsMap = new ConcurrentHashMap<>(); private Manifold manifold; private final StoredPrefs storedPrefs = new StoredPrefs("".toCharArray()); private final SimpleListProperty<File> filesProperty = new SimpleListProperty<>(FXCollections.observableArrayList()); private final SimpleBooleanProperty snomedSelectedProperty = new SimpleBooleanProperty(false); private final SimpleBooleanProperty loincSelectedProperty = new SimpleBooleanProperty(false); private final SimpleBooleanProperty collabSelectedProperty = new SimpleBooleanProperty(false); private final String loincSNOMEDCollabRequiredText = "โœ˜ Import Selection Requires LOINC/SNOMED Collaboration (SnomedCT_LOINCRF2_PRODUCTION_20170831T120000Z.zip)"; private final String importIsReadyText = "โœ” Ready to Import (e.g. LOINC, RxNorm, LOINC/SNOMED CT Collaboration, and Deloitte Assemblages)"; private final String snomedCTRequiredText = "โœ˜ Import Selection Requires SNOMED CT (SnomedCT_InternationalRF2_PRODUCTION_20170731T150000Z.zip)"; @FXML void addImportDataLocation(ActionEvent event) { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Open Resource File"); fileChooser.getExtensionFilters().addAll( new ExtensionFilter("Zip files", "*.zip")); addFiles(fileChooser.showOpenMultipleDialog(importStage)); } private void addFiles(List<File> files) { if(files != null) this.filesProperty.addAll(files); Task<Void> t = new Task<Void>() { @Override protected Void call() throws Exception { for (File file : files) { try (ZipFile zipFile = new ZipFile(file, Charset.forName("UTF-8"))) { TreeItem<ImportItem> newFileItem = new TreeItem<>(new ImportItemZipFile(file)); ConcurrentHashMap<String, TreeItem<ImportItem>> newTreeItems = new ConcurrentHashMap<>(); fileItemsMap.put(newFileItem, newTreeItems); zipFile.stream().forEach((ZipEntry zipEntry) -> { if (zipEntry.getName().toLowerCase().endsWith(".zip")) { // maven artifact structure with nested zip file for actual content try (ZipFile nestedZipFile = new ZipFile(file, Charset.forName("UTF-8"))) { ZipInputStream zis = new ZipInputStream(zipFile.getInputStream(zipEntry), Charset.forName("UTF-8")); ZipEntry nestedEntry = zis.getNextEntry(); while (nestedEntry != null) { if (!nestedEntry.getName().toUpperCase().contains("__MACOSX") && !nestedEntry.getName().contains("._")) { byte[] itemBytes = null; if (nestedEntry.getSize() < (500 * 1024 * 1024)) { //We have to cache these unzipped bytes, as otherwise, //the import is terribly slow, because the java zip API only provides stream access //to nested files, and when you try to unzip from a stream, it can't jump ahead whe you //call next entry, so you end up re-extracting the entire file for each file, which more //that triples the load times. LOG.debug("Caching unzipped content"); itemBytes = IOUtils.toByteArray(zis); } else { LOG.info("content file too large to cache"); } ImportItemZipEntry nestedImportItem = new ImportItemZipEntry(file, zipEntry, nestedEntry, itemBytes); TreeItem<ImportItem> nestedEntryItem = new TreeItem<>(nestedImportItem); newTreeItems.put(zipEntry.getName() + "/" + nestedEntry.getName(), nestedEntryItem); } nestedEntry = zis.getNextEntry(); } } catch (IOException e) { throw new RuntimeException(e); } ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); newTreeItems.put(zipEntry.getName(), entryItem); } else if (!zipEntry.getName().toUpperCase().contains("__MACOSX") && !zipEntry.getName().contains("._")) { if (file.getName().toLowerCase().startsWith("rxnorm_")) { if (zipEntry.getName().toLowerCase().endsWith(".rrf")) { ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); if (importItem.nameProperty.get().toUpperCase().endsWith("RXNCONSO.RRF") && !importItem.parentKey.toLowerCase().contains("prescribe")) { importItem.importData.set(true); } else { importItem.importData.set(false); } newTreeItems.put(zipEntry.getName(), entryItem); } } else if (file.getName().toLowerCase().startsWith("loinc_")) { if (zipEntry.getName().toLowerCase().equals("loinc.csv")) { ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); newTreeItems.put(zipEntry.getName(), entryItem); } } else { ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); newTreeItems.put(zipEntry.getName(), entryItem); } } }); } catch (IOException ex) { throw new RuntimeException(ex); } } Platform.runLater(() -> setupEntryTree()); return null; } }; Get.workExecutors().getExecutor().execute(t); FxUtils.waitWithProgress("Reading file", "Reading selected file", t, importStage.getOwner()); } protected void setupEntryTree() { // clear all existing links fileTreeTable.getRoot().getChildren().clear(); for (TreeItem<ImportItem> fileItem : fileItemsMap.keySet()) { fileItem.getChildren().clear(); ConcurrentHashMap<String, TreeItem<ImportItem>> treeItems = fileItemsMap.get(fileItem); for (Map.Entry<String, TreeItem<ImportItem>> entry : treeItems.entrySet()) { entry.getValue().getChildren().clear(); } } // hook all up here... for (TreeItem<ImportItem> fileItem : fileItemsMap.keySet()) { SelectedImportType type = importType.getValue(); ConcurrentHashMap<String, TreeItem<ImportItem>> treeItems = fileItemsMap.get(fileItem); for (Map.Entry<String, TreeItem<ImportItem>> entry : treeItems.entrySet()) { TreeItem<ImportItem> treeItem = entry.getValue(); if (treeItem.getValue() instanceof ImportItemZipEntry) { ImportItemZipEntry treeItemValue = (ImportItemZipEntry) treeItem.getValue(); if (treeItemValue.importType == null || treeItemValue.importType == type || (type == SelectedImportType.ACTIVE_ONLY && treeItemValue.importType == SelectedImportType.SNAPSHOT)) { if (treeItemValue.getParentKey().equals(FILE_PARENT_KEY)) { if (!fileTreeTable.getRoot().getChildren().contains(fileItem)) { fileTreeTable.getRoot().getChildren().add(fileItem); } fileItem.getChildren().add(treeItem); } else { String parentKey = treeItemValue.getParentKey(); TreeItem<ImportItem> parentItem = treeItems.get(parentKey); if (parentItem == null) { // Add... In some zip files, the directories are not added, just the files. // So we may encounter a need for a parent directory if (!fileTreeTable.getRoot().getChildren().contains(fileItem)) { fileTreeTable.getRoot().getChildren().add(fileItem); } ImportItemDirectory importItemDirectory = new ImportItemDirectory(); importItemDirectory.setName(treeItemValue.getParentKey()); TreeItem<ImportItem> directoryItem = new TreeItem<>(importItemDirectory); fileItem.getChildren().add(directoryItem); importItemDirectory.importData.set(treeItemValue.importData()); treeItems.put(treeItemValue.getParentKey(), directoryItem); directoryItem.getChildren().add(treeItem); } else { if (!fileTreeTable.getRoot().getChildren().contains(fileItem)) { fileTreeTable.getRoot().getChildren().add(fileItem); } parentItem.getValue().importDataProperty().removeListener(treeItemValue); parentItem.getValue().importDataProperty().addListener(treeItemValue); if (!parentItem.getChildren().contains(fileItem)) { parentItem.getChildren().add(treeItem); } if (treeItemValue.importData()) { parentItem.getValue().importDataProperty().set(true); } if (parentKey.indexOf('/') == parentKey.length() - 1) { if (!fileItem.getChildren().contains(parentItem)) { fileItem.getChildren().add(parentItem); } } } } } } } this.fileTreeTable.getRoot().expandedProperty().setValue(Boolean.TRUE); } } @FXML void importData(ActionEvent event) { List<ContentProvider> entriesToImport = new ArrayList<>(); recursiveAddToImport(fileTreeTable.getRoot(), entriesToImport); ImportType directImportType = null; switch (importType.getValue()) { case ACTIVE_ONLY: directImportType = ImportType.ACTIVE_ONLY; break; case FULL: directImportType = ImportType.FULL; break; case SNAPSHOT: directImportType = ImportType.SNAPSHOT; break; case IGNORE: break; case DELTA: default: throw new RuntimeException("oops"); } if (directImportType != null) { ImportSelectedAndTransformTask importer = new ImportSelectedAndTransformTask(manifold, directImportType, entriesToImport); Get.executor().execute(importer); } importStage.close(); } private void recursiveAddToImport(TreeItem<ImportItem> treeItem, List<ContentProvider> entriesToImport) { ImportItem item = treeItem.getValue(); if (item.importData()) { if (item instanceof ImportItemZipEntry) { ImportItemZipEntry zipEntry = (ImportItemZipEntry) item; if (!zipEntry.entry.isDirectory()) { entriesToImport.add(zipEntry.getContent()); } } for (TreeItem<ImportItem> childItem : treeItem.getChildren()) { recursiveAddToImport(childItem, entriesToImport); } } } @FXML void initialize() { assert addButton != null : "fx:id=\"addButton\" was not injected: check your FXML file 'ImportView.fxml'."; assert addArtifactButton != null : "fx:id=\"addArtifactButton\" was not injected: check your FXML file 'ImportView.fxml'."; assert fileTreeTable != null : "fx:id=\"fileTreeTable\" was not injected: check your FXML file 'ImportView.fxml'."; assert treeColumn != null : "fx:id=\"treeColumn\" was not injected: check your FXML file 'ImportView.fxml'."; assert importColumn != null : "fx:id=\"importColumn\" was not injected: check your FXML file 'ImportView.fxml'."; assert importDataButton != null : "fx:id=\"importDataButton\" was not injected: check your FXML file 'ImportView.fxml'."; assert textImportMessage != null : "fx:id=\"textImportMessage\" was not injected: check your FXML file 'ImportView.fxml'."; this.treeColumn.setCellValueFactory(new TreeItemPropertyValueFactory<>("name")); this.importColumn.setCellValueFactory(new TreeItemPropertyValueFactory<>("importData")); this.importColumn.setCellFactory(CheckBoxTreeTableCell.forTreeTableColumn( (Integer index) -> this.fileTreeTable.getTreeItem(index).getValue().importDataProperty())); this.importColumn.setEditable(true); this.fileTreeTable.setRoot(new TreeItem<>(new ImportRoot())); this.fileTreeTable.setShowRoot(false); this.fileTreeTable.setEditable(true); this.fileTreeTable.treeColumnProperty().set(treeColumn); if (FxGet.fxConfiguration().isShowBetaFeaturesEnabled()) { this.importType.getItems().addAll(SelectedImportType.ACTIVE_ONLY, SelectedImportType.SNAPSHOT, SelectedImportType.FULL); } else { this.importType.getItems().addAll(SelectedImportType.ACTIVE_ONLY); this.addArtifactButton.setVisible(false); } this.importType.getSelectionModel().select(SelectedImportType.ACTIVE_ONLY); this.importType.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> { this.importTypeChanged(newValue); }); ArrayList<SDOSourceContent> sdoSourceFiles_ = new ArrayList<>(); this.addArtifactButton.setOnAction((action) -> { ListView<SDOSourceContent> sdoPicker = new ListView<>(); FxUtils.waitWithProgress("Reading SDO Files", "Reading available SDO Source Files", MavenArtifactUtils.readAvailableSourceFiles(storedPrefs, (results) -> { sdoSourceFiles_.clear(); //TODO tie this to some sort of dynamic thing about what types are supported by the direct importer... for (SDOSourceContent sdo : results) { SupportedConverterTypes found = SupportedConverterTypes.findBySrcArtifactId(sdo.getArtifactId()); if (SupportedConverterTypes.SCT == found || SupportedConverterTypes.SCT_EXTENSION == found) { sdoSourceFiles_.add(sdo); } } }), importStage.getScene().getWindow()); sdoPicker.setItems(FXCollections.observableArrayList(sdoSourceFiles_)); sdoPicker.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); sdoPicker.setCellFactory(param -> new ListCell<SDOSourceContent>() { @Override protected void updateItem(SDOSourceContent item, boolean empty) { super.updateItem(item, empty); if (empty || item == null) { setText(null); } else { setText(item.getArtifactId() + (item.hasClassifier() ? " : " + item.getClassifier() : "") + " : " + item.getVersion()); } } }); Alert sdoDialog = new Alert(AlertType.CONFIRMATION); sdoDialog.setTitle("Select Files"); sdoDialog.setHeaderText("Select 1 or more SDO Files to add"); sdoDialog.getDialogPane().setContent(sdoPicker); sdoPicker.setPrefWidth(1024); sdoDialog.initOwner(importStage.getOwner()); if (sdoDialog.showAndWait().orElse(null) == ButtonType.OK) { for (SDOSourceContent sdo : sdoPicker.getSelectionModel().getSelectedItems()) { Optional<File> local = sdo.getLocalPath(storedPrefs); if (local.isPresent()) { addFiles(Arrays.asList(new File[]{local.get()})); } } } }); //TODO tie this to a real StoredPrefs in the GUI. For now, just a default, so we can at least read a local .m2 folder //make this system property read go away String temp = System.getProperty("M2_PATH"); if (StringUtils.isNotBlank(temp)) { this.storedPrefs.setLocalM2FolderPath(temp); } //Initial SNOMED CT check this.snomedSelectedProperty.set(LookupService.get().getService(DescriptionIndexer.class) .query("theophobia", 0).size() > 0); //Initial display to UI if SNOMED CT is present from prior import if(this.snomedSelectedProperty.get() == false){ this.textImportMessage.setText(this.snomedCTRequiredText); this.importDataButton.setDisable(true); }else{ this.textImportMessage.setText(this.importIsReadyText); this.importDataButton.setDisable(false); } //Check to see, after each file is added, if dependencies are being met :) this.filesProperty.addListener((observable, oldValue, newValue) -> { observable.getValue().stream() .map(File::getName) .map(String::toLowerCase) .forEach(name ->{ if(!snomedSelectedProperty.get() && name.contains("snomedct_internationalrf2_production_20170731t150000z.zip")){ this.snomedSelectedProperty.set(true); } else if(!loincSelectedProperty.get() && (name.contains("loinc_") && name.contains("_text.zip")) ){ this.loincSelectedProperty.set(true); } else if(!collabSelectedProperty.get() && name.contains("snomedct_loincrf2_production_20170831t120000z.zip")){ this.collabSelectedProperty.set(true); } }); if(!this.snomedSelectedProperty.get()){ this.textImportMessage.setText(this.snomedCTRequiredText); this.importDataButton.setDisable(true); }else{ if(this.loincSelectedProperty.get()){ if(!this.collabSelectedProperty.get()){ this.textImportMessage.setText(this.loincSNOMEDCollabRequiredText); this.importDataButton.setDisable(true); }else{ this.textImportMessage.setText(this.importIsReadyText); this.importDataButton.setDisable(false); } }else { this.textImportMessage.setText(this.importIsReadyText); this.importDataButton.setDisable(false); } } }); } private void importTypeChanged(SelectedImportType importType) { setupEntryTree(); } public Stage getImportStage() { return importStage; } public void setImportStage(Stage importStage) { this.importStage = importStage; } void setManifold(Manifold manifold) { this.manifold = manifold; } }
komet/gui-contracts/src/main/java/sh/komet/gui/importation/ImportViewController.java
/* * Copyright 2018 Organizations participating in ISAAC, ISAAC's KOMET, and SOLOR development include the US Veterans Health Administration, OSHERA, and the Health Services Platform Consortium.. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sh.komet.gui.importation; import javafx.application.Platform; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleListProperty; import javafx.collections.FXCollections; import javafx.concurrent.Task; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.scene.control.*; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.cell.CheckBoxTreeTableCell; import javafx.scene.control.cell.TreeItemPropertyValueFactory; import javafx.scene.text.Text; import javafx.stage.FileChooser; import javafx.stage.FileChooser.ExtensionFilter; import javafx.stage.Stage; import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import sh.isaac.api.Get; import sh.isaac.api.LookupService; import sh.isaac.api.util.StringUtils; import sh.isaac.dbConfigBuilder.artifacts.MavenArtifactUtils; import sh.isaac.dbConfigBuilder.artifacts.SDOSourceContent; import sh.isaac.dbConfigBuilder.prefs.StoredPrefs; import sh.isaac.pombuilder.converter.SupportedConverterTypes; import sh.isaac.provider.query.lucene.indexers.DescriptionIndexer; import sh.isaac.solor.ContentProvider; import sh.isaac.solor.direct.ImportType; import sh.komet.gui.manifold.Manifold; import sh.komet.gui.util.FxGet; import sh.komet.gui.util.FxUtils; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; import static sh.komet.gui.importation.ImportItemZipEntry.FILE_PARENT_KEY; public class ImportViewController { protected static final Logger LOG = LogManager.getLogger(); @FXML private ChoiceBox<SelectedImportType> importType; @FXML private ResourceBundle resources; @FXML private URL location; @FXML private Button addButton; @FXML private Button addArtifactButton; @FXML private Button importDataButton; @FXML private TreeTableView<ImportItem> fileTreeTable; @FXML private TreeTableColumn<ImportItem, String> treeColumn; @FXML private TreeTableColumn<ImportItem, String> importColumn; @FXML private Text textImportMessage; Stage importStage; Map<TreeItem<ImportItem>, ConcurrentHashMap<String, TreeItem<ImportItem>>> fileItemsMap = new ConcurrentHashMap<>(); private Manifold manifold; private final StoredPrefs storedPrefs = new StoredPrefs("".toCharArray()); private final SimpleListProperty<File> filesProperty = new SimpleListProperty<>(FXCollections.observableArrayList()); private final SimpleBooleanProperty snomedSelectedProperty = new SimpleBooleanProperty(false); private final SimpleBooleanProperty loincSelectedProperty = new SimpleBooleanProperty(); private final SimpleBooleanProperty collabSelectedProperty = new SimpleBooleanProperty(); private final String loincSNOMEDCollabRequiredText = "โœ˜ Import Selection Requires LOINC/SNOMED Collaboration (SnomedCT_LOINCRF2_PRODUCTION_20170831T120000Z.zip)"; private final String importIsReadyText = "โœ” Ready to Import (e.g. LOINC, RxNorm, LOINC/SNOMED CT Collaboration, and Deloitte Assemblages)"; private final String snomedCTRequiredText = "โœ˜ Import Selection Requires SNOMED CT (SnomedCT_InternationalRF2_PRODUCTION_20170731T150000Z.zip)"; @FXML void addImportDataLocation(ActionEvent event) { FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Open Resource File"); fileChooser.getExtensionFilters().addAll( new ExtensionFilter("Zip files", "*.zip")); addFiles(fileChooser.showOpenMultipleDialog(importStage)); } private void addFiles(List<File> files) { this.filesProperty.addAll(files); Task<Void> t = new Task<Void>() { @Override protected Void call() throws Exception { for (File file : files) { try (ZipFile zipFile = new ZipFile(file, Charset.forName("UTF-8"))) { TreeItem<ImportItem> newFileItem = new TreeItem<>(new ImportItemZipFile(file)); ConcurrentHashMap<String, TreeItem<ImportItem>> newTreeItems = new ConcurrentHashMap<>(); fileItemsMap.put(newFileItem, newTreeItems); zipFile.stream().forEach((ZipEntry zipEntry) -> { if (zipEntry.getName().toLowerCase().endsWith(".zip")) { // maven artifact structure with nested zip file for actual content try (ZipFile nestedZipFile = new ZipFile(file, Charset.forName("UTF-8"))) { ZipInputStream zis = new ZipInputStream(zipFile.getInputStream(zipEntry), Charset.forName("UTF-8")); ZipEntry nestedEntry = zis.getNextEntry(); while (nestedEntry != null) { if (!nestedEntry.getName().toUpperCase().contains("__MACOSX") && !nestedEntry.getName().contains("._")) { byte[] itemBytes = null; if (nestedEntry.getSize() < (500 * 1024 * 1024)) { //We have to cache these unzipped bytes, as otherwise, //the import is terribly slow, because the java zip API only provides stream access //to nested files, and when you try to unzip from a stream, it can't jump ahead whe you //call next entry, so you end up re-extracting the entire file for each file, which more //that triples the load times. LOG.debug("Caching unzipped content"); itemBytes = IOUtils.toByteArray(zis); } else { LOG.info("content file too large to cache"); } ImportItemZipEntry nestedImportItem = new ImportItemZipEntry(file, zipEntry, nestedEntry, itemBytes); TreeItem<ImportItem> nestedEntryItem = new TreeItem<>(nestedImportItem); newTreeItems.put(zipEntry.getName() + "/" + nestedEntry.getName(), nestedEntryItem); } nestedEntry = zis.getNextEntry(); } } catch (IOException e) { throw new RuntimeException(e); } ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); newTreeItems.put(zipEntry.getName(), entryItem); } else if (!zipEntry.getName().toUpperCase().contains("__MACOSX") && !zipEntry.getName().contains("._")) { if (file.getName().toLowerCase().startsWith("rxnorm_")) { if (zipEntry.getName().toLowerCase().endsWith(".rrf")) { ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); if (importItem.nameProperty.get().toUpperCase().endsWith("RXNCONSO.RRF") && !importItem.parentKey.toLowerCase().contains("prescribe")) { importItem.importData.set(true); } else { importItem.importData.set(false); } newTreeItems.put(zipEntry.getName(), entryItem); } } else if (file.getName().toLowerCase().startsWith("loinc_")) { if (zipEntry.getName().toLowerCase().equals("loinc.csv")) { ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); newTreeItems.put(zipEntry.getName(), entryItem); } } else { ImportItemZipEntry importItem = new ImportItemZipEntry(file, zipEntry); TreeItem<ImportItem> entryItem = new TreeItem<>(importItem); newTreeItems.put(zipEntry.getName(), entryItem); } } }); } catch (IOException ex) { throw new RuntimeException(ex); } } Platform.runLater(() -> setupEntryTree()); return null; } }; Get.workExecutors().getExecutor().execute(t); FxUtils.waitWithProgress("Reading file", "Reading selected file", t, importStage.getOwner()); } protected void setupEntryTree() { // clear all existing links fileTreeTable.getRoot().getChildren().clear(); for (TreeItem<ImportItem> fileItem : fileItemsMap.keySet()) { fileItem.getChildren().clear(); ConcurrentHashMap<String, TreeItem<ImportItem>> treeItems = fileItemsMap.get(fileItem); for (Map.Entry<String, TreeItem<ImportItem>> entry : treeItems.entrySet()) { entry.getValue().getChildren().clear(); } } // hook all up here... for (TreeItem<ImportItem> fileItem : fileItemsMap.keySet()) { SelectedImportType type = importType.getValue(); ConcurrentHashMap<String, TreeItem<ImportItem>> treeItems = fileItemsMap.get(fileItem); for (Map.Entry<String, TreeItem<ImportItem>> entry : treeItems.entrySet()) { TreeItem<ImportItem> treeItem = entry.getValue(); if (treeItem.getValue() instanceof ImportItemZipEntry) { ImportItemZipEntry treeItemValue = (ImportItemZipEntry) treeItem.getValue(); if (treeItemValue.importType == null || treeItemValue.importType == type || (type == SelectedImportType.ACTIVE_ONLY && treeItemValue.importType == SelectedImportType.SNAPSHOT)) { if (treeItemValue.getParentKey().equals(FILE_PARENT_KEY)) { if (!fileTreeTable.getRoot().getChildren().contains(fileItem)) { fileTreeTable.getRoot().getChildren().add(fileItem); } fileItem.getChildren().add(treeItem); } else { String parentKey = treeItemValue.getParentKey(); TreeItem<ImportItem> parentItem = treeItems.get(parentKey); if (parentItem == null) { // Add... In some zip files, the directories are not added, just the files. // So we may encounter a need for a parent directory if (!fileTreeTable.getRoot().getChildren().contains(fileItem)) { fileTreeTable.getRoot().getChildren().add(fileItem); } ImportItemDirectory importItemDirectory = new ImportItemDirectory(); importItemDirectory.setName(treeItemValue.getParentKey()); TreeItem<ImportItem> directoryItem = new TreeItem<>(importItemDirectory); fileItem.getChildren().add(directoryItem); importItemDirectory.importData.set(treeItemValue.importData()); treeItems.put(treeItemValue.getParentKey(), directoryItem); directoryItem.getChildren().add(treeItem); } else { if (!fileTreeTable.getRoot().getChildren().contains(fileItem)) { fileTreeTable.getRoot().getChildren().add(fileItem); } parentItem.getValue().importDataProperty().removeListener(treeItemValue); parentItem.getValue().importDataProperty().addListener(treeItemValue); if (!parentItem.getChildren().contains(fileItem)) { parentItem.getChildren().add(treeItem); } if (treeItemValue.importData()) { parentItem.getValue().importDataProperty().set(true); } if (parentKey.indexOf('/') == parentKey.length() - 1) { if (!fileItem.getChildren().contains(parentItem)) { fileItem.getChildren().add(parentItem); } } } } } } } this.fileTreeTable.getRoot().expandedProperty().setValue(Boolean.TRUE); } } @FXML void importData(ActionEvent event) { List<ContentProvider> entriesToImport = new ArrayList<>(); recursiveAddToImport(fileTreeTable.getRoot(), entriesToImport); ImportType directImportType = null; switch (importType.getValue()) { case ACTIVE_ONLY: directImportType = ImportType.ACTIVE_ONLY; break; case FULL: directImportType = ImportType.FULL; break; case SNAPSHOT: directImportType = ImportType.SNAPSHOT; break; case IGNORE: break; case DELTA: default: throw new RuntimeException("oops"); } if (directImportType != null) { ImportSelectedAndTransformTask importer = new ImportSelectedAndTransformTask(manifold, directImportType, entriesToImport); Get.executor().execute(importer); } importStage.close(); } private void recursiveAddToImport(TreeItem<ImportItem> treeItem, List<ContentProvider> entriesToImport) { ImportItem item = treeItem.getValue(); if (item.importData()) { if (item instanceof ImportItemZipEntry) { ImportItemZipEntry zipEntry = (ImportItemZipEntry) item; if (!zipEntry.entry.isDirectory()) { entriesToImport.add(zipEntry.getContent()); } } for (TreeItem<ImportItem> childItem : treeItem.getChildren()) { recursiveAddToImport(childItem, entriesToImport); } } } @FXML void initialize() { assert addButton != null : "fx:id=\"addButton\" was not injected: check your FXML file 'ImportView.fxml'."; assert addArtifactButton != null : "fx:id=\"addArtifactButton\" was not injected: check your FXML file 'ImportView.fxml'."; assert fileTreeTable != null : "fx:id=\"fileTreeTable\" was not injected: check your FXML file 'ImportView.fxml'."; assert treeColumn != null : "fx:id=\"treeColumn\" was not injected: check your FXML file 'ImportView.fxml'."; assert importColumn != null : "fx:id=\"importColumn\" was not injected: check your FXML file 'ImportView.fxml'."; assert importDataButton != null : "fx:id=\"importDataButton\" was not injected: check your FXML file 'ImportView.fxml'."; assert textImportMessage != null : "fx:id=\"textImportMessage\" was not injected: check your FXML file 'ImportView.fxml'."; this.treeColumn.setCellValueFactory(new TreeItemPropertyValueFactory<>("name")); this.importColumn.setCellValueFactory(new TreeItemPropertyValueFactory<>("importData")); this.importColumn.setCellFactory(CheckBoxTreeTableCell.forTreeTableColumn( (Integer index) -> this.fileTreeTable.getTreeItem(index).getValue().importDataProperty())); this.importColumn.setEditable(true); this.fileTreeTable.setRoot(new TreeItem<>(new ImportRoot())); this.fileTreeTable.setShowRoot(false); this.fileTreeTable.setEditable(true); this.fileTreeTable.treeColumnProperty().set(treeColumn); if (FxGet.fxConfiguration().isShowBetaFeaturesEnabled()) { this.importType.getItems().addAll(SelectedImportType.ACTIVE_ONLY, SelectedImportType.SNAPSHOT, SelectedImportType.FULL); } else { this.importType.getItems().addAll(SelectedImportType.ACTIVE_ONLY); this.addArtifactButton.setVisible(false); } this.importType.getSelectionModel().select(SelectedImportType.ACTIVE_ONLY); this.importType.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> { this.importTypeChanged(newValue); }); ArrayList<SDOSourceContent> sdoSourceFiles_ = new ArrayList<>(); this.addArtifactButton.setOnAction((action) -> { ListView<SDOSourceContent> sdoPicker = new ListView<>(); FxUtils.waitWithProgress("Reading SDO Files", "Reading available SDO Source Files", MavenArtifactUtils.readAvailableSourceFiles(storedPrefs, (results) -> { sdoSourceFiles_.clear(); //TODO tie this to some sort of dynamic thing about what types are supported by the direct importer... for (SDOSourceContent sdo : results) { SupportedConverterTypes found = SupportedConverterTypes.findBySrcArtifactId(sdo.getArtifactId()); if (SupportedConverterTypes.SCT == found || SupportedConverterTypes.SCT_EXTENSION == found) { sdoSourceFiles_.add(sdo); } } }), importStage.getScene().getWindow()); sdoPicker.setItems(FXCollections.observableArrayList(sdoSourceFiles_)); sdoPicker.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); sdoPicker.setCellFactory(param -> new ListCell<SDOSourceContent>() { @Override protected void updateItem(SDOSourceContent item, boolean empty) { super.updateItem(item, empty); if (empty || item == null) { setText(null); } else { setText(item.getArtifactId() + (item.hasClassifier() ? " : " + item.getClassifier() : "") + " : " + item.getVersion()); } } }); Alert sdoDialog = new Alert(AlertType.CONFIRMATION); sdoDialog.setTitle("Select Files"); sdoDialog.setHeaderText("Select 1 or more SDO Files to add"); sdoDialog.getDialogPane().setContent(sdoPicker); sdoPicker.setPrefWidth(1024); sdoDialog.initOwner(importStage.getOwner()); if (sdoDialog.showAndWait().orElse(null) == ButtonType.OK) { for (SDOSourceContent sdo : sdoPicker.getSelectionModel().getSelectedItems()) { Optional<File> local = sdo.getLocalPath(storedPrefs); if (local.isPresent()) { addFiles(Arrays.asList(new File[]{local.get()})); } } } }); //TODO tie this to a real StoredPrefs in the GUI. For now, just a default, so we can at least read a local .m2 folder //make this system property read go away String temp = System.getProperty("M2_PATH"); if (StringUtils.isNotBlank(temp)) { this.storedPrefs.setLocalM2FolderPath(temp); } //Initial SNOMED CT check this.snomedSelectedProperty.set(LookupService.get().getService(DescriptionIndexer.class) .query("theophobia", 0).size() > 0); this.collabSelectedProperty.set(LookupService.get().getService(DescriptionIndexer.class) .query("O2 Ct RA.high-sCnc", 0).size() > 0); //Initial display to UI if SNOMED CT is present from prior import if(this.snomedSelectedProperty.get() == false){ this.textImportMessage.setText(this.snomedCTRequiredText); this.importDataButton.setDisable(true); }else{ this.textImportMessage.setText(this.importIsReadyText); this.importDataButton.setDisable(false); } //Check to see, after each file is added, if dependencies are being met :) this.filesProperty.addListener((observable, oldValue, newValue) -> { observable.getValue().stream() .map(File::getName) .map(String::toLowerCase) .forEach(name ->{ if(!snomedSelectedProperty.get() && name.contains("snomedct_internationalrf2_production_20170731t150000z.zip")){ this.snomedSelectedProperty.set(true); } else if(!loincSelectedProperty.get() && (name.contains("loinc_") && name.contains("_text.zip")) ){ this.loincSelectedProperty.set(true); } else if(!collabSelectedProperty.get() && name.contains("snomedct_loincrf2_production_20170831t120000z.zip")){ this.collabSelectedProperty.set(true); } }); if(!this.snomedSelectedProperty.get()){ this.textImportMessage.setText(this.snomedCTRequiredText); this.importDataButton.setDisable(true); }else{ if(this.loincSelectedProperty.get()){ if(!this.collabSelectedProperty.get()){ this.textImportMessage.setText(this.loincSNOMEDCollabRequiredText); this.importDataButton.setDisable(true); }else{ this.textImportMessage.setText(this.importIsReadyText); this.importDataButton.setDisable(false); } }else { this.textImportMessage.setText(this.importIsReadyText); this.importDataButton.setDisable(false); } } }); } private void importTypeChanged(SelectedImportType importType) { setupEntryTree(); } public Stage getImportStage() { return importStage; } public void setImportStage(Stage importStage) { this.importStage = importStage; } void setManifold(Manifold manifold) { this.manifold = manifold; } }
Fixed bug with canceling file selection and crashing the viewer (can't add null list to SimpleListProperty). Also removed incorrect attempt at searching for valid Loinc concept.
komet/gui-contracts/src/main/java/sh/komet/gui/importation/ImportViewController.java
Fixed bug with canceling file selection and crashing the viewer (can't add null list to SimpleListProperty). Also removed incorrect attempt at searching for valid Loinc concept.
Java
apache-2.0
368e5aaee63b6d581be570dbb7d394413669db71
0
sameerak/jaggery,maheshika/jaggery,rasika90/jaggery,wso2/product-jaggery,hmrajas/jaggery,wso2/product-jaggery,hevayo/jaggery,cnapagoda/jaggery,hmrajas/jaggery,hevayo/jaggery,rasika90/jaggery,DMHP/jaggery,rasika90/jaggery,Niranjan-K/jaggery,Niranjan-K/jaggery,Niranjan-K/jaggery,rasika/jaggery,wso2/jaggery,manuranga/product-jaggery,maheshika/jaggery,hmrajas/jaggery,cnapagoda/jaggery,thusithathilina/jaggery,lalankea/product-jaggery,charithag/product-jaggery,DMHP/jaggery,hevayo/jaggery,wso2/product-jaggery,sameerak/jaggery,rasika/jaggery,lalankea/product-jaggery,thusithathilina/jaggery,rasika/jaggery,charithag/product-jaggery,wso2/jaggery,maheshika/jaggery,manuranga/product-jaggery,wso2/jaggery,thusithathilina/jaggery,manuranga/product-jaggery,wso2/product-jaggery,sameerak/jaggery,charithag/product-jaggery,charithag/product-jaggery,lalankea/product-jaggery,lalankea/product-jaggery,DMHP/jaggery,manuranga/product-jaggery,cnapagoda/jaggery
package org.jaggeryjs.hostobjects.db; import com.google.gson.Gson; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jaggeryjs.scriptengine.engine.RhinoEngine; import org.jaggeryjs.scriptengine.exceptions.ScriptException; import org.jaggeryjs.scriptengine.util.HostObjectUtil; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.Function; import org.mozilla.javascript.NativeArray; import org.mozilla.javascript.NativeObject; import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.ScriptableObject; import org.wso2.carbon.ndatasource.common.DataSourceException; import org.wso2.carbon.ndatasource.rdbms.RDBMSConfiguration; import org.wso2.carbon.ndatasource.rdbms.RDBMSDataSource; import org.wso2.carbon.ndatasource.core.CarbonDataSource; import org.wso2.carbon.ndatasource.core.DataSourceManager; import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Savepoint; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class DatabaseHostObject extends ScriptableObject { private static final Log log = LogFactory.getLog(DatabaseHostObject.class); private static final String hostObjectName = "Database"; public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; public static final String ORG_H2_DRIVER = "org.h2.Driver"; public static final String ORACLE_JDBC_ORACLE_DRIVER = "oracle.jdbc.OracleDriver"; public static final String MYSQL = "jdbc:mysql"; public static final String H2 = "jdbc:h2"; public static final String ORACLE = "jdbc:oracle"; private boolean autoCommit = true; private Context context = null; private Connection conn = null; static RDBMSDataSource rdbmsDataSource = null; private Map<String, Savepoint> savePoints = new HashMap<String, Savepoint>(); public DatabaseHostObject() { } @Override public String getClassName() { return hostObjectName; } public static Scriptable jsConstructor(Context cx, Object[] args, Function ctorObj, boolean inNewExpr) throws ScriptException { int argsCount = args.length; DatabaseHostObject db = new DatabaseHostObject(); //args count 1 for dataSource name if (argsCount !=1 && argsCount != 3 && argsCount != 4) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, hostObjectName, argsCount, true); } if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "1", "string", args[0], true); } if(argsCount == 1){ String dataSourceName = (String) args[0]; DataSourceManager dataSourceManager = new DataSourceManager(); try { CarbonDataSource carbonDataSource = dataSourceManager.getInstance().getDataSourceRepository().getDataSource(dataSourceName); DataSource dataSource = (DataSource)carbonDataSource.getDSObject(); db.conn = dataSource.getConnection(); db.context = cx; return db; } catch (DataSourceException e) { log.error("Failed to access datasource " + dataSourceName, e); } catch (SQLException e) { log.error("Failed to get connection" ,e); } } if (!(args[1] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "2", "string", args[1], true); } if (!(args[2] instanceof String) && !(args[2] instanceof Integer)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "3", "string", args[2], true); } NativeObject configs = null; if (argsCount == 4) { if (!(args[3] instanceof NativeObject)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "4", "object", args[3], true); } configs = (NativeObject) args[3]; } String dbUrl = (String) args[0]; RDBMSConfiguration rdbmsConfig = new RDBMSConfiguration(); try { if (configs != null) { Gson gson = new Gson(); rdbmsConfig = gson.fromJson(HostObjectUtil.serializeJSON(configs), RDBMSConfiguration.class); } if (rdbmsConfig.getDriverClassName() == null || rdbmsConfig.getDriverClassName().equals("")) { rdbmsConfig.setDriverClassName(getDriverClassName(dbUrl)); } rdbmsConfig.setUsername((String) args[1]); rdbmsConfig.setPassword((String) args[2]); rdbmsConfig.setUrl(dbUrl); try { rdbmsDataSource = new RDBMSDataSource(rdbmsConfig); } catch (DataSourceException e) { throw new ScriptException(e); } db.conn = rdbmsDataSource.getDataSource().getConnection(); db.context = cx; return db; } catch (SQLException e) { String msg = "Error connecting to the database : " + dbUrl; log.warn(msg, e); throw new ScriptException(msg, e); } } private static String getDriverClassName(String dburl) { if (dburl.contains(MYSQL)) { return COM_MYSQL_JDBC_DRIVER; } else if (dburl.contains(H2)) { return ORG_H2_DRIVER; } else if (dburl.contains(ORACLE)) { return ORACLE_JDBC_ORACLE_DRIVER; } else { return null; } } public boolean jsGet_autoCommit() throws ScriptException { return this.autoCommit; } public void jsSet_autoCommit(Object object) throws ScriptException { if (!(object instanceof Boolean)) { HostObjectUtil.invalidProperty(hostObjectName, "autoCommit", "boolean", object); } this.autoCommit = (Boolean) object; } public static Object jsFunction_query(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException, SQLException { String functionName = "query"; int argsCount = args.length; if (argsCount == 0) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } DatabaseHostObject db = (DatabaseHostObject) thisObj; String query; if (argsCount == 1) { //query Function callback = null; if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); return executeQuery(cx, db, stmt, query, callback, true); } else if (argsCount == 2) { if (!(args[0] instanceof String)) { //batch Function callback = null; if (!(args[0] instanceof NativeArray)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } NativeArray queries = (NativeArray) args[0]; NativeArray values = null; if (args[1] instanceof Function) { callback = (Function) args[1]; } else if (args[1] instanceof NativeArray) { values = (NativeArray) args[1]; } else { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "2", "array | function", args[0], false); } return executeBatch(cx, db, queries, values, callback); } else { //query Function callback = null; query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); if (args[1] instanceof Function) { callback = (Function) args[1]; } else if (args[1] instanceof String) { setQueryParams(stmt, args, 1, argsCount); } return executeQuery(cx, db, stmt, query, callback, true); } } else if (argsCount == 3) { if (!(args[0] instanceof String)) { //batch Function callback = null; if (!(args[0] instanceof NativeArray)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "array", args[0], false); } if (!(args[1] instanceof NativeArray)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "2", "array", args[1], false); } if (!(args[2] instanceof Function)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "3", "function", args[2], false); } NativeArray queries = (NativeArray) args[0]; NativeArray values = (NativeArray) args[1]; callback = (Function) args[2]; return executeBatch(cx, db, queries, values, callback); } else { //query Function callback = null; query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); if (args[2] instanceof Function) { callback = (Function) args[2]; setQueryParams(stmt, args, 1, 1); } else { setQueryParams(stmt, args, 1, 2); } return executeQuery(cx, db, stmt, query, callback, true); } } else { //args count > 3 if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } Function callback = null; query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); if (args[argsCount - 1] instanceof Function) { callback = (Function) args[argsCount - 1]; setQueryParams(stmt, args, 1, argsCount - 1); } else { setQueryParams(stmt, args, 1, argsCount); } return executeQuery(cx, db, stmt, query, callback, true); } } public static String jsFunction_savePoint(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException, SQLException { String functionName = "savePoint"; int argsCount = args.length; String savePoint; if (argsCount == 0) { savePoint = UUID.randomUUID().toString(); } else { if (argsCount != 1) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } savePoint = (String) args[0]; } DatabaseHostObject db = (DatabaseHostObject) thisObj; db.savePoints.put(savePoint, db.conn.setSavepoint(savePoint)); return savePoint; } public static void jsFunction_releasePoint(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "releasePoint"; int argsCount = args.length; if (argsCount != 1) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } String savePoint = (String) args[0]; DatabaseHostObject db = (DatabaseHostObject) thisObj; try { db.conn.releaseSavepoint(db.savePoints.remove(savePoint)); } catch (SQLException e) { String msg = "Error while releasing the savepoint : " + savePoint; log.warn(msg, e); throw new ScriptException(msg, e); } } public static void jsFunction_rollback(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "rollback"; int argsCount = args.length; if (argsCount > 1) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } String savePoint = null; if (argsCount == 1) { if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } savePoint = (String) args[0]; } DatabaseHostObject db = (DatabaseHostObject) thisObj; if (savePoint != null) { try { db.conn.rollback(db.savePoints.get(savePoint)); } catch (SQLException e) { String msg = "Error while rolling back the transaction to savepoint : " + savePoint; log.warn(msg, e); throw new ScriptException(msg, e); } } else { try { db.conn.rollback(); } catch (SQLException e) { String msg = "Error while rolling back the transaction"; log.warn(msg, e); throw new ScriptException(msg, e); } } } public static void jsFunction_commit(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "commit"; int argsCount = args.length; if (argsCount > 0) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } DatabaseHostObject db = (DatabaseHostObject) thisObj; try { db.conn.commit(); } catch (SQLException e) { String msg = "Error while committing the transaction"; log.warn(msg, e); throw new ScriptException(msg, e); } } public static void jsFunction_close(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "c"; int argsCount = args.length; if (argsCount > 0) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } DatabaseHostObject db = (DatabaseHostObject) thisObj; try { db.conn.close(); if(rdbmsDataSource!=null){ rdbmsDataSource.getDataSource().close(); } } catch (SQLException e) { String msg = "Error while closing the Database Connection"; log.warn(msg, e); throw new ScriptException(msg, e); } } private static String replaceWildcards(DatabaseHostObject db, String query, NativeArray params) throws SQLException { String openedChar = null; String lastChar = null; StringBuffer newQuery = new StringBuffer(); int paramIndex = 0; for (int i = 0; i < query.length(); i++) { String c = Character.toString(query.charAt(i)); if (lastChar == null) { lastChar = c; if (c.equals("'") || c.equals("\"")) { openedChar = c; } newQuery.append(c); continue; } if (c.equals("'")) { if (openedChar == null) { openedChar = c; } else if (openedChar.equals(c)) { if (!lastChar.equals("\\")) { //closing reached openedChar = null; } } } else if (c.equals("\"")) { if (openedChar == null) { openedChar = c; } else if (openedChar.equals(c)) { if (!lastChar.equals("\\")) { //closing reached openedChar = null; } } } else if (c.equals("?")) { if (openedChar == null) { //replace ? newQuery.append(HostObjectUtil.serializeObject(params.get(paramIndex, db))); paramIndex++; continue; } else if (lastChar.equals("'")) { if (openedChar.equals("'")) { String nextChart = Character.toString(query.charAt(i + 1)); if (nextChart.equals("'")) { //replace '?' newQuery.append(HostObjectUtil.serializeObject(params.get(paramIndex, db))); continue; } } } else if (lastChar.equals("\"")) { if (openedChar.equals("\"")) { String nextChart = Character.toString(query.charAt(i + 1)); if (nextChart.equals("\"")) { //replace '?' newQuery.append(HostObjectUtil.serializeObject(params.get(paramIndex, db))); continue; } } } } newQuery.append(c); lastChar = c; } return newQuery.toString(); } private static void setQueryParams(PreparedStatement stmt, Object[] params, int from, int to) throws SQLException { for (int i = from; i < to + 1; i++) { setQueryParam(stmt, params[i], i); } } private static void setQueryParam(PreparedStatement stmt, Object obj, int index) throws SQLException { if (obj instanceof String) { stmt.setString(index, (String) obj); } else if (obj instanceof Integer) { stmt.setInt(index, (Integer) obj); } else if (obj instanceof Double) { stmt.setDouble(index, (Double) obj); } else { stmt.setString(index, HostObjectUtil.serializeObject(obj)); } } private static Object executeQuery(Context cx, final DatabaseHostObject db, final PreparedStatement stmt, String query, final Function callback, final boolean keyed) throws ScriptException { String regex = "^[\\s\\t\\r\\n]*[Ss][Ee][Ll][Ee][Cc][Tt].*";//select final boolean isSelect = query.matches(regex); if (callback != null) { final ContextFactory factory = cx.getFactory(); final ExecutorService es = Executors.newSingleThreadExecutor(); es.submit(new Callable() { public Object call() throws Exception { RhinoEngine.enterContext(factory); try { Object result; if (isSelect) { result = processResults(db, stmt.executeQuery(), keyed); } else { result = stmt.executeUpdate(); } callback.call(db.context, db, db, new Object[]{result}); } catch (SQLException e) { log.warn(e); } finally { es.shutdown(); RhinoEngine.exitContext(); } return null; } }); return null; } else { try { if (isSelect) { return processResults(db, stmt.executeQuery(), keyed); } else { return stmt.executeUpdate(); } } catch (SQLException e) { log.warn(e); throw new ScriptException(e); } } } private static Object executeBatch(Context cx, final DatabaseHostObject db, NativeArray queries, NativeArray params, final Function callback) throws ScriptException, SQLException { if (params != null && (queries.getLength() != params.getLength())) { String msg = "Query array and values array should be in the same size. HostObject : " + hostObjectName + ", Method : query"; log.warn(msg); throw new ScriptException(msg); } final Statement stmt = db.conn.createStatement(); for (int index : (Integer[]) queries.getIds()) { Object obj = queries.get(index, db); if (!(obj instanceof String)) { String msg = "Invalid query type : " + obj.toString() + ". Query should be a string"; log.warn(msg); throw new ScriptException(msg); } String query = (String) obj; if (params != null) { Object valObj = params.get(index, db); if (!(valObj instanceof NativeArray)) { String msg = "Invalid value type : " + obj.toString() + " for the query " + query; log.warn(msg); throw new ScriptException(msg); } query = replaceWildcards(db, query, (NativeArray) valObj); } stmt.addBatch(query); } if (callback != null) { final ContextFactory factory = cx.getFactory(); final ExecutorService es = Executors.newSingleThreadExecutor(); es.submit(new Callable() { public Object call() throws Exception { RhinoEngine.enterContext(factory); try { int[] result = stmt.executeBatch(); callback.call(db.context, db, db, new Object[]{result}); } catch (SQLException e) { log.warn(e); } finally { es.shutdown(); RhinoEngine.exitContext(); } return null; } }); return null; } else { return stmt.executeBatch(); } } private static Scriptable processResults(DatabaseHostObject db, ResultSet results, boolean keyed) throws SQLException, ScriptException { List<ScriptableObject> rows = new ArrayList<ScriptableObject>(); while (results.next()) { ScriptableObject row; ResultSetMetaData rsmd = results.getMetaData(); if (keyed) { row = new NativeObject(); for (int i = 0; i < rsmd.getColumnCount(); i++) { String columnName = rsmd.getColumnName(i + 1); Object columnValue = getValue(db, results, i + 1, rsmd.getColumnType(i + 1)); row.put(columnName, row, columnValue); } } else { row = new NativeArray(rsmd.getColumnCount()); for (int i = 0; i < rsmd.getColumnCount(); i++) { Object columnValue = getValue(db, results, i + 1, rsmd.getColumnType(i + 1)); row.put(i + 1, row, columnValue); } } rows.add(row); } return db.context.newArray(db, rows.toArray()); } private static Object getValue(DatabaseHostObject db, ResultSet results, int index, int type) throws SQLException, ScriptException { Context cx = db.context; //TODO : implement for other sql types switch (type) { case Types.ARRAY: return cx.newArray(db, new Object[]{results.getArray(index)}); case Types.BIGINT: return results.getBigDecimal(index).toPlainString(); case Types.BINARY: return HostObjectUtil.streamToString(results.getBinaryStream(index)); default: return Context.javaToJS(results.getObject(index), db); } } }
components/hostobjects/org.jaggeryjs.hostobjects.db/src/main/java/org/jaggeryjs/hostobjects/db/DatabaseHostObject.java
package org.jaggeryjs.hostobjects.db; import com.google.gson.Gson; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jaggeryjs.scriptengine.engine.RhinoEngine; import org.jaggeryjs.scriptengine.exceptions.ScriptException; import org.jaggeryjs.scriptengine.util.HostObjectUtil; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.Function; import org.mozilla.javascript.NativeArray; import org.mozilla.javascript.NativeObject; import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.ScriptableObject; import org.wso2.carbon.ndatasource.common.DataSourceException; import org.wso2.carbon.ndatasource.rdbms.RDBMSConfiguration; import org.wso2.carbon.ndatasource.rdbms.RDBMSDataSource; import org.wso2.carbon.ndatasource.core.CarbonDataSource; import org.wso2.carbon.ndatasource.core.DataSourceManager; import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Savepoint; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class DatabaseHostObject extends ScriptableObject { private static final Log log = LogFactory.getLog(DatabaseHostObject.class); private static final String hostObjectName = "Database"; public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; public static final String ORG_H2_DRIVER = "org.h2.Driver"; public static final String ORACLE_JDBC_ORACLE_DRIVER = "oracle.jdbc.OracleDriver"; public static final String MYSQL = "jdbc:mysql"; public static final String H2 = "jdbc:h2"; public static final String ORACLE = "jdbc:oracle"; private boolean autoCommit = true; private Context context = null; private Connection conn = null; static RDBMSDataSource rdbmsDataSource = null; private Map<String, Savepoint> savePoints = new HashMap<String, Savepoint>(); public DatabaseHostObject() { } @Override public String getClassName() { return hostObjectName; } public static Scriptable jsConstructor(Context cx, Object[] args, Function ctorObj, boolean inNewExpr) throws ScriptException { int argsCount = args.length; DatabaseHostObject db = new DatabaseHostObject(); //args count 1 for dataSource name if (argsCount !=1 && argsCount != 3 && argsCount != 4) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, hostObjectName, argsCount, true); } if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "1", "string", args[0], true); } if(argsCount == 1){ String dataSourceName = (String) args[0]; DataSourceManager dataSourceManager = new DataSourceManager(); try { CarbonDataSource carbonDataSource = dataSourceManager.getInstance().getDataSourceRepository().getDataSource(dataSourceName); DataSource dataSource = (DataSource)carbonDataSource.getDSObject(); db.conn = dataSource.getConnection(); db.context = cx; return db; } catch (DataSourceException e) { log.error("Failed to access datasource " + dataSourceName, e); } catch (SQLException e) { log.error("Failed to get connection" ,e); } } if (!(args[1] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "2", "string", args[1], true); } if (!(args[2] instanceof String) && !(args[2] instanceof Integer)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "3", "string", args[2], true); } NativeObject configs = null; if (argsCount == 4) { if (!(args[3] instanceof NativeObject)) { HostObjectUtil.invalidArgsError(hostObjectName, hostObjectName, "4", "object", args[3], true); } configs = (NativeObject) args[3]; } String dbUrl = (String) args[0]; RDBMSConfiguration rdbmsConfig = new RDBMSConfiguration(); try { if (configs != null) { Gson gson = new Gson(); rdbmsConfig = gson.fromJson(HostObjectUtil.serializeJSON(configs), RDBMSConfiguration.class); } if (rdbmsConfig.getDriverClassName() == null || rdbmsConfig.getDriverClassName().equals("")) { rdbmsConfig.setDriverClassName(getDriverClassName(dbUrl)); } rdbmsConfig.setUsername((String) args[1]); rdbmsConfig.setPassword((String) args[2]); rdbmsConfig.setUrl(dbUrl); try { rdbmsDataSource = new RDBMSDataSource(rdbmsConfig); } catch (DataSourceException e) { throw new ScriptException(e); } db.conn = rdbmsDataSource.getDataSource().getConnection(); db.context = cx; return db; } catch (SQLException e) { String msg = "Error connecting to the database : " + dbUrl; log.warn(msg, e); throw new ScriptException(msg, e); } } private static String getDriverClassName(String dburl) { if (dburl.contains(MYSQL)) { return COM_MYSQL_JDBC_DRIVER; } else if (dburl.contains(H2)) { return ORG_H2_DRIVER; } else if (dburl.contains(ORACLE)) { return ORACLE_JDBC_ORACLE_DRIVER; } else { return null; } } public boolean jsGet_autoCommit() throws ScriptException { return this.autoCommit; } public void jsSet_autoCommit(Object object) throws ScriptException { if (!(object instanceof Boolean)) { HostObjectUtil.invalidProperty(hostObjectName, "autoCommit", "boolean", object); } this.autoCommit = (Boolean) object; } public static Object jsFunction_query(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException, SQLException { String functionName = "query"; int argsCount = args.length; if (argsCount == 0) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } DatabaseHostObject db = (DatabaseHostObject) thisObj; String query; if (argsCount == 1) { //query Function callback = null; if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); return executeQuery(cx, db, stmt, query, callback, true); } else if (argsCount == 2) { if (!(args[0] instanceof String)) { //batch Function callback = null; if (!(args[0] instanceof NativeArray)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } NativeArray queries = (NativeArray) args[0]; NativeArray values = null; if (args[1] instanceof Function) { callback = (Function) args[1]; } else if (args[1] instanceof NativeArray) { values = (NativeArray) args[1]; } else { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "2", "array | function", args[0], false); } return executeBatch(cx, db, queries, values, callback); } else { //query Function callback = null; query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); if (args[1] instanceof Function) { callback = (Function) args[1]; } else if (args[1] instanceof String) { setQueryParams(stmt, args, 1, argsCount); } return executeQuery(cx, db, stmt, query, callback, true); } } else if (argsCount == 3) { if (!(args[0] instanceof String)) { //batch Function callback = null; if (!(args[0] instanceof NativeArray)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "array", args[0], false); } if (!(args[1] instanceof NativeArray)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "2", "array", args[1], false); } if (!(args[2] instanceof Function)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "3", "function", args[2], false); } NativeArray queries = (NativeArray) args[0]; NativeArray values = (NativeArray) args[1]; callback = (Function) args[2]; return executeBatch(cx, db, queries, values, callback); } else { //query Function callback = null; query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); if (args[2] instanceof Function) { callback = (Function) args[2]; setQueryParams(stmt, args, 1, 1); } else { setQueryParams(stmt, args, 1, 2); } return executeQuery(cx, db, stmt, query, callback, true); } } else { //args count > 3 if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } Function callback = null; query = (String) args[0]; PreparedStatement stmt = db.conn.prepareStatement(query); if (args[argsCount - 1] instanceof Function) { callback = (Function) args[argsCount - 1]; setQueryParams(stmt, args, 1, argsCount - 1); } else { setQueryParams(stmt, args, 1, argsCount); } return executeQuery(cx, db, stmt, query, callback, true); } } public static String jsFunction_savePoint(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException, SQLException { String functionName = "savePoint"; int argsCount = args.length; String savePoint; if (argsCount == 0) { savePoint = UUID.randomUUID().toString(); } else { if (argsCount != 1) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } savePoint = (String) args[0]; } DatabaseHostObject db = (DatabaseHostObject) thisObj; db.savePoints.put(savePoint, db.conn.setSavepoint(savePoint)); return savePoint; } public static void jsFunction_releasePoint(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "releasePoint"; int argsCount = args.length; if (argsCount != 1) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } String savePoint = (String) args[0]; DatabaseHostObject db = (DatabaseHostObject) thisObj; try { db.conn.releaseSavepoint(db.savePoints.remove(savePoint)); } catch (SQLException e) { String msg = "Error while releasing the savepoint : " + savePoint; log.warn(msg, e); throw new ScriptException(msg, e); } } public static void jsFunction_rollback(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "rollback"; int argsCount = args.length; if (argsCount > 1) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } String savePoint = null; if (argsCount == 1) { if (!(args[0] instanceof String)) { HostObjectUtil.invalidArgsError(hostObjectName, functionName, "1", "string", args[0], false); } savePoint = (String) args[0]; } DatabaseHostObject db = (DatabaseHostObject) thisObj; if (savePoint != null) { try { db.conn.rollback(db.savePoints.get(savePoint)); } catch (SQLException e) { String msg = "Error while rolling back the transaction to savepoint : " + savePoint; log.warn(msg, e); throw new ScriptException(msg, e); } } else { try { db.conn.rollback(); } catch (SQLException e) { String msg = "Error while rolling back the transaction"; log.warn(msg, e); throw new ScriptException(msg, e); } } } public static void jsFunction_commit(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "commit"; int argsCount = args.length; if (argsCount > 0) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } DatabaseHostObject db = (DatabaseHostObject) thisObj; try { db.conn.commit(); } catch (SQLException e) { String msg = "Error while committing the transaction"; log.warn(msg, e); throw new ScriptException(msg, e); } } public static void jsFunction_close(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws ScriptException { String functionName = "c"; int argsCount = args.length; if (argsCount > 0) { HostObjectUtil.invalidNumberOfArgs(hostObjectName, functionName, argsCount, false); } DatabaseHostObject db = (DatabaseHostObject) thisObj; try { db.conn.close(); rdbmsDataSource.getDataSource().close(); } catch (SQLException e) { String msg = "Error while closing the Database Connection"; log.warn(msg, e); throw new ScriptException(msg, e); } } private static String replaceWildcards(DatabaseHostObject db, String query, NativeArray params) throws SQLException { String openedChar = null; String lastChar = null; StringBuffer newQuery = new StringBuffer(); int paramIndex = 0; for (int i = 0; i < query.length(); i++) { String c = Character.toString(query.charAt(i)); if (lastChar == null) { lastChar = c; if (c.equals("'") || c.equals("\"")) { openedChar = c; } newQuery.append(c); continue; } if (c.equals("'")) { if (openedChar == null) { openedChar = c; } else if (openedChar.equals(c)) { if (!lastChar.equals("\\")) { //closing reached openedChar = null; } } } else if (c.equals("\"")) { if (openedChar == null) { openedChar = c; } else if (openedChar.equals(c)) { if (!lastChar.equals("\\")) { //closing reached openedChar = null; } } } else if (c.equals("?")) { if (openedChar == null) { //replace ? newQuery.append(HostObjectUtil.serializeObject(params.get(paramIndex, db))); paramIndex++; continue; } else if (lastChar.equals("'")) { if (openedChar.equals("'")) { String nextChart = Character.toString(query.charAt(i + 1)); if (nextChart.equals("'")) { //replace '?' newQuery.append(HostObjectUtil.serializeObject(params.get(paramIndex, db))); continue; } } } else if (lastChar.equals("\"")) { if (openedChar.equals("\"")) { String nextChart = Character.toString(query.charAt(i + 1)); if (nextChart.equals("\"")) { //replace '?' newQuery.append(HostObjectUtil.serializeObject(params.get(paramIndex, db))); continue; } } } } newQuery.append(c); lastChar = c; } return newQuery.toString(); } private static void setQueryParams(PreparedStatement stmt, Object[] params, int from, int to) throws SQLException { for (int i = from; i < to + 1; i++) { setQueryParam(stmt, params[i], i); } } private static void setQueryParam(PreparedStatement stmt, Object obj, int index) throws SQLException { if (obj instanceof String) { stmt.setString(index, (String) obj); } else if (obj instanceof Integer) { stmt.setInt(index, (Integer) obj); } else if (obj instanceof Double) { stmt.setDouble(index, (Double) obj); } else { stmt.setString(index, HostObjectUtil.serializeObject(obj)); } } private static Object executeQuery(Context cx, final DatabaseHostObject db, final PreparedStatement stmt, String query, final Function callback, final boolean keyed) throws ScriptException { String regex = "^[\\s\\t\\r\\n]*[Ss][Ee][Ll][Ee][Cc][Tt].*";//select final boolean isSelect = query.matches(regex); if (callback != null) { final ContextFactory factory = cx.getFactory(); final ExecutorService es = Executors.newSingleThreadExecutor(); es.submit(new Callable() { public Object call() throws Exception { RhinoEngine.enterContext(factory); try { Object result; if (isSelect) { result = processResults(db, stmt.executeQuery(), keyed); } else { result = stmt.executeUpdate(); } callback.call(db.context, db, db, new Object[]{result}); } catch (SQLException e) { log.warn(e); } finally { es.shutdown(); RhinoEngine.exitContext(); } return null; } }); return null; } else { try { if (isSelect) { return processResults(db, stmt.executeQuery(), keyed); } else { return stmt.executeUpdate(); } } catch (SQLException e) { log.warn(e); throw new ScriptException(e); } } } private static Object executeBatch(Context cx, final DatabaseHostObject db, NativeArray queries, NativeArray params, final Function callback) throws ScriptException, SQLException { if (params != null && (queries.getLength() != params.getLength())) { String msg = "Query array and values array should be in the same size. HostObject : " + hostObjectName + ", Method : query"; log.warn(msg); throw new ScriptException(msg); } final Statement stmt = db.conn.createStatement(); for (int index : (Integer[]) queries.getIds()) { Object obj = queries.get(index, db); if (!(obj instanceof String)) { String msg = "Invalid query type : " + obj.toString() + ". Query should be a string"; log.warn(msg); throw new ScriptException(msg); } String query = (String) obj; if (params != null) { Object valObj = params.get(index, db); if (!(valObj instanceof NativeArray)) { String msg = "Invalid value type : " + obj.toString() + " for the query " + query; log.warn(msg); throw new ScriptException(msg); } query = replaceWildcards(db, query, (NativeArray) valObj); } stmt.addBatch(query); } if (callback != null) { final ContextFactory factory = cx.getFactory(); final ExecutorService es = Executors.newSingleThreadExecutor(); es.submit(new Callable() { public Object call() throws Exception { RhinoEngine.enterContext(factory); try { int[] result = stmt.executeBatch(); callback.call(db.context, db, db, new Object[]{result}); } catch (SQLException e) { log.warn(e); } finally { es.shutdown(); RhinoEngine.exitContext(); } return null; } }); return null; } else { return stmt.executeBatch(); } } private static Scriptable processResults(DatabaseHostObject db, ResultSet results, boolean keyed) throws SQLException, ScriptException { List<ScriptableObject> rows = new ArrayList<ScriptableObject>(); while (results.next()) { ScriptableObject row; ResultSetMetaData rsmd = results.getMetaData(); if (keyed) { row = new NativeObject(); for (int i = 0; i < rsmd.getColumnCount(); i++) { String columnName = rsmd.getColumnName(i + 1); Object columnValue = getValue(db, results, i + 1, rsmd.getColumnType(i + 1)); row.put(columnName, row, columnValue); } } else { row = new NativeArray(rsmd.getColumnCount()); for (int i = 0; i < rsmd.getColumnCount(); i++) { Object columnValue = getValue(db, results, i + 1, rsmd.getColumnType(i + 1)); row.put(i + 1, row, columnValue); } } rows.add(row); } return db.context.newArray(db, rows.toArray()); } private static Object getValue(DatabaseHostObject db, ResultSet results, int index, int type) throws SQLException, ScriptException { Context cx = db.context; //TODO : implement for other sql types switch (type) { case Types.ARRAY: return cx.newArray(db, new Object[]{results.getArray(index)}); case Types.BIGINT: return results.getBigDecimal(index).toPlainString(); case Types.BINARY: return HostObjectUtil.streamToString(results.getBinaryStream(index)); default: return Context.javaToJS(results.getObject(index), db); } } }
adding null check for db.close()
components/hostobjects/org.jaggeryjs.hostobjects.db/src/main/java/org/jaggeryjs/hostobjects/db/DatabaseHostObject.java
adding null check for db.close()
Java
apache-2.0
b625fce0ffab3336973f438916122babc4c383eb
0
claremontqualitymanagement/TestAutomationFramework,claremontqualitymanagement/TestAutomationFramework,claremontqualitymanagement/TestAutomationFramework
package se.claremont.tools; import java.io.File; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by magnusolsson on 2016-09-21. * * Singelton class for Utils stuff. * */ public class Utils { final static Logger logger = LoggerFactory.getLogger( Utils.class ); q private static Utils instance = null; protected Utils() { } public static Utils getInstance() { if( instance == null) instance = new Utils(); return instance; } /** * * @return os for running jvm */ public String getOS() { String os = System.getProperty("os.name"); logger.debug("JVM is running on OS: {}.", os); return os; } /** * * @return gets the separator for the default filesystem */ public String FileSeparator() { return System.getProperty("file.separator"); } /** * @return User home directory path */ public String getUserHomeDirectory() { return System.getProperty("user.home"); } /** * @return User working directory path */ public String getUserWorkingDirectory() { return System.getProperty("user.dir"); } /** * * @return true if jvm is running on Mac OS X, otherwise false */ public boolean amIMacOS() { return getOS().toLowerCase().contains( "mac" ) ? true : false; } /** * * @return root directory */ public String getRootDirectory() { return File.listRoots()[0].getAbsolutePath(); } /** * Checks if pathToFile exists and is a file * @param pathToFile * @return true if file path exists and is file */ public boolean doesFileExists(String pathToFile) { try { File f = new File( pathToFile ); if( f.exists() && f.isFile() ) { return true; } } catch (Exception fe) { //System.err.println("You got problem: " + e.getStackTrace()); logger.debug( fe.getMessage() ); } return false; } public static void main(String[] args) { logger.debug( Utils.getInstance().getRootDirectory() ); logger.debug( Utils.getInstance().getOS() ); logger.debug( Utils.getInstance().getUserWorkingDirectory() ); } }
src/main/java/se/claremont/tools/Utils.java
package se.claremont.tools; import java.io.File; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by magnusolsson on 2016-09-21. * * Singelton class for Utils stuff. * */ public class Utils { final Logger logger = LoggerFactory.getLogger( Utils.class ); private static Utils instance = null; protected Utils() { } public static Utils getInstance() { if( instance == null) instance = new Utils(); return instance; } /** * * @return os for running jvm */ public String getOS() { String os = System.getProperty("os.name"); logger.debug("JVM is running on OS: {}.", os); return os; } /** * * @return gets the separator for the default filesystem */ public String FileSeparator() { return System.getProperty("file.separator"); } /** * @return User home directory path */ public String getUserHomeDirectory() { return System.getProperty("user.home"); } /** * @return User working directory path */ public String getUserWorkingDirectory() { return System.getProperty("user.dir"); } /** * * @return true if jvm is running on Mac OS X, otherwise false */ public boolean amIMacOS() { return getOS().toLowerCase().contains( "mac" ) ? true : false; } /** * * @return root directory */ public String getRootDirectory() { return File.listRoots()[0].getAbsolutePath(); } /** * Checks if pathToFile exists and is a file * @param pathToFile * @return true if file path exists and is file */ public boolean doesFileExists(String pathToFile) { try { File f = new File( pathToFile ); if( f.exists() && f.isFile() ) { return true; } } catch (Exception e) { //System.err.println("You got problem: " + e.getStackTrace()); } return false; } public static void main(String[] args) { System.out.println( Utils.getInstance().getRootDirectory() ); System.out.println( Utils.getInstance().getOS() ); System.out.println( Utils.getInstance().getUserWorkingDirectory() ); } }
latest
src/main/java/se/claremont/tools/Utils.java
latest
Java
apache-2.0
e1f6027d03a79e0d21993a477171eec8c20931c6
0
babble/babble,babble/babble,babble/babble,babble/babble,babble/babble,babble/babble
// AppContextHolder.java /** * Copyright (C) 2008 10gen Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ed.appserver; import java.io.*; import java.util.*; import ed.js.*; import ed.net.*; import ed.net.httpserver.*; import ed.log.*; import ed.util.*; import ed.cloud.*; public class AppContextHolder { static boolean D = Boolean.getBoolean( "DEBUG.APP" ); static String OUR_DOMAINS[] = new String[]{ ".latenightcoders.com" , ".local.10gen.com" , ".10gen.com" }; static String CDN_HOST[] = new String[]{ "origin." , "origin-local." , "static." , "static-local." , "secure." }; static final Set<String> CDN_HOSTNAMES; static { Set<String> s = new HashSet<String>(); for ( String d : OUR_DOMAINS ) for ( String h : CDN_HOST ) s.add( (h + d).replaceAll( "\\.+" , "." ) ); CDN_HOSTNAMES = Collections.unmodifiableSet( s ); } private static final String LOCAL_BRANCH_LIST[] = new String[]{ "master" , "test" , "www" }; private static final String WWW_BRANCH_LIST[] = new String[]{ "test" , "master" }; public AppContextHolder( String defaultWebRoot , String root ){ _defaultWebRoot = defaultWebRoot; _root = root; _rootFile = _root == null ? null : new File( _root ); } public Result getContext( HttpRequest request ){ String host = request.getHeader( "X-Host" ); String uri = request.getURI(); if ( host != null ){ // if there is an X-Host, lets see if this is a cdn thing if ( CDN_HOSTNAMES.contains( request.getHost() ) && ! CDN_HOSTNAMES.contains( host ) && ! host.equals( request.getHost() ) // this should never happen, but is a weird case. ){ // X-Host was cleaned by someone else // so we need strip cdn thing from uri. int idx = uri.indexOf( "/" , 1 ); if ( idx > 0 ){ uri = uri.substring( idx ); } } } else { // no X-Host host = request.getHeader( "Host" ); } if ( host != null ){ int idx = host.indexOf( ":" ); if ( idx > 0 ) host = host.substring( 0 , idx ); } return getContext( host , uri ); } public Result getContext( String host , String uri ){ if ( host != null ) host = host.trim(); if ( D ) System.out.println( host + uri ); if ( host == null || _root == null || host.length() == 0 ){ if ( D ) System.out.println( "\t using default context for [" + host + "]" ); return new Result( _getDefaultContext() , host , uri ); } Info info = fixBase( host , uri ); host = info.host; uri = info.uri; if ( host.equals( "corejs.com" ) ) return new Result( _getCoreContext() , host , uri ); AppContext ac = _getContextFromMap( host ); if ( ac != null ) return new Result( ac , host , uri ); synchronized ( _contextCreationLock ){ ac = _getContextFromMap( host ); if ( ac != null ) return _finish( ac , host, uri , host ); for ( Info i : getPossibleSiteNames( info ) ){ if ( D ) System.out.println( "\t possible site name [" + i.host + "]" ); File temp = new File( _root , i.host ); if ( temp.exists() ) return _finish( getEnvironmentContext( temp , i , host ) , i.host , info.uri , host ); JSObject site = getSiteFromCloud( i.host ); if ( site != null ){ if ( D ) System.out.println( "\t found site from cloud" ); temp.mkdirs(); return _finish( getEnvironmentContext( temp , i , host ) , i.host , info.uri , host ); } } } return _finish( _getDefaultContext() , info.host , info.uri , host ); } private Result _finish( AppContext context , String host , String uri , String origHost ){ _contextCache.put( origHost , context ); _contextCache.put( host , context ); return new Result( context , host , uri ); } private AppContext getEnvironmentContext( final File siteRoot , final Info info , final String originalHost ){ if ( ! siteRoot.exists() ) throw new RuntimeException( "\t trying to map [" + originalHost + "] to " + siteRoot + " which doesn't exist" ); AppContext ac = _getContextFromMap( originalHost ); if ( ac != null ) return ac; if ( D ) System.out.println( "\t mapping directory [" + originalHost + "] to " + siteRoot ); if ( isCodeDir( siteRoot ) ){ ac = new AppContext( siteRoot ); } else { if ( D ) System.out.println( "\t this is a holder for branches" ); final String env = info.getEnvironment( originalHost ); if ( D ) System.out.println( "\t\t env : " + env ); final File envRoot = getBranch( siteRoot , env , info.host ); if ( D ) System.out.println( "\t using full path : " + envRoot ); final String envRootString = envRoot.toString(); ac = _getContextFromMap( envRootString ); if ( ac == null ){ ac = new AppContext( envRootString , envRoot , siteRoot.getName() , env ); _contextCache.put( envRootString , ac ); } } _contextCache.put( info.host , ac ); _contextCache.put( originalHost , ac ); return ac; } void replace( AppContext oldOne , AppContext newOne ){ synchronized ( _contextCreationLock ){ List<String> names = new ArrayList<String>( _contextCache.keySet() ); for ( String s : names ){ AppContext temp = _contextCache.get( s ); if ( temp == oldOne ){ _contextCache.put( s , newOne ); } } _contextCache.put( newOne._root , newOne ); if ( _defaultContext == oldOne ) _defaultContext = newOne; if ( _coreContext == oldOne ) _coreContext = newOne; } } private AppContext _getContextFromMap( String host ){ AppContext ac = _contextCache.get( host ); if (ac != null && ac.isReset()) { _contextCache.put( host , null ); ac = null; } return ac; } File getBranch( File root , String subdomain , String siteName ){ File f = _getBranch( root , subdomain , siteName ); JSObject envConfig = getEnvironmentFromCloud( siteName , subdomain ); if ( envConfig != null ){ GitUtils.fullUpdate( f ); String branch = envConfig.get( "branch" ).toString() ; if ( D ) System.out.println( "\t using branch [" + branch + "]" ); _checkout( f , branch ); } return f; } File _getBranch( File root , String subdomain , String siteName ){ File test = new File( root , subdomain ); if ( test.exists() ) return test; JSObject site = getSiteFromCloud( siteName ); if ( site != null ){ Object gitObject = site.get( "giturl" ); if ( gitObject != null ){ String giturl = gitObject.toString(); JSObject envConfig = getEnvironmentFromCloud( siteName , subdomain ); if ( envConfig != null ){ if ( D ) System.out.println( "\t found an env in grid" ); if ( ! GitUtils.clone( giturl , root , subdomain ) ) throw new RuntimeException( "couldn't clone [" + siteName + "] from [" + giturl + "]" ); _checkout( test , envConfig.get( "branch" ).toString() ); return test; } } } if ( subdomain.equals( "dev" ) ){ test = new File( root , "master" ); if ( test.exists() ) return test; } String searchList[] = null; if ( subdomain.equals( "local" ) ) searchList = LOCAL_BRANCH_LIST; else if ( subdomain.equals( "www" ) ) searchList = WWW_BRANCH_LIST; if ( searchList != null ){ for ( int i=0; i<searchList.length; i++ ){ test = new File( root , searchList[i] ); if ( test.exists() ) return test; } } throw new RuntimeException( "can't find environment [" + subdomain + "] in [" + root + "] siteName [" + siteName + "] found site:" + ( site != null ) ); } static void _checkout( File f , String what ){ if ( GitUtils.checkout( f , what ) ) return; if ( GitUtils.checkout( f , "origin/" + what ) ) return; throw new RuntimeException( "couldn't checkout [" + what + "] for [" + f + "]" ); } private synchronized AppContext _getDefaultContext(){ if ( _defaultWebRoot == null ) return null; if ( _defaultContext != null && _defaultContext._reset ) _defaultContext = null; if ( _defaultContext != null ) return _defaultContext; _defaultContext = new AppContext( _defaultWebRoot ); return _defaultContext; } private boolean isCodeDir( final File test ){ File f = new File( test , ".git" ); if ( f.exists() ) return true; f = new File( test , "dot-git" ); if ( f.exists() ) return true; if ( ! test.exists() ) return false; File lst[] = test.listFiles(); for ( int j=0; j<lst.length; j++ ){ f = lst[j]; if ( f.isDirectory() ) continue; final String name = f.getName(); for ( int i=0; i<JSFileLibrary._srcExtensions.length; i++ ) if ( name.endsWith( JSFileLibrary._srcExtensions[i] ) ) return true; } return false; } static JSObject getEnvironmentFromCloud( String siteName , String envName ){ Cloud theCloud = Cloud.getInstanceIfOnGrid(); if ( theCloud == null ) return null; return theCloud.findEnvironment( siteName , envName ); } private static JSObject getSiteFromCloud( String name ){ Cloud theCloud = Cloud.getInstanceIfOnGrid(); if ( theCloud == null ) return null; return theCloud.findSite( name , false ); } static List<Info> getPossibleSiteNames( String host , String uri ){ return getPossibleSiteNames( fixBase( host , uri ) ); } static List<Info> getPossibleSiteNames( Info base ){ List<Info> all = new ArrayList<Info>( 6 ); all.add( base ); final String host = base.host; final String uri = base.uri; String domain = DNSUtil.getDomain( host ); if ( ! domain.equals( host ) ) all.add( new Info( domain , uri ) ); int idx = domain.indexOf( "." ); if ( idx > 0 ) all.add( new Info( domain.substring( 0 , idx ) , uri ) ); return all; } static Info fixBase( String host , String uri ){ { int idx = host.indexOf( ":" ); if ( idx >= 0 ) host = host.substring( 0 , idx ); } if ( uri == null ){ uri = "/"; } else { if ( ! uri.startsWith( "/" ) ) uri = "/" + uri; } if ( CDN_HOSTNAMES.contains( host ) ){ final int idx = uri.indexOf( "/" , 1 ); if ( idx < 0 ) throw new RuntimeException( "static host without valid host:[" + host + "] uri:[" + uri + "]" ); host = uri.substring( 1 , idx ); uri = uri.substring( idx ); } for ( String d : OUR_DOMAINS ){ if ( host.endsWith( d ) ){ host = host.substring( 0 , host.length() - d.length() ); if ( host.indexOf( "." ) < 0 ) host += ".com"; break; } } if ( host.startsWith( "www." ) ) host = host.substring( 4 ); if ( host.equals( "com" ) ) host = "www.com"; return new Info( host , uri ); } static class Info { Info( String host ){ this( host , "/" ); } Info( String host , String uri ){ this.host = host; this.uri = uri; } String getEnvironment( String big ){ if ( big.equalsIgnoreCase( host ) || host.startsWith( "www." ) || big.startsWith( host + "." ) ) return "www"; int idx = big.indexOf( "." + host ); if ( idx < 0 ){ idx = big.indexOf( host ); if ( idx < 0 ) throw new RuntimeException( "something is wrong host:" + host + " big:" + big ); } return big.substring( 0 , idx ); } public String toString(){ return host + uri; } final String host; final String uri; } private synchronized AppContext _getCoreContext(){ if ( _coreContext == null ) _coreContext = new AppContext( CoreJS.get().getRootFile( null ) ); return _coreContext; } class Result { Result( AppContext context , String host , String uri ){ this.context = context; this.host = host; this.uri = uri; } String getRoot(){ return context.getRoot(); } public String toString(){ return getRoot() + "||" + host + "||" + uri; } final AppContext context; final String uri; final String host; } final String _root; final File _rootFile; private final String _defaultWebRoot; private AppContext _defaultContext; private AppContext _coreContext; private final Map<String,AppContext> _contextCache = Collections.synchronizedMap( new StringMap<AppContext>() ); private final String _contextCreationLock = ( "AppContextHolder-Lock-" + Math.random() ).intern(); }
src/main/ed/appserver/AppContextHolder.java
// AppContextHolder.java /** * Copyright (C) 2008 10gen Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ed.appserver; import java.io.*; import java.util.*; import ed.js.*; import ed.net.*; import ed.net.httpserver.*; import ed.log.*; import ed.util.*; import ed.cloud.*; public class AppContextHolder { static boolean D = Boolean.getBoolean( "DEBUG.APP" ); static String OUR_DOMAINS[] = new String[]{ ".latenightcoders.com" , ".local.10gen.com" , ".10gen.com" }; static String CDN_HOST[] = new String[]{ "origin." , "origin-local." , "static." , "static-local." , "secure." }; static final Set<String> CDN_HOSTNAMES; static { Set<String> s = new HashSet<String>(); for ( String d : OUR_DOMAINS ) for ( String h : CDN_HOST ) s.add( (h + d).replaceAll( "\\.+" , "." ) ); CDN_HOSTNAMES = Collections.unmodifiableSet( s ); } private static final String LOCAL_BRANCH_LIST[] = new String[]{ "master" , "test" , "www" }; private static final String WWW_BRANCH_LIST[] = new String[]{ "test" , "master" }; public AppContextHolder( String defaultWebRoot , String root ){ _defaultWebRoot = defaultWebRoot; _root = root; _rootFile = _root == null ? null : new File( _root ); } public Result getContext( HttpRequest request ){ String host = request.getHeader( "X-Host" ); String uri = request.getURI(); if ( host != null ){ // if there is an X-Host, lets see if this is a cdn thing if ( CDN_HOSTNAMES.contains( request.getHost() ) && ! CDN_HOSTNAMES.contains( host ) && ! host.equals( request.getHost() ) // this should never happen, but is a weird case. ){ // X-Host was cleaned by someone else // so we need strip cdn thing from uri. int idx = uri.indexOf( "/" , 1 ); if ( idx > 0 ){ uri = uri.substring( idx ); } } } else { // no X-Host host = request.getHeader( "Host" ); } if ( host != null ){ int idx = host.indexOf( ":" ); if ( idx > 0 ) host = host.substring( 0 , idx ); } return getContext( host , uri ); } public Result getContext( String host , String uri ){ if ( host != null ) host = host.trim(); if ( D ) System.out.println( host + uri ); if ( host == null || _root == null || host.length() == 0 ){ if ( D ) System.out.println( "\t using default context for [" + host + "]" ); return new Result( _getDefaultContext() , host , uri ); } Info info = fixBase( host , uri ); host = info.host; uri = info.uri; if ( host.equals( "corejs.com" ) ) return new Result( _getCoreContext() , host , uri ); AppContext ac = _getContextFromMap( host ); if ( ac != null ) return new Result( ac , host , uri ); synchronized ( _contextCreationLock ){ ac = _getContextFromMap( host ); if ( ac != null ) return _finish( ac , host, uri , host ); for ( Info i : getPossibleSiteNames( info ) ){ if ( D ) System.out.println( "\t possible site name [" + i.host + "]" ); File temp = new File( _root , i.host ); if ( temp.exists() ) return _finish( getEnvironmentContext( temp , i , host ) , i.host , info.uri , host ); JSObject site = getSiteFromCloud( i.host ); if ( site != null ){ if ( D ) System.out.println( "\t found site from cloud" ); temp.mkdirs(); return _finish( getEnvironmentContext( temp , i , host ) , i.host , info.uri , host ); } } } return _finish( _getDefaultContext() , info.host , info.uri , host ); } private Result _finish( AppContext context , String host , String uri , String origHost ){ _contextCache.put( origHost , context ); _contextCache.put( host , context ); return new Result( context , host , uri ); } private AppContext getEnvironmentContext( final File siteRoot , final Info info , final String originalHost ){ if ( ! siteRoot.exists() ) throw new RuntimeException( "\t trying to map [" + originalHost + "] to " + siteRoot + " which doesn't exist" ); AppContext ac = _getContextFromMap( originalHost ); if ( ac != null ) return ac; if ( D ) System.out.println( "\t mapping directory [" + originalHost + "] to " + siteRoot ); if ( hasGit( siteRoot ) ){ ac = new AppContext( siteRoot ); } else { if ( D ) System.out.println( "\t this is a holder for branches" ); final String env = info.getEnvironment( originalHost ); if ( D ) System.out.println( "\t\t env : " + env ); final File envRoot = getBranch( siteRoot , env , info.host ); if ( D ) System.out.println( "\t using full path : " + envRoot ); final String envRootString = envRoot.toString(); ac = _getContextFromMap( envRootString ); if ( ac == null ){ ac = new AppContext( envRootString , envRoot , siteRoot.getName() , env ); _contextCache.put( envRootString , ac ); } } _contextCache.put( info.host , ac ); _contextCache.put( originalHost , ac ); return ac; } void replace( AppContext oldOne , AppContext newOne ){ synchronized ( _contextCreationLock ){ List<String> names = new ArrayList<String>( _contextCache.keySet() ); for ( String s : names ){ AppContext temp = _contextCache.get( s ); if ( temp == oldOne ){ _contextCache.put( s , newOne ); } } _contextCache.put( newOne._root , newOne ); if ( _defaultContext == oldOne ) _defaultContext = newOne; if ( _coreContext == oldOne ) _coreContext = newOne; } } private AppContext _getContextFromMap( String host ){ AppContext ac = _contextCache.get( host ); if (ac != null && ac.isReset()) { _contextCache.put( host , null ); ac = null; } return ac; } File getBranch( File root , String subdomain , String siteName ){ File f = _getBranch( root , subdomain , siteName ); JSObject envConfig = getEnvironmentFromCloud( siteName , subdomain ); if ( envConfig != null ){ GitUtils.fullUpdate( f ); String branch = envConfig.get( "branch" ).toString() ; if ( D ) System.out.println( "\t using branch [" + branch + "]" ); _checkout( f , branch ); } return f; } File _getBranch( File root , String subdomain , String siteName ){ File test = new File( root , subdomain ); if ( test.exists() ) return test; JSObject site = getSiteFromCloud( siteName ); if ( site != null ){ Object gitObject = site.get( "giturl" ); if ( gitObject != null ){ String giturl = gitObject.toString(); JSObject envConfig = getEnvironmentFromCloud( siteName , subdomain ); if ( envConfig != null ){ if ( D ) System.out.println( "\t found an env in grid" ); if ( ! GitUtils.clone( giturl , root , subdomain ) ) throw new RuntimeException( "couldn't clone [" + siteName + "] from [" + giturl + "]" ); _checkout( test , envConfig.get( "branch" ).toString() ); return test; } } } if ( subdomain.equals( "dev" ) ){ test = new File( root , "master" ); if ( test.exists() ) return test; } String searchList[] = null; if ( subdomain.equals( "local" ) ) searchList = LOCAL_BRANCH_LIST; else if ( subdomain.equals( "www" ) ) searchList = WWW_BRANCH_LIST; if ( searchList != null ){ for ( int i=0; i<searchList.length; i++ ){ test = new File( root , searchList[i] ); if ( test.exists() ) return test; } } throw new RuntimeException( "can't find environment [" + subdomain + "] in [" + root + "] siteName [" + siteName + "] found site:" + ( site != null ) ); } static void _checkout( File f , String what ){ if ( GitUtils.checkout( f , what ) ) return; if ( GitUtils.checkout( f , "origin/" + what ) ) return; throw new RuntimeException( "couldn't checkout [" + what + "] for [" + f + "]" ); } private synchronized AppContext _getDefaultContext(){ if ( _defaultWebRoot == null ) return null; if ( _defaultContext != null && _defaultContext._reset ) _defaultContext = null; if ( _defaultContext != null ) return _defaultContext; _defaultContext = new AppContext( _defaultWebRoot ); return _defaultContext; } private boolean hasGit( File test ){ File f = new File( test , ".git" ); if ( f.exists() ) return true; f = new File( test , "dot-git" ); if ( f.exists() ) return true; return false; } static JSObject getEnvironmentFromCloud( String siteName , String envName ){ Cloud theCloud = Cloud.getInstanceIfOnGrid(); if ( theCloud == null ) return null; return theCloud.findEnvironment( siteName , envName ); } private static JSObject getSiteFromCloud( String name ){ Cloud theCloud = Cloud.getInstanceIfOnGrid(); if ( theCloud == null ) return null; return theCloud.findSite( name , false ); } static List<Info> getPossibleSiteNames( String host , String uri ){ return getPossibleSiteNames( fixBase( host , uri ) ); } static List<Info> getPossibleSiteNames( Info base ){ List<Info> all = new ArrayList<Info>( 6 ); all.add( base ); final String host = base.host; final String uri = base.uri; String domain = DNSUtil.getDomain( host ); if ( ! domain.equals( host ) ) all.add( new Info( domain , uri ) ); int idx = domain.indexOf( "." ); if ( idx > 0 ) all.add( new Info( domain.substring( 0 , idx ) , uri ) ); return all; } static Info fixBase( String host , String uri ){ { int idx = host.indexOf( ":" ); if ( idx >= 0 ) host = host.substring( 0 , idx ); } if ( uri == null ){ uri = "/"; } else { if ( ! uri.startsWith( "/" ) ) uri = "/" + uri; } if ( CDN_HOSTNAMES.contains( host ) ){ final int idx = uri.indexOf( "/" , 1 ); if ( idx < 0 ) throw new RuntimeException( "static host without valid host:[" + host + "] uri:[" + uri + "]" ); host = uri.substring( 1 , idx ); uri = uri.substring( idx ); } for ( String d : OUR_DOMAINS ){ if ( host.endsWith( d ) ){ host = host.substring( 0 , host.length() - d.length() ); if ( host.indexOf( "." ) < 0 ) host += ".com"; break; } } if ( host.startsWith( "www." ) ) host = host.substring( 4 ); if ( host.equals( "com" ) ) host = "www.com"; return new Info( host , uri ); } static class Info { Info( String host ){ this( host , "/" ); } Info( String host , String uri ){ this.host = host; this.uri = uri; } String getEnvironment( String big ){ if ( big.equalsIgnoreCase( host ) || host.startsWith( "www." ) || big.startsWith( host + "." ) ) return "www"; int idx = big.indexOf( "." + host ); if ( idx < 0 ){ idx = big.indexOf( host ); if ( idx < 0 ) throw new RuntimeException( "something is wrong host:" + host + " big:" + big ); } return big.substring( 0 , idx ); } public String toString(){ return host + uri; } final String host; final String uri; } private synchronized AppContext _getCoreContext(){ if ( _coreContext == null ) _coreContext = new AppContext( CoreJS.get().getRootFile( null ) ); return _coreContext; } class Result { Result( AppContext context , String host , String uri ){ this.context = context; this.host = host; this.uri = uri; } String getRoot(){ return context.getRoot(); } public String toString(){ return getRoot() + "||" + host + "||" + uri; } final AppContext context; final String uri; final String host; } final String _root; final File _rootFile; private final String _defaultWebRoot; private AppContext _defaultContext; private AppContext _coreContext; private final Map<String,AppContext> _contextCache = Collections.synchronizedMap( new StringMap<AppContext>() ); private final String _contextCreationLock = ( "AppContextHolder-Lock-" + Math.random() ).intern(); }
better at determining if a directory has code or environments
src/main/ed/appserver/AppContextHolder.java
better at determining if a directory has code or environments
Java
apache-2.0
211921cd33fb74c8ee7a35ea0404af2ddf7cc294
0
Talend/data-prep,Talend/data-prep,Talend/data-prep
// ============================================================================ // // Copyright (C) 2006-2018 Talend Inc. - www.talend.com // // This source code is available under agreement available at // https://github.com/Talend/data-prep/blob/master/LICENSE // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // // ============================================================================ package org.talend.dataprep.quality; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.talend.dataprep.api.dataset.ColumnMetadata; import org.talend.dataprep.api.dataset.row.DataSetRow; import org.talend.dataprep.api.dataset.row.RowMetadataUtils; import org.talend.dataprep.api.dataset.statistics.date.StreamDateHistogramAnalyzer; import org.talend.dataprep.api.dataset.statistics.date.StreamDateHistogramStatistics; import org.talend.dataprep.api.dataset.statistics.number.StreamNumberHistogramAnalyzer; import org.talend.dataprep.api.type.TypeUtils; import org.talend.dataprep.dataset.StatisticsAdapter; import org.talend.dataprep.transformation.actions.date.DateParser; import org.talend.dataprep.transformation.api.transformer.json.NullAnalyzer; import org.talend.dataquality.common.inference.Analyzer; import org.talend.dataquality.common.inference.Analyzers; import org.talend.dataquality.common.inference.Metadata; import org.talend.dataquality.common.inference.ValueQualityStatistics; import org.talend.dataquality.semantic.classifier.SemanticCategoryEnum; import org.talend.dataquality.semantic.snapshot.DictionarySnapshot; import org.talend.dataquality.semantic.snapshot.DictionarySnapshotProvider; import org.talend.dataquality.semantic.snapshot.StandardDictionarySnapshotProvider; import org.talend.dataquality.semantic.statistics.SemanticAnalyzer; import org.talend.dataquality.semantic.statistics.SemanticQualityAnalyzer; import org.talend.dataquality.semantic.statistics.SemanticType; import org.talend.dataquality.statistics.cardinality.CardinalityAnalyzer; import org.talend.dataquality.statistics.cardinality.CardinalityStatistics; import org.talend.dataquality.statistics.frequency.AbstractFrequencyAnalyzer; import org.talend.dataquality.statistics.frequency.DataTypeFrequencyAnalyzer; import org.talend.dataquality.statistics.frequency.DataTypeFrequencyStatistics; import org.talend.dataquality.statistics.frequency.pattern.CompositePatternFrequencyAnalyzer; import org.talend.dataquality.statistics.frequency.pattern.PatternFrequencyStatistics; import org.talend.dataquality.statistics.frequency.recognition.AbstractPatternRecognizer; import org.talend.dataquality.statistics.frequency.recognition.DateTimePatternRecognizer; import org.talend.dataquality.statistics.frequency.recognition.EmptyPatternRecognizer; import org.talend.dataquality.statistics.frequency.recognition.GenericCharPatternRecognizer; import org.talend.dataquality.statistics.numeric.quantile.QuantileAnalyzer; import org.talend.dataquality.statistics.numeric.quantile.QuantileStatistics; import org.talend.dataquality.statistics.numeric.summary.SummaryAnalyzer; import org.talend.dataquality.statistics.numeric.summary.SummaryStatistics; import org.talend.dataquality.statistics.quality.DataTypeQualityAnalyzer; import org.talend.dataquality.statistics.quality.ValueQualityAnalyzer; import org.talend.dataquality.statistics.text.TextLengthAnalyzer; import org.talend.dataquality.statistics.text.TextLengthStatistics; import org.talend.dataquality.statistics.type.DataTypeAnalyzer; import org.talend.dataquality.statistics.type.DataTypeEnum; import org.talend.dataquality.statistics.type.DataTypeOccurences; /** * Service in charge of analyzing dataset quality. */ public class AnalyzerService { /** * This class' logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(AnalyzerService.class); private final DateParser dateParser; private final Set<Analyzer> openedAnalyzers = new HashSet<>(); private DictionarySnapshotProvider dictionarySnapshotProvider; public AnalyzerService() { this(new StandardDictionarySnapshotProvider()); } public AnalyzerService(DictionarySnapshotProvider dictionarySnapshotProvider) { // Semantic builder (a single instance to be shared among all analyzers for proper index file management). this.dictionarySnapshotProvider = dictionarySnapshotProvider; this.dateParser = new DateParser(this); } public void setDictionarySnapshotProvider(DictionarySnapshotProvider provider) { this.dictionarySnapshotProvider = provider; } private static AbstractFrequencyAnalyzer buildPatternAnalyzer(List<ColumnMetadata> columns) { // warning, the order is important List<AbstractPatternRecognizer> patternFrequencyAnalyzers = new ArrayList<>(); patternFrequencyAnalyzers.add(new EmptyPatternRecognizer()); patternFrequencyAnalyzers.add(new DateTimePatternRecognizer()); patternFrequencyAnalyzers.add(new GenericCharPatternRecognizer()); return new CompositePatternFrequencyAnalyzer(patternFrequencyAnalyzers, TypeUtils.convert(columns)); } /** * Return the list of most used patterns for dates. * * @param columns the columns to analyze. * @return the list of most used patterns for dates or an empty list if there's none. */ private List<String> getMostUsedDatePatterns(List<ColumnMetadata> columns) { List<String> patterns = new ArrayList<>(columns.size()); for (ColumnMetadata column : columns) { final String pattern = RowMetadataUtils.getMostUsedDatePattern(column); if (StringUtils.isNotBlank(pattern)) { patterns.add(pattern); } } return patterns; } /** * Similarly to {@link #build(List, Analysis...)} but for a single column. * * @param column A column, may be null. * @param settings A varargs with {@link Analysis}. Duplicates are possible in varargs but will be considered only * once. * @return A ready to use {@link Analyzer}. */ public Analyzer<Analyzers.Result> build(ColumnMetadata column, Analysis... settings) { if (column == null) { return build(Collections.emptyList(), settings); } else { return build(Collections.singletonList(column), settings); } } /** * Extract all column name and return them in a {@link List}. * * @param columns columns metadata containing columns names. * @return a {@link List} of column name */ private List<String> extractColumnNames(List<ColumnMetadata> columns) { return columns.stream().map(ColumnMetadata::getName).collect(Collectors.toList()); } /** * Build a {@link Analyzer} to analyze records with columns (in <code>columns</code>). <code>settings</code> give * all the wanted analysis settings for the analyzer. * * @param columns A list of columns, may be null or empty. * @param settings A varargs with {@link Analysis}. Duplicates are possible in varargs but will be considered only * once. * @return A ready to use {@link Analyzer}. */ public Analyzer<Analyzers.Result> build(List<ColumnMetadata> columns, Analysis... settings) { if (columns == null || columns.isEmpty()) { return Analyzers.with(NullAnalyzer.INSTANCE); } // Get all needed analysis final Set<Analysis> all = EnumSet.noneOf(Analysis.class); for (Analysis setting : settings) { if (setting != null) { all.add(setting); all.addAll(Arrays.asList(setting.dependencies)); } } if (all.isEmpty()) { return Analyzers.with(NullAnalyzer.INSTANCE); } // Column types DataTypeEnum[] types = TypeUtils.convert(columns); // Semantic domains List<String> domainList = columns .stream() // .map(ColumnMetadata::getDomain) // .map(d -> StringUtils.isBlank(d) ? SemanticCategoryEnum.UNKNOWN.getId() : d) // .collect(Collectors.toList()); final String[] domains = domainList.toArray(new String[domainList.size()]); DictionarySnapshot dictionarySnapshot = dictionarySnapshotProvider.get(); // Build all analyzers List<Analyzer> analyzers = new ArrayList<>(); for (Analysis setting : settings) { switch (setting) { case SEMANTIC: final SemanticAnalyzer semanticAnalyzer = new SemanticAnalyzer(dictionarySnapshot); semanticAnalyzer.setLimit(Integer.MAX_VALUE); semanticAnalyzer.setMetadata(Metadata.HEADER_NAME, extractColumnNames(columns)); analyzers.add(semanticAnalyzer); break; case HISTOGRAM: analyzers.add(new StreamDateHistogramAnalyzer(columns, types, dateParser)); analyzers.add(new StreamNumberHistogramAnalyzer(types)); break; case QUALITY: final DataTypeQualityAnalyzer dataTypeQualityAnalyzer = new DataTypeQualityAnalyzer(types); columns.forEach(c -> dataTypeQualityAnalyzer .addCustomDateTimePattern(RowMetadataUtils.getMostUsedDatePattern(c))); analyzers.add(new ValueQualityAnalyzer(dataTypeQualityAnalyzer, new SemanticQualityAnalyzer(dictionarySnapshot, domains, false), true)); // NOSONAR break; case CARDINALITY: analyzers.add(new CardinalityAnalyzer()); break; case PATTERNS: analyzers.add(buildPatternAnalyzer(columns)); break; case LENGTH: analyzers.add(new TextLengthAnalyzer()); break; case QUANTILES: boolean acceptQuantiles = false; for (DataTypeEnum type : types) { if (type == DataTypeEnum.INTEGER || type == DataTypeEnum.DOUBLE) { acceptQuantiles = true; break; } } if (acceptQuantiles) { analyzers.add(new QuantileAnalyzer(types)); } break; case SUMMARY: analyzers.add(new SummaryAnalyzer(types)); break; case TYPE: boolean shouldUseTypeAnalysis = true; for (Analysis analysis : settings) { if (analysis == Analysis.QUALITY) { shouldUseTypeAnalysis = false; break; } } if (shouldUseTypeAnalysis) { final List<String> mostUsedDatePatterns = getMostUsedDatePatterns(columns); analyzers.add(new DataTypeAnalyzer(mostUsedDatePatterns)); } else { LOGGER.warn("Disabled {} analysis (conflicts with {}).", setting, Analysis.QUALITY); } break; case FREQUENCY: analyzers.add(new DataTypeFrequencyAnalyzer()); break; default: throw new IllegalArgumentException("Missing support for '" + setting + "'."); } } // Merge all analyzers into one final Analyzer<Analyzers.Result> analyzer = Analyzers.with(analyzers.toArray(new Analyzer[analyzers.size()])); analyzer.init(); if (LOGGER.isDebugEnabled()) { // Wrap analyzer for usage monitoring (to diagnose non-closed analyzer issues). return new ResourceMonitoredAnalyzer(analyzer); } else { return analyzer; } } public Analyzer<Analyzers.Result> full(final List<ColumnMetadata> columns) { // Configure quality & semantic analysis (if column metadata information is present in stream). return build(columns, Analysis.QUALITY, Analysis.CARDINALITY, Analysis.FREQUENCY, Analysis.PATTERNS, Analysis.LENGTH, Analysis.SEMANTIC, Analysis.QUANTILES, Analysis.SUMMARY, Analysis.HISTOGRAM); } public Analyzer<Analyzers.Result> qualityAnalysis(List<ColumnMetadata> columns) { return build(columns, Analysis.QUALITY, Analysis.SUMMARY, Analysis.SEMANTIC); } /** * <p> * Analyse the... Schema ! * </p> * <ul> * <li>Semantic</li> * <li>DataType</li> * </ul> * * @param columns the columns to analyze. * @return the analyzers to perform for the schema. */ public Analyzer<Analyzers.Result> schemaAnalysis(List<ColumnMetadata> columns) { return build(columns, Analysis.SEMANTIC, Analysis.TYPE); } public void analyzeFull(final Stream<DataSetRow> records, List<ColumnMetadata> columns) { final Analyzer<Analyzers.Result> analyzer = full(columns); analyzer.init(); records.map(r -> r.toArray()).forEach(analyzer::analyze); analyzer.end(); final List<Analyzers.Result> analyzerResult = analyzer.getResult(); final StatisticsAdapter statisticsAdapter = new StatisticsAdapter(40); statisticsAdapter.adapt(columns, analyzerResult); } public enum Analysis { /** * Basic type discovery (integer, string...). */ TYPE(DataTypeOccurences.class), /** * Semantic type discovery (us_code, fr_phone...) */ SEMANTIC(SemanticType.class), /** * Histogram computation. */ HISTOGRAM(StreamDateHistogramStatistics.class), /** * Data quality (empty, invalid, valid...) */ QUALITY(ValueQualityStatistics.class), /** * Cardinality (distinct, duplicates) */ CARDINALITY(CardinalityStatistics.class), /** * String patterns */ PATTERNS(PatternFrequencyStatistics.class), /** * Text length (min / max length) */ LENGTH(TextLengthStatistics.class), /** * Quantiles */ QUANTILES(QuantileStatistics.class), /** * Min / Max / Variance for numeric values */ SUMMARY(SummaryStatistics.class), /** * Value to frequency map */ FREQUENCY(DataTypeFrequencyStatistics.class); private final Class resultClass; private final Analysis[] dependencies; Analysis(Class resultClass, Analysis... dependencies) { this.resultClass = resultClass; this.dependencies = dependencies; } public Class getResultClass() { return resultClass; } } private class ResourceMonitoredAnalyzer implements Analyzer<Analyzers.Result> { private final Analyzer<Analyzers.Result> analyzer; private final Exception caller; private long lastCall; private ResourceMonitoredAnalyzer(Analyzer<Analyzers.Result> analyzer) { caller = new RuntimeException(); // NOSONAR openedAnalyzers.add(this); this.analyzer = analyzer; } @Override public void init() { analyzer.init(); } @Override public boolean analyze(String... strings) { lastCall = System.currentTimeMillis(); return analyzer.analyze(strings); } @Override public void end() { analyzer.end(); } @Override public List<Analyzers.Result> getResult() { return analyzer.getResult(); } @Override public void close() throws Exception { analyzer.close(); openedAnalyzers.remove(this); } @Override public String toString() { StringBuilder toStringBuilder = new StringBuilder(); toStringBuilder // .append(analyzer.toString()) .append(' ') // .append(" last used (") .append(System.currentTimeMillis() - lastCall) // .append(" ms ago) "); final StringWriter toStringCaller = new StringWriter(); this.caller.printStackTrace(new PrintWriter(toStringCaller)); // NOSONAR (stacktrace printed in a String) toStringBuilder.append("caller: ").append(toStringCaller.toString()); return toStringBuilder.toString(); } } }
dataprep-backend-common/src/main/java/org/talend/dataprep/quality/AnalyzerService.java
// ============================================================================ // // Copyright (C) 2006-2018 Talend Inc. - www.talend.com // // This source code is available under agreement available at // https://github.com/Talend/data-prep/blob/master/LICENSE // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // // ============================================================================ package org.talend.dataprep.quality; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.talend.dataprep.api.dataset.ColumnMetadata; import org.talend.dataprep.api.dataset.row.DataSetRow; import org.talend.dataprep.api.dataset.row.RowMetadataUtils; import org.talend.dataprep.api.dataset.statistics.date.StreamDateHistogramAnalyzer; import org.talend.dataprep.api.dataset.statistics.date.StreamDateHistogramStatistics; import org.talend.dataprep.api.dataset.statistics.number.StreamNumberHistogramAnalyzer; import org.talend.dataprep.api.type.TypeUtils; import org.talend.dataprep.dataset.StatisticsAdapter; import org.talend.dataprep.transformation.actions.date.DateParser; import org.talend.dataprep.transformation.api.transformer.json.NullAnalyzer; import org.talend.dataquality.common.inference.Analyzer; import org.talend.dataquality.common.inference.Analyzers; import org.talend.dataquality.common.inference.Metadata; import org.talend.dataquality.common.inference.ValueQualityStatistics; import org.talend.dataquality.semantic.classifier.SemanticCategoryEnum; import org.talend.dataquality.semantic.snapshot.DictionarySnapshot; import org.talend.dataquality.semantic.snapshot.DictionarySnapshotProvider; import org.talend.dataquality.semantic.snapshot.StandardDictionarySnapshotProvider; import org.talend.dataquality.semantic.statistics.SemanticAnalyzer; import org.talend.dataquality.semantic.statistics.SemanticQualityAnalyzer; import org.talend.dataquality.semantic.statistics.SemanticType; import org.talend.dataquality.statistics.cardinality.CardinalityAnalyzer; import org.talend.dataquality.statistics.cardinality.CardinalityStatistics; import org.talend.dataquality.statistics.frequency.AbstractFrequencyAnalyzer; import org.talend.dataquality.statistics.frequency.DataTypeFrequencyAnalyzer; import org.talend.dataquality.statistics.frequency.DataTypeFrequencyStatistics; import org.talend.dataquality.statistics.frequency.pattern.CompositePatternFrequencyAnalyzer; import org.talend.dataquality.statistics.frequency.pattern.PatternFrequencyStatistics; import org.talend.dataquality.statistics.frequency.recognition.AbstractPatternRecognizer; import org.talend.dataquality.statistics.frequency.recognition.DateTimePatternRecognizer; import org.talend.dataquality.statistics.frequency.recognition.EmptyPatternRecognizer; import org.talend.dataquality.statistics.frequency.recognition.LatinExtendedCharPatternRecognizer; import org.talend.dataquality.statistics.numeric.quantile.QuantileAnalyzer; import org.talend.dataquality.statistics.numeric.quantile.QuantileStatistics; import org.talend.dataquality.statistics.numeric.summary.SummaryAnalyzer; import org.talend.dataquality.statistics.numeric.summary.SummaryStatistics; import org.talend.dataquality.statistics.quality.DataTypeQualityAnalyzer; import org.talend.dataquality.statistics.quality.ValueQualityAnalyzer; import org.talend.dataquality.statistics.text.TextLengthAnalyzer; import org.talend.dataquality.statistics.text.TextLengthStatistics; import org.talend.dataquality.statistics.type.DataTypeAnalyzer; import org.talend.dataquality.statistics.type.DataTypeEnum; import org.talend.dataquality.statistics.type.DataTypeOccurences; import java.io.PrintWriter; import java.io.StringWriter; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Service in charge of analyzing dataset quality. */ public class AnalyzerService { /** * This class' logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(AnalyzerService.class); private final DateParser dateParser; private final Set<Analyzer> openedAnalyzers = new HashSet<>(); private DictionarySnapshotProvider dictionarySnapshotProvider; public AnalyzerService() { this(new StandardDictionarySnapshotProvider()); } public AnalyzerService(DictionarySnapshotProvider dictionarySnapshotProvider) { // Semantic builder (a single instance to be shared among all analyzers for proper index file management). this.dictionarySnapshotProvider = dictionarySnapshotProvider; this.dateParser = new DateParser(this); } public void setDictionarySnapshotProvider(DictionarySnapshotProvider provider) { this.dictionarySnapshotProvider = provider; } private static AbstractFrequencyAnalyzer buildPatternAnalyzer(List<ColumnMetadata> columns) { // warning, the order is important List<AbstractPatternRecognizer> patternFrequencyAnalyzers = new ArrayList<>(); patternFrequencyAnalyzers.add(new EmptyPatternRecognizer()); patternFrequencyAnalyzers.add(new DateTimePatternRecognizer()); patternFrequencyAnalyzers.add(new LatinExtendedCharPatternRecognizer()); return new CompositePatternFrequencyAnalyzer(patternFrequencyAnalyzers, TypeUtils.convert(columns)); } /** * Return the list of most used patterns for dates. * * @param columns the columns to analyze. * @return the list of most used patterns for dates or an empty list if there's none. */ private List<String> getMostUsedDatePatterns(List<ColumnMetadata> columns) { List<String> patterns = new ArrayList<>(columns.size()); for (ColumnMetadata column : columns) { final String pattern = RowMetadataUtils.getMostUsedDatePattern(column); if (StringUtils.isNotBlank(pattern)) { patterns.add(pattern); } } return patterns; } /** * Similarly to {@link #build(List, Analysis...)} but for a single column. * * @param column A column, may be null. * @param settings A varargs with {@link Analysis}. Duplicates are possible in varargs but will be considered only * once. * @return A ready to use {@link Analyzer}. */ public Analyzer<Analyzers.Result> build(ColumnMetadata column, Analysis... settings) { if (column == null) { return build(Collections.emptyList(), settings); } else { return build(Collections.singletonList(column), settings); } } /** * Extract all column name and return them in a {@link List}. * * @param columns columns metadata containing columns names. * @return a {@link List} of column name */ private List<String> extractColumnNames(List<ColumnMetadata> columns) { return columns.stream().map(ColumnMetadata::getName).collect(Collectors.toList()); } /** * Build a {@link Analyzer} to analyze records with columns (in <code>columns</code>). <code>settings</code> give * all the wanted analysis settings for the analyzer. * * @param columns A list of columns, may be null or empty. * @param settings A varargs with {@link Analysis}. Duplicates are possible in varargs but will be considered only * once. * @return A ready to use {@link Analyzer}. */ public Analyzer<Analyzers.Result> build(List<ColumnMetadata> columns, Analysis... settings) { if (columns == null || columns.isEmpty()) { return Analyzers.with(NullAnalyzer.INSTANCE); } // Get all needed analysis final Set<Analysis> all = EnumSet.noneOf(Analysis.class); for (Analysis setting : settings) { if (setting != null) { all.add(setting); all.addAll(Arrays.asList(setting.dependencies)); } } if (all.isEmpty()) { return Analyzers.with(NullAnalyzer.INSTANCE); } // Column types DataTypeEnum[] types = TypeUtils.convert(columns); // Semantic domains List<String> domainList = columns .stream() // .map(ColumnMetadata::getDomain) // .map(d -> StringUtils.isBlank(d) ? SemanticCategoryEnum.UNKNOWN.getId() : d) // .collect(Collectors.toList()); final String[] domains = domainList.toArray(new String[domainList.size()]); DictionarySnapshot dictionarySnapshot = dictionarySnapshotProvider.get(); // Build all analyzers List<Analyzer> analyzers = new ArrayList<>(); for (Analysis setting : settings) { switch (setting) { case SEMANTIC: final SemanticAnalyzer semanticAnalyzer = new SemanticAnalyzer(dictionarySnapshot); semanticAnalyzer.setLimit(Integer.MAX_VALUE); semanticAnalyzer.setMetadata(Metadata.HEADER_NAME, extractColumnNames(columns)); analyzers.add(semanticAnalyzer); break; case HISTOGRAM: analyzers.add(new StreamDateHistogramAnalyzer(columns, types, dateParser)); analyzers.add(new StreamNumberHistogramAnalyzer(types)); break; case QUALITY: final DataTypeQualityAnalyzer dataTypeQualityAnalyzer = new DataTypeQualityAnalyzer(types); columns.forEach(c -> dataTypeQualityAnalyzer .addCustomDateTimePattern(RowMetadataUtils.getMostUsedDatePattern(c))); analyzers.add(new ValueQualityAnalyzer(dataTypeQualityAnalyzer, new SemanticQualityAnalyzer(dictionarySnapshot, domains, false), true)); // NOSONAR break; case CARDINALITY: analyzers.add(new CardinalityAnalyzer()); break; case PATTERNS: analyzers.add(buildPatternAnalyzer(columns)); break; case LENGTH: analyzers.add(new TextLengthAnalyzer()); break; case QUANTILES: boolean acceptQuantiles = false; for (DataTypeEnum type : types) { if (type == DataTypeEnum.INTEGER || type == DataTypeEnum.DOUBLE) { acceptQuantiles = true; break; } } if (acceptQuantiles) { analyzers.add(new QuantileAnalyzer(types)); } break; case SUMMARY: analyzers.add(new SummaryAnalyzer(types)); break; case TYPE: boolean shouldUseTypeAnalysis = true; for (Analysis analysis : settings) { if (analysis == Analysis.QUALITY) { shouldUseTypeAnalysis = false; break; } } if (shouldUseTypeAnalysis) { final List<String> mostUsedDatePatterns = getMostUsedDatePatterns(columns); analyzers.add(new DataTypeAnalyzer(mostUsedDatePatterns)); } else { LOGGER.warn("Disabled {} analysis (conflicts with {}).", setting, Analysis.QUALITY); } break; case FREQUENCY: analyzers.add(new DataTypeFrequencyAnalyzer()); break; default: throw new IllegalArgumentException("Missing support for '" + setting + "'."); } } // Merge all analyzers into one final Analyzer<Analyzers.Result> analyzer = Analyzers.with(analyzers.toArray(new Analyzer[analyzers.size()])); analyzer.init(); if (LOGGER.isDebugEnabled()) { // Wrap analyzer for usage monitoring (to diagnose non-closed analyzer issues). return new ResourceMonitoredAnalyzer(analyzer); } else { return analyzer; } } public Analyzer<Analyzers.Result> full(final List<ColumnMetadata> columns) { // Configure quality & semantic analysis (if column metadata information is present in stream). return build(columns, Analysis.QUALITY, Analysis.CARDINALITY, Analysis.FREQUENCY, Analysis.PATTERNS, Analysis.LENGTH, Analysis.SEMANTIC, Analysis.QUANTILES, Analysis.SUMMARY, Analysis.HISTOGRAM); } public Analyzer<Analyzers.Result> qualityAnalysis(List<ColumnMetadata> columns) { return build(columns, Analysis.QUALITY, Analysis.SUMMARY, Analysis.SEMANTIC); } /** * <p> * Analyse the... Schema ! * </p> * <ul> * <li>Semantic</li> * <li>DataType</li> * </ul> * * @param columns the columns to analyze. * @return the analyzers to perform for the schema. */ public Analyzer<Analyzers.Result> schemaAnalysis(List<ColumnMetadata> columns) { return build(columns, Analysis.SEMANTIC, Analysis.TYPE); } public void analyzeFull(final Stream<DataSetRow> records, List<ColumnMetadata> columns) { final Analyzer<Analyzers.Result> analyzer = full(columns); analyzer.init(); records.map(r -> r.toArray()).forEach(analyzer::analyze); analyzer.end(); final List<Analyzers.Result> analyzerResult = analyzer.getResult(); final StatisticsAdapter statisticsAdapter = new StatisticsAdapter(40); statisticsAdapter.adapt(columns, analyzerResult); } public enum Analysis { /** * Basic type discovery (integer, string...). */ TYPE(DataTypeOccurences.class), /** * Semantic type discovery (us_code, fr_phone...) */ SEMANTIC(SemanticType.class), /** * Histogram computation. */ HISTOGRAM(StreamDateHistogramStatistics.class), /** * Data quality (empty, invalid, valid...) */ QUALITY(ValueQualityStatistics.class), /** * Cardinality (distinct, duplicates) */ CARDINALITY(CardinalityStatistics.class), /** * String patterns */ PATTERNS(PatternFrequencyStatistics.class), /** * Text length (min / max length) */ LENGTH(TextLengthStatistics.class), /** * Quantiles */ QUANTILES(QuantileStatistics.class), /** * Min / Max / Variance for numeric values */ SUMMARY(SummaryStatistics.class), /** * Value to frequency map */ FREQUENCY(DataTypeFrequencyStatistics.class); private final Class resultClass; private final Analysis[] dependencies; Analysis(Class resultClass, Analysis... dependencies) { this.resultClass = resultClass; this.dependencies = dependencies; } public Class getResultClass() { return resultClass; } } private class ResourceMonitoredAnalyzer implements Analyzer<Analyzers.Result> { private final Analyzer<Analyzers.Result> analyzer; private final Exception caller; private long lastCall; private ResourceMonitoredAnalyzer(Analyzer<Analyzers.Result> analyzer) { caller = new RuntimeException(); // NOSONAR openedAnalyzers.add(this); this.analyzer = analyzer; } @Override public void init() { analyzer.init(); } @Override public boolean analyze(String... strings) { lastCall = System.currentTimeMillis(); return analyzer.analyze(strings); } @Override public void end() { analyzer.end(); } @Override public List<Analyzers.Result> getResult() { return analyzer.getResult(); } @Override public void close() throws Exception { analyzer.close(); openedAnalyzers.remove(this); } @Override public String toString() { StringBuilder toStringBuilder = new StringBuilder(); toStringBuilder // .append(analyzer.toString()) .append(' ') // .append(" last used (") .append(System.currentTimeMillis() - lastCall) // .append(" ms ago) "); final StringWriter toStringCaller = new StringWriter(); this.caller.printStackTrace(new PrintWriter(toStringCaller)); // NOSONAR (stacktrace printed in a String) toStringBuilder.append("caller: ").append(toStringCaller.toString()); return toStringBuilder.toString(); } } }
feat(TDQ-14160): add asian patterns (#1544)
dataprep-backend-common/src/main/java/org/talend/dataprep/quality/AnalyzerService.java
feat(TDQ-14160): add asian patterns (#1544)
Java
apache-2.0
1adbea464819b1b34f433f109e45544d68988d60
0
medic/javarosa,medic/javarosa,medic/javarosa
/* * Copyright (C) 2009 JavaRosa * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javarosa.j2me.services; import java.util.Hashtable; import org.javarosa.core.services.UnavailableServiceException; /** * This is a registry of services to be passed to a state (such as form entry) that can perform data capture. * * A 'service' is an interface implementation that provides data from somewhere outside JavaRosa, typically * through a vendor-specific API not available on all devices. * * To prevent shared code from crashing/not compiling on a device that lacks the device specific API, the * shared code must instead communicate only with an abstract API defined in JavaRosa (i.e., * 'VideoCaptureService'). The deployment then provides an implementation of this service that acts as * an intermediary, and communicates directly with the device-specific API. * * @author Drew Roos * */ public class DataCaptureServiceRegistry { private Hashtable<String, DataCaptureService> services; public DataCaptureServiceRegistry () { services = new Hashtable<String, DataCaptureService>(); } public DataCaptureServiceRegistry (DataCaptureService[] services) { this(); for (int i = 0; i < services.length; i++) registerService(services[i]); } public void registerService (DataCaptureService service) { String type = service.getType(); validateServiceType(type, service); services.put(type, service); } public void unregisterService (String type) { if (services.get(type) == null) { System.err.println("No service registered for type [" + type + "]"); } else { services.remove(type); } } public DataCaptureService getService (String type) throws UnavailableServiceException { DataCaptureService service = services.get(type); if (service == null) { throw new UnavailableServiceException("No service registered for type [" + type + "]"); } else { return service; } } private static void validateServiceType (String type, DataCaptureService service) { if (/* (DataCaptureService.IMAGE.equals(type) && !(service instanceof ImageCaptureService)) || */ (DataCaptureService.AUDIO.equals(type) && !(service instanceof AudioCaptureService)) || /* (DataCaptureService.VIDEO.equals(type) && !(service instanceof VideoCaptureService)) || */ (DataCaptureService.BARCODE.equals(type) && !(service instanceof BarcodeCaptureService)) || (DataCaptureService.LOCATION.equals(type) && !(service instanceof LocationCaptureService))/*|| (DataCaptureService.RFID.equals(type) && !(service instanceof RFIDCaptureService))*/) { throw new RuntimeException("Service is not of the proper type!"); } } /* convenience functions */ /* public ImageCaptureService getImageCaptureService () throws UnavailableServiceException { return (ImageCaptureService)getService(DataCaptureService.IMAGE); } */ public AudioCaptureService getAudioCaptureService () throws UnavailableServiceException { return (AudioCaptureService)getService(DataCaptureService.AUDIO); } /* public VideoCaptureService getVideoCaptureService () throws UnavailableServiceException { return (VideoCaptureService)getService(DataCaptureService.VIDEO); } */ public BarcodeCaptureService getBarcodeCaptureService () throws UnavailableServiceException { return (BarcodeCaptureService)getService(DataCaptureService.BARCODE); } public LocationCaptureService getLocationCaptureService () throws UnavailableServiceException { return (LocationCaptureService)getService(DataCaptureService.LOCATION); } /* public RFIDCaptureService getRFIDCaptureService () throws UnavailableServiceException { return (RFIDCaptureService)getService(DataCaptureService.RFID); } */ }
j2me/core/src/org/javarosa/j2me/services/DataCaptureServiceRegistry.java
/* * Copyright (C) 2009 JavaRosa * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javarosa.j2me.services; import java.util.Hashtable; import org.javarosa.core.services.UnavailableServiceException; import org.javarosa.j2me.services.DataCaptureService; /** * This is a registry of services to be passed to a state (such as form entry) that can perform data capture. * * A 'service' is an interface implementation that provides data from somewhere outside JavaRosa, typically * through a vendor-specific API not available on all devices. * * To prevent shared code from crashing/not compiling on a device that lacks the device specific API, the * shared code must instead communicate only with an abstract API defined in JavaRosa (i.e., * 'VideoCaptureService'). The deployment then provides an implementation of this service that acts as * an intermediary, and communicates directly with the device-specific API. * * @author Drew Roos * */ public class DataCaptureServiceRegistry { private Hashtable<String, DataCaptureService> services; public DataCaptureServiceRegistry () { services = new Hashtable<String, DataCaptureService>(); } public DataCaptureServiceRegistry (DataCaptureService[] services) { this(); for (int i = 0; i < services.length; i++) registerService(services[i]); } public void registerService (DataCaptureService service) { String type = service.getType(); validateServiceType(type, service); services.put(type, service); } public void unregisterService (String type) { if (services.get(type) == null) { System.err.println("No service registered for type [" + type + "]"); } else { services.remove(type); } } public DataCaptureService getService (String type) throws UnavailableServiceException { DataCaptureService service = services.get(type); if (service == null) { throw new UnavailableServiceException("No service registered for type [" + type + "]"); } else { return service; } } private static void validateServiceType (String type, DataCaptureService service) { if (/* (DataCaptureService.IMAGE.equals(type) && !(service instanceof ImageCaptureService)) || */ (DataCaptureService.AUDIO.equals(type) && !(service instanceof AudioCaptureService)) || /* (DataCaptureService.VIDEO.equals(type) && !(service instanceof VideoCaptureService)) || */ (DataCaptureService.BARCODE.equals(type) && !(service instanceof BarcodeCaptureService)) /*|| (DataCaptureService.LOCATION.equals(type) && !(service instanceof LocationCaptureService)) || (DataCaptureService.RFID.equals(type) && !(service instanceof RFIDCaptureService))*/) { throw new RuntimeException("Service is not of the proper type!"); } } /* convenience functions */ /* public ImageCaptureService getImageCaptureService () throws UnavailableServiceException { return (ImageCaptureService)getService(DataCaptureService.IMAGE); } */ public AudioCaptureService getAudioCaptureService () throws UnavailableServiceException { return (AudioCaptureService)getService(DataCaptureService.AUDIO); } /* public VideoCaptureService getVideoCaptureService () throws UnavailableServiceException { return (VideoCaptureService)getService(DataCaptureService.VIDEO); } */ public BarcodeCaptureService getBarcodeCaptureService () throws UnavailableServiceException { return (BarcodeCaptureService)getService(DataCaptureService.BARCODE); } /* public LocationCaptureService getLocationCaptureService () throws UnavailableServiceException { return (LocationCaptureService)getService(DataCaptureService.LOCATION); } public RFIDCaptureService getRFIDCaptureService () throws UnavailableServiceException { return (RFIDCaptureService)getService(DataCaptureService.RFID); } */ }
adding location service
j2me/core/src/org/javarosa/j2me/services/DataCaptureServiceRegistry.java
adding location service
Java
bsd-2-clause
1201cd441a026b5efafc087e5b6d5ec9a83e1986
0
f97one/ScreeKeeper_Android
package net.formula97.android.screenkeeper; import android.app.AlertDialog; import android.app.Fragment; import android.content.DialogInterface; import android.test.SingleLaunchActivityTestCase; import android.widget.Button; import android.widget.CheckBox; import android.widget.SeekBar; import android.widget.TextView; import com.robotium.solo.Solo; public class MainActivityTest extends SingleLaunchActivityTestCase<MainActivity> { public static final String APP_PACKAGE="net.formula97.android.screenkeeper"; private Solo solo; public MainActivityTest() { super(APP_PACKAGE, MainActivity.class); } @Override protected void setUp() throws Exception { super.setUp(); solo = new Solo(getInstrumentation(), getActivity()); } @Override protected void tearDown() throws Exception { super.tearDown(); solo.finishOpenedActivities(); } public void test001_OptionsMenu() throws Throwable { final int minPitch = 15; final int maxPitch = 25; final int timeout = 0; // UIใฎ่จญๅฎšใ‚’ๅค‰ใˆใ‚‹ getActivity().runOnUiThread(new Runnable() { @Override public void run() { // ใ€Œ่ตทๅ‹•ๆ™‚ใ‹ใ‚‰ๆœ‰ๅŠนใ€ใ‚ชใƒณ getCbStartup().setChecked(true); // ๆœ€ๅฐ่ง’ 15 getSbMin().setProgress(minPitch); getActivity().onProgressChanged(getSbMin(), minPitch, false); // ๆœ€ๅคง่ง’ 25 (+ 45 = 70) getSbMax().setProgress(maxPitch); getActivity().onProgressChanged(getSbMax(), maxPitch, false); // ใ‚ฟใ‚คใƒ ใ‚ขใ‚ฆใƒˆ0 getSbTimeout().setProgress(timeout); getActivity().onProgressChanged(getSbTimeout(), timeout, false); } }); getInstrumentation().waitForIdleSync(); // Robotium SoloใงOptionsMenuใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ solo.clickOnMenuItem(getActivity().getString(R.string.restore_default)); solo.sleep(1000); Fragment fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); assertNotNull("MessageDialogใฎDialogFragmentใŒๅ–ๅพ—ใงใใฆใ„ใ‚‹", fragment); assertTrue(fragment instanceof MessageDialogs); assertTrue(((MessageDialogs) fragment).getShowsDialog()); final AlertDialog ad = (AlertDialog) ((MessageDialogs) fragment).getDialog(); Button posi = ad.getButton(DialogInterface.BUTTON_POSITIVE); Button nega = ad.getButton(DialogInterface.BUTTON_NEGATIVE); Button neu = ad.getButton(DialogInterface.BUTTON_NEUTRAL); assertNotNull("PositiveButtonใ‚’ๆŒใฃใฆใ„ใ‚‹", posi); assertNotNull("NegativeButtonใ‚’ๆŒใฃใฆใ„ใ‚‹", nega); // NeutralButtonใฏๅฎš็พฉใ—ใฆใ„ใชใ„ใฏใšใชใ‚“ใ ใŒใ€ใฉใ†ใ„ใ†ใ‚ใ‘ใ‹ๅ–ๅพ—ใงใใ‚‹ใฎใงใ€ใƒ†ใ‚นใƒˆใจใ—ใฆๆˆ็ซ‹ใ—ใชใ„ // assertNull("NeutralButtonใฏๆŒใŸใชใ„", neu); getActivity().runOnUiThread(new Runnable() { @Override public void run() { ad.getButton(DialogInterface.BUTTON_NEGATIVE).performClick(); } }); getInstrumentation().waitForIdleSync(); int currentMin = Integer.parseInt(getCurrentMinPitch().getText().toString()); int currentMax = Integer.parseInt(getCurrentMaxPitch().getText().toString()); String currentTimeout = getAquireTimeout().getText().toString(); assertEquals("ๆœ€ๅฐ่ง’ใฏ15ใฎใพใพ", minPitch, currentMin); assertEquals("ๆœ€ๅคง่ง’ใฏ70ใฎใพใพ", maxPitch + Consts.Prefs.MAX_PITCH_OFFSET, currentMax); assertEquals("ใ‚ฟใ‚คใƒ ใ‚ขใ‚ฆใƒˆใฏใ€Œno timeoutใ€", getActivity().getString(R.string.no_timeout), currentTimeout); fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); assertNull("MessageDialogใฎDialogFragmentใŒๆถˆใˆใฆใ„ใ‚‹", fragment); // ๅ†ๅบฆ่กจ็คบใ—ใฆPositiveButtonใ‚’ๆŠผใ™ solo.clickOnMenuItem(getActivity().getString(R.string.restore_default)); solo.sleep(1000); fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); final AlertDialog ad2 = (AlertDialog) ((MessageDialogs) fragment).getDialog(); getActivity().runOnUiThread(new Runnable() { @Override public void run() { ad2.getButton(DialogInterface.BUTTON_POSITIVE).performClick(); } }); getInstrumentation().waitForIdleSync(); currentMin = Integer.parseInt(getCurrentMinPitch().getText().toString()); currentMax = Integer.parseInt(getCurrentMaxPitch().getText().toString()); currentTimeout = getAquireTimeout().getText().toString(); assertEquals("ๆœ€ๅฐ่ง’ใฏ5ใซๆˆปใฃใฆใ„ใ‚‹", Consts.Prefs.DEFAULT_MIN_PITCH, currentMin); assertEquals("ๆœ€ๅคง่ง’ใฏ80ใซๆˆปใฃใฆใ„ใ‚‹", Consts.Prefs.DEFAULT_MAX_PITCH + Consts.Prefs.MAX_PITCH_OFFSET, currentMax); assertEquals("ใ‚ฟใ‚คใƒ ใ‚ขใ‚ฆใƒˆใฏ180็ง’ใซๆˆปใฃใฆใ„ใ‚‹", String.valueOf(Consts.Prefs.DEFAULT_ACQUIRE_TIMEOUT) + getActivity().getString(R.string.seconds), currentTimeout); assertFalse("ใ‚นใ‚ฟใƒผใƒˆใ‚ขใƒƒใƒ—่ตทๅ‹•ใฏ็„กๅŠนใซๆˆปใฃใฆใ„ใ‚‹", getCbStartup().isChecked()); fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); assertNull("MessageDialogใฎDialogFragmentใŒๆถˆใˆใฆใ„ใ‚‹", fragment); } private CheckBox getCbStartup() { return (CheckBox) getActivity().findViewById(R.id.cb_startup); } private SeekBar getSbMin() { return (SeekBar)getActivity().findViewById(R.id.sb_minimumPitch); } private SeekBar getSbMax() { return (SeekBar)getActivity().findViewById(R.id.sb_maximumPitch); } private SeekBar getSbTimeout() { return (SeekBar)getActivity().findViewById(R.id.sb_acquireTimeout); } private TextView getCurrentMinPitch() { return (TextView) getActivity().findViewById(R.id.tv_currentMinPitch); } private TextView getCurrentMaxPitch() { return (TextView) getActivity().findViewById(R.id.tv_currentMaxPitch); } private TextView getAquireTimeout() { return (TextView) getActivity().findViewById(R.id.tv_acquire_timeout); } }
android/src/androidTest/java/net/formula97/android/screenkeeper/MainActivityTest.java
package net.formula97.android.screenkeeper; import android.app.AlertDialog; import android.app.Fragment; import android.content.DialogInterface; import android.test.SingleLaunchActivityTestCase; import android.widget.CheckBox; import android.widget.SeekBar; import android.widget.TextView; import com.robotium.solo.Solo; public class MainActivityTest extends SingleLaunchActivityTestCase<MainActivity> { public static final String APP_PACKAGE="net.formula97.android.screenkeeper"; private Solo solo; public MainActivityTest() { super(APP_PACKAGE, MainActivity.class); } @Override protected void setUp() throws Exception { super.setUp(); solo = new Solo(getInstrumentation(), getActivity()); } @Override protected void tearDown() throws Exception { super.tearDown(); solo.finishOpenedActivities(); } public void test001_OptionsMenu() throws Throwable { final int minPitch = 15; final int maxPitch = 25; final int timeout = 60; // UIใฎ่จญๅฎšใ‚’ๅค‰ใˆใ‚‹ getActivity().runOnUiThread(new Runnable() { @Override public void run() { // ใ€Œ่ตทๅ‹•ๆ™‚ใ‹ใ‚‰ๆœ‰ๅŠนใ€ใ‚ชใƒณ getCbStartup().setChecked(true); // ๆœ€ๅฐ่ง’ 15 getSbMin().setProgress(minPitch); getActivity().onProgressChanged(getSbMin(), minPitch, false); // ๆœ€ๅคง่ง’ 25 (+ 45 = 70) getSbMax().setProgress(maxPitch); getActivity().onProgressChanged(getSbMax(), maxPitch, false); // ใ‚ฟใ‚คใƒ ใ‚ขใ‚ฆใƒˆ60 getSbTimeout().setProgress(timeout); getActivity().onProgressChanged(getSbTimeout(), timeout, false); } }); getInstrumentation().waitForIdleSync(); // Robotium SoloใงOptionsMenuใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ solo.clickOnMenuItem(getActivity().getString(R.string.restore_default)); solo.sleep(1000); Fragment fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); assertNotNull("MessageDialogใฎDialogFragmentใŒๅ–ๅพ—ใงใใฆใ„ใ‚‹", fragment); assertTrue(fragment instanceof MessageDialogs); assertTrue(((MessageDialogs) fragment).getShowsDialog()); final AlertDialog ad = (AlertDialog) ((MessageDialogs) fragment).getDialog(); assertNotNull("PositiveButtonใ‚’ๆŒใฃใฆใ„ใ‚‹", ad.getButton(DialogInterface.BUTTON_POSITIVE)); assertNotNull("NegativeButtonใ‚’ๆŒใฃใฆใ„ใ‚‹", ad.getButton(DialogInterface.BUTTON_NEGATIVE)); // assertNull("NeutralButtonใฏๆŒใŸใชใ„", ad.getButton(DialogInterface.BUTTON_NEUTRAL)); getActivity().runOnUiThread(new Runnable() { @Override public void run() { ad.getButton(DialogInterface.BUTTON_NEGATIVE).performClick(); } }); getInstrumentation().waitForIdleSync(); int currentMin = Integer.parseInt(getCurrentMinPitch().getText().toString()); int currentMax = Integer.parseInt(getCurrentMaxPitch().getText().toString()); String currentTimeout = getAquireTimeout().getText().toString(); assertEquals("ๆœ€ๅฐ่ง’ใฏ15ใฎใพใพ", minPitch, currentMin); assertEquals("ๆœ€ๅคง่ง’ใฏ70ใฎใพใพ", maxPitch + Consts.Prefs.MAX_PITCH_OFFSET, currentMax); assertEquals("ใ‚ฟใ‚คใƒ ใ‚ขใ‚ฆใƒˆใฏ60็ง’", String.valueOf(timeout) + getActivity().getString(R.string.seconds), currentTimeout); fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); assertNull("MessageDialogใฎDialogFragmentใŒๆถˆใˆใฆใ„ใ‚‹", fragment); // ๅ†ๅบฆ่กจ็คบใ—ใฆPositiveButtonใ‚’ๆŠผใ™ solo.clickOnMenuItem(getActivity().getString(R.string.restore_default)); solo.sleep(1000); fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); final AlertDialog ad2 = (AlertDialog) ((MessageDialogs) fragment).getDialog(); getActivity().runOnUiThread(new Runnable() { @Override public void run() { ad2.getButton(DialogInterface.BUTTON_POSITIVE).performClick(); } }); getInstrumentation().waitForIdleSync(); currentMin = Integer.parseInt(getCurrentMinPitch().getText().toString()); currentMax = Integer.parseInt(getCurrentMaxPitch().getText().toString()); currentTimeout = getAquireTimeout().getText().toString(); assertEquals("ๆœ€ๅฐ่ง’ใฏ5ใซๆˆปใฃใฆใ„ใ‚‹", Consts.Prefs.DEFAULT_MIN_PITCH, currentMin); assertEquals("ๆœ€ๅคง่ง’ใฏ80ใซๆˆปใฃใฆใ„ใ‚‹", Consts.Prefs.DEFAULT_MAX_PITCH + Consts.Prefs.MAX_PITCH_OFFSET, currentMax); assertEquals("ใ‚ฟใ‚คใƒ ใ‚ขใ‚ฆใƒˆใฏ180็ง’ใซๆˆปใฃใฆใ„ใ‚‹", String.valueOf(Consts.Prefs.DEFAULT_ACQUIRE_TIMEOUT) + getActivity().getString(R.string.seconds), currentTimeout); assertFalse("ใ‚นใ‚ฟใƒผใƒˆใ‚ขใƒƒใƒ—่ตทๅ‹•ใฏ็„กๅŠนใซๆˆปใฃใฆใ„ใ‚‹", getCbStartup().isChecked()); fragment = getActivity().getFragmentManager().findFragmentByTag(MessageDialogs.FRAGMENT_KEY); assertNull("MessageDialogใฎDialogFragmentใŒๆถˆใˆใฆใ„ใ‚‹", fragment); } private CheckBox getCbStartup() { return (CheckBox) getActivity().findViewById(R.id.cb_startup); } private SeekBar getSbMin() { return (SeekBar)getActivity().findViewById(R.id.sb_minimumPitch); } private SeekBar getSbMax() { return (SeekBar)getActivity().findViewById(R.id.sb_maximumPitch); } private SeekBar getSbTimeout() { return (SeekBar)getActivity().findViewById(R.id.sb_acquireTimeout); } private TextView getCurrentMinPitch() { return (TextView) getActivity().findViewById(R.id.tv_currentMinPitch); } private TextView getCurrentMaxPitch() { return (TextView) getActivity().findViewById(R.id.tv_currentMaxPitch); } private TextView getAquireTimeout() { return (TextView) getActivity().findViewById(R.id.tv_acquire_timeout); } }
DialogInterfaceใฎใƒœใ‚ฟใƒณๅ–ๅพ—ใซใคใ„ใฆใฎๅ‡ฆ็†ใ‚’ๆ˜Ž็คบๅŒ–ใ€‚
android/src/androidTest/java/net/formula97/android/screenkeeper/MainActivityTest.java
DialogInterfaceใฎใƒœใ‚ฟใƒณๅ–ๅพ—ใซใคใ„ใฆใฎๅ‡ฆ็†ใ‚’ๆ˜Ž็คบๅŒ–ใ€‚
Java
bsd-3-clause
6566341b571f3fcbf09d43ff23f888bc0394f394
0
phrase/Phrase-AndroidStudio
package com.phrase.intellij; import com.intellij.execution.ExecutionException; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.process.CapturingProcessHandler; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessListener; import com.intellij.notification.Notification; import com.intellij.notification.NotificationType; import com.intellij.notification.Notifications; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.phrase.intellij.ui.ColorTextPane; import org.jetbrains.annotations.NotNull; import java.awt.*; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; /** * Created by kolja on 15.10.15. */ public class PushPullAdapter { private String clientPath; private String projectPath; private ToolWindowHelper outputWindowHelper; private SimpleDateFormat sdf; public PushPullAdapter(final String path, Project project) { clientPath = path; projectPath = project.getBasePath(); outputWindowHelper = new ToolWindowHelper(project); sdf = new SimpleDateFormat("HH:mm:ss"); } public void run(final String clientAction) { if (outputWindowHelper.getOutputWindow().isActive()) { final ColorTextPane finalArea = outputWindowHelper.getColorTextPane(); outputWindowHelper.getOutputWindow().show(new Runnable() { @Override public void run() { runCommand(clientAction, finalArea); } }); } else { outputWindowHelper.getOutputWindow().activate(new Runnable() { @Override public void run() { runCommand(clientAction, outputWindowHelper.getColorTextPane()); } }); } } private void runCommand(final String clientAction, final ColorTextPane finalArea) { try { GeneralCommandLine gcl = new GeneralCommandLine(clientPath, clientAction); gcl.withWorkDirectory(projectPath); Map env = new HashMap<String, String>(); env.put("PHRASE_USER_AGENT", "AndroidStudio"); gcl.withEnvironment(env); final CapturingProcessHandler processHandler = new CapturingProcessHandler(gcl.createProcess(), Charset.defaultCharset(), gcl.getCommandLineString()); processHandler.addProcessListener(new ProcessListener() { @Override public void startNotified(ProcessEvent event) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { finalArea.setEditable(true); finalArea.appendANSI(getFormattedTime() + "phrase " + clientAction + "\n"); } }); } @Override public void processTerminated(ProcessEvent event) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { finalArea.setEditable(false); } }); } @Override public void processWillTerminate(ProcessEvent event, boolean willBeDestroyed) { } @Override public void onTextAvailable(final ProcessEvent event, final Key outputType) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (event.getText().length() < 5) { finalArea.appendANSI(event.getText()); return; } if (outputType.toString() == "stdout") { finalArea.appendANSI(getFormattedTime() + event.getText()); } else if (outputType.toString() == "system") { finalArea.append(Color.getHSBColor(0.000f, 0.000f, 0.000f), getFormattedTime() + event.getText()); } else { finalArea.append(Color.getHSBColor(0.000f, 1.000f, 0.502f), event.getText()); } } }); } }); Thread queryThread = new Thread() { public void run() { processHandler.runProcess(); } }; queryThread.start(); } catch (ExecutionException exception) { Notifications.Bus.notify(new Notification("Phrase", "Error", exception.getMessage(), NotificationType.ERROR)); } } @NotNull private String getFormattedTime() { return sdf.format(new Date()) + " "; } }
src/com/phrase/intellij/PushPullAdapter.java
package com.phrase.intellij; import com.intellij.execution.ExecutionException; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.process.CapturingProcessHandler; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.process.ProcessListener; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.notification.Notification; import com.intellij.notification.NotificationType; import com.intellij.notification.Notifications; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.phrase.intellij.ui.ColorTextPane; import org.jetbrains.annotations.NotNull; import java.awt.*; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; /** * Created by kolja on 15.10.15. */ public class PushPullAdapter { private String clientPath; private String projectPath; private ToolWindowHelper outputWindowHelper; private SimpleDateFormat sdf; public PushPullAdapter(final String path, Project project) { clientPath = path; projectPath = project.getBasePath(); outputWindowHelper = new ToolWindowHelper(project); sdf = new SimpleDateFormat("HH:mm:ss"); } public void run(final String clientAction) { if (outputWindowHelper.getOutputWindow().isActive()) { final ColorTextPane finalArea = outputWindowHelper.getColorTextPane(); outputWindowHelper.getOutputWindow().show(new Runnable() { @Override public void run() { runCommand(clientAction, finalArea); } }); } else { outputWindowHelper.getOutputWindow().activate(new Runnable() { @Override public void run() { runCommand(clientAction, outputWindowHelper.getColorTextPane()); } }); } } private void runCommand(final String clientAction, final ColorTextPane finalArea) { try { GeneralCommandLine gcl = new GeneralCommandLine(clientPath, clientAction); gcl.withWorkDirectory(projectPath); PluginId pluginId = com.intellij.ide.plugins.PluginManager.getPluginByClassName(getClass().getName()); final CapturingProcessHandler processHandler = new CapturingProcessHandler(gcl.createProcess(), Charset.defaultCharset(), gcl.getCommandLineString()); processHandler.addProcessListener(new ProcessListener() { @Override public void startNotified(ProcessEvent event) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { finalArea.setEditable(true); finalArea.appendANSI(getFormattedTime() + "phrase " + clientAction + "\n"); } }); } @Override public void processTerminated(ProcessEvent event) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { finalArea.setEditable(false); } }); } @Override public void processWillTerminate(ProcessEvent event, boolean willBeDestroyed) { } @Override public void onTextAvailable(final ProcessEvent event, final Key outputType) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (event.getText().length() < 5) { finalArea.appendANSI(event.getText()); return; } if (outputType.toString() == "stdout") { finalArea.appendANSI(getFormattedTime() + event.getText()); } else if (outputType.toString() == "system") { finalArea.append(Color.getHSBColor(0.000f, 0.000f, 0.000f), getFormattedTime() + event.getText()); } else { finalArea.append(Color.getHSBColor(0.000f, 1.000f, 0.502f), event.getText()); } } }); } }); Thread queryThread = new Thread() { public void run() { processHandler.runProcess(); } }; queryThread.start(); } catch (ExecutionException exception) { Notifications.Bus.notify(new Notification("Phrase", "Error", exception.getMessage(), NotificationType.ERROR)); } } @NotNull private String getFormattedTime() { return sdf.format(new Date()) + " "; } }
Add Android Studio user agent
src/com/phrase/intellij/PushPullAdapter.java
Add Android Studio user agent
Java
bsd-3-clause
905dfc99b783b3b2233ffee640b1651f3c2bd8a7
0
stoewer/nix-java
package org.gnode.nix; import net.jcip.annotations.NotThreadSafe; import org.gnode.nix.valid.Result; import org.gnode.nix.valid.Validator; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.*; import java.util.function.Predicate; import static org.junit.Assert.*; @NotThreadSafe public class TestSource { private File file; private Block block; private Section section; private Source source, source_other, source_null; private DataArray darray; private MultiTag mtag; private Date statup_time; @Before public void setUp() { // precision of time_t is in seconds hence (millis / 1000) * 1000 statup_time = new Date((System.currentTimeMillis() / 1000) * 1000); file = File.open("test_Source_" + UUID.randomUUID().toString() + ".h5", FileMode.Overwrite); block = file.createBlock("block", "dataset"); section = file.createSection("foo_section", "metadata"); source = block.createSource("source_one", "channel"); source_other = block.createSource("source_two", "channel"); source_null = null; // create a DataArray & a MultiTag darray = block.createDataArray("DataArray", "dataArray", DataType.Double, new NDSize(new int[]{0, 0})); double[] A = new double[5 * 5]; for (int i = 0; i < 5; i++) { A[i * 5 + i] = 100 * i; } darray.setDataExtent(new NDSize(new int[]{5, 5})); darray.setData(A, darray.getDataExtent(), new NDSize()); mtag = block.createMultiTag("tag_one", "test_tag", darray); } @After public void tearDown() { String location = file.getLocation(); file.close(); // delete file java.io.File f = new java.io.File(location); f.delete(); } @Test public void testValidate() { Result result = Validator.validate(source); assertTrue(result.getErrors().size() == 0); assertTrue(result.getWarnings().size() == 0); } @Test public void testId() { assertEquals(source.getId().length(), 36); } @Test public void testName() { assertEquals(source.getName(), "source_one"); } @Test public void testType() { assertEquals(source.getType(), "channel"); } @Test public void testDefinition() { String def = "def"; source.setDefinition(def); assertEquals(source.getDefinition(), def); source.setDefinition(null); assertNull(source.getDefinition()); } @Test public void testtestMetadataAccess() { assertNull(source.getMetadata()); source.setMetadata(section); assertNotNull(source.getMetadata()); source.removeMetadata(); assertNull(source.getMetadata()); // test deleter removing link too source.setMetadata(section); file.deleteSection(section.getId()); assertNull(source.getMetadata()); // re-create section section = file.createSection("foo_section", "metadata"); } @Test public void testSourceAccess() { List<String> names = Arrays.asList("source_a", "source_b", "source_c", "source_d", "source_e"); assertEquals(source.getSourceCount(), 0); assertEquals(source.getSources().size(), 0); assertFalse(source.hasSource("invalid_id")); Source s = null; try { source.hasSource(s); fail(); } catch (RuntimeException re) { } ArrayList<String> ids = new ArrayList<String>(); for (String name : names) { Source child_source = source.createSource(name, "channel"); assertEquals(child_source.getName(), name); assertTrue(source.hasSource(child_source)); assertTrue(source.hasSource(name)); ids.add(child_source.getId()); } try { source.createSource(names.get(0), "channel"); fail(); } catch (RuntimeException re) { } assertEquals(source.getSourceCount(), names.size()); assertEquals(source.getSources().size(), names.size()); for (String id : ids) { Source child_source = source.getSource(id); assertTrue(source.hasSource(id)); assertEquals(child_source.getId(), id); source.deleteSource(id); } Source s1, s2 = null; s1 = source.createSource("name", "type"); try { source.deleteSource(s2); fail(); } catch (RuntimeException re) { } try { source.deleteSource(s1); } catch (Exception e) { fail(); } assertEquals(source.getSourceCount(), 0); assertEquals(source.getSources().size(), 0); assertFalse(source.hasSource("invalid_id")); } @Test public void testFindSource() { /* We create the following tree: * * source---l1n1---l2n1---l3n1 * | | | * | ------l2n2 * | | * | |-----l2n3---l3n2 * | | * | ------l3n3 * ------l1n2 * | * ------l1n3---l2n4 * | * ------l2n5---l3n4 * | * ------l2n6---l3n5 * | | * mtag-------------| | * | * darray------------------- */ Source l1n1 = source.createSource("l1n1", "typ1"); Source l1n2 = source.createSource("l1n2", "typ2"); Source l1n3 = source.createSource("l1n3", "typ3"); Source l2n1 = l1n1.createSource("l2n1", "typ1"); Source l2n2 = l1n1.createSource("l2n2", "typ2"); Source l2n3 = l1n1.createSource("l2n3", "typ2"); Source l2n4 = l1n3.createSource("l2n4", "typ2"); Source l2n5 = l1n3.createSource("l2n5", "typ2"); Source l2n6 = l1n3.createSource("l2n6", "typ3"); Source l3n1 = l2n1.createSource("l3n1", "typ1"); Source l3n2 = l2n3.createSource("l3n2", "typ2"); Source l3n3 = l2n3.createSource("l3n3", "typ2"); Source l3n4 = l2n5.createSource("l3n4", "typ2"); Source l3n5 = l2n5.createSource("l3n5", "typ2"); mtag.addSource(l2n6.getId()); darray.addSource(l3n5.getId()); // test if sources are in place assertTrue(mtag.hasSource(l2n6)); assertTrue(darray.hasSource(l3n5)); assertTrue(mtag.getSources().size() == 1); assertTrue(darray.getSources().size() == 1); // test depth limit assertTrue(source.findSources().size() == 15); assertTrue(source.findSources((Source s) -> true, 2).size() == 10); assertTrue(source.findSources((Source s) -> true, 1).size() == 4); assertTrue(source.findSources((Source s) -> true, 0).size() == 1); // test filter Predicate<Source> filter_typ1 = (Source s) -> s.getType().equals("typ1"); Predicate<Source> filter_typ2 = (Source s) -> s.getType().equals("typ2"); assertTrue(source.findSources(filter_typ1).size() == 3); assertTrue(source.findSources(filter_typ2).size() == 9); // test deleter /* chop the tree down to: * * source---l1n2 * | * ------l1n3---l2n4 * | * ------l2n5---l3n4 * | * ------l2n6---l3n5 * | | * mtag-------------| | * | * darray------------------- */ source.deleteSource(l1n1.getId()); assertTrue(source.findSources().size() == 8); /* chop the tree down to: * * source---l1n3---l2n4 * | * ------l2n5---l3n4 * | * ------l2n6---l3n5 * | | * mtag-------------| | * | * darray------------------- */ source.deleteSource(l1n2.getId()); assertTrue(source.findSources().size() == 7); /* chop the tree down to: * * source * mtag * darray */ source.deleteSource(l1n3.getId()); assertTrue(source.findSources().size() == 1); assertFalse(mtag.hasSource(l2n6)); assertFalse(darray.hasSource(l3n5)); assertTrue(mtag.getSources().size() == 0); assertTrue(darray.getSources().size() == 0); } @Test public void testCreatedAt() { assertTrue(source.getCreatedAt().compareTo(statup_time) >= 0); long time = System.currentTimeMillis() - 10000000L * 1000; // precision of time_t is in seconds hence (millis / 1000) * 1000 time = time / 1000 * 1000; Date past_time = new Date(time); source.forceCreatedAt(past_time); assertTrue(source.getCreatedAt().equals(past_time)); } @Test public void testUpdatedAt() { assertTrue(source.getUpdatedAt().compareTo(statup_time) >= 0); } }
src/test/java/org/gnode/nix/TestSource.java
package org.gnode.nix; import net.jcip.annotations.NotThreadSafe; import org.gnode.nix.valid.Result; import org.gnode.nix.valid.Validator; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.*; import static org.junit.Assert.*; @NotThreadSafe public class TestSource { private File file; private Block block; private Section section; private Source source, source_other, source_null; private Date statup_time; @Before public void setUp() { // precision of time_t is in seconds hence (millis / 1000) * 1000 statup_time = new Date((System.currentTimeMillis() / 1000) * 1000); file = File.open("test_Source_" + UUID.randomUUID().toString() + ".h5", FileMode.Overwrite); block = file.createBlock("block", "dataset"); section = file.createSection("foo_section", "metadata"); source = block.createSource("source_one", "channel"); source_other = block.createSource("source_two", "channel"); source_null = null; } @After public void tearDown() { String location = file.getLocation(); file.close(); // delete file java.io.File f = new java.io.File(location); f.delete(); } @Test public void testValidate() { Result result = Validator.validate(source); assertTrue(result.getErrors().size() == 0); assertTrue(result.getWarnings().size() == 0); } @Test public void testId() { assertEquals(source.getId().length(), 36); } @Test public void testName() { assertEquals(source.getName(), "source_one"); } @Test public void testType() { assertEquals(source.getType(), "channel"); } @Test public void testDefinition() { String def = "def"; source.setDefinition(def); assertEquals(source.getDefinition(), def); source.setDefinition(null); assertNull(source.getDefinition()); } @Test public void testtestMetadataAccess() { assertNull(source.getMetadata()); source.setMetadata(section); assertNotNull(source.getMetadata()); source.removeMetadata(); assertNull(source.getMetadata()); // test deleter removing link too source.setMetadata(section); file.deleteSection(section.getId()); assertNull(source.getMetadata()); // re-create section section = file.createSection("foo_section", "metadata"); } @Test public void testSourceAccess() { List<String> names = Arrays.asList("source_a", "source_b", "source_c", "source_d", "source_e"); assertEquals(source.getSourceCount(), 0); assertEquals(source.getSources().size(), 0); assertFalse(source.hasSource("invalid_id")); Source s = null; try { source.hasSource(s); fail(); } catch (RuntimeException re) { } ArrayList<String> ids = new ArrayList<String>(); for (String name : names) { Source child_source = source.createSource(name, "channel"); assertEquals(child_source.getName(), name); assertTrue(source.hasSource(child_source)); assertTrue(source.hasSource(name)); ids.add(child_source.getId()); } try { source.createSource(names.get(0), "channel"); fail(); } catch (RuntimeException re) { } assertEquals(source.getSourceCount(), names.size()); assertEquals(source.getSources().size(), names.size()); for (String id : ids) { Source child_source = source.getSource(id); assertTrue(source.hasSource(id)); assertEquals(child_source.getId(), id); source.deleteSource(id); } Source s1, s2 = null; s1 = source.createSource("name", "type"); try { source.deleteSource(s2); fail(); } catch (RuntimeException re) { } try { source.deleteSource(s1); } catch (Exception e) { fail(); } assertEquals(source.getSourceCount(), 0); assertEquals(source.getSources().size(), 0); assertFalse(source.hasSource("invalid_id")); } @Test public void testCreatedAt() { assertTrue(source.getCreatedAt().compareTo(statup_time) >= 0); long time = System.currentTimeMillis() - 10000000L * 1000; // precision of time_t is in seconds hence (millis / 1000) * 1000 time = time / 1000 * 1000; Date past_time = new Date(time); source.forceCreatedAt(past_time); assertTrue(source.getCreatedAt().equals(past_time)); } @Test public void testUpdatedAt() { assertTrue(source.getUpdatedAt().compareTo(statup_time) >= 0); } }
[TestSource.java] update tests for findSources
src/test/java/org/gnode/nix/TestSource.java
[TestSource.java] update tests for findSources
Java
mit
78e2e03bbbb930a53e8a091fe9351491ca5ccb89
0
ase-sharif/op2,ase-sharif/FaultLocalizationTechniques
/** * Calculates suspiciousness and confidence values according to the Op2 fault localization * technique. * The usage mode is to create a coverage matrix that specifies which program * elements are executed by which test cases. In terms of this implementation, each program element called * a statement. coverage matrix is encoded as a two dimensional array where the first dimension is indexed by * the test case number and the second dimension is indexed by the statement number. */ public class Op2 { private boolean[][] coverageMatrix; // coverage matrix -- [test][statement] private boolean[] failTestCases; // failing test cases -- [test] private boolean[] liveTestCases; // live test cases -- [test] private boolean[] badCoverage; // bad coverage (no coverage information, usually due to a segmentation fault) -- [test] private boolean[] coverableStatements; // coverable statements -- [statement] private boolean isBadCoverageCalculated = false; private int numberOfTests; // number of test cases private int numberOfStatements; // number of statements private int totalLiveFail; private int totalLivePass; private int[] passOnStatement; // p(s), for every s and considering liveness of test cases -- [statement] private int[] failOnStatement; // f(s), for every s and considering liveness of test cases -- [statemen] /** * Constructor of Op2 class */ public Op2(boolean[][] coverageMatrix) { this.coverageMatrix = coverageMatrix; numberOfTests = coverageMatrix.length; numberOfStatements = coverageMatrix[0].length; // initialize so that all test cases are live liveTestCases = new boolean[numberOfTests]; for (int i = 0; i < liveTestCases.length; i++) { liveTestCases[i] = true; } } private void calculateBadTestCoverage() { if (isBadCoverageCalculated) return; badCoverage = new boolean[numberOfTests]; for (int i = 0; i < numberOfTests; i++) { badCoverage[i] = true; for (int j = 0; j < numberOfStatements; j++) { //if there is a statement covered for this test case this is not a bad test case if (coverageMatrix[i][j]) { badCoverage[i] = false; break; } } isBadCoverageCalculated = true; } } private void calculateTotalLiveFailAndPass() { totalLiveFail = 0; totalLivePass = 0; for (int i = 0; i < numberOfTests; i++) { if (liveTestCases[i]) { if (!badCoverage[i]) { if (failTestCases[i]) { totalLiveFail++; } else totalLivePass++; } } } } private void calculatePassOnStmtAndFailOnStmt() { passOnStatement = new int[numberOfStatements]; failOnStatement = new int[numberOfStatements]; // first only consider live test cases for (int i = 0; i < numberOfTests; i++) { // if this isn't a dead test case if (!badCoverage[i]) { // if this test case is live if (liveTestCases[i]) { for (int j = 0; j < numberOfStatements; j++) { if (coverableStatements[j]) { if (coverageMatrix[i][j]) { if (failTestCases[i]) failOnStatement[j]++; else passOnStatement[j]++; } } } } } } } }
src/main/java/Op2.java
/** * Calculates suspiciousness and confidence values according to the Op2 fault localization * technique. * The usage mode is to create a coverage matrix that specifies which program * elements are executed by which test cases. In terms of this implementation, each program element called * a statement. coverage matrix is encoded as a two dimensional array where the first dimension is indexed by * the test case number and the second dimension is indexed by the statement number. */ public class Op2 { private boolean[][] coverageMatrix; // coverage matrix -- [test][statement] private boolean[] failTestCases; // failing test cases -- [test] private boolean[] liveTestCases; // live test cases -- [test] private boolean[] badCoverage; // bad coverage (no coverage information, usually due to a segmentation fault) -- [test] private boolean isBadCoverageCalculated = false; private int numberOfTests; // number of test cases private int numberOfStatements; // number of statements private int totalLiveFail; private int totalLivePass; /** * Constructor of Op2 class */ public Op2(boolean[][] coverageMatrix) { this.coverageMatrix = coverageMatrix; numberOfTests = coverageMatrix.length; numberOfStatements = coverageMatrix[0].length; // initialize so that all test cases are live liveTestCases = new boolean[numberOfTests]; for (int i = 0; i < liveTestCases.length; i++) { liveTestCases[i] = true; } } private void calculateBadTestCoverage() { if (isBadCoverageCalculated) return; badCoverage = new boolean[numberOfTests]; for (int i = 0; i < numberOfTests; i++) { badCoverage[i] = true; for (int j = 0; j < numberOfStatements; j++) { //if there is a statement covered for this test case this is not a bad test case if (coverageMatrix[i][j]) { badCoverage[i] = false; break; } } isBadCoverageCalculated = true; } } private void calculateTotalLiveFailAndPass() { totalLiveFail = 0; totalLivePass = 0; for (int i = 0; i < numberOfTests; i++) { if (liveTestCases[i]) { if (!badCoverage[i]) { if (failTestCases[i]) { totalLiveFail++; } else totalLivePass++; } } } } }
calculate pass on statement and fail on statement
src/main/java/Op2.java
calculate pass on statement and fail on statement
Java
mit
7cb80f5070952839a8a4aecc59a33aa17f3d9541
0
CS2103JAN2017-W14-B3/main,CS2103JAN2017-W14-B3/main
package guitests; import org.junit.Test; import guitests.guihandles.TaskCardHandle; import seedu.doit.commons.core.Messages; import seedu.doit.commons.exceptions.IllegalValueException; import seedu.doit.logic.commands.AddCommand; import seedu.doit.testutil.TestTask; import seedu.doit.testutil.TestUtil; import seedu.doit.testutil.TypicalTestTasks; public class AddCommandTest extends TaskManagerGuiTest { public static final String MESSAGE_PRIORITY_CONSTRAINTS = "Task priority should only be low med high"; public static final String MESSAGE_STARTTIME_CONSTRAINTS = "Item Start Time should be " + "2 alphanumeric/period strings separated by '@'"; public static final String MESSAGE_ENDTIME_CONSTRAINTS = "Item End Time should be 2 alphanumeric" + "/period strings separated by '@'"; @Test public void add() throws IllegalValueException { //add one floating task TestTask[] currentList = this.td.getTypicalTasks(); TestTask taskToAdd = TypicalTestTasks.getFloatingTestTask(); assertAddSuccess(taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task taskToAdd = TypicalTestTasks.getDeadlineTestTask(); assertAddSuccess(taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another event taskToAdd = TypicalTestTasks.getEventTestTask(); assertAddSuccess(taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add duplicate floating task this.commandBox.runCommand(TypicalTestTasks.getFloatingTestTask().getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertAllPanelsMatch(currentList); //add duplicate event this.commandBox.runCommand(TypicalTestTasks.getEventTestTask().getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertAllPanelsMatch(currentList); //add duplicate task this.commandBox.runCommand(TypicalTestTasks.getDeadlineTestTask().getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertAllPanelsMatch(currentList); //add to empty list this.commandBox.runCommand("clear"); assertAddSuccess(TypicalTestTasks.getFloatingTestTask()); //invalid command this.commandBox.runCommand("adds invalid1"); assertResultMessage(Messages.MESSAGE_UNKNOWN_COMMAND); //invalid start time this.commandBox.runCommand("add invalid2 s/kjsdf e/today p/high d/sss"); assertResultMessage(MESSAGE_STARTTIME_CONSTRAINTS); //invalid end time this.commandBox.runCommand("add invalid3 e/kjdgf p/high d/sss"); assertResultMessage(MESSAGE_ENDTIME_CONSTRAINTS); //invalid priority this.commandBox.runCommand("add invalid4 p/dfjkhd d/sss"); assertResultMessage(MESSAGE_PRIORITY_CONSTRAINTS); //missing description // this.commandBox.runCommand("add invalid5 e/today p/high"); //assertResultMessage(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE)); } private void assertAddSuccess(TestTask taskToAdd, TestTask... currentList) { this.commandBox.runCommand(taskToAdd.getAddCommand()); //confirm the new card contains the right data if (!taskToAdd.getIsDone() && taskToAdd.isFloatingTask()) { TaskCardHandle addedCard = this.floatingTaskListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); } else if (!taskToAdd.getIsDone() && taskToAdd.isEvent()) { TaskCardHandle addedCard = this.eventListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); } else if (!taskToAdd.getIsDone() && taskToAdd.isTask()) { TaskCardHandle addedCard = this.taskListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); } //confirm the list now contains all previous tasks plus the new task TestTask[] expectedList = TestUtil.addTasksToList(currentList, taskToAdd); assertAllPanelsMatch(expectedList); } }
src/test/java/guitests/AddCommandTest.java
package guitests; import org.junit.Test; import guitests.guihandles.TaskCardHandle; import seedu.doit.commons.core.Messages; import seedu.doit.commons.exceptions.IllegalValueException; import seedu.doit.logic.commands.AddCommand; import seedu.doit.testutil.TestTask; import seedu.doit.testutil.TestUtil; import seedu.doit.testutil.TypicalTestTasks; public class AddCommandTest extends TaskManagerGuiTest { public static final String MESSAGE_PRIORITY_CONSTRAINTS = "Task priority should only be low med high"; @Test public void add() throws IllegalValueException { //add one floating task TestTask[] currentList = this.td.getTypicalTasks(); TestTask taskToAdd = TypicalTestTasks.getFloatingTestTask(); assertAddSuccess(taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another task taskToAdd = TypicalTestTasks.getDeadlineTestTask(); assertAddSuccess(taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add another event taskToAdd = TypicalTestTasks.getEventTestTask(); assertAddSuccess(taskToAdd, currentList); currentList = TestUtil.addTasksToList(currentList, taskToAdd); //add duplicate floating task this.commandBox.runCommand(TypicalTestTasks.getFloatingTestTask().getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertAllPanelsMatch(currentList); //add duplicate event this.commandBox.runCommand(TypicalTestTasks.getEventTestTask().getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertAllPanelsMatch(currentList); //add duplicate task this.commandBox.runCommand(TypicalTestTasks.getDeadlineTestTask().getAddCommand()); assertResultMessage(AddCommand.MESSAGE_DUPLICATE_TASK); assertAllPanelsMatch(currentList); //add to empty list this.commandBox.runCommand("clear"); assertAddSuccess(TypicalTestTasks.getFloatingTestTask()); //invalid command this.commandBox.runCommand("adds invalid1"); assertResultMessage(Messages.MESSAGE_UNKNOWN_COMMAND); //invalid start time //this.commandBox.runCommand("add invalid2 s/kjsdf e/today p/high d/sss"); //assertResultMessage(StartTimeMESSAGE_STARTTIME_CONSTRAINTS); //invalid end time //this.commandBox.runCommand("add invalid3 e/kjdgf p/high d/sss"); //assertResultMessage("Invalid Date Format: " + "kjdgf"); //invalid priority this.commandBox.runCommand("add invalid4 p/dfjkhd d/sss"); assertResultMessage(MESSAGE_PRIORITY_CONSTRAINTS); //missing description // this.commandBox.runCommand("add invalid5 e/today p/high"); //assertResultMessage(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE)); } private void assertAddSuccess(TestTask taskToAdd, TestTask... currentList) { this.commandBox.runCommand(taskToAdd.getAddCommand()); //confirm the new card contains the right data if (!taskToAdd.getIsDone() && taskToAdd.isFloatingTask()) { TaskCardHandle addedCard = this.floatingTaskListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); } else if (!taskToAdd.getIsDone() && taskToAdd.isEvent()) { TaskCardHandle addedCard = this.eventListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); } else if (!taskToAdd.getIsDone() && taskToAdd.isTask()) { TaskCardHandle addedCard = this.taskListPanel.navigateToTask(taskToAdd.getName().fullName); assertMatching(taskToAdd, addedCard); } //confirm the list now contains all previous tasks plus the new task TestTask[] expectedList = TestUtil.addTasksToList(currentList, taskToAdd); assertAllPanelsMatch(expectedList); } }
Correct commented test cases in addCommand
src/test/java/guitests/AddCommandTest.java
Correct commented test cases in addCommand
Java
mit
8ddd10743dbd6db570b7ca6442031fea5ccd9907
0
seanmonstar/ServiceDroid
package com.monstarlab.servicedroid.activity; import android.app.Activity; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.GestureDetector; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.GestureDetector.SimpleOnGestureListener; import android.view.View.OnClickListener; import android.view.View.OnTouchListener; import android.widget.TableLayout; import android.widget.TextView; import com.monstarlab.servicedroid.model.Models.Calls; import com.monstarlab.servicedroid.model.Models.Placements; import com.monstarlab.servicedroid.model.Models.ReturnVisits; import com.monstarlab.servicedroid.model.Models.TimeEntries; import com.monstarlab.servicedroid.util.TimeUtil; import com.monstarlab.servicedroid.R; public class StatisticsActivity extends Activity implements OnTouchListener { private static final String TAG = "StatisticsActivity"; private static final int MENU_MONTH = Menu.FIRST; private static final int MENU_YEAR = Menu.FIRST + 1; private static final int MENU_EMAIL = Menu.FIRST + 2; private static final int REPORT_TIME_NOTIFICATION = 1; //private TimeUtil mTimeHelper; private static String[] CallsProjection = new String[] { Calls._ID, Calls.BIBLE_STUDY }; private static String[] TimeProjection = new String[] { TimeEntries._ID, TimeEntries.DATE, TimeEntries.LENGTH }; private static String[] RVProjection = new String[] { ReturnVisits._ID, ReturnVisits.DATE, ReturnVisits.CALL_ID }; private static String[] PlacementsProjection = new String[] { Placements._ID, Placements.DATE }; private TextView mTimePeriodDisplay; private TextView mHoursDisplay; private TextView mRvsDisplay; private TextView mMagsDisplay; private TextView mBrochuresDisplay; private TextView mBooksDisplay; private TextView mBibleStudiesDisplay; private int mCurrentMonth = TimeUtil.getCurrentMonth(); private int mCurrentYear = TimeUtil.getCurrentYear(); private int mTimeSpan = MENU_MONTH; private static final int SWIPE_MIN_DISTANCE = 120; private static final int SWIPE_MAX_OFF_PATH = 250; private static final int SWIPE_THRESHOLD_VELOCITY = 200; private GestureDetector gestureDetector; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.stats); // mTimeHelper = new TimeUtil(this); mTimePeriodDisplay = (TextView)findViewById(R.id.stats_timeperiod); mHoursDisplay = (TextView)findViewById(R.id.hours); mRvsDisplay = (TextView)findViewById(R.id.rvs); mMagsDisplay = (TextView)findViewById(R.id.magazines); mBrochuresDisplay = (TextView)findViewById(R.id.brochures); mBooksDisplay = (TextView)findViewById(R.id.books); mBibleStudiesDisplay = (TextView)findViewById(R.id.bible_studies); // Gesture detection gestureDetector = new GestureDetector(new MyGestureDetector()); TableLayout table = (TableLayout) findViewById(R.id.statstable); table.setOnTouchListener(this); //setup reminder... //scheduleReminder(); } @Override protected void onResume() { super.onResume(); fillData(); } protected void fillData() { mTimePeriodDisplay.setText("" + mCurrentMonth + "/" + mCurrentYear); mHoursDisplay.setText(getHoursSum()); mRvsDisplay.setText(getRVs()); mMagsDisplay.setText(getMagazines()); mBrochuresDisplay.setText(getBrochures()); mBooksDisplay.setText(getBooks()); mBibleStudiesDisplay.setText(getBibleStudies()); } protected String getBibleStudies() { Cursor c = getContentResolver().query(Calls.CONTENT_URI, CallsProjection, Calls.BIBLE_STUDY + "=1", null, null); int sum = 0; if(c != null) { c.moveToFirst(); sum = c.getCount(); c.close(); c = null; } return "" + sum; } protected String getBooks() { Cursor c = getContentResolver().query(Placements.BOOKS_CONTENT_URI, PlacementsProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { sum = c.getCount(); c.close(); c = null; } return ""+sum; } protected String getBrochures() { Cursor c = getContentResolver().query(Placements.BROCHURES_CONTENT_URI, PlacementsProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { sum = c.getCount(); c.close(); c = null; } return ""+sum; } protected String getMagazines() { Cursor c = getContentResolver().query(Placements.MAGAZINES_CONTENT_URI, PlacementsProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { sum = c.getCount(); c.close(); c = null; } return ""+sum; } protected String getHoursSum() { Cursor c = getContentResolver().query(TimeEntries.CONTENT_URI, TimeProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { c.moveToFirst(); while(!c.isAfterLast()) { sum += c.getInt(2); c.moveToNext(); } c.close(); c = null; } return TimeUtil.toTimeString(sum); } protected String getRVs() { Cursor c = getContentResolver().query(ReturnVisits.CONTENT_URI, RVProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); String numOfRVs = "0"; if(c != null) { c.moveToFirst(); numOfRVs = "" + c.getCount(); c.close(); c = null; } return numOfRVs; } protected void setTimePeriod() { } protected void moveBackwardOneMonth() { mCurrentMonth--; if(mCurrentMonth <= 0) { mCurrentMonth = 12; mCurrentYear--; } } protected void moveForwardOneMonth() { mCurrentMonth++; if(mCurrentMonth >= 12) { mCurrentMonth = 1; mCurrentYear++; } } protected String getTimePeriodWhere(String dateField) { return dateField + " between ? and ?"; // "dateField between YYYY-MM-01 and date('YYYY-MM-01','+1 month','-1 day');" } protected String[] getTimePeriodArgs(int year, int month) { String[] args = new String[2]; //beginning of month args[0] = year + "-" + TimeUtil.pad(month) + "-01"; //end of month //TODO - possibly fix date? args[1] = year + "-" + TimeUtil.pad(month+1) + "-01"; return args; } protected void setTimeSpan(int span) { mTimeSpan = span; } @Override public boolean onCreateOptionsMenu(Menu menu) { boolean result = super.onCreateOptionsMenu(menu); //menu.add(0, MENU_MONTH, 1, R.string.monthly).setIcon(android.R.drawable.ic_menu_month); //menu.add(0, MENU_YEAR, 1, R.string.service_year).setIcon(android.R.drawable.ic_menu_my_calendar); menu.add(0, MENU_EMAIL, 1, R.string.send).setIcon(android.R.drawable.ic_menu_send); return result; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch(item.getItemId()) { case MENU_MONTH: setTimeSpan(MENU_MONTH); break; case MENU_YEAR: setTimeSpan(MENU_YEAR); break; case MENU_EMAIL: sendEmail(); } return super.onOptionsItemSelected(item); } protected void sendEmail() { Intent i = new Intent(Intent.ACTION_SEND, Uri.parse("content://com.android.email.provider")); //i.setType("text/plain"); //use this line for testing in the emulator i.setType("message/rfc822"); //for device i.putExtra(Intent.EXTRA_EMAIL, new String[] {}); i.putExtra(Intent.EXTRA_SUBJECT, "Service Time for " + mCurrentMonth + "/" + mCurrentYear); i.putExtra(Intent.EXTRA_TEXT, getStatsTextForTimePeriod()); startActivity(Intent.createChooser(i, "Send by...")); } protected String getStatsTextForTimePeriod() { StringBuilder sb = new StringBuilder(); //TODO - use strings.xml to allow for internationalization sb.append("Here is my Service Record for " + mCurrentMonth + "/" + mCurrentYear + "\n\n"); sb.append("Hours: " + getHoursSum() + "\n"); sb.append("Magazines: " + getMagazines() + "\n"); sb.append("Brochures: " + getBrochures() + "\n"); sb.append("Books: " + getBooks() + "\n"); sb.append("Return Visits: " + getRVs() + "\n"); sb.append("Bible Studies: " + getBibleStudies() + "\n"); return sb.toString(); } protected void scheduleReminder() { //((AlarmManager)getSystemService(Context.ALARM_SERVICE)). showReminder(); } protected void showReminder() { int icon = R.drawable.icon; // icon from resources CharSequence tickerText = "Send in Service Time"; // ticker-text long when = System.currentTimeMillis(); // notification time Context context = getApplicationContext(); // application Context CharSequence contentTitle = "ServiceDroid"; // expanded message title CharSequence contentText = "Your service time is due!"; // expanded message text Intent notificationIntent = new Intent(this, StatisticsActivity.class); PendingIntent contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0); // the next two lines initialize the Notification, using the configurations above Notification notification = new Notification(icon, tickerText, when); notification.defaults |= Notification.DEFAULT_SOUND; notification.setLatestEventInfo(context, contentTitle, contentText, contentIntent); ((NotificationManager)getSystemService(Context.NOTIFICATION_SERVICE)).notify(REPORT_TIME_NOTIFICATION, notification); } class MyGestureDetector extends SimpleOnGestureListener { @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { try { if (Math.abs(e1.getY() - e2.getY()) > SWIPE_MAX_OFF_PATH) return false; // right to left swipe if(e1.getX() - e2.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { //left moveForwardOneMonth(); fillData(); } else if (e2.getX() - e1.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { //right moveBackwardOneMonth(); fillData(); } } catch (Exception e) { // nothing } return false; } @Override public boolean onDown(MotionEvent event) { return true; } } @Override public boolean onTouch(View v, MotionEvent event) { if (gestureDetector.onTouchEvent(event)) { return true; } return false; } }
src/com/monstarlab/servicedroid/activity/StatisticsActivity.java
package com.monstarlab.servicedroid.activity; import android.app.Activity; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.GestureDetector; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.GestureDetector.SimpleOnGestureListener; import android.view.View.OnClickListener; import android.view.View.OnTouchListener; import android.widget.TableLayout; import android.widget.TextView; import com.monstarlab.servicedroid.model.Models.Calls; import com.monstarlab.servicedroid.model.Models.Placements; import com.monstarlab.servicedroid.model.Models.ReturnVisits; import com.monstarlab.servicedroid.model.Models.TimeEntries; import com.monstarlab.servicedroid.util.TimeUtil; import com.monstarlab.servicedroid.R; public class StatisticsActivity extends Activity implements OnTouchListener { private static final String TAG = "StatisticsActivity"; private static final int MENU_MONTH = Menu.FIRST; private static final int MENU_YEAR = Menu.FIRST + 1; private static final int MENU_EMAIL = Menu.FIRST + 2; private static final int REPORT_TIME_NOTIFICATION = 1; //private TimeUtil mTimeHelper; private static String[] CallsProjection = new String[] { Calls._ID, Calls.BIBLE_STUDY }; private static String[] TimeProjection = new String[] { TimeEntries._ID, TimeEntries.DATE, TimeEntries.LENGTH }; private static String[] RVProjection = new String[] { ReturnVisits._ID, ReturnVisits.DATE, ReturnVisits.CALL_ID }; private static String[] PlacementsProjection = new String[] { Placements._ID, Placements.DATE }; private TextView mTimePeriodDisplay; private TextView mHoursDisplay; private TextView mRvsDisplay; private TextView mMagsDisplay; private TextView mBrochuresDisplay; private TextView mBooksDisplay; private TextView mBibleStudiesDisplay; private int mCurrentMonth = TimeUtil.getCurrentMonth(); private int mCurrentYear = TimeUtil.getCurrentYear(); private int mTimeSpan = MENU_MONTH; private static final int SWIPE_MIN_DISTANCE = 120; private static final int SWIPE_MAX_OFF_PATH = 250; private static final int SWIPE_THRESHOLD_VELOCITY = 200; private GestureDetector gestureDetector; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.stats); // mTimeHelper = new TimeUtil(this); mTimePeriodDisplay = (TextView)findViewById(R.id.stats_timeperiod); mHoursDisplay = (TextView)findViewById(R.id.hours); mRvsDisplay = (TextView)findViewById(R.id.rvs); mMagsDisplay = (TextView)findViewById(R.id.magazines); mBrochuresDisplay = (TextView)findViewById(R.id.brochures); mBooksDisplay = (TextView)findViewById(R.id.books); mBibleStudiesDisplay = (TextView)findViewById(R.id.bible_studies); // Gesture detection gestureDetector = new GestureDetector(new MyGestureDetector()); TableLayout table = (TableLayout) findViewById(R.id.statstable); table.setOnTouchListener(this); //setup reminder... //scheduleReminder(); } @Override protected void onResume() { super.onResume(); fillData(); } protected void fillData() { mTimePeriodDisplay.setText("" + mCurrentMonth + "/" + mCurrentYear); mHoursDisplay.setText(getHoursSum()); mRvsDisplay.setText(getRVs()); mMagsDisplay.setText(getMagazines()); mBrochuresDisplay.setText(getBrochures()); mBooksDisplay.setText(getBooks()); mBibleStudiesDisplay.setText(getBibleStudies()); } protected String getBibleStudies() { Cursor c = getContentResolver().query(Calls.CONTENT_URI, CallsProjection, Calls.BIBLE_STUDY + "=1", null, null); int sum = 0; if(c != null) { c.moveToFirst(); sum = c.getCount(); c.close(); c = null; } return "" + sum; } protected String getBooks() { Cursor c = getContentResolver().query(Placements.BOOKS_CONTENT_URI, PlacementsProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { sum = c.getCount(); c.close(); c = null; } return ""+sum; } protected String getBrochures() { Cursor c = getContentResolver().query(Placements.BROCHURES_CONTENT_URI, PlacementsProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { sum = c.getCount(); c.close(); c = null; } return ""+sum; } protected String getMagazines() { Cursor c = getContentResolver().query(Placements.MAGAZINES_CONTENT_URI, PlacementsProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { sum = c.getCount(); c.close(); c = null; } return ""+sum; } protected String getHoursSum() { Cursor c = getContentResolver().query(TimeEntries.CONTENT_URI, TimeProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); int sum = 0; if(c != null) { c.moveToFirst(); while(!c.isAfterLast()) { sum += c.getInt(2); c.moveToNext(); } c.close(); c = null; } return TimeUtil.toTimeString(sum); } protected String getRVs() { Cursor c = getContentResolver().query(ReturnVisits.CONTENT_URI, RVProjection, getTimePeriodWhere(ReturnVisits.DATE), getTimePeriodArgs(mCurrentYear, mCurrentMonth), null); String numOfRVs = "0"; if(c != null) { c.moveToFirst(); numOfRVs = "" + c.getCount(); c.close(); c = null; } return numOfRVs; } protected void setTimePeriod() { } protected void moveBackwardOneMonth() { mCurrentMonth--; if(mCurrentMonth <= 0) { mCurrentMonth = 12; mCurrentYear--; } } protected void moveForwardOneMonth() { mCurrentMonth++; if(mCurrentMonth >= 12) { mCurrentMonth = 1; mCurrentYear++; } } protected String getTimePeriodWhere(String dateField) { return dateField + " between ? and ?"; // "dateField between YYYY-MM-01 and date('YYYY-MM-01','+1 month','-1 day');" } protected String[] getTimePeriodArgs(int year, int month) { String[] args = new String[2]; //beginning of month args[0] = year + "-" + TimeUtil.pad(month) + "-01"; //end of month //TODO - possibly fix date? args[1] = year + "-" + TimeUtil.pad(month+1) + "-01"; return args; } protected void setTimeSpan(int span) { mTimeSpan = span; } @Override public boolean onCreateOptionsMenu(Menu menu) { boolean result = super.onCreateOptionsMenu(menu); //menu.add(0, MENU_MONTH, 1, R.string.monthly).setIcon(android.R.drawable.ic_menu_month); //menu.add(0, MENU_YEAR, 1, R.string.service_year).setIcon(android.R.drawable.ic_menu_my_calendar); menu.add(0, MENU_EMAIL, 1, R.string.send).setIcon(android.R.drawable.ic_menu_send); return result; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch(item.getItemId()) { case MENU_MONTH: setTimeSpan(MENU_MONTH); break; case MENU_YEAR: setTimeSpan(MENU_YEAR); break; case MENU_EMAIL: sendEmail(); } return super.onOptionsItemSelected(item); } protected void sendEmail() { Intent i = new Intent(Intent.ACTION_SEND, Uri.parse("content://com.android.email.provider")); //i.setType("text/plain"); //use this line for testing in the emulator i.setType("message/rfc822"); //for device i.putExtra(Intent.EXTRA_EMAIL, new String[] {}); i.putExtra(Intent.EXTRA_SUBJECT, "Service Time for " + mCurrentMonth + "/" + mCurrentYear); i.putExtra(Intent.EXTRA_TEXT, getStatsTextForTimePeriod()); startActivity(Intent.createChooser(i, "Send by...")); } protected String getStatsTextForTimePeriod() { StringBuilder sb = new StringBuilder(); //TODO - use strings.xml to allow for internationalization sb.append("Here is my Service Record for " + mCurrentMonth + "/" + mCurrentYear + "\n\n"); sb.append("Hours: " + getHoursSum() + "\n"); sb.append("Magazines: " + getMagazines() + "\n"); sb.append("Brochures: " + getBrochures() + "\n"); sb.append("Books: " + getBooks() + "\n"); sb.append("Return Vists: " + getRVs() + "\n"); sb.append("Bible Studies: " + getBibleStudies() + "\n"); return sb.toString(); } protected void scheduleReminder() { //((AlarmManager)getSystemService(Context.ALARM_SERVICE)). showReminder(); } protected void showReminder() { int icon = R.drawable.icon; // icon from resources CharSequence tickerText = "Send in Service Time"; // ticker-text long when = System.currentTimeMillis(); // notification time Context context = getApplicationContext(); // application Context CharSequence contentTitle = "ServiceDroid"; // expanded message title CharSequence contentText = "Your service time is due!"; // expanded message text Intent notificationIntent = new Intent(this, StatisticsActivity.class); PendingIntent contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0); // the next two lines initialize the Notification, using the configurations above Notification notification = new Notification(icon, tickerText, when); notification.defaults |= Notification.DEFAULT_SOUND; notification.setLatestEventInfo(context, contentTitle, contentText, contentIntent); ((NotificationManager)getSystemService(Context.NOTIFICATION_SERVICE)).notify(REPORT_TIME_NOTIFICATION, notification); } class MyGestureDetector extends SimpleOnGestureListener { @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { try { if (Math.abs(e1.getY() - e2.getY()) > SWIPE_MAX_OFF_PATH) return false; // right to left swipe if(e1.getX() - e2.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { //left moveForwardOneMonth(); fillData(); } else if (e2.getX() - e1.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { //right moveBackwardOneMonth(); fillData(); } } catch (Exception e) { // nothing } return false; } @Override public boolean onDown(MotionEvent event) { return true; } } @Override public boolean onTouch(View v, MotionEvent event) { if (gestureDetector.onTouchEvent(event)) { return true; } return false; } }
mispelled word
src/com/monstarlab/servicedroid/activity/StatisticsActivity.java
mispelled word
Java
mit
cf8ce0ab8ac5c83bca33381f23ababa43e80ee38
0
vimeo/vimeo-networking-java,vimeo/vimeo-networking-java,vimeo/vimeo-networking-java
package com.vimeo.networking.model; import com.vimeo.stag.GsonAdapterKey; import org.jetbrains.annotations.NotNull; import java.io.Serializable; import java.util.HashMap; import java.util.Map; /** * A model that holds the type of push subscriptions a user has. This class * has both getters and setters since a user can update their subscriptions * using a Patch. * <p> * Created by zetterstromk on 12/15/16. */ public class Subscriptions implements Serializable { private static final long serialVersionUID = 3088065484753327987L; public static final String KEY_COMMENT = "comment"; public static final String KEY_CREDIT = "credit"; public static final String KEY_LIKE = "like"; public static final String KEY_REPLY = "reply"; public static final String KEY_FOLLOW = "follow"; public static final String KEY_VIDEO_AVAILABLE = "video_available"; @GsonAdapterKey(KEY_COMMENT) boolean mComment; @GsonAdapterKey(KEY_CREDIT) boolean mCredit; @GsonAdapterKey(KEY_LIKE) boolean mLike; @GsonAdapterKey(KEY_REPLY) boolean mReply; @GsonAdapterKey(KEY_FOLLOW) boolean mFollow; @GsonAdapterKey(KEY_VIDEO_AVAILABLE) boolean mVideoAvailable; public boolean isReceivingComment() { return mComment; } public void receiveComment(boolean receive) { mComment = receive; } public boolean isReceivingCredit() { return mCredit; } public void receiveCredit(boolean receive) { mCredit = receive; } public boolean isReceivingLike() { return mLike; } public void receiveLike(boolean receive) { mLike = receive; } public boolean isReceivingReply() { return mReply; } public void receiveReply(boolean receive) { mReply = receive; } public boolean isReceivingFollow() { return mFollow; } public void receiveFollow(boolean receive) { mFollow = receive; } public boolean isReceivingVideoAvailable() { return mVideoAvailable; } public void receiveVideoAvailable(boolean receive) { mVideoAvailable = receive; } @NotNull public Map<String, Boolean> getMapFromSubscriptions() { Map<String, Boolean> map = new HashMap<>(); map.put(KEY_COMMENT, mComment); map.put(KEY_CREDIT, mCredit); map.put(KEY_LIKE, mLike); map.put(KEY_REPLY, mReply); map.put(KEY_FOLLOW, mFollow); map.put(KEY_VIDEO_AVAILABLE, mVideoAvailable); return map; } }
vimeo-networking/src/main/java/com/vimeo/networking/model/Subscriptions.java
package com.vimeo.networking.model; import com.vimeo.stag.GsonAdapterKey; import org.jetbrains.annotations.NotNull; import java.io.Serializable; import java.util.HashMap; import java.util.Map; /** * A model that holds the type of push subscriptions a user has. This class * has both getters and setters since a user can update their subscriptions * using a Patch. * <p> * Created by zetterstromk on 12/15/16. */ public class Subscriptions implements Serializable { private static final long serialVersionUID = 3088065484753327987L; public static final String KEY_COMMENT = "comment"; public static final String KEY_CREDIT = "credit"; public static final String KEY_LIKE = "like"; public static final String KEY_REPLY = "reply"; public static final String KEY_FOLLOW = "follow"; public static final String KEY_VIDEO_AVAILABLE = "video_available"; @GsonAdapterKey(KEY_COMMENT) boolean mComment; @GsonAdapterKey(KEY_CREDIT) boolean mCredit; @GsonAdapterKey(KEY_LIKE) boolean mLike; @GsonAdapterKey(KEY_REPLY) boolean mReply; @GsonAdapterKey(KEY_FOLLOW) boolean mFollow; @GsonAdapterKey(KEY_VIDEO_AVAILABLE) boolean mVideoAvailable; public boolean isComment() { return mComment; } public void setComment(boolean comment) { mComment = comment; } public boolean isCredit() { return mCredit; } public void setCredit(boolean credit) { mCredit = credit; } public boolean isLike() { return mLike; } public void setLike(boolean like) { mLike = like; } public boolean isReply() { return mReply; } public void setReply(boolean reply) { mReply = reply; } public boolean isFollow() { return mFollow; } public void setFollow(boolean follow) { mFollow = follow; } public boolean isVideoAvailable() { return mVideoAvailable; } public void setVideoAvailable(boolean videoAvailable) { mVideoAvailable = videoAvailable; } @NotNull public Map<String, Boolean> getMapFromSubscriptions() { Map<String, Boolean> map = new HashMap<>(); map.put(KEY_COMMENT, mComment); map.put(KEY_CREDIT, mCredit); map.put(KEY_LIKE, mLike); map.put(KEY_REPLY, mReply); map.put(KEY_FOLLOW, mFollow); map.put(KEY_VIDEO_AVAILABLE, mVideoAvailable); return map; } }
change method names
vimeo-networking/src/main/java/com/vimeo/networking/model/Subscriptions.java
change method names
Java
mit
3aa1fc99d27695c00aaacd64c8aac8487d964ea6
0
RovoMe/JDrum
package at.rovo.caching.drum; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import at.rovo.caching.drum.data.ByteSerializer; import at.rovo.caching.drum.event.DrumEventDispatcher; import at.rovo.caching.drum.internal.DiskBucketWriter; import at.rovo.caching.drum.internal.InMemoryData; import at.rovo.caching.drum.internal.InMemoryMessageBroker; import at.rovo.caching.drum.internal.backend.DrumStorageFactory; import at.rovo.caching.drum.util.DrumExceptionHandler; import at.rovo.caching.drum.util.DrumUtil; import at.rovo.caching.drum.util.NamedThreadFactory; /** * <p> * This implementation of the 'Disk Repository with Update Management' structure * utilizes a consumer/producer pattern to store and process input received by * its * </p> * <ul> * <li>{@link #check(Number)} or {@link #check(Number, ByteSerializable)}</li> * <li>{@link #update(Number, ByteSerializable)} or * {@link #update(Number, ByteSerializable, ByteSerializable)}</li> * <li>{@link #checkUpdate(Number, ByteSerializable) or * {@link #checkUpdate(Number, ByteSerializable, ByteSerializable)}</li> * </ul> * <p> * methods. * </p> * <p> * Internally <code>numBuckets</code> buffers and buckets will be created which * will hold the data sent to DRUM. Buffers are the in-memory storage while * buckets are the intermediary disk files. Buffers fill up to <code> * bufferSize</code> bytes before they get sent to a disk file. * </p> * * * @param <V> * The type of the value * @param <A> * The type of the auxiliary data attached to a key * * @author Roman Vottner */ public class Drum<V extends ByteSerializer<V>, A extends ByteSerializer<A>> implements IDrum<V, A> { /** The logger of this class **/ private final static Logger logger = LogManager.getLogger(Drum.class); /** The name of the DRUM instance **/ protected String drumName = null; /** The number of buffers and buckets used **/ protected int numBuckets = 0; /** The size of an in-memory buffer **/ protected int bufferSize = 0; /** * The broker list which holds elements in memory until they get written to * the disk file **/ protected List<IBroker<InMemoryData<V, A>, V, A>> inMemoryBuffer = null; /** * The set of writer objects that listens to notifications of a broker and * write content from the broker to a disk file **/ protected List<IDiskWriter<V, A>> diskWriters = null; /** * The object that compares keys of data-objects for their uniqueness and * merges them into the data store in case the need to be updated **/ protected IMerger<V, A> merger = null; /** The execution service which hosts our threads **/ protected ExecutorService executor = null; /** The merger thread **/ protected Thread mergerThread = null; /** * The event dispatcher used to inform listeners of internal state changes * and certain statistics **/ protected DrumEventDispatcher eventDispatcher = new DrumEventDispatcher(); /** The event dispatcher thread **/ protected Thread eventDispatcherThread = null; /** * <p> * Implementation of the build pattern presented by Joshua Bloch in his book * 'Effective Java - Second Edition' in 'Item 2: Consider a builder when * faced with many constructor parameters'. * </p> * <p> * On invoking {@link #build()} the builder will create a new instance of * DRUM with the provided parameters. * </p> * <p> * By default, the builder will create a DRUM instance for 512 buckets with * 64k buffer size and a {@link NullDispatcher}. * </p> * * @param <V> * The type of the value DRUM will manage * @param <A> * The type of the auxiliary data attached to a key * * @author Roman Vottner */ public static class Builder<V extends ByteSerializer<V>, A extends ByteSerializer<A>> implements IBuilder<Drum<V, A>> { // required parameters /** The name of the drum instance **/ private final String drumName; /** The type of the value managed by DRUM **/ private final Class<V> valueClass; /** The type of the auxiliary data managed by DRUM **/ private final Class<A> auxClass; /** The number of buckets managed by this DRUM instance **/ private int numBuckets = 512; /** The size of the buffer before a flush is forced **/ private int bufferSize = 64; /** The class responsible for dispatching the results **/ private IDispatcher<V,A> dispatcher = new NullDispatcher<V,A>(); /** A listener class which needs to be informed of state changes **/ private IDrumListener listener = null; /** The factory which creates the backing storage service **/ private DrumStorageFactory<V,A> factory = null; /** * <p> * Creates a new builder object with the minimum number of required data * to instantiate a new {@link Drum} instance on invoking * {@link #build()}. * </p> * * @param drumName The name of the DRUM instance * @param valueClass The type of the value this instance will manage * @param auxClass The type of the auxiliary data this instance will * manage */ public Builder(String drumName, Class<V> valueClass, Class<A> auxClass) { this.drumName = drumName; this.valueClass = valueClass; this.auxClass = auxClass; } /** * <p> * Assigns the builder to create a Drum instance which uses the provided * dispatcher instead of the default {@link NullDispatcher} to dispatch * results. * </p> * * @param dispatcher The dispatcher to use for instantiating DRUM * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> dispatcher(IDispatcher<V,A> dispatcher) { if (dispatcher == null) throw new IllegalArgumentException("Invalid dispatcher received"); this.dispatcher = dispatcher; return this; } /** * <p> * Assigns the builder to create a Drum instance which uses the provided * number of buckets instead of the default 512 buckets. * </p> * * @param numBuckets The number of buckets DRUM should manage * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> numBucket(int numBuckets) { if (numBuckets <= 0 || ((numBuckets & -numBuckets) != numBuckets)) throw new IllegalArgumentException( "The number of buckets must be greater than 0 and must " + "be a superset of 2"); this.numBuckets = numBuckets; return this; } /** * <p> * Assigns the builder to create a Drum instance which uses the provided * buffer size instead of 64kb. * </p> * * @param bufferSize The buffer size DRUM should use before flushing the * content * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> bufferSize(int bufferSize) { if (bufferSize <= 0 || ((bufferSize & -bufferSize) != bufferSize)) throw new IllegalArgumentException( "BufferSize must be greater than 0 and have a base of 2 " + "(ex: 2^1, 2^2, 2^3, ...)"); this.bufferSize = bufferSize; return this; } /** * <p> * Assigns the builder to create a Drum instance with the defined * listener in place. * </p> * * @param listener The listener to notify on state changes * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> listener(IDrumListener listener) { this.listener = listener; return this; } /** * <p> * Assigns the builder to create a Drum instance with the given factory * to create a backend storage instead of the backend storage created by * the default factory. * </p> * * @param factory The factory responsible for creating the backend * storage * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> factory(DrumStorageFactory<V,A> factory) { this.factory = factory; return this; } /** * <p> * Assigns the builder to create and initialize a new instance of a DRUM * object. * </p> * * @return A new initialized instance of DRUM * @throws DrumException If during the initialization of DRUM an error * occurred */ public Drum<V,A> build() throws Exception { return new Drum<V,A>(this); } } /** * <p> * Creates a new instance and assigns initial values contained within the * builder object to the corresponding attributes. * </p> * * @param builder * @throws DrumException */ private Drum(Builder<V,A> builder) throws DrumException { if (builder.listener != null) this.addDrumListener(builder.listener); DrumStorageFactory<V,A> factory; if (builder.factory != null) factory = builder.factory; else factory = DrumStorageFactory.getDefaultStorageFactory( builder.drumName, builder.numBuckets, builder.dispatcher, builder.valueClass, builder.auxClass, this.eventDispatcher); this.init(builder.drumName, builder.numBuckets, builder.bufferSize, builder.dispatcher, builder.valueClass, builder.auxClass, factory); } /** * <p> * Initializes the DRUM instance with required data and starts the worker * threads. * </p> * * @param drumName * The name of the DRUM instance * @param numBuckets * The number of buckets to be used * @param bufferSize * The size of a single buffer in bytes * @param dispatcher * The {@link IDispatcher} implementation which will receive * information on items added via <code>check</code>, * <code>update</code> or <code>checkUpdate</code>. * @param valueClass * The class-type of the value for a certain key * @param auxClass * The auxiliary data-type attached to a certain key * @param factory * The factory object which defines where data should be stored * in. Note that factory must return an implementation of IMerger * @param listener * The object which needs to be notified on certain internal * state or statistic changes * @throws DrumException */ private void init(String drumName, int numBuckets, int bufferSize, IDispatcher<V, A> dispatcher, Class<V> valueClass, Class<A> auxClass, DrumStorageFactory<V, A> factory) throws DrumException { this.eventDispatcherThread = new Thread(this.eventDispatcher); this.eventDispatcherThread.setName(drumName + "EventDispatcher"); this.eventDispatcherThread.start(); this.drumName = drumName; this.numBuckets = numBuckets; this.bufferSize = bufferSize; // create the broker and the consumer listening to the broker this.inMemoryBuffer = new ArrayList<IBroker<InMemoryData<V, A>, V, A>>( numBuckets); this.diskWriters = new ArrayList<IDiskWriter<V, A>>(numBuckets); this.merger = factory.getStorage(); DrumExceptionHandler exceptionHandler = new DrumExceptionHandler(); NamedThreadFactory writerFactory = new NamedThreadFactory(); writerFactory.setName(this.drumName + "-Writer"); writerFactory.setUncaughtExceptionHanlder(exceptionHandler); // writerFactory.increasePriority(true); // this.executor = Executors.newCachedThreadPool(writerFactory); this.executor = Executors.newFixedThreadPool(this.numBuckets, writerFactory); for (int i = 0; i < numBuckets; i++) { IBroker<InMemoryData<V, A>, V, A> broker = new InMemoryMessageBroker<InMemoryData<V, A>, V, A>( drumName, i, bufferSize, this.eventDispatcher); IDiskWriter<V, A> consumer = new DiskBucketWriter<V, A>(drumName, i, bufferSize, broker, this.merger, this.eventDispatcher); this.inMemoryBuffer.add(broker); this.diskWriters.add(consumer); this.executor.submit(consumer); // add a reference of the disk writer to the merger, so it can use // the semaphore to lock the file it is currently reading from to // merge the data into the backing data store. // While reading from a file, a further access to the file (which // should result in a write access) is therefore refused. this.merger.addDiskFileWriter(consumer); } this.mergerThread = new Thread(this.merger, this.drumName + "-Merger"); // this.mergerThread.setPriority(Math.min(10, // this.mergerThread.getPriority()+1)); this.mergerThread.setUncaughtExceptionHandler(exceptionHandler); this.mergerThread.start(); // Thread.currentThread().setPriority(Math.max(0, // Thread.currentThread().getPriority()-1)); } @Override public void check(Long key) { this.add(key, null, null, DrumOperation.CHECK); } @Override public void check(Long key, A aux) { this.add(key, null, aux, DrumOperation.CHECK); } @Override public void update(Long key, V value) { this.add(key, value, null, DrumOperation.UPDATE); } @Override public void update(Long key, V value, A aux) { this.add(key, value, aux, DrumOperation.UPDATE); } @Override public void appendUpdate(Long key, V value) { this.add(key, value, null, DrumOperation.APPEND_UPDATE); } @Override public void appendUpdate(Long key, V value, A aux) { this.add(key, value, aux, DrumOperation.APPEND_UPDATE); } @Override public void checkUpdate(Long key, V value) { this.add(key, value, null, DrumOperation.CHECK_UPDATE); } @Override public void checkUpdate(Long key, V value, A aux) { this.add(key, value, aux, DrumOperation.CHECK_UPDATE); } @Override public void dispose() throws DrumException { logger.debug("[{}] - Disposal initialted", this.drumName); // flip the buffers which sends the writers the latest data for (IBroker<?, ?, ?> broker : this.inMemoryBuffer) broker.stop(); // give the threads a chance to finish their work without being // interrupted for (IDiskWriter<V, A> writer : this.diskWriters) writer.stop(); this.executor.shutdown(); // wait for the threads to finish try { this.executor.awaitTermination(1, TimeUnit.MINUTES); } catch (InterruptedException e) { } this.merger.stop(); try { this.mergerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } // close the open resources held by the writers for (IDiskWriter<V, A> writer : this.diskWriters) writer.close(); this.eventDispatcher.stop(); this.eventDispatcherThread.interrupt(); logger.trace("[{}] - disposed", this.drumName); } @Override public void addDrumListener(IDrumListener listener) { this.eventDispatcher.addDrumListener(listener); } @Override public void removeDrumListener(IDrumListener listener) { this.eventDispatcher.removeDrumListener(listener); } /** * <p> * Stores the key, the value and the auxiliary data as well as the operation * to be executed on these data in the according in-memory buffer. * </p> * * @param key * The hash value of the data * @param value * The value associated with the key * @param aux * The auxiliary data of the key * @param operation * The operation to be used on the data */ private void add(Long key, V value, A aux, DrumOperation operation) { // get the bucket index based on the first n bits of the key, according // to the number of defined buckets int bucketId = DrumUtil.getBucketForKey(key, this.numBuckets); // add a new InMemoryData object to the broker this.inMemoryBuffer.get(bucketId).put( new InMemoryData<V, A>(key, value, aux, operation)); } /** * <p> * Returns the name of the DRUM instance. * </p> * * @return The name of the DRUM instance */ public String getName() { return this.drumName; } /** * <p> * Returns the number of buckets used by this DRUM instance. * </p> * * @return The number of buckets used */ public int getNumberOfBuckets() { return this.numBuckets; } }
src/main/java/at/rovo/caching/drum/Drum.java
package at.rovo.caching.drum; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import at.rovo.caching.drum.data.ByteSerializer; import at.rovo.caching.drum.event.DrumEventDispatcher; import at.rovo.caching.drum.internal.DiskBucketWriter; import at.rovo.caching.drum.internal.InMemoryData; import at.rovo.caching.drum.internal.InMemoryMessageBroker; import at.rovo.caching.drum.internal.backend.DrumStorageFactory; import at.rovo.caching.drum.util.DrumExceptionHandler; import at.rovo.caching.drum.util.DrumUtil; import at.rovo.caching.drum.util.NamedThreadFactory; /** * <p> * This implementation of the 'Disk Repository with Update Management' structure * utilizes a consumer/producer pattern to store and process input received by * its * </p> * <ul> * <li>{@link #check(Number)} or {@link #check(Number, ByteSerializable)}</li> * <li>{@link #update(Number, ByteSerializable)} or * {@link #update(Number, ByteSerializable, ByteSerializable)}</li> * <li>{@link #checkUpdate(Number, ByteSerializable) or * {@link #checkUpdate(Number, ByteSerializable, ByteSerializable)}</li> * </ul> * <p> * methods. * </p> * <p> * Internally <code>numBuckets</code> buffers and buckets will be created which * will hold the data sent to DRUM. Buffers are the in-memory storage while * buckets are the intermediary disk files. Buffers fill up to <code> * bufferSize</code> bytes before they get sent to a disk file. * </p> * * * @param <V> * The type of the value * @param <A> * The type of the auxiliary data attached to a key * * @author Roman Vottner */ public class Drum<V extends ByteSerializer<V>, A extends ByteSerializer<A>> implements IDrum<V, A> { /** The logger of this class **/ private final static Logger logger = LogManager.getLogger(Drum.class); /** The name of the DRUM instance **/ protected String drumName = null; /** The number of buffers and buckets used **/ protected int numBuckets = 0; /** The size of an in-memory buffer **/ protected int bufferSize = 0; /** * The broker list which holds elements in memory until they get written to * the disk file **/ protected List<IBroker<InMemoryData<V, A>, V, A>> inMemoryBuffer = null; /** * The set of writer objects that listens to notifications of a broker and * write content from the broker to a disk file **/ protected List<IDiskWriter<V, A>> diskWriters = null; /** * The object that compares keys of data-objects for their uniqueness and * merges them into the data store in case the need to be updated **/ protected IMerger<V, A> merger = null; /** The execution service which hosts our threads **/ protected ExecutorService executor = null; /** The merger thread **/ protected Thread mergerThread = null; /** * The event dispatcher used to inform listeners of internal state changes * and certain statistics **/ protected DrumEventDispatcher eventDispatcher = new DrumEventDispatcher(); /** The event dispatcher thread **/ protected Thread eventDispatcherThread = null; /** * <p> * Implementation of the build pattern presented by Joshua Block in his book * 'Effective Java - Second Edition' in 'Item 2: Consider a builder when * faced with many constructor parameters'. * </p> * <p> * On invoking {@link #build()} the builder will create a new instance of * DRUM with the provided parameters. * </p> * <p> * By default, the builder will create a DRUM instance for 512 buckets with * 64k buffer size and a {@link NullDispatcher}. * </p> * * @param <V> * The type of the value DRUM will manage * @param <A> * The type of the auxiliary data attached to a key * * @author Roman Vottner */ public static class Builder<V extends ByteSerializer<V>, A extends ByteSerializer<A>> implements IBuilder<Drum<V, A>> { // required parameters /** The name of the drum instance **/ private final String drumName; /** The type of the value managed by DRUM **/ private final Class<V> valueClass; /** The type of the auxiliary data managed by DRUM **/ private final Class<A> auxClass; /** The number of buckets managed by this DRUM instance **/ private int numBuckets = 512; /** The size of the buffer before a flush is forced **/ private int bufferSize = 64; /** The class responsible for dispatching the results **/ private IDispatcher<V,A> dispatcher = new NullDispatcher<V,A>(); /** A listener class which needs to be informed of state changes **/ private IDrumListener listener = null; /** The factory which creates the backing storage service **/ private DrumStorageFactory<V,A> factory = null; /** * <p> * Creates a new builder object with the minimum number of required data * to instantiate a new {@link Drum} instance on invoking * {@link #build()}. * </p> * * @param drumName The name of the DRUM instance * @param valueClass The type of the value this instance will manage * @param auxClass The type of the auxiliary data this instance will * manage */ public Builder(String drumName, Class<V> valueClass, Class<A> auxClass) { this.drumName = drumName; this.valueClass = valueClass; this.auxClass = auxClass; } /** * <p> * Assigns the builder to create a Drum instance which uses the provided * dispatcher instead of the default {@link NullDispatcher} to dispatch * results. * </p> * * @param dispatcher The dispatcher to use for instantiating DRUM * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> dispatcher(IDispatcher<V,A> dispatcher) { if (dispatcher == null) throw new IllegalArgumentException("Invalid dispatcher received"); this.dispatcher = dispatcher; return this; } /** * <p> * Assigns the builder to create a Drum instance which uses the provided * number of buckets instead of the default 512 buckets. * </p> * * @param numBuckets The number of buckets DRUM should manage * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> numBucket(int numBuckets) { if (numBuckets <= 0 || ((numBuckets & -numBuckets) != numBuckets)) throw new IllegalArgumentException( "The number of buckets must be greater than 0 and must " + "be a superset of 2"); this.numBuckets = numBuckets; return this; } /** * <p> * Assigns the builder to create a Drum instance which uses the provided * buffer size instead of 64kb. * </p> * * @param bufferSize The buffer size DRUM should use before flushing the * content * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> bufferSize(int bufferSize) { if (bufferSize <= 0 || ((bufferSize & -bufferSize) != bufferSize)) throw new IllegalArgumentException( "BufferSize must be greater than 0 and have a base of 2 " + "(ex: 2^1, 2^2, 2^3, ...)"); this.bufferSize = bufferSize; return this; } /** * <p> * Assigns the builder to create a Drum instance with the defined * listener in place. * </p> * * @param listener The listener to notify on state changes * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> listener(IDrumListener listener) { this.listener = listener; return this; } /** * <p> * Assigns the builder to create a Drum instance with the given factory * to create a backend storage instead of the backend storage created by * the default factory. * </p> * * @param factory The factory responsible for creating the backend * storage * @return The builder responsible for creating a new instance of DRUM */ public Builder<V,A> factory(DrumStorageFactory<V,A> factory) { this.factory = factory; return this; } /** * <p> * Assigns the builder to create and initialize a new instance of a DRUM * object. * </p> * * @return A new initialized instance of DRUM * @throws DrumException If during the initialization of DRUM an error * occurred */ public Drum<V,A> build() throws Exception { return new Drum<V,A>(this); } } /** * <p> * Creates a new instance and assigns initial values contained within the * builder object to the corresponding attributes. * </p> * * @param builder * @throws DrumException */ private Drum(Builder<V,A> builder) throws DrumException { if (builder.listener != null) this.addDrumListener(builder.listener); DrumStorageFactory<V,A> factory; if (builder.factory != null) factory = builder.factory; else factory = DrumStorageFactory.getDefaultStorageFactory( builder.drumName, builder.numBuckets, builder.dispatcher, builder.valueClass, builder.auxClass, this.eventDispatcher); this.init(builder.drumName, builder.numBuckets, builder.bufferSize, builder.dispatcher, builder.valueClass, builder.auxClass, factory); } /** * <p> * Initializes the DRUM instance with required data and starts the worker * threads. * </p> * * @param drumName * The name of the DRUM instance * @param numBuckets * The number of buckets to be used * @param bufferSize * The size of a single buffer in bytes * @param dispatcher * The {@link IDispatcher} implementation which will receive * information on items added via <code>check</code>, * <code>update</code> or <code>checkUpdate</code>. * @param valueClass * The class-type of the value for a certain key * @param auxClass * The auxiliary data-type attached to a certain key * @param factory * The factory object which defines where data should be stored * in. Note that factory must return an implementation of IMerger * @param listener * The object which needs to be notified on certain internal * state or statistic changes * @throws DrumException */ private void init(String drumName, int numBuckets, int bufferSize, IDispatcher<V, A> dispatcher, Class<V> valueClass, Class<A> auxClass, DrumStorageFactory<V, A> factory) throws DrumException { this.eventDispatcherThread = new Thread(this.eventDispatcher); this.eventDispatcherThread.setName(drumName + "EventDispatcher"); this.eventDispatcherThread.start(); this.drumName = drumName; this.numBuckets = numBuckets; this.bufferSize = bufferSize; // create the broker and the consumer listening to the broker this.inMemoryBuffer = new ArrayList<IBroker<InMemoryData<V, A>, V, A>>( numBuckets); this.diskWriters = new ArrayList<IDiskWriter<V, A>>(numBuckets); this.merger = factory.getStorage(); DrumExceptionHandler exceptionHandler = new DrumExceptionHandler(); NamedThreadFactory writerFactory = new NamedThreadFactory(); writerFactory.setName(this.drumName + "-Writer"); writerFactory.setUncaughtExceptionHanlder(exceptionHandler); // writerFactory.increasePriority(true); // this.executor = Executors.newCachedThreadPool(writerFactory); this.executor = Executors.newFixedThreadPool(this.numBuckets, writerFactory); for (int i = 0; i < numBuckets; i++) { IBroker<InMemoryData<V, A>, V, A> broker = new InMemoryMessageBroker<InMemoryData<V, A>, V, A>( drumName, i, bufferSize, this.eventDispatcher); IDiskWriter<V, A> consumer = new DiskBucketWriter<V, A>(drumName, i, bufferSize, broker, this.merger, this.eventDispatcher); this.inMemoryBuffer.add(broker); this.diskWriters.add(consumer); this.executor.submit(consumer); // add a reference of the disk writer to the merger, so it can use // the semaphore to lock the file it is currently reading from to // merge the data into the backing data store. // While reading from a file, a further access to the file (which // should result in a write access) is therefore refused. this.merger.addDiskFileWriter(consumer); } this.mergerThread = new Thread(this.merger, this.drumName + "-Merger"); // this.mergerThread.setPriority(Math.min(10, // this.mergerThread.getPriority()+1)); this.mergerThread.setUncaughtExceptionHandler(exceptionHandler); this.mergerThread.start(); // Thread.currentThread().setPriority(Math.max(0, // Thread.currentThread().getPriority()-1)); } @Override public void check(Long key) { this.add(key, null, null, DrumOperation.CHECK); } @Override public void check(Long key, A aux) { this.add(key, null, aux, DrumOperation.CHECK); } @Override public void update(Long key, V value) { this.add(key, value, null, DrumOperation.UPDATE); } @Override public void update(Long key, V value, A aux) { this.add(key, value, aux, DrumOperation.UPDATE); } @Override public void appendUpdate(Long key, V value) { this.add(key, value, null, DrumOperation.APPEND_UPDATE); } @Override public void appendUpdate(Long key, V value, A aux) { this.add(key, value, aux, DrumOperation.APPEND_UPDATE); } @Override public void checkUpdate(Long key, V value) { this.add(key, value, null, DrumOperation.CHECK_UPDATE); } @Override public void checkUpdate(Long key, V value, A aux) { this.add(key, value, aux, DrumOperation.CHECK_UPDATE); } @Override public void dispose() throws DrumException { logger.debug("[{}] - Disposal initialted", this.drumName); // flip the buffers which sends the writers the latest data for (IBroker<?, ?, ?> broker : this.inMemoryBuffer) broker.stop(); // give the threads a chance to finish their work without being // interrupted for (IDiskWriter<V, A> writer : this.diskWriters) writer.stop(); this.executor.shutdown(); // wait for the threads to finish try { this.executor.awaitTermination(1, TimeUnit.MINUTES); } catch (InterruptedException e) { } this.merger.stop(); try { this.mergerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } // close the open resources held by the writers for (IDiskWriter<V, A> writer : this.diskWriters) writer.close(); this.eventDispatcher.stop(); this.eventDispatcherThread.interrupt(); logger.trace("[{}] - disposed", this.drumName); } @Override public void addDrumListener(IDrumListener listener) { this.eventDispatcher.addDrumListener(listener); } @Override public void removeDrumListener(IDrumListener listener) { this.eventDispatcher.removeDrumListener(listener); } /** * <p> * Stores the key, the value and the auxiliary data as well as the operation * to be executed on these data in the according in-memory buffer. * </p> * * @param key * The hash value of the data * @param value * The value associated with the key * @param aux * The auxiliary data of the key * @param operation * The operation to be used on the data */ private void add(Long key, V value, A aux, DrumOperation operation) { // get the bucket index based on the first n bits of the key, according // to the number of defined buckets int bucketId = DrumUtil.getBucketForKey(key, this.numBuckets); // add a new InMemoryData object to the broker this.inMemoryBuffer.get(bucketId).put( new InMemoryData<V, A>(key, value, aux, operation)); } /** * <p> * Returns the name of the DRUM instance. * </p> * * @return The name of the DRUM instance */ public String getName() { return this.drumName; } /** * <p> * Returns the number of buckets used by this DRUM instance. * </p> * * @return The number of buckets used */ public int getNumberOfBuckets() { return this.numBuckets; } }
MINOR typo fix
src/main/java/at/rovo/caching/drum/Drum.java
MINOR typo fix
Java
mit
d28de85ef7ad41e65ede5a81bc9c003554db016b
0
AlmasB/FXGL,AlmasB/FXGL,AlmasB/FXGL,AlmasB/FXGL
/* * FXGL - JavaFX Game Library. The MIT License (MIT). * Copyright (c) AlmasB ([email protected]). * See LICENSE for details. */ package com.almasb.fxgl.tools.dialogues; import com.almasb.fxgl.app.ApplicationMode; import com.almasb.fxgl.app.GameApplication; import com.almasb.fxgl.app.GameSettings; import javafx.scene.Cursor; import java.util.Map; import static com.almasb.fxgl.dsl.FXGL.*; import static com.almasb.fxgl.tools.dialogues.DialogueEditorVars.*; /** * A dialogue editor for FXGL. * * @author Almas Baimagambetov (AlmasB) ([email protected]) */ public class DialogueEditorApp extends GameApplication { @Override protected void initSettings(GameSettings settings) { settings.setWidth(1600); settings.setHeight(900); settings.setTitle("FXGL Dialogue Editor - github.com/AlmasB/FXGL"); settings.setVersion("1.0-beta"); settings.getCSSList().add("dialogue_editor.css"); settings.setIntroEnabled(false); settings.setMainMenuEnabled(false); settings.setGameMenuEnabled(false); settings.setManualResizeEnabled(true); settings.setScaleAffectedOnResize(false); //settings.setProfilingEnabled(true); settings.setApplicationMode(ApplicationMode.DEVELOPER); settings.setCloseConfirmation(settings.getApplicationMode() == ApplicationMode.RELEASE); } @Override protected void initGameVars(Map<String, Object> vars) { vars.put(IS_SNAP_TO_GRID, true); vars.put(IS_COLOR_BLIND_MODE, true); } @Override protected void initGame() { getGameScene().setCursor(Cursor.DEFAULT); addUINode(new MainUI()); } public static void main(String[] args) { launch(args); } }
fxgl-tools/src/main/java/com/almasb/fxgl/tools/dialogues/DialogueEditorApp.java
/* * FXGL - JavaFX Game Library. The MIT License (MIT). * Copyright (c) AlmasB ([email protected]). * See LICENSE for details. */ package com.almasb.fxgl.tools.dialogues; import com.almasb.fxgl.app.ApplicationMode; import com.almasb.fxgl.app.GameApplication; import com.almasb.fxgl.app.GameSettings; import javafx.scene.Cursor; import java.util.Map; import static com.almasb.fxgl.dsl.FXGL.*; import static com.almasb.fxgl.tools.dialogues.DialogueEditorVars.*; /** * A dialogue editor for FXGL. * * @author Almas Baimagambetov (AlmasB) ([email protected]) */ public class DialogueEditorApp extends GameApplication { @Override protected void initSettings(GameSettings settings) { settings.setWidth(1600); settings.setHeight(900); settings.setTitle("FXGL Dialogue Editor - github.com/AlmasB/FXGL"); settings.setVersion("1.0-beta"); settings.getCSSList().add("dialogue_editor.css"); settings.setIntroEnabled(false); settings.setManualResizeEnabled(true); settings.setScaleAffectedOnResize(false); //settings.setProfilingEnabled(true); settings.setApplicationMode(ApplicationMode.DEVELOPER); settings.setCloseConfirmation(settings.getApplicationMode() == ApplicationMode.RELEASE); } @Override protected void initGameVars(Map<String, Object> vars) { vars.put(IS_SNAP_TO_GRID, true); vars.put(IS_COLOR_BLIND_MODE, true); } @Override protected void initGame() { getGameScene().setCursor(Cursor.DEFAULT); addUINode(new MainUI()); } public static void main(String[] args) { launch(args); } }
disable menus
fxgl-tools/src/main/java/com/almasb/fxgl/tools/dialogues/DialogueEditorApp.java
disable menus
Java
mit
314d063c2065ffc446cdb4d930e13f15cc441d18
0
Aquerr/EagleFactions,Aquerr/EagleFactions
package io.github.aquerr.eaglefactions.logic; import io.github.aquerr.eaglefactions.config.ConfigAccess; import io.github.aquerr.eaglefactions.config.IConfig; import io.github.aquerr.eaglefactions.config.MainConfig; import javafx.util.Pair; import ninja.leaping.configurate.ConfigurationNode; import sun.java2d.pipe.SpanShapeRenderer; import java.math.BigDecimal; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.function.Function; public class MainLogic { private static IConfig mainConfig = MainConfig.getConfig(); public static boolean getAllianceFriendlyFire() { ConfigurationNode friendlyFireNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "friendlyFire", "alliance"); Boolean friendlyFire = friendlyFireNode.getBoolean(); return friendlyFire; } public static BigDecimal getGlobalMaxPower() { ConfigurationNode maxPowerNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "maxpower"); BigDecimal maxPower = new BigDecimal(maxPowerNode.getString()); return maxPower; } public static BigDecimal getStartingPower() { ConfigurationNode startingPowerNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "startpower"); BigDecimal startPower = new BigDecimal(startingPowerNode.getString()); return startPower; } public static BigDecimal getPowerIncrement() { ConfigurationNode powerIncrementNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "increment"); BigDecimal incrementPower = new BigDecimal(powerIncrementNode.getString()); return incrementPower; } public static BigDecimal getPowerDecrement() { ConfigurationNode powerDecrementNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "decrement"); BigDecimal decrementPower = new BigDecimal(powerDecrementNode.getString()); return decrementPower; } public static BigDecimal getKillAward() { ConfigurationNode killAwardNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "killaward"); BigDecimal killAward = new BigDecimal(killAwardNode.getString()); return killAward; } public static BigDecimal getPunishment() { ConfigurationNode punishmentNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "punishment"); BigDecimal punishment = new BigDecimal(punishmentNode.getString()); return punishment; } public static int getMaxNameLength() { ConfigurationNode maxLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "name", "maxlength"); int maxLength = maxLengthNode.getInt(); return maxLength; } public static int getMinNameLength() { ConfigurationNode minLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "name", "minlength"); int minLength = minLengthNode.getInt(); return minLength; } public static int getMaxTagLength() { ConfigurationNode maxLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "tag", "maxlength"); int maxLength = maxLengthNode.getInt(); return maxLength; } public static int getMinTagLength() { ConfigurationNode minLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "tag", "minlength"); int minLength = minLengthNode.getInt(); return minLength; } public static boolean getMobSpawning() { ConfigurationNode mobSpawningNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "spawn", "mobs"); boolean mobSpawning = mobSpawningNode.getBoolean(); return mobSpawning; } public static boolean getBlockEnteringFactions() { ConfigurationNode mobSpawningNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "blockEnteringFactions"); boolean mobSpawning = mobSpawningNode.getBoolean(); return mobSpawning; } public static boolean requireConnectedClaims() { ConfigurationNode requireConnectedClaimsNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "connectedClaims"); boolean requireConnectedClaims = requireConnectedClaimsNode.getBoolean(); return requireConnectedClaims; } public static boolean shouldBlockSafeZoneFromWarZone() { ConfigurationNode blockSafeZoneFromWarZoneNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "blockSafeZoneWhileInWarZone"); boolean blockSafeZoneFromWarZone = blockSafeZoneFromWarZoneNode.getBoolean(); return blockSafeZoneFromWarZone; } public static boolean isPlayerLimit() { ConfigurationNode isPlayerLimitNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "playerlimit", "playerlimit"); boolean playerLimit = isPlayerLimitNode.getBoolean(); return playerLimit; } public static int getPlayerLimit() { ConfigurationNode limitNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "playerlimit", "limit"); int limit = limitNode.getInt(); return limit; } public static int getAttackTime() { ConfigurationNode attackTimeNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "attacktime"); int attackTime = attackTimeNode.getInt(); return attackTime; } public static String getPrefixOption() { ConfigurationNode prefixNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "chat", "prefix"); String prefix = prefixNode.getString(); return prefix; } public static boolean getCreateByItems() { ConfigurationNode createByItemsNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "factioncreation", "createbyitems"); boolean createByItems = createByItemsNode.getBoolean(); return createByItems; } public static HashMap<String, Integer> getNeededItems() { ConfigurationNode itemsNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "factioncreation", "items"); List<String> itemsList = itemsNode.getList(objectToStringTransformer); HashMap<String, Integer> items = new HashMap<>(); for (String itemWithAmount: itemsList) { String strings[] = itemWithAmount.split("\\|"); String item = strings[0]; int amount = Integer.valueOf(strings[1]); items.put(item, amount); } return items; } private static Function<Object,String> objectToStringTransformer = input -> { if (input instanceof String) { return (String) input; } else { return null; } }; }
src/main/java/io/github/aquerr/eaglefactions/logic/MainLogic.java
package io.github.aquerr.eaglefactions.logic; import io.github.aquerr.eaglefactions.config.ConfigAccess; import io.github.aquerr.eaglefactions.config.IConfig; import io.github.aquerr.eaglefactions.config.MainConfig; import ninja.leaping.configurate.ConfigurationNode; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import java.util.function.Function; public class MainLogic { private static IConfig mainConfig = MainConfig.getConfig(); public static boolean getAllianceFriendlyFire() { ConfigurationNode friendlyFireNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "friendlyFire", "alliance"); Boolean friendlyFire = friendlyFireNode.getBoolean(); return friendlyFire; } public static BigDecimal getGlobalMaxPower() { ConfigurationNode maxPowerNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "maxpower"); BigDecimal maxPower = new BigDecimal(maxPowerNode.getString()); return maxPower; } public static BigDecimal getStartingPower() { ConfigurationNode startingPowerNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "startpower"); BigDecimal startPower = new BigDecimal(startingPowerNode.getString()); return startPower; } public static BigDecimal getPowerIncrement() { ConfigurationNode powerIncrementNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "increment"); BigDecimal incrementPower = new BigDecimal(powerIncrementNode.getString()); return incrementPower; } public static BigDecimal getPowerDecrement() { ConfigurationNode powerDecrementNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "decrement"); BigDecimal decrementPower = new BigDecimal(powerDecrementNode.getString()); return decrementPower; } public static BigDecimal getKillAward() { ConfigurationNode killAwardNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "killaward"); BigDecimal killAward = new BigDecimal(killAwardNode.getString()); return killAward; } public static BigDecimal getPunishment() { ConfigurationNode punishmentNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "power", "punishment"); BigDecimal punishment = new BigDecimal(punishmentNode.getString()); return punishment; } public static int getMaxNameLength() { ConfigurationNode maxLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "name", "maxlength"); int maxLength = maxLengthNode.getInt(); return maxLength; } public static int getMinNameLength() { ConfigurationNode minLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "name", "minlength"); int minLength = minLengthNode.getInt(); return minLength; } public static int getMaxTagLength() { ConfigurationNode maxLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "tag", "maxlength"); int maxLength = maxLengthNode.getInt(); return maxLength; } public static int getMinTagLength() { ConfigurationNode minLengthNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "tag", "minlength"); int minLength = minLengthNode.getInt(); return minLength; } public static boolean getMobSpawning() { ConfigurationNode mobSpawningNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "spawn", "mobs"); boolean mobSpawning = mobSpawningNode.getBoolean(); return mobSpawning; } public static boolean getBlockEnteringFactions() { ConfigurationNode mobSpawningNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "blockEnteringFactions"); boolean mobSpawning = mobSpawningNode.getBoolean(); return mobSpawning; } public static boolean requireConnectedClaims() { ConfigurationNode requireConnectedClaimsNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "connectedClaims"); boolean requireConnectedClaims = requireConnectedClaimsNode.getBoolean(); return requireConnectedClaims; } public static boolean shouldBlockSafeZoneFromWarZone() { ConfigurationNode blockSafeZoneFromWarZoneNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "blockSafeZoneWhileInWarZone"); boolean blockSafeZoneFromWarZone = blockSafeZoneFromWarZoneNode.getBoolean(); return blockSafeZoneFromWarZone; } public static boolean isPlayerLimit() { ConfigurationNode isPlayerLimitNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "playerlimit", "playerlimit"); boolean playerLimit = isPlayerLimitNode.getBoolean(); return playerLimit; } public static int getPlayerLimit() { ConfigurationNode limitNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "playerlimit", "limit"); int limit = limitNode.getInt(); return limit; } public static int getAttackTime() { ConfigurationNode attackTimeNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "attacktime"); int attackTime = attackTimeNode.getInt(); return attackTime; } public static String getPrefixOption() { ConfigurationNode prefixNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "chat", "prefix"); String prefix = prefixNode.getString(); return prefix; } public static boolean getCreateByItems() { ConfigurationNode createByItemsNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "factioncreation", "createbyitems"); boolean createByItems = createByItemsNode.getBoolean(); return createByItems; } public static List<String> getNeededItems() { ConfigurationNode itemsNode = ConfigAccess.getConfig(mainConfig).getNode("eaglefactions", "gameplay", "factioncreation", "items"); List<String> items = itemsNode.getList(objectToStringTransformer); return items; } private static Function<Object,String> objectToStringTransformer = input -> { if (input instanceof String) { return (String) input; } else { return null; } }; }
Changed needed items to HashMap with key value pair
src/main/java/io/github/aquerr/eaglefactions/logic/MainLogic.java
Changed needed items to HashMap with key value pair
Java
cc0-1.0
8c17bfb8cf9d2df6d14a3c650a614b5f41795981
0
PedaB/mapsplit,PedaB/mapsplit
package dev.osm.mapsplit; /* * Mapsplit - A simple but fast tile splitter for large OSM data * * Written in 2011 by Peda ([email protected]) * * To the extent possible under law, the author(s) have dedicated all copyright and related and neighboring rights to * this software to the public domain worldwide. This software is distributed without any warranty. * * You should have received a copy of the CC0 Public Domain Dedication along with this software. If not, see * <http://creativecommons.org/publicdomain/zero/1.0/>. */ import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.sql.SQLException; import java.text.DateFormat; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.openstreetmap.osmosis.core.container.v0_6.BoundContainer; import org.openstreetmap.osmosis.core.container.v0_6.EntityContainer; import org.openstreetmap.osmosis.core.container.v0_6.NodeContainer; import org.openstreetmap.osmosis.core.container.v0_6.RelationContainer; import org.openstreetmap.osmosis.core.container.v0_6.WayContainer; import org.openstreetmap.osmosis.core.domain.v0_6.Bound; import org.openstreetmap.osmosis.core.domain.v0_6.Entity; import org.openstreetmap.osmosis.core.domain.v0_6.Node; import org.openstreetmap.osmosis.core.domain.v0_6.Relation; import org.openstreetmap.osmosis.core.domain.v0_6.RelationMember; import org.openstreetmap.osmosis.core.domain.v0_6.Way; import org.openstreetmap.osmosis.core.domain.v0_6.WayNode; import org.openstreetmap.osmosis.core.task.v0_6.RunnableSource; import org.openstreetmap.osmosis.core.task.v0_6.Sink; import org.openstreetmap.osmosis.osmbinary.file.BlockOutputStream; import ch.poole.geo.mbtiles4j.MBTilesWriteException; import ch.poole.geo.mbtiles4j.MBTilesWriter; import ch.poole.geo.mbtiles4j.model.MetadataEntry; import crosby.binary.osmosis.OsmosisReader; import crosby.binary.osmosis.OsmosisSerializer; public class MapSplit { private static final String MAPSPLIT_TAG = "mapsplit"; private static final String PBF_EXT = ".osm.pbf"; private static final List<String> KNOWN_PBF_EXTS = List.of(".pbf", PBF_EXT); private static final Logger LOGGER = Logger.getLogger(MapSplit.class.getName()); private static final int MAX_ZOOM_OUT_DIFF = 5; private final CommandLineParams params; // all data after this appointment date is considered new or modified private Date appointmentDate; private Date latestDate = new Date(0); // internal store to check if reading the file worked private boolean complete = false; // the hashmap for all nodes in the osm map private final OsmMap nmap; // the hashmap for all ways in the osm map private final OsmMap wmap; // the hashmap for all relations in the osm map private final OsmMap rmap; /** * ways which are members in a relation, and whose nodes might therefore need to be added to extra tiles in a second * run. * * If we do not want complete relations, this field is null. But we may want some or all relations to be complete โ€“ * that is, we want their way members, and all nodes of these way members(!), to be part of all tiles the relation * itself is in. Because we do not store a way's nodes, this requires a second read through the input file. */ private Set<Long> relationMemberWayIds = null; // a bitset telling the algorithm which tiles need to be re-renderd private final UnsignedSparseBitSet modifiedTiles = new UnsignedSparseBitSet(); private final Map<Integer, UnsignedSparseBitSet> optimizedModifiedTiles = new HashMap<>(); // the serializer (OSM writers) for any modified tile private Map<Integer, OsmosisSerializer> outFiles; // output for mbtiles private Map<Integer, ByteArrayOutputStream> outBlobs; // new zoom levels for tiles during optimization private final Map<Integer, Byte> zoomMap = new HashMap<>(); // relations with potential forward references private final Set<Relation> postProcessRelations = new HashSet<>(); class DataFormatException extends RuntimeException { private static final long serialVersionUID = 1L; /** * Construct a new exception indicating data format errors * * @param message the message */ public DataFormatException(@NotNull String message) { super(message); } } /** * Construct a new MapSplit instance * * @param params parameters from the command line * @param appointmentDate only add changes from after this date (doesn't really work) */ public MapSplit(CommandLineParams params, Date appointmentDate) { this.params = params; this.appointmentDate = appointmentDate; if (params.mapSizes != null) { nmap = new HeapMap(params.mapSizes[0]); wmap = new HeapMap(params.mapSizes[1]); rmap = new HeapMap(params.mapSizes[2]); } else { nmap = new ArrayMap(params.maxIds[0]); wmap = new ArrayMap(params.maxIds[1]); rmap = new ArrayMap(params.maxIds[2]); } if (params.completeRelations || params.completeAreas) { relationMemberWayIds = new HashSet<>(); } optimizedModifiedTiles.put(params.zoom, modifiedTiles); } /** * Calculate the longitude for a tile * * @param x the x number for the tile * @return the longitude */ private double tile2lon(int x) { return (x / Math.pow(2.0, params.zoom)) * 360.0 - 180.0; } /** * Calculate the latitude for a tile * * @param y the y number for the tile * @return the latitude */ private double tile2lat(int y) { double n = Math.PI - 2.0 * Math.PI * y / Math.pow(2, params.zoom); return (180.0 / Math.PI * Math.atan(0.5 * (Math.pow(Math.E, n) - Math.pow(Math.E, -n)))); } /** * Calculate tile X number for a given longitude * * @param lon the longitude * @return the tile X number */ private int lon2tileX(double lon) { int xtile = (int) Math.floor((lon + 180) / 360 * (1 << params.zoom)); if (xtile < 0) { return 0; } else if (xtile >= (1 << params.zoom)) { return ((1 << params.zoom) - 1); } else { return xtile; } } /** * Calculate tile Y number for a given latitude * * @param lat the latitude * @return the tile y number */ private int lat2tileY(double lat) { int ytile = (int) Math.floor((1 - Math.log(Math.tan(Math.toRadians(lat)) + 1 / Math.cos(Math.toRadians(lat))) / Math.PI) / 2 * (1 << params.zoom)); if (ytile < 0) { return 0; } else if (ytile >= (1 << params.zoom)) { return ((1 << params.zoom) - 1); } else { return ytile; } } /** * Calculate the Bound for the given tile * * @param tileX tile X number * @param tileY tile Y number * @return a Bound object (a bound box for the tile) */ public Bound getBound(int tileX, int tileY) { double l = tile2lon(tileX); double r = tile2lon(tileX + 1); double t = tile2lat(tileY); double b = tile2lat(tileY + 1); double dx = r - l; double dy = b - t; l = Math.max(l - params.border * dx, Const.MIN_LON); r = Math.min(r + params.border * dx, Const.MAX_LON); t = Math.min(t - params.border * dy, Const.MAX_LAT); b = Math.max(b + params.border * dy, Const.MIN_LAT); return new Bound(r, l, t, b, MAPSPLIT_TAG); } /** * Fill out holes * * @param tiles the current tiles */ private void checkAndFill(@NotNull Collection<Long> tiles) { int minX = Integer.MAX_VALUE; int minY = Integer.MAX_VALUE; int maxX = Integer.MIN_VALUE; int maxY = Integer.MIN_VALUE; // determine the min/max tile nrs for (long tile : tiles) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); minX = Math.min(minX, tx); minY = Math.min(minY, ty); maxX = Math.max(maxX, tx); maxY = Math.max(maxY, ty); } // enlarge min/max to have a border and to cope with possible neighbour tiles minX -= 2; minY -= 2; maxX += 2; maxY += 2; int sizeX = maxX - minX + 1; int sizeY = maxY - minY + 1; // fill the helperSet which marks any set tile BitSet helperSet = new BitSet(); for (long tile : tiles) { int tx = nmap.tileX(tile) - minX; int ty = nmap.tileY(tile) - minY; int neighbour = nmap.neighbour(tile); helperSet.set(tx + ty * sizeX); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { helperSet.set(tx + 1 + ty * sizeX); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { helperSet.set(tx + (ty + 1) * sizeX); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { helperSet.set(tx + 1 + (ty + 1) * sizeX); } } // start with tile 1,1 and fill region... Deque<Integer> stack = new ArrayDeque<>(); stack.push(1 + 1 * sizeX); // fill all tiles that are reachable by a 4-neighbourhood while (!stack.isEmpty()) { int val = stack.pop(); boolean isSet = helperSet.get(val); helperSet.set(val); if (val >= sizeX * sizeY) { continue; } int ty = val / sizeX; int tx = val % sizeX; if ((tx == 0) || (ty == 0) || (ty >= sizeY)) { continue; } if (!isSet) { stack.push(tx + 1 + ty * sizeX); stack.push(tx - 1 + ty * sizeX); stack.push(tx + (ty + 1) * sizeX); stack.push(tx + (ty - 1) * sizeX); } } // now check if there are not-set bits left (i.e. holes in tiles) int idx = -1; while (true) { idx = helperSet.nextClearBit(idx + 1); if (idx >= sizeX * sizeY) { break; } int tx = idx % sizeX; int ty = idx / sizeX; if ((tx == 0) || (ty == 0)) { continue; } tx += minX; ty += minY; // TODO: make this a bit nicer by delegating the id-generation to the map code int c = tx << Const.MAX_ZOOM | ty; tiles.add(((long) c) << AbstractOsmMap.TILE_Y_SHIFT); modifiedTiles.set(c); } } /** * calculate the lon-offset for the given border size * * @param lon the longitude * @return the offset */ private double deltaX(double lon) { int tx = lon2tileX(lon); double x1 = tile2lon(tx); double x2 = tile2lon(tx + 1); return params.border * (x2 - x1); } /** * calculate the lat-offset for the given border size * * @param lat the latitude * @return the offset */ private double deltaY(double lat) { int ty = lat2tileY(lat); double y1 = tile2lat(ty); double y2 = tile2lat(ty + 1); return params.border * (y2 - y1); } /** * Add tile and neighbours to modifiedTiles * * @param tx tile x number * @param ty tile y number * @param neighbour bit map for neighbour tiles */ private void setModifiedTiles(int tx, int ty, int neighbour) { modifiedTiles.set(tx << Const.MAX_ZOOM | ty); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tx + 1) << Const.MAX_ZOOM | ty); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tx << Const.MAX_ZOOM | (ty + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tx + 1) << Const.MAX_ZOOM | (ty + 1)); } } /** * Add a Node * * @param n the Node * @param lat latitude in WGS84 coords * @param lon longitude in WGS84 coords */ private void addNodeToMap(Node n, double lat, double lon) { int tileX = lon2tileX(lon); int tileY = lat2tileY(lat); int neighbour = OsmMap.NEIGHBOURS_NONE; // check and add border if needed double dx = deltaX(lon); if (lon2tileX(lon + dx) > tileX) { neighbour = OsmMap.NEIGHBOURS_EAST; } else if (lon2tileX(lon - dx) < tileX) { tileX--; neighbour = OsmMap.NEIGHBOURS_EAST; } double dy = deltaY(lat); if (lat2tileY(lat + dy) > tileY) { neighbour += OsmMap.NEIGHBOURS_SOUTH; } else if (lat2tileY(lat - dy) < tileY) { tileY--; neighbour += OsmMap.NEIGHBOURS_SOUTH; } // mark current tile (and neighbours) to be re-rendered if (n.getTimestamp().after(appointmentDate)) { setModifiedTiles(tileX, tileY, neighbour); } // mark the latest changes made to this map if (n.getTimestamp().after(latestDate)) { latestDate = n.getTimestamp(); } nmap.put(n.getId(), tileX, tileY, neighbour); } /** * Add a Way * * @param way the Way */ private void addWayToMap(@NotNull Way way) { boolean modified = way.getTimestamp().after(appointmentDate); Set<Long> tileList = new TreeSet<>(); // mark the latest changes made to this map if (way.getTimestamp().after(latestDate)) { latestDate = way.getTimestamp(); } List<Long> tiles = new ArrayList<>(); for (WayNode wayNode : way.getWayNodes()) { // get tileNrs for given node long tile = nmap.get(wayNode.getNodeId()); // don't ignore missing nodes if (tile == 0) { if (params.verbose) { LOGGER.log(Level.INFO, "way {0} missing node {1}", new Object[] { way.getId(), wayNode.getNodeId() }); } return; } tiles.add(tile); } for (long tile : tiles) { // mark tiles (and possible neighbours) as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); setModifiedTiles(tx, ty, neighbour); } tileList.add(tile); } // TODO check/verify if 8 tiles is ok or if there might be corner-cases with only 4 tiles // with more than 8 (or 4?!) tiles in the list we might have a "hole" if (tileList.size() >= 8) { checkAndFill(tileList); } // bootstrap a tilepos for the way long id = way.getWayNodes().get(0).getNodeId(); long val = nmap.get(id); int tx = nmap.tileX(val); int ty = nmap.tileY(val); // put way into map with a "random" base tile wmap.put(way.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the way knows which tiles it belongs to wmap.update(way.getId(), tileList); for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.update(wayNode.getNodeId(), tileList); } } /** * Iterate over the way nodes and add tileList to the list of tiles they are supposed to be in * * @param way the Way we are processing * @param tileList the List of tiles, encoded with {@link TileCoord} */ private void addExtraWayToMap(@NotNull Way way, @NotNull Collection<Integer> tileList) { for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.updateInt(wayNode.getNodeId(), tileList); } } /** * Add a Relation * * @param r the Relation */ private void addRelationToMap(@NotNull Relation r) { boolean modified = r.getTimestamp().after(appointmentDate); Collection<Long> tileList = new TreeSet<>(); boolean nodeWarned = false; // suppress multiple warnings about missing Nodes boolean wayWarned = false; // suppress multiple warnings about missing Ways boolean relationWarned = false; // suppress multiple warnings about missing Relations if (r.getTimestamp().after(latestDate)) { latestDate = r.getTimestamp(); } for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: long tile = nmap.get(m.getMemberId()); // The referenced node is not in our data set if (tile == 0) { if (params.verbose && !nodeWarned) { LOGGER.log(Level.INFO, "Non-complete Relation {0} (missing a node)", r.getId()); nodeWarned = true; } continue; } // mark tiles as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); setModifiedTiles(tx, ty, neighbour); } tileList.add(tile); break; case Way: List<Integer> list = wmap.getAllTiles(m.getMemberId()); // The referenced way is not in our data set if (list == null) { if (params.verbose && !wayWarned) { LOGGER.log(Level.INFO, "Non-complete Relation {0} (missing a way)", r.getId()); wayWarned = true; } continue; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } // TODO: make this a bit more generic / nicer code :/ for (Integer i : list) { tileList.add(((long) i) << AbstractOsmMap.TILE_Y_SHIFT); } break; case Relation: list = rmap.getAllTiles(m.getMemberId()); // The referenced relation is not in our data set if (list == null) { if (params.verbose && !relationWarned) { LOGGER.log(Level.INFO, "Non-complete Relation {0} (missing a relation)", r.getId()); relationWarned = true; } postProcessRelations.add(r); continue; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } for (Integer i : list) { tileList.add(((long) i) << HeapMap.TILE_Y_SHIFT); } break; default: LOGGER.log(Level.WARNING, "Unknown member type {0}", m.getMemberType()); } } // Just in case, this can happen due to silly input data :'( if (tileList.isEmpty()) { LOGGER.log(Level.WARNING, "Ignoring relation with no elements in tiles"); return; } // no need to fill tile list here as that will have already happened for any element with geometry long val = tileList.iterator().next(); int tx = rmap.tileX(val); int ty = rmap.tileY(val); // put relation into map with a "random" base tile rmap.put(r.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the relation knows in which tiles it is needed rmap.update(r.getId(), tileList); if (params.completeRelations || (params.completeAreas && hasTag(r, "type", "multipolygon"))) { // only add members to all the tiles if the appropriate option is enabled for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: nmap.update(m.getMemberId(), tileList); break; case Way: wmap.update(m.getMemberId(), tileList); relationMemberWayIds.add(m.getMemberId()); break; case Relation: rmap.update(m.getMemberId(), tileList); break; case Bound: break; default: LOGGER.log(Level.WARNING, "Unknown member type {0}", m.getMemberType()); } } } } /** * Check if an Entity has a tag * * @param e the Entity to inpsect * @param key tag key * @param value tag value * @return true if e is tagged with the tag */ private static boolean hasTag(@NotNull Entity e, @Nullable String key, @Nullable String value) { return e.getTags().stream().anyMatch(tag -> tag.getKey().equals(key) && tag.getValue().equals(value)); } long nCount = 0; long wCount = 0; long rCount = 0; /** * Setup the OSM object to tiles mappings * * @throws IOException if reading the input caused an issue * @throws InterruptedException if a Thread was interrupted */ public void setup() throws IOException, InterruptedException { RunnableSource reader = new OsmosisReader(new FileInputStream(params.inputFile)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } /** * Throw an exception if the metadata flag is set but we are reading data without any * * @param e the OSM object to check */ void checkMetadata(@NotNull Entity e) { if (params.metadata && (e.getVersion() == -1)) { // this doesn't seem to be really documented throw new DataFormatException(String.format("%s %d is missing a valid version and metadata flag is set", e.getType(), e.getId())); } } @Override public void process(EntityContainer ec) { if (ec instanceof NodeContainer) { Node n = ((NodeContainer) ec).getEntity(); checkMetadata(n); addNodeToMap(n, n.getLatitude(), n.getLongitude()); if (params.verbose) { nCount++; if ((nCount % (nmap.getCapacity() / 20)) == 0) { LOGGER.log(Level.INFO, "{0} nodes processed", nCount); } } } else if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); checkMetadata(w); addWayToMap(w); if (params.verbose) { wCount++; if ((wCount % (wmap.getCapacity() / 20)) == 0) { LOGGER.log(Level.INFO, "{0} ways processed", wCount); } } } else if (ec instanceof RelationContainer) { Relation r = ((RelationContainer) ec).getEntity(); checkMetadata(r); addRelationToMap(r); if (params.verbose) { rCount++; if ((rCount % (rmap.getCapacity() / 20)) == 0) { LOGGER.log(Level.INFO, "{0} relations processed", rCount); } } } else if (ec instanceof BoundContainer) { // nothing todo, we ignore bound tags } else { LOGGER.log(Level.WARNING, "Unknown Element while reading"); LOGGER.log(Level.WARNING, ec.toString()); LOGGER.log(Level.WARNING, ec.getEntity().toString()); } } @Override public void initialize(Map<String, Object> metaData) { // not used } @Override public void close() { // not used } }); if (params.verbose) { LOGGER.log(Level.INFO, "Initial pass started"); } runThread(new Thread(reader)); if (!complete) { throw new IOException("Could not read file fully"); } if (params.verbose) { LOGGER.log(Level.INFO, "We have read:\n{0} nodes\n{1} ways\n{2} relations", new Object[] { nCount, wCount, rCount }); } if (!postProcessRelations.isEmpty()) { int preSize = postProcessRelations.size(); int postSize = preSize; if (params.verbose) { LOGGER.log(Level.INFO, "Post processing {0} relations with forward references", new Object[] { preSize }); } do { preSize = postSize; List<Relation> temp = new ArrayList<>(postProcessRelations); postProcessRelations.clear(); for (Relation r : temp) { addRelationToMap(r); } postSize = postProcessRelations.size(); if (params.verbose) { LOGGER.log(Level.INFO, "{0} incomplete relations left", new Object[] { postSize }); } } while (postSize < preSize); } // Second run if we are in complete-relation-mode if (relationMemberWayIds != null) { complete = false; reader = new OsmosisReader(new FileInputStream(params.inputFile)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); if (relationMemberWayIds.contains(w.getId())) { List<Integer> tileList = wmap.getAllTiles(w.getId()); addExtraWayToMap(w, tileList); } } } @Override public void initialize(Map<String, Object> metaData) { // not used } @Override public void close() { // not used } }); runThread(new Thread(reader)); if (!complete) { // NOSONAR throw new IOException("Could not read file fully in second run"); } } } /** * Start a thread and wait for it to complete * * @param thread the Thread * @throws InterruptedException if thread is interrupted */ private void runThread(@NotNull Thread thread) throws InterruptedException { thread.start(); while (thread.isAlive()) { try { thread.join(); } catch (InterruptedException e) { // NOSONAR LOGGER.log(Level.WARNING, "readerThread interupted {0}", e.getMessage()); throw e; } } } /** * Optimize the tile stack * * @param nodeLimit the minimum number of Nodes a tile should contain * */ private void optimize(final int nodeLimit) { if (params.verbose) { LOGGER.log(Level.INFO, "Optimizing ..."); } long statsStart = System.currentTimeMillis(); // count Node tile use // at high zoom levels this will contains // lots of Nodes that are in more than // one tile Map<Integer, Integer> stats = new HashMap<>(); nmap.keys().forEach((long k) -> { List<Integer> tiles = nmap.getAllTiles(k); if (tiles != null) { for (Integer t : tiles) { Integer count = stats.get(t); if (count != null) { count++; stats.put(t, count); } else { stats.put(t, 1); } } } else { LOGGER.log(Level.INFO, "tiles null for {0}", k); } }); long nodeCount = 0; List<Integer> keys = new ArrayList<>(stats.keySet()); Collections.sort(keys); for (Integer key : keys) { int value = stats.get(key); nodeCount += value; if (!zoomMap.containsKey(key)) { // not mapped if (value < nodeLimit) { CountResult prevResult = null; for (int z = 1; z < MAX_ZOOM_OUT_DIFF; z++) { int newZoom = params.zoom - z; CountResult result = getCounts(key, z, stats); if (result.total < 4 * nodeLimit) { if (result.total > nodeLimit || z == (MAX_ZOOM_OUT_DIFF - 1)) { for (int i = 0; i < result.keys.length; i++) { if (result.counts[i] != null) { zoomMap.put(result.keys[i], (byte) newZoom); } } break; // found optimal size } prevResult = result; // store this and try next zoom } else { if (prevResult != null) { for (int i = 0; i < prevResult.keys.length; i++) { if (prevResult.counts[i] != null) { zoomMap.put(prevResult.keys[i], (byte) (newZoom + 1)); } } } break; // last iteration was better } } } } } for (Entry<Integer, Byte> optimzedTile : zoomMap.entrySet()) { int idx = optimzedTile.getKey(); int newTileZoom = optimzedTile.getValue(); modifiedTiles.clear(idx); idx = mapToNewTile(idx, newTileZoom); UnsignedSparseBitSet tileSet = optimizedModifiedTiles.get(newTileZoom); if (tileSet == null) { tileSet = new UnsignedSparseBitSet(); optimizedModifiedTiles.put(newTileZoom, tileSet); } tileSet.set(idx); } if (params.verbose) { LOGGER.log(Level.INFO, "Tiles {0} avg node count {1} merged tiles {2}", new Object[] { stats.size(), nodeCount / stats.size(), zoomMap.size() }); LOGGER.log(Level.INFO, "Stats took {0} s", (System.currentTimeMillis() - statsStart) / 1000); } } class CountResult { int total; int[] keys; Integer[] counts; } /** * Get usage stats for zoomed out tiles * * @param idx the original tile index * @param zoomDiff how many levels to zoom out * @param stats a map containing the per tile stats * @return a CountResult object */ CountResult getCounts(int idx, int zoomDiff, @NotNull Map<Integer, Integer> stats) { // determine the counts for the other tiles in the zoomed out tile int x0 = ((idx >>> Const.MAX_ZOOM) >> zoomDiff) << zoomDiff; int y0 = ((idx & (int) Const.MAX_TILE_NUMBER) >> zoomDiff) << zoomDiff; int side = 2 << (zoomDiff - 1); int[] keys = new int[side * side]; for (int i = 0; i < side; i++) { for (int j = 0; j < side; j++) { keys[i * side + j] = ((x0 + i) << Const.MAX_ZOOM) | (y0 + j); } } Integer[] counts = new Integer[keys.length]; int total = 0; for (int i = 0; i < keys.length; i++) { counts[i] = stats.get(keys[i]); if (counts[i] != null) { total += counts[i]; } } CountResult result = new CountResult(); result.total = total; result.keys = keys; result.counts = counts; return result; } /** * Taking a packed tile id, return the tile it is in on a lower zoom level * * @param idx the packed tile id * @param newTileZoom the new zoom level (less than the base zoom) * @return packed tile id at newTileZoom */ private int mapToNewTile(int idx, int newTileZoom) { int xNew = (idx >>> Const.MAX_ZOOM) >> (params.zoom - newTileZoom); int yNew = (idx & (int) Const.MAX_TILE_NUMBER) >> (params.zoom - newTileZoom); return xNew << Const.MAX_ZOOM | yNew; } /** * Check if the coordinates are inside a polygon * * @param x longitude * @param y latitude * @param polygon the polygon * @return true is inside */ private boolean isInside(double x, double y, @NotNull double[] polygon) { boolean in = false; int lines = polygon.length / 2; for (int i = 0, j = lines - 1; i < lines; j = i++) { if (((polygon[2 * i + 1] > y) != (polygon[2 * j + 1] > y)) && (x < (polygon[2 * j] - polygon[2 * i]) * (y - polygon[2 * i + 1]) / (polygon[2 * j + 1] - polygon[2 * i + 1]) + polygon[2 * i])) { in = !in; } } return in; } /** * Check if the corners of a tile are inside a polygon * * @param tx tile x number * @param ty tile y number * @param polygon the polygon * @return true if a corner is inside */ private boolean isInside(int tx, int ty, @NotNull double[] polygon) { for (int u = 0; u < 2; u++) { for (int v = 0; v < 2; v++) { double x = tile2lon(tx + u); double y = tile2lat(ty + v); if (isInside(x, y, polygon)) { return true; } } } return false; } /** * Check if a tile intersects with / is covered by a polygon Note this only checks the corners of the tile so isn't * really correct and should be replaced by a suitable correct algorithm * * @param tx tile x number * @param ty tile y number * @param inside outer rings (the tile should be "inside") * @param outside inner rings (the tile should be "outside") * @return true if the tile intersects / is covered by the polygon */ private boolean isInside(int tx, int ty, @NotNull List<double[]> inside, @NotNull List<double[]> outside) { boolean in = false; for (double[] polygon : inside) { in = isInside(tx, ty, polygon); if (in) { break; } } if (!in) { return false; } for (double[] polygon : outside) { if (isInside(tx, ty, polygon)) { return false; } } return true; } /** * Remove all tiles that are not in the provided polygon * * @param polygonFile the path for a file containing the polygon * @throws IOException if reading fails */ public void clipPoly(@NotNull File polygonFile) throws IOException { List<double[]> inside = new ArrayList<>(); List<double[]> outside = new ArrayList<>(); try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(polygonFile)));) { /* String name = */ br.readLine(); // unused.. NOSONAR String poly = br.readLine(); while (!"END".equals(poly)) { int pos = 0; int size = 128; double[] data = new double[2 * size]; String coords = br.readLine(); while (!"END".equals(coords)) { coords = coords.trim(); int idx = coords.indexOf(' '); double lon = Double.parseDouble(coords.substring(0, idx)); double lat = Double.parseDouble(coords.substring(idx + 1)); // check if there's enough space to store if (pos >= size) { double[] tmp = new double[4 * size]; System.arraycopy(data, 0, tmp, 0, 2 * size); size *= 2; data = tmp; } // store data data[2 * pos] = lon; data[2 * pos + 1] = lat; pos++; coords = br.readLine(); } if (pos != size) { double[] tmp = new double[2 * pos]; System.arraycopy(data, 0, tmp, 0, 2 * pos); data = tmp; } if (poly.startsWith("!")) { outside.add(data); } else { inside.add(data); } // read next polygon, if there's any poly = br.readLine(); } } // now walk modifiedTiles and clear bits that are not inside polygon int idx = 0; while (true) { idx = modifiedTiles.nextSetBit(UnsignedSparseBitSet.inc(idx)); if (idx == -1) { break; } int tx = idx >>> Const.MAX_ZOOM; int ty = (int) (idx & Const.MAX_TILE_NUMBER); boolean in = isInside(tx, ty, inside, outside); if (!in) { modifiedTiles.clear(idx); } } } /** * Read the input file, process the OSM elements and write them out * * @param basename the basename for individual tile files or the name of a MBTiles format sqlite database * @param metadata write metadata (version, timestamp, etc) * @param mbTiles write to a MBTiles format sqlite database instead of writing individual tiles * @throws IOException if reading or creating the files has an issue * @throws InterruptedException if one of the Threads was interrupted */ public void store(@NotNull String basename, boolean metadata, boolean mbTiles) throws IOException, InterruptedException { MBTilesWriter w = null; if (mbTiles) { try { w = new MBTilesWriter(new File(basename)); w.getConnection().setAutoCommit(false); } catch (MBTilesWriteException | SQLException e1) { throw new IOException(e1); } } Bound bounds = null; int minZoom = params.zoom; for (Entry<Integer, UnsignedSparseBitSet> omt : optimizedModifiedTiles.entrySet()) { final UnsignedSparseBitSet tileSet = omt.getValue(); final int currentZoom = omt.getKey(); if (currentZoom < minZoom) { minZoom = currentZoom; } int ymax = 1 << currentZoom; // for conversion to TMS schema if (params.verbose) { LOGGER.log(Level.INFO, "Processing {0} tiles for zoom {1}", new Object[] { tileSet.cardinality(), currentZoom }); } int idx = -1; // start at -1 because this will be incremented before the first use // We might call this code several times if we have more tiles // to store than open files allowed while (true) { complete = false; outFiles = new HashMap<>(); if (mbTiles) { outBlobs = new HashMap<>(); } // Setup out-files... int count = 0; while (true) { idx = tileSet.nextSetBit(UnsignedSparseBitSet.inc(idx)); if (idx == -1) { // created all tiles for this zoom level break; } if (outFiles.get(idx) == null) { int tileX = idx >>> Const.MAX_ZOOM; int tileY = (int) (idx & Const.MAX_TILE_NUMBER); OutputStream target = null; if (mbTiles) { target = new ByteArrayOutputStream(); } else { String file; if (basename.contains("%x") && basename.contains("%y")) { file = basename.replace("%x", Integer.toString(tileX)).replace("%y", Integer.toString(tileY)).replace("%z", Integer.toString(currentZoom)); if (KNOWN_PBF_EXTS.stream().noneMatch(file::endsWith)) { file = file + PBF_EXT; } } else { file = basename + currentZoom + "/" + tileX + "_" + tileY + PBF_EXT; } File outputFile = new File(file); File parent = outputFile.getParentFile(); parent.mkdirs(); target = new FileOutputStream(file); } OsmosisSerializer serializer = new OsmosisSerializer(new BlockOutputStream(target)); serializer.setUseDense(true); serializer.configOmit(!metadata); // write out the bound for that tile Bound bound = getBound(tileX, tileY); BoundContainer bc = new BoundContainer(bound); serializer.process(bc); outFiles.put(idx, serializer); if (mbTiles) { outBlobs.put(idx, (ByteArrayOutputStream) target); } } if ((params.maxFiles != -1) && (++count >= params.maxFiles)) { break; } } // Now start writing output... RunnableSource reader = new OsmosisReader(new FileInputStream(params.inputFile)); class BoundSink implements Sink { Bound overallBounds = null; Set<Integer> mappedTiles = new HashSet<>(); /** * Get the overall bounds of the data * * @return a Bound object or null */ Bound getBounds() { return overallBounds; } @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { long id = ec.getEntity().getId(); Iterable<Integer> tiles; if (ec instanceof NodeContainer) { tiles = nmap.getAllTiles(id); } else if (ec instanceof WayContainer) { tiles = wmap.getAllTiles(id); } else if (ec instanceof RelationContainer) { tiles = rmap.getAllTiles(id); } else if (ec instanceof BoundContainer) { Bound bounds = ((BoundContainer) ec).getEntity(); if (overallBounds == null) { overallBounds = bounds; } else { overallBounds.union(bounds); } return; } else { LOGGER.log(Level.WARNING, "Unknown Element while reading"); LOGGER.log(Level.WARNING, "{0}", ec); LOGGER.log(Level.WARNING, "{0}", ec.getEntity()); return; } if (tiles == null) { // No tile where we could store the given entity into // This probably is a degenerated relation ;) return; } if (params.nodeLimit > 0) { // quite costly, and only relevant if tile optimization is on mappedTiles.clear(); for (int i : tiles) { // map original zoom tiles to optimized ones // and remove duplicates Byte newZoom = zoomMap.get(i); if (newZoom != null) { i = mapToNewTile(i, newZoom); } else { newZoom = (byte) params.zoom; } if (currentZoom == newZoom) { mappedTiles.add(i); } } tiles = mappedTiles; } for (int i : tiles) { if (tileSet.get(i)) { OsmosisSerializer ser = outFiles.get(i); if (ser != null) { ser.process(ec); } } } } @Override public void initialize(Map<String, Object> metaData) { // do nothing } @Override public void close() { // do nothing } } BoundSink sink = new BoundSink(); reader.setSink(sink); runThread(new Thread(reader)); if (!complete) { throw new IOException("Could not fully read file in storing run"); } // Finish and close files... for (Entry<Integer, OsmosisSerializer> entry : outFiles.entrySet()) { OsmosisSerializer ser = entry.getValue(); ser.complete(); ser.flush(); ser.close(); if (mbTiles) { int tileX = entry.getKey() >>> Const.MAX_ZOOM; int tileY = (int) (entry.getKey() & Const.MAX_TILE_NUMBER); int y = ymax - tileY - 1; // TMS scheme ByteArrayOutputStream blob = outBlobs.get(entry.getKey()); try { w.addTile(blob.toByteArray(), currentZoom, tileX, y); } catch (MBTilesWriteException e) { // NOSONAR LOGGER.log(Level.WARNING, "{0} z:{1} x:{2} y:{3}", new Object[] { e.getMessage(), currentZoom, tileX, tileY }); throw new IOException(e); } } } if (params.verbose) { LOGGER.log(Level.INFO, "Wrote {0} tiles, continuing with next block of tiles", outFiles.size()); } // remove mappings form this pass outFiles.clear(); if (mbTiles) { outBlobs.clear(); } if (idx == -1) { // written all tiles for this zoom level bounds = sink.getBounds(); break; } } } // Add MBTiles metadata parts if (mbTiles) { MetadataEntry ent = new MetadataEntry(); File file = new File(basename); ent.setTilesetName(file.getName()).setTilesetType(MetadataEntry.TileSetType.BASE_LAYER).setTilesetVersion(Const.MBT_VERSION) .setAttribution(Const.OSM_ATTRIBUTION).addCustomKeyValue("format", Const.MSF_MIME_TYPE) .addCustomKeyValue("minzoom", Integer.toString(minZoom)).addCustomKeyValue("maxzoom", Integer.toString(params.zoom)) .addCustomKeyValue("latest_date", Long.toString(latestDate.getTime())); if (bounds != null) { ent.setTilesetBounds(bounds.getLeft(), bounds.getBottom(), bounds.getRight(), bounds.getTop()); } else { ent.setTilesetBounds(Const.MIN_LON, -85, Const.MAX_LON, 85); } try { w.addMetadataEntry(ent); w.getConnection().commit(); } catch (MBTilesWriteException | SQLException e) { // NOSONAR throw new IOException(e); } w.close(); } } /** * Set up options from the command line and run the tiler * * @param params parameters from the command line * @param appointmentDate only add changes from after this date (doesn't really work) * * @return the "last changed" date * @throws InterruptedException if one of the Threads was interrupted * @throws IOException if IO went wrong */ private static Date run(CommandLineParams params, Date appointmentDate) throws IOException, InterruptedException { long startup = System.currentTimeMillis(); MapSplit split = new MapSplit(params, appointmentDate); long time = System.currentTimeMillis(); split.setup(); time = System.currentTimeMillis() - time; double nratio = split.nmap.getMissHitRatio(); double wratio = split.wmap.getMissHitRatio(); double rratio = split.rmap.getMissHitRatio(); if (params.polygonFile != null) { if (params.verbose) { LOGGER.log(Level.INFO, "Clip tiles with polygon given by \"{0}\"", params.polygonFile); } split.clipPoly(params.polygonFile); } long modified = split.modifiedTiles.cardinality(); if (params.timing) { LOGGER.log(Level.INFO, "Initial reading and datastructure setup took {0} ms", time); } if (params.verbose) { LOGGER.log(Level.INFO, "We have {0} modified tiles to store.", modified); } if (params.nodeLimit > 0) { split.optimize(params.nodeLimit); } time = System.currentTimeMillis(); split.store(params.outputBase, params.metadata, params.mbTiles); time = System.currentTimeMillis() - time; if (params.timing) { LOGGER.log(Level.INFO, "Saving tiles took {0} ms", time); long overall = System.currentTimeMillis() - startup; LOGGER.log(Level.INFO, "Overall runtime: {0} ms", overall); LOGGER.log(Level.INFO, " == {0} min", (overall / 1000 / 60)); } if (params.verbose) { LOGGER.log(Level.INFO, "Load:"); LOGGER.log(Level.INFO, "Nodes : {0}", split.nmap.getLoad()); LOGGER.log(Level.INFO, "Ways : {0}", split.wmap.getLoad()); LOGGER.log(Level.INFO, "Relations: {0}", split.rmap.getLoad()); LOGGER.log(Level.INFO, "MissHitRatio:"); LOGGER.log(Level.INFO, "Nodes : {0}", nratio); LOGGER.log(Level.INFO, "Ways : {0}", wratio); LOGGER.log(Level.INFO, "Relations: {0}", rratio); } return split.latestDate; } /** * Main class (what else?) * * @param args command line arguments * @throws IOException if IO failed * @throws InterruptedException if a Thread was interrupted */ public static void main(String[] args) throws IOException, InterruptedException { // set up logging LogManager.getLogManager().reset(); SimpleFormatter fmt = new SimpleFormatter(); Handler stdoutHandler = new FlushStreamHandler(System.out, fmt); // NOSONAR stdoutHandler.setLevel(Level.INFO); LOGGER.addHandler(stdoutHandler); Handler stderrHandler = new FlushStreamHandler(System.err, fmt); // NOSONAR stderrHandler.setLevel(Level.WARNING); LOGGER.addHandler(stderrHandler); // parse command line parameters CommandLineParams params; try { params = new CommandLineParams(args, LOGGER); } catch (IllegalArgumentException e) { return; } // Date-setup as fall-back option DateFormat df = DateFormat.getDateTimeInstance(); Date appointmentDate = new Date(-1); if (params.dateFile == null && params.verbose) { LOGGER.log(Level.INFO, "No datefile given. Writing all available tiles."); } else if (params.dateFile != null) { if (params.dateFile.exists()) { try (DataInputStream dis = new DataInputStream(new FileInputStream(params.dateFile))) { String line = dis.readUTF(); if (line != null) { try { appointmentDate = df.parse(line); } catch (java.text.ParseException pe) { if (params.verbose) { LOGGER.log(Level.INFO, "Could not parse datefile."); } } } } } else if (params.verbose) { LOGGER.log(Level.INFO, "Datefile does not exist, writing all tiles"); } } if (params.verbose) { LOGGER.log(Level.INFO, "Reading: {0}", params.inputFile); LOGGER.log(Level.INFO, "Writing: {0}", params.outputBase); } // Actually run the splitter... Date latest = run(params, appointmentDate); // NOSONAR if (params.verbose) { LOGGER.log(Level.INFO, "Last changes to the map had been done on {0}", df.format(latest)); } if (params.dateFile != null) { try (DataOutputStream dos = new DataOutputStream(new FileOutputStream(params.dateFile));) { dos.writeUTF(df.format(latest)); } } } }
src/main/java/dev/osm/mapsplit/MapSplit.java
package dev.osm.mapsplit; /* * Mapsplit - A simple but fast tile splitter for large OSM data * * Written in 2011 by Peda ([email protected]) * * To the extent possible under law, the author(s) have dedicated all copyright and related and neighboring rights to * this software to the public domain worldwide. This software is distributed without any warranty. * * You should have received a copy of the CC0 Public Domain Dedication along with this software. If not, see * <http://creativecommons.org/publicdomain/zero/1.0/>. */ import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.sql.SQLException; import java.text.DateFormat; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.openstreetmap.osmosis.core.container.v0_6.BoundContainer; import org.openstreetmap.osmosis.core.container.v0_6.EntityContainer; import org.openstreetmap.osmosis.core.container.v0_6.NodeContainer; import org.openstreetmap.osmosis.core.container.v0_6.RelationContainer; import org.openstreetmap.osmosis.core.container.v0_6.WayContainer; import org.openstreetmap.osmosis.core.domain.v0_6.Bound; import org.openstreetmap.osmosis.core.domain.v0_6.Entity; import org.openstreetmap.osmosis.core.domain.v0_6.Node; import org.openstreetmap.osmosis.core.domain.v0_6.Relation; import org.openstreetmap.osmosis.core.domain.v0_6.RelationMember; import org.openstreetmap.osmosis.core.domain.v0_6.Way; import org.openstreetmap.osmosis.core.domain.v0_6.WayNode; import org.openstreetmap.osmosis.core.task.v0_6.RunnableSource; import org.openstreetmap.osmosis.core.task.v0_6.Sink; import org.openstreetmap.osmosis.osmbinary.file.BlockOutputStream; import ch.poole.geo.mbtiles4j.MBTilesWriteException; import ch.poole.geo.mbtiles4j.MBTilesWriter; import ch.poole.geo.mbtiles4j.model.MetadataEntry; import crosby.binary.osmosis.OsmosisReader; import crosby.binary.osmosis.OsmosisSerializer; public class MapSplit { private static final String MAPSPLIT_TAG = "mapsplit"; private static final String PBF_EXT = ".osm.pbf"; private static final List<String> KNOWN_PBF_EXTS = List.of(".pbf", PBF_EXT); private static final Logger LOGGER = Logger.getLogger(MapSplit.class.getName()); private static final int MAX_ZOOM_OUT_DIFF = 5; private final CommandLineParams params; // all data after this appointment date is considered new or modified private Date appointmentDate; private Date latestDate = new Date(0); // internal store to check if reading the file worked private boolean complete = false; // the hashmap for all nodes in the osm map private final OsmMap nmap; // the hashmap for all ways in the osm map private final OsmMap wmap; // the hashmap for all relations in the osm map private final OsmMap rmap; /** * ways which are members in a relation, and whose nodes might therefore need to be added to extra tiles in a second * run. * * If we do not want complete relations, this field is null. But we may want some or all relations to be complete โ€“ * that is, we want their way members, and all nodes of these way members(!), to be part of all tiles the relation * itself is in. Because we do not store a way's nodes, this requires a second read through the input file. */ private Set<Long> relationMemberWayIds = null; // a bitset telling the algorithm which tiles need to be re-renderd private final UnsignedSparseBitSet modifiedTiles = new UnsignedSparseBitSet(); private final Map<Integer, UnsignedSparseBitSet> optimizedModifiedTiles = new HashMap<>(); // the serializer (OSM writers) for any modified tile private Map<Integer, OsmosisSerializer> outFiles; // output for mbtiles private Map<Integer, ByteArrayOutputStream> outBlobs; // new zoom levels for tiles during optimization private final Map<Integer, Byte> zoomMap = new HashMap<>(); // relations with potential forward references private final Set<Relation> postProcessRelations = new HashSet<>(); class DataFormatException extends RuntimeException { private static final long serialVersionUID = 1L; /** * Construct a new exception indicating data format errors * * @param message the message */ public DataFormatException(@NotNull String message) { super(message); } } /** * Construct a new MapSplit instance * * @param params parameters from the command line * @param appointmentDate only add changes from after this date (doesn't really work) */ public MapSplit(CommandLineParams params, Date appointmentDate) { this.params = params; this.appointmentDate = appointmentDate; if (params.mapSizes != null) { nmap = new HeapMap(params.mapSizes[0]); wmap = new HeapMap(params.mapSizes[1]); rmap = new HeapMap(params.mapSizes[2]); } else { nmap = new ArrayMap(params.maxIds[0]); wmap = new ArrayMap(params.maxIds[1]); rmap = new ArrayMap(params.maxIds[2]); } if (params.completeRelations || params.completeAreas) { relationMemberWayIds = new HashSet<>(); } optimizedModifiedTiles.put(params.zoom, modifiedTiles); } /** * Calculate the longitude for a tile * * @param x the x number for the tile * @return the longitude */ private double tile2lon(int x) { return (x / Math.pow(2.0, params.zoom)) * 360.0 - 180.0; } /** * Calculate the latitude for a tile * * @param y the y number for the tile * @return the latitude */ private double tile2lat(int y) { double n = Math.PI - 2.0 * Math.PI * y / Math.pow(2, params.zoom); return (180.0 / Math.PI * Math.atan(0.5 * (Math.pow(Math.E, n) - Math.pow(Math.E, -n)))); } /** * Calculate tile X number for a given longitude * * @param lon the longitude * @return the tile X number */ private int lon2tileX(double lon) { int xtile = (int) Math.floor((lon + 180) / 360 * (1 << params.zoom)); if (xtile < 0) { return 0; } else if (xtile >= (1 << params.zoom)) { return ((1 << params.zoom) - 1); } else { return xtile; } } /** * Calculate tile Y number for a given latitude * * @param lat the latitude * @return the tile y number */ private int lat2tileY(double lat) { int ytile = (int) Math.floor((1 - Math.log(Math.tan(Math.toRadians(lat)) + 1 / Math.cos(Math.toRadians(lat))) / Math.PI) / 2 * (1 << params.zoom)); if (ytile < 0) { return 0; } else if (ytile >= (1 << params.zoom)) { return ((1 << params.zoom) - 1); } else { return ytile; } } /** * Calculate the Bound for the given tile * * @param tileX tile X number * @param tileY tile Y number * @return a Bound object (a bound box for the tile) */ public Bound getBound(int tileX, int tileY) { double l = tile2lon(tileX); double r = tile2lon(tileX + 1); double t = tile2lat(tileY); double b = tile2lat(tileY + 1); double dx = r - l; double dy = b - t; l = Math.max(l - params.border * dx, Const.MIN_LON); r = Math.min(r + params.border * dx, Const.MAX_LON); t = Math.min(t - params.border * dy, Const.MAX_LAT); b = Math.max(b + params.border * dy, Const.MIN_LAT); return new Bound(r, l, t, b, MAPSPLIT_TAG); } /** * Fill out holes * * @param tiles the current tiles */ private void checkAndFill(@NotNull Collection<Long> tiles) { int minX = Integer.MAX_VALUE; int minY = Integer.MAX_VALUE; int maxX = Integer.MIN_VALUE; int maxY = Integer.MIN_VALUE; // determine the min/max tile nrs for (long tile : tiles) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); minX = Math.min(minX, tx); minY = Math.min(minY, ty); maxX = Math.max(maxX, tx); maxY = Math.max(maxY, ty); } // enlarge min/max to have a border and to cope with possible neighbour tiles minX -= 2; minY -= 2; maxX += 2; maxY += 2; int sizeX = maxX - minX + 1; int sizeY = maxY - minY + 1; // fill the helperSet which marks any set tile BitSet helperSet = new BitSet(); for (long tile : tiles) { int tx = nmap.tileX(tile) - minX; int ty = nmap.tileY(tile) - minY; int neighbour = nmap.neighbour(tile); helperSet.set(tx + ty * sizeX); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { helperSet.set(tx + 1 + ty * sizeX); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { helperSet.set(tx + (ty + 1) * sizeX); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { helperSet.set(tx + 1 + (ty + 1) * sizeX); } } // start with tile 1,1 and fill region... Deque<Integer> stack = new ArrayDeque<>(); stack.push(1 + 1 * sizeX); // fill all tiles that are reachable by a 4-neighbourhood while (!stack.isEmpty()) { int val = stack.pop(); boolean isSet = helperSet.get(val); helperSet.set(val); if (val >= sizeX * sizeY) { continue; } int ty = val / sizeX; int tx = val % sizeX; if ((tx == 0) || (ty == 0) || (ty >= sizeY)) { continue; } if (!isSet) { stack.push(tx + 1 + ty * sizeX); stack.push(tx - 1 + ty * sizeX); stack.push(tx + (ty + 1) * sizeX); stack.push(tx + (ty - 1) * sizeX); } } // now check if there are not-set bits left (i.e. holes in tiles) int idx = -1; while (true) { idx = helperSet.nextClearBit(idx + 1); if (idx >= sizeX * sizeY) { break; } int tx = idx % sizeX; int ty = idx / sizeX; if ((tx == 0) || (ty == 0)) { continue; } tx += minX; ty += minY; // TODO: make this a bit nicer by delegating the id-generation to the map code int c = tx << Const.MAX_ZOOM | ty; tiles.add(((long) c) << AbstractOsmMap.TILE_Y_SHIFT); modifiedTiles.set(c); } } /** * calculate the lon-offset for the given border size * * @param lon the longitude * @return the offset */ private double deltaX(double lon) { int tx = lon2tileX(lon); double x1 = tile2lon(tx); double x2 = tile2lon(tx + 1); return params.border * (x2 - x1); } /** * calculate the lat-offset for the given border size * * @param lat the latitude * @return the offset */ private double deltaY(double lat) { int ty = lat2tileY(lat); double y1 = tile2lat(ty); double y2 = tile2lat(ty + 1); return params.border * (y2 - y1); } /** * Add tile and neighbours to modifiedTiles * * @param tx tile x number * @param ty tile y number * @param neighbour bit map for neighbour tiles */ private void setModifiedTiles(int tx, int ty, int neighbour) { modifiedTiles.set(tx << Const.MAX_ZOOM | ty); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tx + 1) << Const.MAX_ZOOM | ty); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tx << Const.MAX_ZOOM | (ty + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tx + 1) << Const.MAX_ZOOM | (ty + 1)); } } /** * Add a Node * * @param n the Node * @param lat latitude in WGS84 coords * @param lon longitude in WGS84 coords */ private void addNodeToMap(Node n, double lat, double lon) { int tileX = lon2tileX(lon); int tileY = lat2tileY(lat); int neighbour = OsmMap.NEIGHBOURS_NONE; // check and add border if needed double dx = deltaX(lon); if (lon2tileX(lon + dx) > tileX) { neighbour = OsmMap.NEIGHBOURS_EAST; } else if (lon2tileX(lon - dx) < tileX) { tileX--; neighbour = OsmMap.NEIGHBOURS_EAST; } double dy = deltaY(lat); if (lat2tileY(lat + dy) > tileY) { neighbour += OsmMap.NEIGHBOURS_SOUTH; } else if (lat2tileY(lat - dy) < tileY) { tileY--; neighbour += OsmMap.NEIGHBOURS_SOUTH; } // mark current tile (and neighbours) to be re-rendered if (n.getTimestamp().after(appointmentDate)) { setModifiedTiles(tileX, tileY, neighbour); } // mark the latest changes made to this map if (n.getTimestamp().after(latestDate)) { latestDate = n.getTimestamp(); } nmap.put(n.getId(), tileX, tileY, neighbour); } /** * Add a Way * * @param way the Way */ private void addWayToMap(@NotNull Way way) { boolean modified = way.getTimestamp().after(appointmentDate); Set<Long> tileList = new TreeSet<>(); // mark the latest changes made to this map if (way.getTimestamp().after(latestDate)) { latestDate = way.getTimestamp(); } List<Long> tiles = new ArrayList<>(); for (WayNode wayNode : way.getWayNodes()) { // get tileNrs for given node long tile = nmap.get(wayNode.getNodeId()); // don't ignore missing nodes if (tile == 0) { if (params.verbose) { LOGGER.log(Level.INFO, "way {0} missing node {1}", new Object[] { way.getId(), wayNode.getNodeId() }); } return; } tiles.add(tile); } for (long tile : tiles) { // mark tiles (and possible neighbours) as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); setModifiedTiles(tx, ty, neighbour); } tileList.add(tile); } // TODO check/verify if 8 tiles is ok or if there might be corner-cases with only 4 tiles // with more than 8 (or 4?!) tiles in the list we might have a "hole" if (tileList.size() >= 8) { checkAndFill(tileList); } // bootstrap a tilepos for the way long id = way.getWayNodes().get(0).getNodeId(); long val = nmap.get(id); int tx = nmap.tileX(val); int ty = nmap.tileY(val); // put way into map with a "random" base tile wmap.put(way.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the way knows which tiles it belongs to wmap.update(way.getId(), tileList); for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.update(wayNode.getNodeId(), tileList); } } /** * Iterate over the way nodes and add tileList to the list of tiles they are supposed to be in * * @param way the Way we are processing * @param tileList the List of tiles, encoded with {@link TileCoord} */ private void addExtraWayToMap(@NotNull Way way, @NotNull Collection<Integer> tileList) { for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.updateInt(wayNode.getNodeId(), tileList); } } /** * Add a Relation * * @param r the Relation */ private void addRelationToMap(@NotNull Relation r) { boolean modified = r.getTimestamp().after(appointmentDate); Collection<Long> tileList = new TreeSet<>(); boolean nodeWarned = false; // suppress multiple warnings about missing Nodes boolean wayWarned = false; // suppress multiple warnings about missing Ways boolean relationWarned = false; // suppress multiple warnings about missing Relations if (r.getTimestamp().after(latestDate)) { latestDate = r.getTimestamp(); } for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: long tile = nmap.get(m.getMemberId()); // The referenced node is not in our data set if (tile == 0) { if (params.verbose && !nodeWarned) { LOGGER.log(Level.INFO, "Non-complete Relation {0} (missing a node)", r.getId()); nodeWarned = true; } continue; } // mark tiles as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); setModifiedTiles(tx, ty, neighbour); } tileList.add(tile); break; case Way: List<Integer> list = wmap.getAllTiles(m.getMemberId()); // The referenced way is not in our data set if (list == null) { if (params.verbose && !wayWarned) { LOGGER.log(Level.INFO, "Non-complete Relation {0} (missing a way)", r.getId()); wayWarned = true; } continue; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } // TODO: make this a bit more generic / nicer code :/ for (Integer i : list) { tileList.add(((long) i) << AbstractOsmMap.TILE_Y_SHIFT); } break; case Relation: list = rmap.getAllTiles(m.getMemberId()); // The referenced relation is not in our data set if (list == null) { if (params.verbose && !relationWarned) { LOGGER.log(Level.INFO, "Non-complete Relation {0} (missing a relation)", r.getId()); relationWarned = true; } postProcessRelations.add(r); continue; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } for (Integer i : list) { tileList.add(((long) i) << HeapMap.TILE_Y_SHIFT); } break; default: LOGGER.log(Level.WARNING, "Unknown member type {0}", m.getMemberType()); } } // Just in case, this can happen due to silly input data :'( if (tileList.isEmpty()) { LOGGER.log(Level.WARNING, "Ignoring relation with no elements in tiles"); return; } // no need to fill tile list here as that will have already happened for any element with geometry long val = tileList.iterator().next(); int tx = rmap.tileX(val); int ty = rmap.tileY(val); // put relation into map with a "random" base tile rmap.put(r.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the relation knows in which tiles it is needed rmap.update(r.getId(), tileList); if (params.completeRelations || (params.completeAreas && hasTag(r, "type", "multipolygon"))) { // only add members to all the tiles if the appropriate option is enabled for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: nmap.update(m.getMemberId(), tileList); break; case Way: wmap.update(m.getMemberId(), tileList); relationMemberWayIds.add(m.getMemberId()); break; case Relation: rmap.update(m.getMemberId(), tileList); break; case Bound: break; default: LOGGER.log(Level.WARNING, "Unknown member type {0}", m.getMemberType()); } } } } /** * Check if an Entity has a tag * * @param e the Entity to inpsect * @param key tag key * @param value tag value * @return true if e is tagged with the tag */ private static boolean hasTag(@NotNull Entity e, @Nullable String key, @Nullable String value) { return e.getTags().stream().anyMatch(tag -> tag.getKey().equals(key) && tag.getValue().equals(value)); } long nCount = 0; long wCount = 0; long rCount = 0; /** * Setup the OSM object to tiles mappings * * @throws IOException if reading the input caused an issue * @throws InterruptedException if a Thread was interrupted */ public void setup() throws IOException, InterruptedException { RunnableSource reader = new OsmosisReader(new FileInputStream(params.inputFile)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } /** * Throw an exception if the metadata flag is set but we are reading data without any * * @param e the OSM object to check */ void checkMetadata(@NotNull Entity e) { if (params.metadata && (e.getVersion() == -1)) { // this doesn't seem to be really documented throw new DataFormatException(String.format("%s %d is missing a valid version and metadata flag is set", e.getType(), e.getId())); } } @Override public void process(EntityContainer ec) { if (ec instanceof NodeContainer) { Node n = ((NodeContainer) ec).getEntity(); checkMetadata(n); addNodeToMap(n, n.getLatitude(), n.getLongitude()); if (params.verbose) { nCount++; if ((nCount % (nmap.getCapacity() / 20)) == 0) { LOGGER.log(Level.INFO, "{0} nodes processed", nCount); } } } else if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); checkMetadata(w); addWayToMap(w); if (params.verbose) { wCount++; if ((wCount % (wmap.getCapacity() / 20)) == 0) { LOGGER.log(Level.INFO, "{0} ways processed", wCount); } } } else if (ec instanceof RelationContainer) { Relation r = ((RelationContainer) ec).getEntity(); checkMetadata(r); addRelationToMap(r); if (params.verbose) { rCount++; if ((rCount % (rmap.getCapacity() / 20)) == 0) { LOGGER.log(Level.INFO, "{0} relations processed", rCount); } } } else if (ec instanceof BoundContainer) { // nothing todo, we ignore bound tags } else { LOGGER.log(Level.WARNING, "Unknown Element while reading"); LOGGER.log(Level.WARNING, ec.toString()); LOGGER.log(Level.WARNING, ec.getEntity().toString()); } } @Override public void initialize(Map<String, Object> metaData) { // not used } @Override public void close() { // not used } }); if (params.verbose) { LOGGER.log(Level.INFO, "Initial pass started"); } Thread readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { // NOSONAR LOGGER.log(Level.WARNING, "readerThread interupted {0}", e.getMessage()); throw e; } } if (!complete) { throw new IOException("Could not read file fully"); } if (params.verbose) { LOGGER.log(Level.INFO, "We have read:\n{0} nodes\n{1} ways\n{2} relations", new Object[] { nCount, wCount, rCount }); } if (!postProcessRelations.isEmpty()) { int preSize = postProcessRelations.size(); int postSize = preSize; if (params.verbose) { LOGGER.log(Level.INFO, "Post processing {0} relations with forward references", new Object[] { preSize }); } do { preSize = postSize; List<Relation> temp = new ArrayList<>(postProcessRelations); postProcessRelations.clear(); for (Relation r : temp) { addRelationToMap(r); } postSize = postProcessRelations.size(); if (params.verbose) { LOGGER.log(Level.INFO, "{0} incomplete relations left", new Object[] { postSize }); } } while (postSize < preSize); } // Second run if we are in complete-relation-mode if (relationMemberWayIds != null) { complete = false; reader = new OsmosisReader(new FileInputStream(params.inputFile)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); if (relationMemberWayIds.contains(w.getId())) { List<Integer> tileList = wmap.getAllTiles(w.getId()); addExtraWayToMap(w, tileList); } } } @Override public void initialize(Map<String, Object> metaData) { // not used } @Override public void close() { // not used } }); readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { // NOSONAR LOGGER.log(Level.WARNING, "readerThread interupted {0}", e.getMessage()); throw e; } } if (!complete) { // NOSONAR throw new IOException("Could not read file fully in second run"); } } } /** * Optimize the tile stack * * @param nodeLimit the minimum number of Nodes a tile should contain * */ private void optimize(final int nodeLimit) { if (params.verbose) { LOGGER.log(Level.INFO, "Optimizing ..."); } long statsStart = System.currentTimeMillis(); // count Node tile use // at high zoom levels this will contains // lots of Nodes that are in more than // one tile Map<Integer, Integer> stats = new HashMap<>(); nmap.keys().forEach((long k) -> { List<Integer> tiles = nmap.getAllTiles(k); if (tiles != null) { for (Integer t : tiles) { Integer count = stats.get(t); if (count != null) { count++; stats.put(t, count); } else { stats.put(t, 1); } } } else { LOGGER.log(Level.INFO, "tiles null for {0}", k); } }); long nodeCount = 0; List<Integer> keys = new ArrayList<>(stats.keySet()); Collections.sort(keys); for (Integer key : keys) { int value = stats.get(key); nodeCount += value; if (!zoomMap.containsKey(key)) { // not mapped if (value < nodeLimit) { CountResult prevResult = null; for (int z = 1; z < MAX_ZOOM_OUT_DIFF; z++) { int newZoom = params.zoom - z; CountResult result = getCounts(key, z, stats); if (result.total < 4 * nodeLimit) { if (result.total > nodeLimit || z == (MAX_ZOOM_OUT_DIFF - 1)) { for (int i = 0; i < result.keys.length; i++) { if (result.counts[i] != null) { zoomMap.put(result.keys[i], (byte) newZoom); } } break; // found optimal size } prevResult = result; // store this and try next zoom } else { if (prevResult != null) { for (int i = 0; i < prevResult.keys.length; i++) { if (prevResult.counts[i] != null) { zoomMap.put(prevResult.keys[i], (byte) (newZoom + 1)); } } } break; // last iteration was better } } } } } for (Entry<Integer, Byte> optimzedTile : zoomMap.entrySet()) { int idx = optimzedTile.getKey(); int newTileZoom = optimzedTile.getValue(); modifiedTiles.clear(idx); idx = mapToNewTile(idx, newTileZoom); UnsignedSparseBitSet tileSet = optimizedModifiedTiles.get(newTileZoom); if (tileSet == null) { tileSet = new UnsignedSparseBitSet(); optimizedModifiedTiles.put(newTileZoom, tileSet); } tileSet.set(idx); } if (params.verbose) { LOGGER.log(Level.INFO, "Tiles {0} avg node count {1} merged tiles {2}", new Object[] { stats.size(), nodeCount / stats.size(), zoomMap.size() }); LOGGER.log(Level.INFO, "Stats took {0} s", (System.currentTimeMillis() - statsStart) / 1000); } } class CountResult { int total; int[] keys; Integer[] counts; } /** * Get usage stats for zoomed out tiles * * @param idx the original tile index * @param zoomDiff how many levels to zoom out * @param stats a map containing the per tile stats * @return a CountResult object */ CountResult getCounts(int idx, int zoomDiff, @NotNull Map<Integer, Integer> stats) { // determine the counts for the other tiles in the zoomed out tile int x0 = ((idx >>> Const.MAX_ZOOM) >> zoomDiff) << zoomDiff; int y0 = ((idx & (int) Const.MAX_TILE_NUMBER) >> zoomDiff) << zoomDiff; int side = 2 << (zoomDiff - 1); int[] keys = new int[side * side]; for (int i = 0; i < side; i++) { for (int j = 0; j < side; j++) { keys[i * side + j] = ((x0 + i) << Const.MAX_ZOOM) | (y0 + j); } } Integer[] counts = new Integer[keys.length]; int total = 0; for (int i = 0; i < keys.length; i++) { counts[i] = stats.get(keys[i]); if (counts[i] != null) { total += counts[i]; } } CountResult result = new CountResult(); result.total = total; result.keys = keys; result.counts = counts; return result; } /** * Taking a packed tile id, return the tile it is in on a lower zoom level * * @param idx the packed tile id * @param newTileZoom the new zoom level (less than the base zoom) * @return packed tile id at newTileZoom */ private int mapToNewTile(int idx, int newTileZoom) { int xNew = (idx >>> Const.MAX_ZOOM) >> (params.zoom - newTileZoom); int yNew = (idx & (int) Const.MAX_TILE_NUMBER) >> (params.zoom - newTileZoom); return xNew << Const.MAX_ZOOM | yNew; } /** * Check if the coordinates are inside a polygon * * @param x longitude * @param y latitude * @param polygon the polygon * @return true is inside */ private boolean isInside(double x, double y, @NotNull double[] polygon) { boolean in = false; int lines = polygon.length / 2; for (int i = 0, j = lines - 1; i < lines; j = i++) { if (((polygon[2 * i + 1] > y) != (polygon[2 * j + 1] > y)) && (x < (polygon[2 * j] - polygon[2 * i]) * (y - polygon[2 * i + 1]) / (polygon[2 * j + 1] - polygon[2 * i + 1]) + polygon[2 * i])) { in = !in; } } return in; } /** * Check if the corners of a tile are inside a polygon * * @param tx tile x number * @param ty tile y number * @param polygon the polygon * @return true if a corner is inside */ private boolean isInside(int tx, int ty, @NotNull double[] polygon) { for (int u = 0; u < 2; u++) { for (int v = 0; v < 2; v++) { double x = tile2lon(tx + u); double y = tile2lat(ty + v); if (isInside(x, y, polygon)) { return true; } } } return false; } /** * Check if a tile intersects with / is covered by a polygon Note this only checks the corners of the tile so isn't * really correct and should be replaced by a suitable correct algorithm * * @param tx tile x number * @param ty tile y number * @param inside outer rings (the tile should be "inside") * @param outside inner rings (the tile should be "outside") * @return true if the tile intersects / is covered by the polygon */ private boolean isInside(int tx, int ty, @NotNull List<double[]> inside, @NotNull List<double[]> outside) { boolean in = false; for (double[] polygon : inside) { in = isInside(tx, ty, polygon); if (in) { break; } } if (!in) { return false; } for (double[] polygon : outside) { if (isInside(tx, ty, polygon)) { return false; } } return true; } /** * Remove all tiles that are not in the provided polygon * * @param polygonFile the path for a file containing the polygon * @throws IOException if reading fails */ public void clipPoly(@NotNull File polygonFile) throws IOException { List<double[]> inside = new ArrayList<>(); List<double[]> outside = new ArrayList<>(); try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(polygonFile)));) { /* String name = */ br.readLine(); // unused.. NOSONAR String poly = br.readLine(); while (!"END".equals(poly)) { int pos = 0; int size = 128; double[] data = new double[2 * size]; String coords = br.readLine(); while (!"END".equals(coords)) { coords = coords.trim(); int idx = coords.indexOf(' '); double lon = Double.parseDouble(coords.substring(0, idx)); double lat = Double.parseDouble(coords.substring(idx + 1)); // check if there's enough space to store if (pos >= size) { double[] tmp = new double[4 * size]; System.arraycopy(data, 0, tmp, 0, 2 * size); size *= 2; data = tmp; } // store data data[2 * pos] = lon; data[2 * pos + 1] = lat; pos++; coords = br.readLine(); } if (pos != size) { double[] tmp = new double[2 * pos]; System.arraycopy(data, 0, tmp, 0, 2 * pos); data = tmp; } if (poly.startsWith("!")) { outside.add(data); } else { inside.add(data); } // read next polygon, if there's any poly = br.readLine(); } } // now walk modifiedTiles and clear bits that are not inside polygon int idx = 0; while (true) { idx = modifiedTiles.nextSetBit(UnsignedSparseBitSet.inc(idx)); if (idx == -1) { break; } int tx = idx >>> Const.MAX_ZOOM; int ty = (int) (idx & Const.MAX_TILE_NUMBER); boolean in = isInside(tx, ty, inside, outside); if (!in) { modifiedTiles.clear(idx); } } } /** * Read the input file, process the OSM elements and write them out * * @param basename the basename for individual tile files or the name of a MBTiles format sqlite database * @param metadata write metadata (version, timestamp, etc) * @param mbTiles write to a MBTiles format sqlite database instead of writing individual tiles * @throws IOException if reading or creating the files has an issue * @throws InterruptedException if one of the Threads was interrupted */ public void store(@NotNull String basename, boolean metadata, boolean mbTiles) throws IOException, InterruptedException { MBTilesWriter w = null; if (mbTiles) { try { w = new MBTilesWriter(new File(basename)); w.getConnection().setAutoCommit(false); } catch (MBTilesWriteException | SQLException e1) { throw new IOException(e1); } } Bound bounds = null; int minZoom = params.zoom; for (Entry<Integer, UnsignedSparseBitSet> omt : optimizedModifiedTiles.entrySet()) { final UnsignedSparseBitSet tileSet = omt.getValue(); final int currentZoom = omt.getKey(); if (currentZoom < minZoom) { minZoom = currentZoom; } int ymax = 1 << currentZoom; // for conversion to TMS schema if (params.verbose) { LOGGER.log(Level.INFO, "Processing {0} tiles for zoom {1}", new Object[] { tileSet.cardinality(), currentZoom }); } int idx = -1; // start at -1 because this will be incremented before the first use // We might call this code several times if we have more tiles // to store than open files allowed while (true) { complete = false; outFiles = new HashMap<>(); if (mbTiles) { outBlobs = new HashMap<>(); } // Setup out-files... int count = 0; while (true) { idx = tileSet.nextSetBit(UnsignedSparseBitSet.inc(idx)); if (idx == -1) { // created all tiles for this zoom level break; } if (outFiles.get(idx) == null) { int tileX = idx >>> Const.MAX_ZOOM; int tileY = (int) (idx & Const.MAX_TILE_NUMBER); OutputStream target = null; if (mbTiles) { target = new ByteArrayOutputStream(); } else { String file; if (basename.contains("%x") && basename.contains("%y")) { file = basename.replace("%x", Integer.toString(tileX)).replace("%y", Integer.toString(tileY)).replace("%z", Integer.toString(currentZoom)); if (KNOWN_PBF_EXTS.stream().noneMatch(file::endsWith)) { file = file + PBF_EXT; } } else { file = basename + currentZoom + "/" + tileX + "_" + tileY + PBF_EXT; } File outputFile = new File(file); File parent = outputFile.getParentFile(); parent.mkdirs(); target = new FileOutputStream(file); } OsmosisSerializer serializer = new OsmosisSerializer(new BlockOutputStream(target)); serializer.setUseDense(true); serializer.configOmit(!metadata); // write out the bound for that tile Bound bound = getBound(tileX, tileY); BoundContainer bc = new BoundContainer(bound); serializer.process(bc); outFiles.put(idx, serializer); if (mbTiles) { outBlobs.put(idx, (ByteArrayOutputStream) target); } } if ((params.maxFiles != -1) && (++count >= params.maxFiles)) { break; } } // Now start writing output... RunnableSource reader = new OsmosisReader(new FileInputStream(params.inputFile)); class BoundSink implements Sink { Bound overallBounds = null; Set<Integer> mappedTiles = new HashSet<>(); /** * Get the overall bounds of the data * * @return a Bound object or null */ Bound getBounds() { return overallBounds; } @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { long id = ec.getEntity().getId(); Iterable<Integer> tiles; if (ec instanceof NodeContainer) { tiles = nmap.getAllTiles(id); } else if (ec instanceof WayContainer) { tiles = wmap.getAllTiles(id); } else if (ec instanceof RelationContainer) { tiles = rmap.getAllTiles(id); } else if (ec instanceof BoundContainer) { Bound bounds = ((BoundContainer) ec).getEntity(); if (overallBounds == null) { overallBounds = bounds; } else { overallBounds.union(bounds); } return; } else { LOGGER.log(Level.WARNING, "Unknown Element while reading"); LOGGER.log(Level.WARNING, "{0}", ec); LOGGER.log(Level.WARNING, "{0}", ec.getEntity()); return; } if (tiles == null) { // No tile where we could store the given entity into // This probably is a degenerated relation ;) return; } if (params.nodeLimit > 0) { // quite costly, and only relevant if tile optimization is on mappedTiles.clear(); for (int i : tiles) { // map original zoom tiles to optimized ones // and remove duplicates Byte newZoom = zoomMap.get(i); if (newZoom != null) { i = mapToNewTile(i, newZoom); } else { newZoom = (byte) params.zoom; } if (currentZoom == newZoom) { mappedTiles.add(i); } } tiles = mappedTiles; } for (int i : tiles) { if (tileSet.get(i)) { OsmosisSerializer ser = outFiles.get(i); if (ser != null) { ser.process(ec); } } } } @Override public void initialize(Map<String, Object> metaData) { // do nothing } @Override public void close() { // do nothing } } BoundSink sink = new BoundSink(); reader.setSink(sink); Thread readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { // NOSONAR LOGGER.log(Level.WARNING, "readerThread interupted {0}", e.getMessage()); throw e; } } if (!complete) { throw new IOException("Could not fully read file in storing run"); } // Finish and close files... for (Entry<Integer, OsmosisSerializer> entry : outFiles.entrySet()) { OsmosisSerializer ser = entry.getValue(); ser.complete(); ser.flush(); ser.close(); if (mbTiles) { int tileX = entry.getKey() >>> Const.MAX_ZOOM; int tileY = (int) (entry.getKey() & Const.MAX_TILE_NUMBER); int y = ymax - tileY - 1; // TMS scheme ByteArrayOutputStream blob = outBlobs.get(entry.getKey()); try { w.addTile(blob.toByteArray(), currentZoom, tileX, y); } catch (MBTilesWriteException e) { // NOSONAR LOGGER.log(Level.WARNING, "{0} z:{1} x:{2} y:{3}", new Object[] { e.getMessage(), currentZoom, tileX, tileY }); throw new IOException(e); } } } if (params.verbose) { LOGGER.log(Level.INFO, "Wrote {0} tiles, continuing with next block of tiles", outFiles.size()); } // remove mappings form this pass outFiles.clear(); if (mbTiles) { outBlobs.clear(); } if (idx == -1) { // written all tiles for this zoom level bounds = sink.getBounds(); break; } } } // Add MBTiles metadata parts if (mbTiles) { MetadataEntry ent = new MetadataEntry(); File file = new File(basename); ent.setTilesetName(file.getName()).setTilesetType(MetadataEntry.TileSetType.BASE_LAYER).setTilesetVersion(Const.MBT_VERSION) .setAttribution(Const.OSM_ATTRIBUTION).addCustomKeyValue("format", Const.MSF_MIME_TYPE) .addCustomKeyValue("minzoom", Integer.toString(minZoom)).addCustomKeyValue("maxzoom", Integer.toString(params.zoom)) .addCustomKeyValue("latest_date", Long.toString(latestDate.getTime())); if (bounds != null) { ent.setTilesetBounds(bounds.getLeft(), bounds.getBottom(), bounds.getRight(), bounds.getTop()); } else { ent.setTilesetBounds(Const.MIN_LON, -85, Const.MAX_LON, 85); } try { w.addMetadataEntry(ent); w.getConnection().commit(); } catch (MBTilesWriteException | SQLException e) { // NOSONAR throw new IOException(e); } w.close(); } } /** * Set up options from the command line and run the tiler * * @param params parameters from the command line * @param appointmentDate only add changes from after this date (doesn't really work) * * @return the "last changed" date * @throws InterruptedException if one of the Threads was interrupted * @throws IOException if IO went wrong */ private static Date run(CommandLineParams params, Date appointmentDate) throws IOException, InterruptedException { long startup = System.currentTimeMillis(); MapSplit split = new MapSplit(params, appointmentDate); long time = System.currentTimeMillis(); split.setup(); time = System.currentTimeMillis() - time; double nratio = split.nmap.getMissHitRatio(); double wratio = split.wmap.getMissHitRatio(); double rratio = split.rmap.getMissHitRatio(); if (params.polygonFile != null) { if (params.verbose) { LOGGER.log(Level.INFO, "Clip tiles with polygon given by \"{0}\"", params.polygonFile); } split.clipPoly(params.polygonFile); } long modified = split.modifiedTiles.cardinality(); if (params.timing) { LOGGER.log(Level.INFO, "Initial reading and datastructure setup took {0} ms", time); } if (params.verbose) { LOGGER.log(Level.INFO, "We have {0} modified tiles to store.", modified); } if (params.nodeLimit > 0) { split.optimize(params.nodeLimit); } time = System.currentTimeMillis(); split.store(params.outputBase, params.metadata, params.mbTiles); time = System.currentTimeMillis() - time; if (params.timing) { LOGGER.log(Level.INFO, "Saving tiles took {0} ms", time); long overall = System.currentTimeMillis() - startup; LOGGER.log(Level.INFO, "Overall runtime: {0} ms", overall); LOGGER.log(Level.INFO, " == {0} min", (overall / 1000 / 60)); } if (params.verbose) { LOGGER.log(Level.INFO, "Load:"); LOGGER.log(Level.INFO, "Nodes : {0}", split.nmap.getLoad()); LOGGER.log(Level.INFO, "Ways : {0}", split.wmap.getLoad()); LOGGER.log(Level.INFO, "Relations: {0}", split.rmap.getLoad()); LOGGER.log(Level.INFO, "MissHitRatio:"); LOGGER.log(Level.INFO, "Nodes : {0}", nratio); LOGGER.log(Level.INFO, "Ways : {0}", wratio); LOGGER.log(Level.INFO, "Relations: {0}", rratio); } return split.latestDate; } /** * Main class (what else?) * * @param args command line arguments * @throws IOException if IO failed * @throws InterruptedException if a Thread was interrupted */ public static void main(String[] args) throws IOException, InterruptedException { // set up logging LogManager.getLogManager().reset(); SimpleFormatter fmt = new SimpleFormatter(); Handler stdoutHandler = new FlushStreamHandler(System.out, fmt); // NOSONAR stdoutHandler.setLevel(Level.INFO); LOGGER.addHandler(stdoutHandler); Handler stderrHandler = new FlushStreamHandler(System.err, fmt); // NOSONAR stderrHandler.setLevel(Level.WARNING); LOGGER.addHandler(stderrHandler); // parse command line parameters CommandLineParams params; try { params = new CommandLineParams(args, LOGGER); } catch (IllegalArgumentException e) { return; } // Date-setup as fall-back option DateFormat df = DateFormat.getDateTimeInstance(); Date appointmentDate = new Date(-1); if (params.dateFile == null && params.verbose) { LOGGER.log(Level.INFO, "No datefile given. Writing all available tiles."); } else if (params.dateFile != null) { if (params.dateFile.exists()) { try (DataInputStream dis = new DataInputStream(new FileInputStream(params.dateFile))) { String line = dis.readUTF(); if (line != null) { try { appointmentDate = df.parse(line); } catch (java.text.ParseException pe) { if (params.verbose) { LOGGER.log(Level.INFO, "Could not parse datefile."); } } } } } else if (params.verbose) { LOGGER.log(Level.INFO, "Datefile does not exist, writing all tiles"); } } if (params.verbose) { LOGGER.log(Level.INFO, "Reading: {0}", params.inputFile); LOGGER.log(Level.INFO, "Writing: {0}", params.outputBase); } // Actually run the splitter... Date latest = run(params, appointmentDate); // NOSONAR if (params.verbose) { LOGGER.log(Level.INFO, "Last changes to the map had been done on {0}", df.format(latest)); } if (params.dateFile != null) { try (DataOutputStream dos = new DataOutputStream(new FileOutputStream(params.dateFile));) { dos.writeUTF(df.format(latest)); } } } }
Small code simplification
src/main/java/dev/osm/mapsplit/MapSplit.java
Small code simplification
Java
epl-1.0
73fb29bc7f308c675a3da29d6521faddfc08540f
0
qgears/opensource-utils,qgears/opensource-utils,qgears/opensource-utils,qgears/opensource-utils,qgears/opensource-utils
package hu.qgears.opengl.libinput; import java.util.HashMap; public enum ELibinputEventType { key(1), pointerMotion(400), pointerAbsolute(401), pointerButton(402); private int typeOrdinal; private static HashMap<Integer, ELibinputEventType> typeByOrdinal=new HashMap<Integer, ELibinputEventType>(); private ELibinputEventType(int typeOrdinal) { this.typeOrdinal=typeOrdinal; } static { for(ELibinputEventType t: ELibinputEventType.values()) { typeByOrdinal.put(t.typeOrdinal, t); } }; public static ELibinputEventType byOrdinal(int typeOrdinal) { return typeByOrdinal.get(typeOrdinal); } }
commons/hu.qgears.opengl.commons/src/hu/qgears/opengl/libinput/ELibinputEventType.java
package hu.qgears.opengl.libinput; import java.util.HashMap; public enum ELibinputEventType { key(0), pointerMotion(400), pointerAbsolute(401), pointerButton(402); private int typeOrdinal; private static HashMap<Integer, ELibinputEventType> typeByOrdinal=new HashMap<Integer, ELibinputEventType>(); private ELibinputEventType(int typeOrdinal) { this.typeOrdinal=typeOrdinal; } static { for(ELibinputEventType t: ELibinputEventType.values()) { typeByOrdinal.put(t.typeOrdinal, t); } }; public static ELibinputEventType byOrdinal(int typeOrdinal) { return typeByOrdinal.get(typeOrdinal); } }
enum matching with values in libinput_jni.cpp
commons/hu.qgears.opengl.commons/src/hu/qgears/opengl/libinput/ELibinputEventType.java
enum matching with values in libinput_jni.cpp
Java
mpl-2.0
d1a9a8ea8bf563abb068fe26b6af13276abcddee
0
foolchan2556/openmrs-core,WANeves/openmrs-core,MuhammadSafwan/Stop-Button-Ability,geoff-wasilwa/openmrs-core,vinayvenu/openmrs-core,donaldgavis/openmrs-core,preethi29/openmrs-core,rbtracker/openmrs-core,maekstr/openmrs-core,iLoop2/openmrs-core,sadhanvejella/openmrs,ldf92/openmrs-core,dlahn/openmrs-core,hoquangtruong/TestMylyn,spereverziev/openmrs-core,chethandeshpande/openmrs-core,lilo2k/openmrs-core,prisamuel/openmrs-core,aboutdata/openmrs-core,asifur77/openmrs,andyvand/OpenMRS,michaelhofer/openmrs-core,jvena1/openmrs-core,jvena1/openmrs-core,siddharthkhabia/openmrs-core,jembi/openmrs-core,joansmith/openmrs-core,kigsmtua/openmrs-core,michaelhofer/openmrs-core,maekstr/openmrs-core,milankarunarathne/openmrs-core,andyvand/OpenMRS,AbhijitParate/openmrs-core,maany/openmrs-core,trsorsimoII/openmrs-core,iLoop2/openmrs-core,lilo2k/openmrs-core,andyvand/OpenMRS,spereverziev/openmrs-core,alexei-grigoriev/openmrs-core,Openmrs-joel/openmrs-core,pselle/openmrs-core,pselle/openmrs-core,ldf92/openmrs-core,iLoop2/openmrs-core,aj-jaswanth/openmrs-core,asifur77/openmrs,kigsmtua/openmrs-core,sadhanvejella/openmrs,koskedk/openmrs-core,sravanthi17/openmrs-core,jembi/openmrs-core,alexwind26/openmrs-core,iLoop2/openmrs-core,Negatu/openmrs-core,AbhijitParate/openmrs-core,siddharthkhabia/openmrs-core,rbtracker/openmrs-core,siddharthkhabia/openmrs-core,Winbobob/openmrs-core,shiangree/openmrs-core,Ch3ck/openmrs-core,macorrales/openmrs-core,Negatu/openmrs-core,sadhanvejella/openmrs,Negatu/openmrs-core,prisamuel/openmrs-core,shiangree/openmrs-core,MitchellBot/openmrs-core,sadhanvejella/openmrs,maekstr/openmrs-core,aboutdata/openmrs-core,hoquangtruong/TestMylyn,koskedk/openmrs-core,WANeves/openmrs-core,AbhijitParate/openmrs-core,MitchellBot/openmrs-core,alexwind26/openmrs-core,Negatu/openmrs-core,lilo2k/openmrs-core,geoff-wasilwa/openmrs-core,dcmul/openmrs-core,michaelhofer/openmrs-core,pselle/openmrs-core,aj-jaswanth/openmrs-core,aboutdata/openmrs-core,aj-jaswanth/openmrs-core,naraink/openmrs-core,Winbobob/openmrs-core,joansmith/openmrs-core,aboutdata/openmrs-core,sadhanvejella/openmrs,ldf92/openmrs-core,jembi/openmrs-core,nilusi/Legacy-UI,donaldgavis/openmrs-core,naraink/openmrs-core,kabariyamilind/openMRSDEV,kabariyamilind/openMRSDEV,jcantu1988/openmrs-core,Ch3ck/openmrs-core,aj-jaswanth/openmrs-core,donaldgavis/openmrs-core,vinayvenu/openmrs-core,dlahn/openmrs-core,Winbobob/openmrs-core,kabariyamilind/openMRSDEV,ssmusoke/openmrs-core,jembi/openmrs-core,dcmul/openmrs-core,chethandeshpande/openmrs-core,ern2/openmrs-core,hoquangtruong/TestMylyn,dlahn/openmrs-core,WANeves/openmrs-core,pselle/openmrs-core,koskedk/openmrs-core,chethandeshpande/openmrs-core,rbtracker/openmrs-core,kigsmtua/openmrs-core,shiangree/openmrs-core,macorrales/openmrs-core,kckc/openmrs-core,MitchellBot/openmrs-core,nilusi/Legacy-UI,milankarunarathne/openmrs-core,jamesfeshner/openmrs-module,kristopherschmidt/openmrs-core,jcantu1988/openmrs-core,kristopherschmidt/openmrs-core,AbhijitParate/openmrs-core,jamesfeshner/openmrs-module,nilusi/Legacy-UI,alexei-grigoriev/openmrs-core,donaldgavis/openmrs-core,nilusi/Legacy-UI,ldf92/openmrs-core,foolchan2556/openmrs-core,naraink/openmrs-core,kristopherschmidt/openmrs-core,asifur77/openmrs,sintjuri/openmrs-core,sravanthi17/openmrs-core,naraink/openmrs-core,macorrales/openmrs-core,dcmul/openmrs-core,alexei-grigoriev/openmrs-core,lbl52001/openmrs-core,Ch3ck/openmrs-core,geoff-wasilwa/openmrs-core,maany/openmrs-core,michaelhofer/openmrs-core,preethi29/openmrs-core,milankarunarathne/openmrs-core,jamesfeshner/openmrs-module,ssmusoke/openmrs-core,lilo2k/openmrs-core,prisamuel/openmrs-core,dcmul/openmrs-core,rbtracker/openmrs-core,sravanthi17/openmrs-core,alexwind26/openmrs-core,kristopherschmidt/openmrs-core,maany/openmrs-core,maany/openmrs-core,naraink/openmrs-core,vinayvenu/openmrs-core,lbl52001/openmrs-core,Winbobob/openmrs-core,AbhijitParate/openmrs-core,prisamuel/openmrs-core,dlahn/openmrs-core,dcmul/openmrs-core,aboutdata/openmrs-core,alexei-grigoriev/openmrs-core,andyvand/OpenMRS,Winbobob/openmrs-core,koskedk/openmrs-core,ern2/openmrs-core,andyvand/OpenMRS,rbtracker/openmrs-core,kckc/openmrs-core,maekstr/openmrs-core,MuhammadSafwan/Stop-Button-Ability,milankarunarathne/openmrs-core,kristopherschmidt/openmrs-core,koskedk/openmrs-core,asifur77/openmrs,aboutdata/openmrs-core,jamesfeshner/openmrs-module,ssmusoke/openmrs-core,hoquangtruong/TestMylyn,pselle/openmrs-core,shiangree/openmrs-core,lbl52001/openmrs-core,sintjuri/openmrs-core,spereverziev/openmrs-core,lilo2k/openmrs-core,MuhammadSafwan/Stop-Button-Ability,sravanthi17/openmrs-core,milankarunarathne/openmrs-core,nilusi/Legacy-UI,ern2/openmrs-core,macorrales/openmrs-core,lbl52001/openmrs-core,ssmusoke/openmrs-core,sintjuri/openmrs-core,MuhammadSafwan/Stop-Button-Ability,Negatu/openmrs-core,maekstr/openmrs-core,joansmith/openmrs-core,sadhanvejella/openmrs,michaelhofer/openmrs-core,foolchan2556/openmrs-core,pselle/openmrs-core,kckc/openmrs-core,ern2/openmrs-core,jcantu1988/openmrs-core,WANeves/openmrs-core,jamesfeshner/openmrs-module,asifur77/openmrs,jvena1/openmrs-core,shiangree/openmrs-core,alexwind26/openmrs-core,kckc/openmrs-core,kigsmtua/openmrs-core,foolchan2556/openmrs-core,iLoop2/openmrs-core,Ch3ck/openmrs-core,ern2/openmrs-core,WANeves/openmrs-core,AbhijitParate/openmrs-core,spereverziev/openmrs-core,MitchellBot/openmrs-core,kabariyamilind/openMRSDEV,alexwind26/openmrs-core,trsorsimoII/openmrs-core,prisamuel/openmrs-core,alexei-grigoriev/openmrs-core,jcantu1988/openmrs-core,alexei-grigoriev/openmrs-core,Openmrs-joel/openmrs-core,kckc/openmrs-core,preethi29/openmrs-core,dcmul/openmrs-core,geoff-wasilwa/openmrs-core,iLoop2/openmrs-core,Negatu/openmrs-core,koskedk/openmrs-core,Openmrs-joel/openmrs-core,milankarunarathne/openmrs-core,maekstr/openmrs-core,Openmrs-joel/openmrs-core,chethandeshpande/openmrs-core,joansmith/openmrs-core,preethi29/openmrs-core,donaldgavis/openmrs-core,kigsmtua/openmrs-core,ssmusoke/openmrs-core,foolchan2556/openmrs-core,jvena1/openmrs-core,trsorsimoII/openmrs-core,MuhammadSafwan/Stop-Button-Ability,lbl52001/openmrs-core,MitchellBot/openmrs-core,siddharthkhabia/openmrs-core,macorrales/openmrs-core,jembi/openmrs-core,andyvand/OpenMRS,vinayvenu/openmrs-core,vinayvenu/openmrs-core,hoquangtruong/TestMylyn,sintjuri/openmrs-core,ldf92/openmrs-core,kabariyamilind/openMRSDEV,lbl52001/openmrs-core,preethi29/openmrs-core,trsorsimoII/openmrs-core,siddharthkhabia/openmrs-core,nilusi/Legacy-UI,foolchan2556/openmrs-core,Openmrs-joel/openmrs-core,Ch3ck/openmrs-core,hoquangtruong/TestMylyn,kigsmtua/openmrs-core,WANeves/openmrs-core,sintjuri/openmrs-core,jembi/openmrs-core,MuhammadSafwan/Stop-Button-Ability,trsorsimoII/openmrs-core,kckc/openmrs-core,geoff-wasilwa/openmrs-core,spereverziev/openmrs-core,joansmith/openmrs-core,sravanthi17/openmrs-core,naraink/openmrs-core,chethandeshpande/openmrs-core,jcantu1988/openmrs-core,jvena1/openmrs-core,aj-jaswanth/openmrs-core,siddharthkhabia/openmrs-core,dlahn/openmrs-core,maany/openmrs-core,spereverziev/openmrs-core,sintjuri/openmrs-core,Winbobob/openmrs-core,shiangree/openmrs-core,prisamuel/openmrs-core,lilo2k/openmrs-core
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.module; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.GlobalProperty; import org.openmrs.Privilege; import org.w3c.dom.Document; /** * Generic module class that openmrs manipulates * * @version 1.0 */ public final class Module { private Log log = LogFactory.getLog(this.getClass()); private String name; private String moduleId; private String packageName; private String description; private String author; private String version; private String updateURL; // should be a URL to an update.rdf file private String updateVersion = null; // version obtained from the remote update.rdf file private String downloadURL = null; // will only be populated when the remote file is newer than the current module private Activator activator; private ModuleActivator moduleActivator; private String activatorName; private String requireOpenmrsVersion; private String requireDatabaseVersion; private Map<String, String> requiredModulesMap; private Map<String, String> awareOfModulesMap; private List<AdvicePoint> advicePoints = new Vector<AdvicePoint>(); private IdentityHashMap<String, String> extensionNames = new IdentityHashMap<String, String>(); private List<Extension> extensions = new Vector<Extension>(); private Map<String, Properties> messages = new HashMap<String, Properties>(); private List<Privilege> privileges = new Vector<Privilege>(); private List<GlobalProperty> globalProperties = new Vector<GlobalProperty>(); private List<String> mappingFiles = new Vector<String>(); private Set<String> packagesWithMappedClasses = new HashSet<String>(); private Document config = null; private Document sqldiff = null; private Document log4j = null; private boolean mandatory = Boolean.FALSE; // keep a reference to the file that we got this module from so we can delete // it if necessary private File file = null; private String startupErrorMessage = null; /** * Simple constructor * * @param name */ public Module(String name) { this.name = name; } /** * Main constructor * * @param name * @param moduleId * @param packageName * @param author * @param description * @param version */ public Module(String name, String moduleId, String packageName, String author, String description, String version) { this.name = name; this.moduleId = moduleId; this.packageName = packageName; this.author = author; this.description = description; this.version = version; log.debug("Creating module " + name); } public boolean equals(Object obj) { if (obj != null && obj instanceof Module) { Module mod = (Module) obj; return getModuleId().equals(mod.getModuleId()); } return false; } /** * @return the activator * @deprecated replaced by {@link Module#getModuleActivator()} */ @Deprecated public Activator getActivator() { try { if (activator == null) { ModuleClassLoader classLoader = ModuleFactory.getModuleClassLoader(this); if (classLoader == null) throw new ModuleException("The classloader is null", getModuleId()); Class<?> c = classLoader.loadClass(getActivatorName()); setActivator((Activator) c.newInstance()); } } catch (ClassNotFoundException e) { throw new ModuleException("Unable to load/find activator: '" + getActivatorName() + "'", name, e); } catch (IllegalAccessException e) { throw new ModuleException("Unable to load/access activator: '" + getActivatorName() + "'", name, e); } catch (InstantiationException e) { throw new ModuleException("Unable to load/instantiate activator: '" + getActivatorName() + "'", name, e); } return activator; } /** * @param activator the activator to set */ public void setActivator(Activator activator) { this.activator = activator; } /** * @return the moduleActivator */ public ModuleActivator getModuleActivator() { try { if (moduleActivator == null) { ModuleClassLoader classLoader = ModuleFactory.getModuleClassLoader(this); if (classLoader == null) throw new ModuleException("The classloader is null", getModuleId()); Class<?> c = classLoader.loadClass(getActivatorName()); Object o = c.newInstance(); if (ModuleActivator.class.isAssignableFrom(o.getClass())) setModuleActivator((ModuleActivator) o); } } catch (ClassNotFoundException e) { throw new ModuleException("Unable to load/find moduleActivator: '" + getActivatorName() + "'", name, e); } catch (IllegalAccessException e) { throw new ModuleException("Unable to load/access moduleActivator: '" + getActivatorName() + "'", name, e); } catch (InstantiationException e) { throw new ModuleException("Unable to load/instantiate moduleActivator: '" + getActivatorName() + "'", name, e); } return moduleActivator; } /** * @param moduleActivator the moduleActivator to set */ public void setModuleActivator(ModuleActivator moduleActivator) { this.moduleActivator = moduleActivator; } /** * @return the activatorName */ public String getActivatorName() { return activatorName; } /** * @param activatorName the activatorName to set */ public void setActivatorName(String activatorName) { this.activatorName = activatorName; } /** * @return the author */ public String getAuthor() { return author; } /** * @param author the author to set */ public void setAuthor(String author) { this.author = author; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the requireDatabaseVersion */ public String getRequireDatabaseVersion() { return requireDatabaseVersion; } /** * @param requireDatabaseVersion the requireDatabaseVersion to set */ public void setRequireDatabaseVersion(String requireDatabaseVersion) { this.requireDatabaseVersion = requireDatabaseVersion; } /** * This list of strings is just what is included in the config.xml file, the full package names: * e.g. org.openmrs.module.formentry * * @return the list of requiredModules */ public List<String> getRequiredModules() { return requiredModulesMap == null ? null : new ArrayList<String>(requiredModulesMap.keySet()); } /** * Convenience method to get the version of this given module that is required * * @return the version of the given required module, or null if there are no version constraints * @since 1.5 * @should return null if no required modules exist * @should return null if no required module by given name exists */ public String getRequiredModuleVersion(String moduleName) { return requiredModulesMap == null ? null : requiredModulesMap.get(moduleName); } /** * This is a convenience method to set all the required modules without any version requirements * * @param requiredModules the requiredModules to set for this module * @should set modules when there is a null required modules map */ public void setRequiredModules(List<String> requiredModules) { if (requiredModulesMap == null) requiredModulesMap = new HashMap<String, String>(); for (String module : requiredModules) { requiredModulesMap.put(module, null); } } /** * @param requiredModulesMap <code>Map<String,String></code> of the <code>requiredModule</code>s * to set * @since 1.5 */ public void setRequiredModulesMap(Map<String, String> requiredModulesMap) { this.requiredModulesMap = requiredModulesMap; } /** * Get the modules that are required for this module. The keys in this map are the module * package names. The values in the map are the required version. If no specific version is * required, it will be null. * * @return a map from required module to the version that is required */ public Map<String, String> setRequiredModulesMap() { return requiredModulesMap; } /** * Sets the modules that this module is aware of. * * @param awareOfModulesMap <code>Map<String,String></code> of the * <code>awareOfModulesMap</code>s to set * @since 1.9 */ public void setAwareOfModulesMap(Map<String, String> awareOfModulesMap) { this.awareOfModulesMap = awareOfModulesMap; } /** * This list of strings is just what is included in the config.xml file, the full package names: * e.g. org.openmrs.module.formentry, for the modules that this module is aware of. * * @since 1.9 * @return the list of awareOfModules */ public List<String> getAwareOfModules() { return awareOfModulesMap == null ? null : new ArrayList<String>(awareOfModulesMap.keySet()); } /** * @return the requireOpenmrsVersion */ public String getRequireOpenmrsVersion() { return requireOpenmrsVersion; } /** * @param requireOpenmrsVersion the requireOpenmrsVersion to set */ public void setRequireOpenmrsVersion(String requireOpenmrsVersion) { this.requireOpenmrsVersion = requireOpenmrsVersion; } /** * @return the module id */ public String getModuleId() { return moduleId; } /** * @return the module id, with all . replaced with / */ public String getModuleIdAsPath() { return moduleId == null ? null : moduleId.replace('.', '/'); } /** * @param moduleId the module id to set */ public void setModuleId(String moduleId) { this.moduleId = moduleId; } /** * @return the packageName */ public String getPackageName() { return packageName; } /** * @param packageName the packageName to set */ public void setPackageName(String packageName) { this.packageName = packageName; } /** * @return the version */ public String getVersion() { return version; } /** * @param version the version to set */ public void setVersion(String version) { this.version = version; } /** * @return the updateURL */ public String getUpdateURL() { return updateURL; } /** * @param updateURL the updateURL to set */ public void setUpdateURL(String updateURL) { this.updateURL = updateURL; } /** * @return the downloadURL */ public String getDownloadURL() { return downloadURL; } /** * @param downloadURL the downloadURL to set */ public void setDownloadURL(String downloadURL) { this.downloadURL = downloadURL; } /** * @return the updateVersion */ public String getUpdateVersion() { return updateVersion; } /** * @param updateVersion the updateVersion to set */ public void setUpdateVersion(String updateVersion) { this.updateVersion = updateVersion; } /** * @return the extensions */ public List<Extension> getExtensions() { if (extensions.size() == extensionNames.size()) return extensions; return expandExtensionNames(); } /** * @param extensions the extensions to set */ public void setExtensions(List<Extension> extensions) { this.extensions = extensions; } /** * A map of pointid to classname. The classname is expected to be a class that extends the * {@link Extension} object. <br/> * <br/> * This map will be expanded into full Extension objects the first time {@link #getExtensions()} * is called * * @param map from pointid to classname * @see ModuleFileParser */ public void setExtensionNames(IdentityHashMap<String, String> map) { if (log.isDebugEnabled()) for (Map.Entry<String, String> entry : extensionNames.entrySet()) { log.debug("Setting extension names: " + entry.getKey() + " : " + entry.getValue()); } this.extensionNames = map; } /** * Expand the temporary extensionNames map of pointid-classname to full pointid-classobject. <br> * This has to be done after the fact because when the pointid-classnames are parsed, the * module's objects aren't fully realized yet and so not all classes can be loaded. <br/> * <br/> * * @return a list of full Extension objects */ private List<Extension> expandExtensionNames() { ModuleClassLoader moduleClsLoader = ModuleFactory.getModuleClassLoader(this); if (moduleClsLoader == null) { log.debug(String.format("Module class loader is not available, maybe the module %s is stopped/stopping", getName())); } else if (extensions.size() != extensionNames.size()) { for (Map.Entry<String, String> entry : extensionNames.entrySet()) { String point = entry.getKey(); String className = entry.getValue(); log.debug("expanding extension names: " + point + " : " + className); try { Class<?> cls = moduleClsLoader.loadClass(className); Extension ext = (Extension) cls.newInstance(); ext.setPointId(point); ext.setModuleId(this.getModuleId()); extensions.add(ext); log.debug("Added extension: " + ext.getExtensionId() + " : " + ext.getClass()); } catch (NoClassDefFoundError e) { log.warn("Unable to find class definition for extension: " + point, e); } catch (ClassNotFoundException e) { log.warn("Unable to load class for extension: " + point, e); } catch (IllegalAccessException e) { log.warn("Unable to load class for extension: " + point, e); } catch (InstantiationException e) { log.warn("Unable to load class for extension: " + point, e); } } } return extensions; } /** * @return the advicePoints */ public List<AdvicePoint> getAdvicePoints() { return advicePoints; } /** * @param advicePoints the advicePoints to set */ public void setAdvicePoints(List<AdvicePoint> advicePoints) { this.advicePoints = advicePoints; } public File getFile() { return file; } public void setFile(File file) { this.file = file; } /** * Gets a mapping from locale to properties used by this module. The locales are represented as * a string containing language and country codes. * * @return mapping from locales to properties */ public Map<String, Properties> getMessages() { return messages; } /** * Sets the map from locale to properties used by this module. * * @param messages map of locale to properties for that locale */ public void setMessages(Map<String, Properties> messages) { this.messages = messages; } public List<GlobalProperty> getGlobalProperties() { return globalProperties; } public void setGlobalProperties(List<GlobalProperty> globalProperties) { this.globalProperties = globalProperties; } public List<Privilege> getPrivileges() { return privileges; } public void setPrivileges(List<Privilege> privileges) { this.privileges = privileges; } public Document getConfig() { return config; } public void setConfig(Document config) { this.config = config; } public Document getLog4j() { return log4j; } public void setLog4j(Document log4j) { this.log4j = log4j; } public Document getSqldiff() { return sqldiff; } public void setSqldiff(Document sqldiff) { this.sqldiff = sqldiff; } public List<String> getMappingFiles() { return mappingFiles; } public void setMappingFiles(List<String> mappingFiles) { this.mappingFiles = mappingFiles; } /** * Packages to scan for classes with JPA annotated classes. * * @return the set of packages to scan * @since 1.9.2, 1.10 */ public Set<String> getPackagesWithMappedClasses() { return packagesWithMappedClasses; } /** * @param packagesToScan * @see #getPackagesWithMappedClasses() * @since 1.9.2, 1.10 */ public void setPackagesWithMappedClasses(Set<String> packagesToScan) { this.packagesWithMappedClasses = packagesToScan; } /** * This property is set by the module owner to tell OpenMRS that once it is installed, it must * always startup. This is intended for modules with system-critical monitoring or security * checks that should always be in place. * * @return true if this module has said that it should always start up */ public boolean isMandatory() { return mandatory; } public void setMandatory(boolean mandatory) { this.mandatory = mandatory; } /** * This is a convenience method to know whether this module is core to OpenMRS. A module is * 'core' when this module is essentially part of the core code and must exist at all times * * @return true if this is an OpenMRS core module * @see {@link ModuleConstants#CORE_MODULES} */ public boolean isCoreModule() { return !ModuleUtil.ignoreCoreModules() && ModuleConstants.CORE_MODULES.containsKey(moduleId); } public boolean isStarted() { return ModuleFactory.isModuleStarted(this); } public void setStartupErrorMessage(String e) { if (e == null) throw new ModuleException("Startup error message cannot be null", this.getModuleId()); this.startupErrorMessage = e; } /** * Add the given exceptionMessage and throwable as the startup error for this module. This * method loops over the stacktrace and adds the detailed message * * @param exceptionMessage optional. the default message to show on the first line of the error * message * @param t throwable stacktrace to include in the error message */ public void setStartupErrorMessage(String exceptionMessage, Throwable t) { if (t == null) throw new ModuleException("Startup error value cannot be null", this.getModuleId()); StringBuffer sb = new StringBuffer(); // if exceptionMessage is not null, append it if (exceptionMessage != null) { sb.append(exceptionMessage); sb.append("\n"); } sb.append(t.getMessage()); sb.append("\n"); // loop over and append all stacktrace elements marking the "openmrs" ones for (StackTraceElement traceElement : t.getStackTrace()) { if (traceElement.getClassName().contains("openmrs")) sb.append(" ** "); sb.append(traceElement); sb.append("\n"); } this.startupErrorMessage = sb.toString(); } public String getStartupErrorMessage() { return startupErrorMessage; } public Boolean hasStartupError() { return (this.startupErrorMessage != null); } public void clearStartupError() { this.startupErrorMessage = null; } public String toString() { if (moduleId == null) return super.toString(); return moduleId; } public void disposeAdvicePointsClassInstance() { if (advicePoints == null) return; for (AdvicePoint advicePoint : advicePoints) { advicePoint.disposeClassInstance(); } } }
api/src/main/java/org/openmrs/module/Module.java
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.module; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.GlobalProperty; import org.openmrs.Privilege; import org.w3c.dom.Document; /** * Generic module class that openmrs manipulates * * @version 1.0 */ public final class Module { private Log log = LogFactory.getLog(this.getClass()); private String name; private String moduleId; private String packageName; private String description; private String author; private String version; private String updateURL; // should be a URL to an update.rdf file private String updateVersion = null; // version obtained from the remote update.rdf file private String downloadURL = null; // will only be populated when the remote file is newer than the current module private Activator activator; private ModuleActivator moduleActivator; private String activatorName; private String requireOpenmrsVersion; private String requireDatabaseVersion; private Map<String, String> requiredModulesMap; private Map<String, String> awareOfModulesMap; private List<AdvicePoint> advicePoints = new Vector<AdvicePoint>(); private IdentityHashMap<String, String> extensionNames = new IdentityHashMap<String, String>(); private List<Extension> extensions = new Vector<Extension>(); private Map<String, Properties> messages = new HashMap<String, Properties>(); private List<Privilege> privileges = new Vector<Privilege>(); private List<GlobalProperty> globalProperties = new Vector<GlobalProperty>(); private List<String> mappingFiles = new Vector<String>(); private Set<String> packagesWithMappedClasses = new HashSet<String>(); private Document config = null; private Document sqldiff = null; private Document log4j = null; private boolean mandatory = Boolean.FALSE; // keep a reference to the file that we got this module from so we can delete // it if necessary private File file = null; private String startupErrorMessage = null; /** * Simple constructor * * @param name */ public Module(String name) { this.name = name; } /** * Main constructor * * @param name * @param moduleId * @param packageName * @param author * @param description * @param version */ public Module(String name, String moduleId, String packageName, String author, String description, String version) { this.name = name; this.moduleId = moduleId; this.packageName = packageName; this.author = author; this.description = description; this.version = version; log.debug("Creating module " + name); } public boolean equals(Object obj) { if (obj != null && obj instanceof Module) { Module mod = (Module) obj; return getModuleId().equals(mod.getModuleId()); } return false; } /** * @return the activator * @deprecated replaced by {@link Module#getModuleActivator()} */ @Deprecated public Activator getActivator() { try { if (activator == null) { ModuleClassLoader classLoader = ModuleFactory.getModuleClassLoader(this); if (classLoader == null) throw new ModuleException("The classloader is null", getModuleId()); Class<?> c = classLoader.loadClass(getActivatorName()); setActivator((Activator) c.newInstance()); } } catch (ClassNotFoundException e) { throw new ModuleException("Unable to load/find activator: '" + getActivatorName() + "'", name, e); } catch (IllegalAccessException e) { throw new ModuleException("Unable to load/access activator: '" + getActivatorName() + "'", name, e); } catch (InstantiationException e) { throw new ModuleException("Unable to load/instantiate activator: '" + getActivatorName() + "'", name, e); } return activator; } /** * @param activator the activator to set */ public void setActivator(Activator activator) { this.activator = activator; } /** * @return the moduleActivator */ public ModuleActivator getModuleActivator() { try { if (moduleActivator == null) { ModuleClassLoader classLoader = ModuleFactory.getModuleClassLoader(this); if (classLoader == null) throw new ModuleException("The classloader is null", getModuleId()); Class<?> c = classLoader.loadClass(getActivatorName()); Object o = c.newInstance(); if (ModuleActivator.class.isAssignableFrom(o.getClass())) setModuleActivator((ModuleActivator) o); } } catch (ClassNotFoundException e) { throw new ModuleException("Unable to load/find moduleActivator: '" + getActivatorName() + "'", name, e); } catch (IllegalAccessException e) { throw new ModuleException("Unable to load/access moduleActivator: '" + getActivatorName() + "'", name, e); } catch (InstantiationException e) { throw new ModuleException("Unable to load/instantiate moduleActivator: '" + getActivatorName() + "'", name, e); } return moduleActivator; } /** * @param moduleActivator the moduleActivator to set */ public void setModuleActivator(ModuleActivator moduleActivator) { this.moduleActivator = moduleActivator; } /** * @return the activatorName */ public String getActivatorName() { return activatorName; } /** * @param activatorName the activatorName to set */ public void setActivatorName(String activatorName) { this.activatorName = activatorName; } /** * @return the author */ public String getAuthor() { return author; } /** * @param author the author to set */ public void setAuthor(String author) { this.author = author; } /** * @return the description */ public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @return the requireDatabaseVersion */ public String getRequireDatabaseVersion() { return requireDatabaseVersion; } /** * @param requireDatabaseVersion the requireDatabaseVersion to set */ public void setRequireDatabaseVersion(String requireDatabaseVersion) { this.requireDatabaseVersion = requireDatabaseVersion; } /** * This list of strings is just what is included in the config.xml file, the full package names: * e.g. org.openmrs.module.formentry * * @return the list of requiredModules */ public List<String> getRequiredModules() { return requiredModulesMap == null ? null : new ArrayList<String>(requiredModulesMap.keySet()); } /** * Convenience method to get the version of this given module that is required * * @return the version of the given required module, or null if there are no version constraints * @since 1.5 * @should return null if no required modules exist * @should return null if no required module by given name exists */ public String getRequiredModuleVersion(String moduleName) { return requiredModulesMap == null ? null : requiredModulesMap.get(moduleName); } /** * This is a convenience method to set all the required modules without any version requirements * * @param requiredModules the requiredModules to set for this module * @should set modules when there is a null required modules map */ public void setRequiredModules(List<String> requiredModules) { if (requiredModulesMap == null) requiredModulesMap = new HashMap<String, String>(); for (String module : requiredModules) { requiredModulesMap.put(module, null); } } /** * @param requiredModulesMap <code>Map<String,String></code> of the <code>requiredModule</code>s * to set * @since 1.5 */ public void setRequiredModulesMap(Map<String, String> requiredModulesMap) { this.requiredModulesMap = requiredModulesMap; } /** * Get the modules that are required for this module. The keys in this map are the module * package names. The values in the map are the required version. If no specific version is * required, it will be null. * * @return a map from required module to the version that is required */ public Map<String, String> setRequiredModulesMap() { return requiredModulesMap; } /** * Sets the modules that this module is aware of. * * @param awareOfModulesMap <code>Map<String,String></code> of the * <code>awareOfModulesMap</code>s to set * @since 1.9 */ public void setAwareOfModulesMap(Map<String, String> awareOfModulesMap) { this.awareOfModulesMap = awareOfModulesMap; } /** * This list of strings is just what is included in the config.xml file, the full package names: * e.g. org.openmrs.module.formentry, for the modules that this module is aware of. * * @since 1.9 * @return the list of awareOfModules */ public List<String> getAwareOfModules() { return awareOfModulesMap == null ? null : new ArrayList<String>(awareOfModulesMap.keySet()); } /** * @return the requireOpenmrsVersion */ public String getRequireOpenmrsVersion() { return requireOpenmrsVersion; } /** * @param requireOpenmrsVersion the requireOpenmrsVersion to set */ public void setRequireOpenmrsVersion(String requireOpenmrsVersion) { this.requireOpenmrsVersion = requireOpenmrsVersion; } /** * @return the module id */ public String getModuleId() { return moduleId; } /** * @return the module id, with all . replaced with / */ public String getModuleIdAsPath() { return moduleId == null ? null : moduleId.replace('.', '/'); } /** * @param moduleId the module id to set */ public void setModuleId(String moduleId) { this.moduleId = moduleId; } /** * @return the packageName */ public String getPackageName() { return packageName; } /** * @param packageName the packageName to set */ public void setPackageName(String packageName) { this.packageName = packageName; } /** * @return the version */ public String getVersion() { return version; } /** * @param version the version to set */ public void setVersion(String version) { this.version = version; } /** * @return the updateURL */ public String getUpdateURL() { return updateURL; } /** * @param updateURL the updateURL to set */ public void setUpdateURL(String updateURL) { this.updateURL = updateURL; } /** * @return the downloadURL */ public String getDownloadURL() { return downloadURL; } /** * @param downloadURL the downloadURL to set */ public void setDownloadURL(String downloadURL) { this.downloadURL = downloadURL; } /** * @return the updateVersion */ public String getUpdateVersion() { return updateVersion; } /** * @param updateVersion the updateVersion to set */ public void setUpdateVersion(String updateVersion) { this.updateVersion = updateVersion; } /** * @return the extensions */ public List<Extension> getExtensions() { if (extensions.size() == extensionNames.size()) return extensions; return expandExtensionNames(); } /** * @param extensions the extensions to set */ public void setExtensions(List<Extension> extensions) { this.extensions = extensions; } /** * A map of pointid to classname. The classname is expected to be a class that extends the * {@link Extension} object. <br/> * <br/> * This map will be expanded into full Extension objects the first time {@link #getExtensions()} * is called * * @param map from pointid to classname * @see ModuleFileParser */ public void setExtensionNames(IdentityHashMap<String, String> map) { if (log.isDebugEnabled()) for (Map.Entry<String, String> entry : extensionNames.entrySet()) { log.debug("Setting extension names: " + entry.getKey() + " : " + entry.getValue()); } this.extensionNames = map; } /** * Expand the temporary extensionNames map of pointid-classname to full pointid-classobject. <br> * This has to be done after the fact because when the pointid-classnames are parsed, the * module's objects aren't fully realized yet and so not all classes can be loaded. <br/> * <br/> * * @return a list of full Extension objects */ private List<Extension> expandExtensionNames() { ModuleClassLoader moduleClsLoader = ModuleFactory.getModuleClassLoader(this); if (moduleClsLoader == null) { log.debug(String.format("Module class loader is not available, maybe the module %s is stopped/stopping", getName())); } else if (extensions.size() != extensionNames.size()) { for (Map.Entry<String, String> entry : extensionNames.entrySet()) { String point = entry.getKey(); String className = entry.getValue(); log.debug("expanding extension names: " + point + " : " + className); try { Class<?> cls = moduleClsLoader.loadClass(className); Extension ext = (Extension) cls.newInstance(); ext.setPointId(point); ext.setModuleId(this.getModuleId()); extensions.add(ext); log.debug("Added extension: " + ext.getExtensionId() + " : " + ext.getClass()); } catch (NoClassDefFoundError e) { log.warn("Unable to find class definition for extension: " + point, e); } catch (ClassNotFoundException e) { log.warn("Unable to load class for extension: " + point, e); } catch (IllegalAccessException e) { log.warn("Unable to load class for extension: " + point, e); } catch (InstantiationException e) { log.warn("Unable to load class for extension: " + point, e); } } } return extensions; } /** * @return the advicePoints */ public List<AdvicePoint> getAdvicePoints() { return advicePoints; } /** * @param advicePoints the advicePoints to set */ public void setAdvicePoints(List<AdvicePoint> advicePoints) { this.advicePoints = advicePoints; } public File getFile() { return file; } public void setFile(File file) { this.file = file; } /** * Gets a mapping from locale to properties used by this module. The locales are represented as * a string containing language and country codes. * * @return mapping from locales to properties */ public Map<String, Properties> getMessages() { return messages; } /** * Sets the map from locale to properties used by this module. * * @param messages map of locale to properties for that locale */ public void setMessages(Map<String, Properties> messages) { this.messages = messages; } public List<GlobalProperty> getGlobalProperties() { return globalProperties; } public void setGlobalProperties(List<GlobalProperty> globalProperties) { this.globalProperties = globalProperties; } public List<Privilege> getPrivileges() { return privileges; } public void setPrivileges(List<Privilege> privileges) { this.privileges = privileges; } public Document getConfig() { return config; } public void setConfig(Document config) { this.config = config; } public Document getLog4j() { return log4j; } public void setLog4j(Document log4j) { this.log4j = log4j; } public Document getSqldiff() { return sqldiff; } public void setSqldiff(Document sqldiff) { this.sqldiff = sqldiff; } public List<String> getMappingFiles() { return mappingFiles; } public void setMappingFiles(List<String> mappingFiles) { this.mappingFiles = mappingFiles; } public Set<String> getPackagesWithMappedClasses() { return packagesWithMappedClasses; } public void setPackagesWithMappedClasses(Set<String> packagesToScan) { this.packagesWithMappedClasses = packagesToScan; } /** * This property is set by the module owner to tell OpenMRS that once it is installed, it must * always startup. This is intended for modules with system-critical monitoring or security * checks that should always be in place. * * @return true if this module has said that it should always start up */ public boolean isMandatory() { return mandatory; } public void setMandatory(boolean mandatory) { this.mandatory = mandatory; } /** * This is a convenience method to know whether this module is core to OpenMRS. A module is * 'core' when this module is essentially part of the core code and must exist at all times * * @return true if this is an OpenMRS core module * @see {@link ModuleConstants#CORE_MODULES} */ public boolean isCoreModule() { return !ModuleUtil.ignoreCoreModules() && ModuleConstants.CORE_MODULES.containsKey(moduleId); } public boolean isStarted() { return ModuleFactory.isModuleStarted(this); } public void setStartupErrorMessage(String e) { if (e == null) throw new ModuleException("Startup error message cannot be null", this.getModuleId()); this.startupErrorMessage = e; } /** * Add the given exceptionMessage and throwable as the startup error for this module. This * method loops over the stacktrace and adds the detailed message * * @param exceptionMessage optional. the default message to show on the first line of the error * message * @param t throwable stacktrace to include in the error message */ public void setStartupErrorMessage(String exceptionMessage, Throwable t) { if (t == null) throw new ModuleException("Startup error value cannot be null", this.getModuleId()); StringBuffer sb = new StringBuffer(); // if exceptionMessage is not null, append it if (exceptionMessage != null) { sb.append(exceptionMessage); sb.append("\n"); } sb.append(t.getMessage()); sb.append("\n"); // loop over and append all stacktrace elements marking the "openmrs" ones for (StackTraceElement traceElement : t.getStackTrace()) { if (traceElement.getClassName().contains("openmrs")) sb.append(" ** "); sb.append(traceElement); sb.append("\n"); } this.startupErrorMessage = sb.toString(); } public String getStartupErrorMessage() { return startupErrorMessage; } public Boolean hasStartupError() { return (this.startupErrorMessage != null); } public void clearStartupError() { this.startupErrorMessage = null; } public String toString() { if (moduleId == null) return super.toString(); return moduleId; } public void disposeAdvicePointsClassInstance() { if (advicePoints == null) return; for (AdvicePoint advicePoint : advicePoints) { advicePoint.disposeClassInstance(); } } }
TRUNK-3778: Added javadocs
api/src/main/java/org/openmrs/module/Module.java
TRUNK-3778: Added javadocs
Java
lgpl-2.1
8835f3910fdb3d018c2052341c40231bca9e961c
0
windauer/exist,adamretter/exist,ambs/exist,eXist-db/exist,jessealama/exist,MjAbuz/exist,zwobit/exist,RemiKoutcherawy/exist,joewiz/exist,hungerburg/exist,adamretter/exist,adamretter/exist,patczar/exist,shabanovd/exist,joewiz/exist,RemiKoutcherawy/exist,olvidalo/exist,hungerburg/exist,MjAbuz/exist,ambs/exist,lcahlander/exist,olvidalo/exist,dizzzz/exist,lcahlander/exist,zwobit/exist,wshager/exist,jensopetersen/exist,dizzzz/exist,lcahlander/exist,RemiKoutcherawy/exist,zwobit/exist,patczar/exist,wshager/exist,MjAbuz/exist,windauer/exist,RemiKoutcherawy/exist,eXist-db/exist,jensopetersen/exist,patczar/exist,kohsah/exist,kohsah/exist,wolfgangmm/exist,dizzzz/exist,lcahlander/exist,wolfgangmm/exist,eXist-db/exist,zwobit/exist,opax/exist,zwobit/exist,joewiz/exist,jensopetersen/exist,joewiz/exist,wshager/exist,wshager/exist,adamretter/exist,RemiKoutcherawy/exist,ljo/exist,jensopetersen/exist,opax/exist,shabanovd/exist,dizzzz/exist,eXist-db/exist,RemiKoutcherawy/exist,MjAbuz/exist,jessealama/exist,ambs/exist,patczar/exist,adamretter/exist,dizzzz/exist,hungerburg/exist,wshager/exist,kohsah/exist,patczar/exist,wolfgangmm/exist,olvidalo/exist,jensopetersen/exist,dizzzz/exist,shabanovd/exist,wolfgangmm/exist,ljo/exist,jessealama/exist,MjAbuz/exist,shabanovd/exist,lcahlander/exist,shabanovd/exist,shabanovd/exist,kohsah/exist,hungerburg/exist,ambs/exist,eXist-db/exist,eXist-db/exist,ljo/exist,lcahlander/exist,wshager/exist,kohsah/exist,joewiz/exist,opax/exist,windauer/exist,jessealama/exist,olvidalo/exist,zwobit/exist,ambs/exist,wolfgangmm/exist,patczar/exist,hungerburg/exist,joewiz/exist,ljo/exist,olvidalo/exist,wolfgangmm/exist,windauer/exist,ljo/exist,jessealama/exist,windauer/exist,MjAbuz/exist,adamretter/exist,jessealama/exist,opax/exist,ambs/exist,kohsah/exist,ljo/exist,opax/exist,jensopetersen/exist,windauer/exist
/* * eXist Open Source Native XML Database * Copyright (C) 2001-03 Wolfgang M. Meier * [email protected] * http://exist.sourceforge.net * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. * * $Id$ */ package org.exist.cocoon; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.avalon.framework.configuration.Configurable; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.parameters.ParameterException; import org.apache.avalon.framework.parameters.Parameterizable; import org.apache.avalon.framework.parameters.Parameters; import org.apache.cocoon.ProcessingException; import org.apache.cocoon.caching.CacheableProcessingComponent; import org.apache.cocoon.environment.Context; import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.Request; import org.apache.cocoon.environment.Response; import org.apache.cocoon.environment.Session; import org.apache.cocoon.environment.SourceResolver; import org.apache.cocoon.environment.http.HttpEnvironment; import org.apache.cocoon.generation.ServiceableGenerator; import org.apache.cocoon.xml.IncludeXMLConsumer; import org.apache.excalibur.source.Source; import org.apache.excalibur.source.SourceValidity; import org.apache.excalibur.source.impl.validity.AggregatedValidity; import org.apache.excalibur.source.impl.validity.ExpiresValidity; import org.exist.source.CocoonSource; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.storage.serializers.Serializer; import org.exist.xmldb.CollectionImpl; import org.exist.xmldb.XQueryService; import org.exist.xquery.XPathException; import org.exist.xquery.functions.request.RequestModule; import org.exist.xquery.value.Item; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Collection; import org.xmldb.api.base.Database; import org.xmldb.api.base.ResourceSet; import org.xmldb.api.base.XMLDBException; import org.xmldb.api.modules.XMLResource; /** * A generator for Cocoon which reads an XQuery script, executes it and passes * the results into the Cocoon pipeline. * * The following optional attributes are accepted on the component declaration as default eXist settings: * <li><tt>collection</tt>: identifies the XML:DB root collection used to process * the request</li> * <li><tt>user</tt></li> * <li><tt>password</tt></li> * <li><tt>create-session</tt>: if set to "true", indicates that an * HTTP session should be created upon the first invocation.</li> * <li><tt>expand-xincludes</tt></li> * <li><tt>cache-validity</tt>: if specified, the XQuery content is * cached until the specified delay expressed in milliseconds is elapsed * or until the XQuery file is modified. The identity of the cached content is * computed using the XQuery file URI and the list of all parameters passed to * the XQuery.</li> * * The component also accept default parameters that will be declared as implicit variables in the XQuery. * See below an example declaration of the XQueryGenerator component with default eXist settings, and an extra user-defined parameter: * * <map:generator logger="xmldb" name="xquery" * collection="xmldb:exist:///db/" * user="guest" * password="guest" * create-session="false" * expand-xincludes="false" * cache-validity="-1" * src="org.exist.cocoon.XQueryGenerator"> * <parameter name="myProjectURI" value="/db/myproject"/> * </map:generator> * * These settings and parameters can be overriden on a per-pipeline basis with sitemap parameters, see below with default values and the extra user-defined parameter: * * <pre> * &lt;map:parameter name=&quot;collection&quot; value=&quot;xmldb:exist:///db&quot;/&gt; * &lt;map:parameter name=&quot;user&quot; value=&quot;guest&quot;/&gt; * &lt;map:parameter name=&quot;password&quot; value=&quot;guest&quot;/&gt; * &lt;map:parameter name=&quot;create-session&quot; value=&quot;false&quot;/&gt; * &lt;map:parameter name=&quot;expand-xincludes&quot; value=&quot;false&quot;/&gt; * &lt;map:parameter name=&quot;cache-validity&quot; value=&quot;-1quot;/&gt; * &lt;map:parameter name=&quot;myProjectURI&quot; value=&quot;/db/myproject&quot;/&gt; * </pre> * * The last sitemap parameter overrides the value of the XQuery variable defined in the component parameters, * whereas others override the default eXist settings defined on the component attributes. * * @author wolf */ public class XQueryGenerator extends ServiceableGenerator implements Configurable, Parameterizable, CacheableProcessingComponent { public final static String DRIVER = "org.exist.xmldb.DatabaseImpl"; private Source inputSource = null; private Map objectModel = null; private boolean createSession; private boolean defaultCreateSession = false; private final static String CREATE_SESSION = "create-session"; private boolean expandXIncludes; private boolean defaultExpandXIncludes = false; private final static String EXPAND_XINCLUDES = "expand-xincludes"; private String collectionURI; private String defaultCollectionURI = "xmldb:exist:///db"; private final static String COLLECTION_URI = "collection"; private long cacheValidity; private long defaultCacheValidity = -1; private final static String CACHE_VALIDITY = "cache-validity"; private String user; private String defaultUser = "guest"; private final static String USER = "user"; private String password; private String defaultPassword = "guest"; private final static String PASSWORD = "password"; private Map optionalParameters; private Parameters componentParams; /* * (non-Javadoc) * * @see org.apache.cocoon.generation.AbstractGenerator#setup(org.apache.cocoon.environment.SourceResolver, * java.util.Map, java.lang.String, * org.apache.avalon.framework.parameters.Parameters) */ public void setup(SourceResolver resolver, Map objectModel, String source, Parameters parameters) throws ProcessingException, SAXException, IOException { super.setup(resolver, objectModel, source, parameters); /* * We don't do this directly in parameterize() because setup() can be * called multiple times and optionalParameters needs resetting to forget * sitemap parameters that may have been removed inbetween */ this.optionalParameters = new HashMap(); String paramNames[] = componentParams.getNames(); for (int i = 0; i < paramNames.length; i++) { String param = paramNames[i]; try { optionalParameters.put(param, componentParams.getParameter(param)); } catch (ParameterException e1) { // Cannot happen as we iterate through existing parameters } } this.objectModel = objectModel; this.inputSource = resolver.resolveURI(source); this.collectionURI = parameters.getParameter(COLLECTION_URI, this.defaultCollectionURI); this.user = parameters.getParameter(USER, this.defaultUser); this.password = parameters.getParameter(PASSWORD, this.defaultPassword); this.createSession = parameters.getParameterAsBoolean(CREATE_SESSION, this.defaultCreateSession); this.expandXIncludes = parameters.getParameterAsBoolean( EXPAND_XINCLUDES, this.defaultExpandXIncludes); this.cacheValidity = parameters.getParameterAsLong(CACHE_VALIDITY, defaultCacheValidity); paramNames = parameters.getNames(); for (int i = 0; i < paramNames.length; i++) { String param = paramNames[i]; if (!(param.equals(COLLECTION_URI) || param.equals(USER) || param.equals(PASSWORD) || param.equals(CREATE_SESSION) || param .equals(EXPAND_XINCLUDES) || param.equals(CACHE_VALIDITY))) { this.optionalParameters.put(param, parameters .getParameter(param, "")); } } Context context = ObjectModelHelper.getContext(objectModel); String dbHome = context.getRealPath("WEB-INF"); try { Class driver = Class.forName(DRIVER); Database database = (Database)driver.newInstance(); database.setProperty("create-database", "true"); database.setProperty("configuration", dbHome + File.separatorChar + "conf.xml"); DatabaseManager.registerDatabase(database); } catch(Exception e) { throw new ProcessingException("Failed to initialize database driver: " + e.getMessage(), e); } } /* * (non-Javadoc) * * @see org.apache.cocoon.generation.AbstractGenerator#recycle() */ public void recycle() { if (resolver != null) resolver.release(inputSource); inputSource = null; super.recycle(); } /* * (non-Javadoc) * * @see org.apache.cocoon.generation.Generator#generate() */ public void generate() throws IOException, SAXException, ProcessingException { ContentHandler includeContentHandler; if (inputSource == null) throw new ProcessingException("No input source"); Request request = ObjectModelHelper.getRequest(objectModel); Response response = ObjectModelHelper.getResponse(objectModel); Context context = ObjectModelHelper.getContext(objectModel); Session session = request.getSession(createSession); final String servletPath = request.getServletPath(); final String pathInfo = request.getPathInfo(); StringBuffer baseURIBuffer = new StringBuffer(servletPath); if (pathInfo != null) baseURIBuffer.append(pathInfo); int p = baseURIBuffer.lastIndexOf("/"); if (p > -1) baseURIBuffer.delete(p,baseURIBuffer.length()); final String baseURI = context.getRealPath(baseURIBuffer.toString()); // check if user and password can be read from the session if (session != null && request.isRequestedSessionIdValid()) { String actualUser = getSessionAttribute(session, "user"); String actualPass = getSessionAttribute(session, "password"); user = actualUser == null ? null : String.valueOf(actualUser); password = actualPass == null ? null : String.valueOf(actualPass); } if (user == null) user = defaultUser; if (password == null) password = defaultPassword; try { Collection collection = DatabaseManager.getCollection( collectionURI, user, password); if (collection == null) { if (getLogger().isErrorEnabled()) getLogger().error( "Collection " + collectionURI + " not found"); throw new ProcessingException("Collection " + collectionURI + " not found"); } XQueryService service = (XQueryService) collection.getService( "XQueryService", "1.0"); service.setProperty(Serializer.GENERATE_DOC_EVENTS, "false"); service.setProperty(EXistOutputKeys.EXPAND_XINCLUDES, expandXIncludes ? "yes" : "no"); service.setProperty("base-uri", baseURI); service.setNamespace(RequestModule.PREFIX, RequestModule.NAMESPACE_URI); service.setModuleLoadPath(baseURI); if(!((CollectionImpl)collection).isRemoteCollection()) { HttpServletRequest httpRequest = (HttpServletRequest) objectModel .get(HttpEnvironment.HTTP_REQUEST_OBJECT); service.declareVariable(RequestModule.PREFIX + ":request", new CocoonRequestWrapper(request, httpRequest)); service.declareVariable(RequestModule.PREFIX + ":response", new CocoonResponseWrapper(response)); if(session != null) service.declareVariable(RequestModule.PREFIX + ":session", new CocoonSessionWrapper(session)); includeContentHandler = this.contentHandler; } else { includeContentHandler = new IncludeXMLConsumer(this.contentHandler); } declareParameters(service); String uri = inputSource.getURI(); ResourceSet result = service.execute(new CocoonSource(inputSource, true)); XMLResource resource; this.contentHandler.startDocument(); for (long i = 0; i < result.getSize(); i++) { resource = (XMLResource) result.getResource(i); resource.getContentAsSAX(includeContentHandler); } this.contentHandler.endDocument(); } catch (XMLDBException e) { throw new ProcessingException("XMLDBException occurred: " + e.getMessage(), e); } } private void declareParameters(XQueryService service) throws XMLDBException { for(Iterator i = optionalParameters.entrySet().iterator(); i.hasNext(); ) { Map.Entry entry = (Map.Entry)i.next(); service.declareVariable((String)entry.getKey(), entry.getValue()); } } private String getSessionAttribute(Session session, String attribute) { Object obj = session.getAttribute(attribute); if(obj == null) return null; if(obj instanceof Item) try { return ((Item)obj).getStringValue(); } catch (XPathException e) { return null; } return obj.toString(); } /** * @see org.apache.avalon.framework.configuration.Configurable#configure(org.apache.avalon.framework.configuration.Configuration) */ public void configure(Configuration config) throws ConfigurationException { this.defaultCollectionURI = config.getAttribute(COLLECTION_URI, this.defaultCollectionURI); this.defaultCreateSession = config.getAttributeAsBoolean(CREATE_SESSION, this.defaultCreateSession); this.defaultExpandXIncludes = config.getAttributeAsBoolean(EXPAND_XINCLUDES, this.defaultExpandXIncludes); this.defaultPassword = config.getAttribute(PASSWORD, this.defaultPassword); this.defaultUser = config.getAttribute(USER, this.defaultUser); this.defaultCacheValidity = config.getAttributeAsLong(CACHE_VALIDITY, this.defaultCacheValidity); } /** * @see org.apache.avalon.framework.parameters.Parameterizable#parameterize(org.apache.avalon.framework.parameters.Parameters) */ public void parameterize(Parameters params) throws ParameterException { this.componentParams = params; } public Serializable getKey() { StringBuffer key = new StringBuffer(); key.append(optionalParameters.toString()); key.append(inputSource.getURI()); return key.toString(); } public SourceValidity getValidity() { if (cacheValidity != -1) { AggregatedValidity v = new AggregatedValidity(); v.add(inputSource.getValidity()); v.add(new ExpiresValidity(cacheValidity)); return v; } return null; } }
src/org/exist/cocoon/XQueryGenerator.java
/* * eXist Open Source Native XML Database * Copyright (C) 2001-03 Wolfgang M. Meier * [email protected] * http://exist.sourceforge.net * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. * * $Id$ */ package org.exist.cocoon; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.avalon.framework.configuration.Configurable; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; import org.apache.avalon.framework.parameters.ParameterException; import org.apache.avalon.framework.parameters.Parameterizable; import org.apache.avalon.framework.parameters.Parameters; import org.apache.cocoon.ProcessingException; import org.apache.cocoon.environment.Context; import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.Request; import org.apache.cocoon.environment.Response; import org.apache.cocoon.environment.Session; import org.apache.cocoon.environment.SourceResolver; import org.apache.cocoon.environment.http.HttpEnvironment; import org.apache.cocoon.generation.ServiceableGenerator; import org.apache.cocoon.xml.IncludeXMLConsumer; import org.apache.excalibur.source.Source; import org.exist.source.CocoonSource; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.storage.serializers.Serializer; import org.exist.xmldb.CollectionImpl; import org.exist.xmldb.XQueryService; import org.exist.xquery.XPathException; import org.exist.xquery.functions.request.RequestModule; import org.exist.xquery.value.Item; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Collection; import org.xmldb.api.base.Database; import org.xmldb.api.base.ResourceSet; import org.xmldb.api.base.XMLDBException; import org.xmldb.api.modules.XMLResource; /** * A generator for Cocoon which reads an XQuery script, executes it and passes * the results into the Cocoon pipeline. * * The following optional attributes are accepted on the component declaration as default eXist settings: * <li><tt>collection</tt>: identifies the XML:DB root collection used to process * the request</li> * <li><tt>user</tt></li> * <li><tt>password</tt></li> * <li><tt>create-session</tt>: if set to "true", indicates that an * HTTP session should be created upon the first invocation.</li> * <li><tt>expand-xincludes</tt></li> * * The component also accept default parameters that will be declared as implicit variables in the XQuery. * See below an example declaration of the XQueryGenerator component with default eXist settings, and an extra user-defined parameter: * * <map:generator logger="xmldb" name="xquery" * collection="xmldb:exist:///db/" * user="guest" * password="guest" * create-session="false" * expand-xincludes="false" * src="org.exist.cocoon.XQueryGenerator"> * <parameter name="myProjectURI" value="/db/myproject"/> * </map:generator> * * These settings and parameters can be overriden on a per-pipeline basis with sitemap parameters, see below with default values and the extra user-defined parameter: * * <pre> * &lt;map:parameter name=&quot;collection&quot; value=&quot;xmldb:exist:///db&quot;/&gt; * &lt;map:parameter name=&quot;user&quot; value=&quot;guest&quot;/&gt; * &lt;map:parameter name=&quot;password&quot; value=&quot;guest&quot;/&gt; * &lt;map:parameter name=&quot;create-session&quot; value=&quot;false&quot;/&gt; * &lt;map:parameter name=&quot;expand-xincludes&quot; value=&quot;false&quot;/&gt; * &lt;map:parameter name=&quot;myProjectURI&quot; value=&quot;/db/myproject&quot;/&gt; * </pre> * * The last sitemap parameter overrides the value of the XQuery variable defined in the component parameters, * whereas others override the default eXist settings defined on the component attributes. * * @author wolf */ public class XQueryGenerator extends ServiceableGenerator implements Configurable, Parameterizable { public final static String DRIVER = "org.exist.xmldb.DatabaseImpl"; private Source inputSource = null; private Map objectModel = null; private boolean createSession; private boolean defaultCreateSession = false; private final static String CREATE_SESSION = "create-session"; private boolean expandXIncludes; private boolean defaultExpandXIncludes = false; private final static String EXPAND_XINCLUDES = "expand-xincludes"; private String collectionURI; private String defaultCollectionURI = "xmldb:exist:///db"; private final static String COLLECTION_URI = "collection"; private String user; private String defaultUser = "guest"; private final static String USER = "user"; private String password; private String defaultPassword = "guest"; private final static String PASSWORD = "password"; private Map optionalParameters; /* * (non-Javadoc) * * @see org.apache.cocoon.generation.AbstractGenerator#setup(org.apache.cocoon.environment.SourceResolver, * java.util.Map, java.lang.String, * org.apache.avalon.framework.parameters.Parameters) */ public void setup(SourceResolver resolver, Map objectModel, String source, Parameters parameters) throws ProcessingException, SAXException, IOException { super.setup(resolver, objectModel, source, parameters); this.objectModel = objectModel; this.inputSource = resolver.resolveURI(source); this.collectionURI = parameters.getParameter(COLLECTION_URI, this.defaultCollectionURI); this.user = parameters.getParameter(USER, this.defaultUser); this.password = parameters.getParameter(PASSWORD, this.defaultPassword); this.createSession = parameters.getParameterAsBoolean(CREATE_SESSION, this.defaultCreateSession); this.expandXIncludes = parameters.getParameterAsBoolean( EXPAND_XINCLUDES, this.defaultExpandXIncludes); String paramNames[] = parameters.getNames(); for (int i = 0; i < paramNames.length; i++) { String param = paramNames[i]; if (!(param.equals(COLLECTION_URI) || param.equals(USER) || param.equals(PASSWORD) || param.equals(CREATE_SESSION) || param .equals(EXPAND_XINCLUDES))) { this.optionalParameters.put(param, parameters .getParameter(param, "")); } } Context context = ObjectModelHelper.getContext(objectModel); String dbHome = context.getRealPath("WEB-INF"); try { Class driver = Class.forName(DRIVER); Database database = (Database)driver.newInstance(); database.setProperty("create-database", "true"); database.setProperty("configuration", dbHome + File.separatorChar + "conf.xml"); DatabaseManager.registerDatabase(database); } catch(Exception e) { throw new ProcessingException("Failed to initialize database driver: " + e.getMessage(), e); } } /* * (non-Javadoc) * * @see org.apache.cocoon.generation.AbstractGenerator#recycle() */ public void recycle() { if (resolver != null) resolver.release(inputSource); inputSource = null; super.recycle(); } /* * (non-Javadoc) * * @see org.apache.cocoon.generation.Generator#generate() */ public void generate() throws IOException, SAXException, ProcessingException { ContentHandler includeContentHandler; if (inputSource == null) throw new ProcessingException("No input source"); Request request = ObjectModelHelper.getRequest(objectModel); Response response = ObjectModelHelper.getResponse(objectModel); Context context = ObjectModelHelper.getContext(objectModel); Session session = request.getSession(createSession); final String servletPath = request.getServletPath(); final String pathInfo = request.getPathInfo(); StringBuffer baseURIBuffer = new StringBuffer(servletPath); if (pathInfo != null) baseURIBuffer.append(pathInfo); int p = baseURIBuffer.lastIndexOf("/"); if (p > -1) baseURIBuffer.delete(p,baseURIBuffer.length()); final String baseURI = context.getRealPath(baseURIBuffer.toString()); // check if user and password can be read from the session if (session != null && request.isRequestedSessionIdValid()) { String actualUser = getSessionAttribute(session, "user"); String actualPass = getSessionAttribute(session, "password"); user = actualUser == null ? null : String.valueOf(actualUser); password = actualPass == null ? null : String.valueOf(actualPass); } if (user == null) user = defaultUser; if (password == null) password = defaultPassword; try { Collection collection = DatabaseManager.getCollection( collectionURI, user, password); if (collection == null) { if (getLogger().isErrorEnabled()) getLogger().error( "Collection " + collectionURI + " not found"); throw new ProcessingException("Collection " + collectionURI + " not found"); } XQueryService service = (XQueryService) collection.getService( "XQueryService", "1.0"); service.setProperty(Serializer.GENERATE_DOC_EVENTS, "false"); service.setProperty(EXistOutputKeys.EXPAND_XINCLUDES, expandXIncludes ? "yes" : "no"); service.setProperty("base-uri", baseURI); service.setNamespace(RequestModule.PREFIX, RequestModule.NAMESPACE_URI); service.setModuleLoadPath(baseURI); if(!((CollectionImpl)collection).isRemoteCollection()) { HttpServletRequest httpRequest = (HttpServletRequest) objectModel .get(HttpEnvironment.HTTP_REQUEST_OBJECT); service.declareVariable(RequestModule.PREFIX + ":request", new CocoonRequestWrapper(request, httpRequest)); service.declareVariable(RequestModule.PREFIX + ":response", new CocoonResponseWrapper(response)); if(session != null) service.declareVariable(RequestModule.PREFIX + ":session", new CocoonSessionWrapper(session)); includeContentHandler = this.contentHandler; } else { includeContentHandler = new IncludeXMLConsumer(this.contentHandler); } declareParameters(service); String uri = inputSource.getURI(); ResourceSet result = service.execute(new CocoonSource(inputSource, true)); XMLResource resource; this.contentHandler.startDocument(); for (long i = 0; i < result.getSize(); i++) { resource = (XMLResource) result.getResource(i); resource.getContentAsSAX(includeContentHandler); } this.contentHandler.endDocument(); } catch (XMLDBException e) { throw new ProcessingException("XMLDBException occurred: " + e.getMessage(), e); } } private void declareParameters(XQueryService service) throws XMLDBException { for(Iterator i = optionalParameters.entrySet().iterator(); i.hasNext(); ) { Map.Entry entry = (Map.Entry)i.next(); service.declareVariable((String)entry.getKey(), entry.getValue()); } } private String getSessionAttribute(Session session, String attribute) { Object obj = session.getAttribute(attribute); if(obj == null) return null; if(obj instanceof Item) try { return ((Item)obj).getStringValue(); } catch (XPathException e) { return null; } return obj.toString(); } /** * @see org.apache.avalon.framework.configuration.Configurable#configure(org.apache.avalon.framework.configuration.Configuration) */ public void configure(Configuration config) throws ConfigurationException { this.defaultCollectionURI = config.getAttribute(COLLECTION_URI, this.defaultCollectionURI); this.defaultCreateSession = config.getAttributeAsBoolean(CREATE_SESSION, this.defaultCreateSession); this.defaultExpandXIncludes = config.getAttributeAsBoolean(EXPAND_XINCLUDES, this.defaultExpandXIncludes); this.defaultPassword = config.getAttribute(PASSWORD, this.defaultPassword); this.defaultUser = config.getAttribute(USER, this.defaultUser); } /** * @see org.apache.avalon.framework.parameters.Parameterizable#parameterize(org.apache.avalon.framework.parameters.Parameters) */ public void parameterize(Parameters params) throws ParameterException { this.optionalParameters = new HashMap(); String paramNames[] = params.getNames(); for (int i = 0; i < paramNames.length; i++) { String param = paramNames[i]; optionalParameters.put(param, params.getParameter(param)); } } }
Applied patch submitted by Jean-Baptiste Quenot: * allow XQueryGenerator to be cacheable by setting an optional expiration delay: cache-validity. * when sitemap parameters are removed, setup() is called again but optionalParameters is not reset, so outdated sitemap parameters may persist. svn path=/trunk/eXist-1.0/; revision=1653
src/org/exist/cocoon/XQueryGenerator.java
Applied patch submitted by Jean-Baptiste Quenot:
Java
apache-2.0
1174084d4c10a8cb621050db9a4954e2d3077b0c
0
MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab
package org.myrobotlab.framework; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import org.myrobotlab.framework.interfaces.Invoker; import org.myrobotlab.logging.LoggerFactory; import org.slf4j.Logger; /** * Simple class representing an operating system mrl process * * @author GroG * */ public class ProcessData implements Serializable { public final static Logger log = LoggerFactory.getLogger(ProcessData.class); private static final long serialVersionUID = 1L; public static final String STATE_RUNNING = "running"; public static final String STATE_STOPPED = "stopped"; public static final String STATE_RESTARTING = "restarting"; public static final String STATE_UNKNOWN = "unknown"; // TODO - need to start using id public Integer id; public String branch; public String name; public String version; public Long startTs = null; public Long stopTs = null; public String jarPath = null; public String javaExe = null; public String jniLibraryPath = null; public String jnaLibraryPath = null; public String Xmx = null; boolean userDefinedServices = false; public String state = STATE_STOPPED; transient public Process process; transient public Monitor monitor; static transient public Invoker service; ArrayList<String> in = null; public static class Monitor extends Thread { ProcessData data; public Monitor(ProcessData pd) { super(String.format("%s.monitor", pd.name)); this.data = pd; } @Override public void run() { try { if (data.process != null) { // data.isRunning = true; data.state = STATE_RUNNING; data.state = "running"; // don't wait if there is no agent if (service != null) { data.process.waitFor(); } } } catch (Exception e) { } data.state = STATE_STOPPED; data.state = "stopped"; if (ProcessData.service != null) { ProcessData.service.invoke("publishTerminated", data.id); } } } public ProcessData(Invoker service, Integer id, String branch, String version, String name, Process process) { this.id = id; ProcessData.service = service; this.name = name; this.branch = branch; this.version = version; this.process = process; } /** * copy of a ProcessData - threaded data will not be copied * @param pd the process data */ public ProcessData(ProcessData pd) { this.id = pd.id; this.name = pd.name; this.branch = pd.branch; this.version = pd.version; this.javaExe = pd.javaExe; this.jniLibraryPath = pd.jniLibraryPath; this.version = pd.version; this.jnaLibraryPath = pd.jnaLibraryPath; this.jarPath = pd.jarPath; this.Xmx = pd.Xmx; this.userDefinedServices = pd.userDefinedServices; if (pd.in != null) { this.in = new ArrayList<String>(); for (int i = 0; i < pd.in.size(); ++i) { this.in.add(pd.in.get(i)); } } // this.process = pd.process; // this.startTs = System.currentTimeMillis(); // monitor = new Monitor(this); // monitor.start(); } /* * FIXME - is too much catering to mrl execution ... * * convert an String[] into a valid ProcessData * * @param inCmdLine * @param defaultBranch * @param defaultVersion */ public ProcessData(Invoker service, String jarPath, String[] inCmdLine, String defaultBranch, String defaultVersion) { ProcessData.service = service; this.jarPath = jarPath; // String protectedDomain = // URLDecoder.decode(Agent.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(), // "UTF-8"); // log.info("protected domain {}", protectedDomain); // convert to ArrayList to process in = new ArrayList<String>(); for (int i = 0; i < inCmdLine.length; ++i) { String cmd = inCmdLine[i]; if (cmd.equals("-runtimeName")) { name = inCmdLine[i + 1]; continue; } if (cmd.equals("-branch")) { branch = inCmdLine[i + 1]; continue; } if (cmd.equals("-service")) { userDefinedServices = true; } // additional parameters in.add(inCmdLine[i]); } name = (name == null) ? "runtime" : name; branch = (branch == null) ? defaultBranch : branch; version = (version == null) ? defaultVersion : version; // step 1 - get current env data // String ps = File.pathSeparator; String fs = File.separator; Platform platform = Platform.getLocalInstance(); String exeName = platform.isWindows() ? "javaw" : "java"; javaExe = String.format("%s%sbin%s%s", System.getProperty("java.home"), fs, fs, exeName); jniLibraryPath = "-Djava.library.path=libraries/native"; jnaLibraryPath = "-Djna.library.path=libraries/native"; } public boolean isRunning() { return STATE_RUNNING.equals(state); } public String[] buildCmdLine() { ArrayList<String> cmd = new ArrayList<String>(); cmd.add(javaExe); cmd.add(jniLibraryPath); cmd.add(jnaLibraryPath); cmd.add("-cp"); // step 1 - get current env data String ps = File.pathSeparator; // bogus jython.jar added as a hack to support - jython's 'more' fragile // 2.7.0 interface :( // http://www.jython.org/archive/21/docs/registry.html // http://bugs.jython.org/issue2355 String classpath = String.format("%s%s./libraries/jar/jython.jar%s./libraries/jar/*%s./bin%s./build/classes", jarPath, ps, ps, ps, ps); cmd.add(classpath); cmd.add("org.myrobotlab.service.Runtime"); if (!userDefinedServices) { cmd.add("-service"); // cmd.add("webgui"); // cmd.add("WebGui"); cmd.add("log"); cmd.add("Log"); cmd.add("cli"); cmd.add("Cli"); cmd.add("gui"); cmd.add("SwingGui"); cmd.add("python"); cmd.add("Python"); } cmd.add("-fromAgent"); if (in != null) { for (int i = 0; i < in.size(); ++i) { cmd.add(in.get(i)); } } return cmd.toArray(new String[cmd.size()]); } public void setRestarting(){ state = STATE_RESTARTING; } public boolean isRestarting() { return state.equals(STATE_RESTARTING); } }
src/org/myrobotlab/framework/ProcessData.java
package org.myrobotlab.framework; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import org.myrobotlab.framework.interfaces.Invoker; import org.myrobotlab.logging.LoggerFactory; import org.slf4j.Logger; /** * Simple class representing an operating system mrl process * * @author GroG * */ public class ProcessData implements Serializable { public final static Logger log = LoggerFactory.getLogger(ProcessData.class); private static final long serialVersionUID = 1L; public static final String STATE_RUNNING = "running"; public static final String STATE_STOPPED = "stopped"; public static final String STATE_RESTARTING = "restarting"; public static final String STATE_UNKNOWN = "unknown"; // TODO - need to start using id public Integer id; public String branch; public String name; public String version; public Long startTs = null; public Long stopTs = null; public String jarPath = null; public String javaExe = null; public String jniLibraryPath = null; public String jnaLibraryPath = null; public String Xmx = null; boolean userDefinedServices = false; public String state = STATE_STOPPED; transient public Process process; transient public Monitor monitor; static transient public Invoker service; ArrayList<String> in = null; public static class Monitor extends Thread { ProcessData data; public Monitor(ProcessData pd) { super(String.format("%s.monitor", pd.name)); this.data = pd; } @Override public void run() { try { if (data.process != null) { // data.isRunning = true; data.state = STATE_RUNNING; data.state = "running"; // don't wait if there is no agent if (service != null) { data.process.waitFor(); } } } catch (Exception e) { } data.state = STATE_STOPPED; data.state = "stopped"; if (ProcessData.service != null) { ProcessData.service.invoke("publishTerminated", data.id); } } } public ProcessData(Invoker service, Integer id, String branch, String version, String name, Process process) { this.id = id; ProcessData.service = service; this.name = name; this.branch = branch; this.version = version; this.process = process; } /** * copy of a ProcessData - threaded data will not be copied * @param pd the process data */ public ProcessData(ProcessData pd) { this.id = pd.id; this.name = pd.name; this.branch = pd.branch; this.version = pd.version; this.javaExe = pd.javaExe; this.jniLibraryPath = pd.jniLibraryPath; this.version = pd.version; this.jnaLibraryPath = pd.jnaLibraryPath; this.userDefinedServices = pd.userDefinedServices; if (pd.in != null) { this.in = new ArrayList<String>(); for (int i = 0; i < pd.in.size(); ++i) { this.in.add(pd.in.get(i)); } } // this.process = pd.process; // this.startTs = System.currentTimeMillis(); // monitor = new Monitor(this); // monitor.start(); } /* * FIXME - is too much catering to mrl execution ... * * convert an String[] into a valid ProcessData * * @param inCmdLine * @param defaultBranch * @param defaultVersion */ public ProcessData(Invoker service, String jarPath, String[] inCmdLine, String defaultBranch, String defaultVersion) { ProcessData.service = service; this.jarPath = jarPath; // String protectedDomain = // URLDecoder.decode(Agent.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(), // "UTF-8"); // log.info("protected domain {}", protectedDomain); // convert to ArrayList to process in = new ArrayList<String>(); for (int i = 0; i < inCmdLine.length; ++i) { String cmd = inCmdLine[i]; if (cmd.equals("-runtimeName")) { name = inCmdLine[i + 1]; continue; } if (cmd.equals("-branch")) { branch = inCmdLine[i + 1]; continue; } if (cmd.equals("-service")) { userDefinedServices = true; } // additional parameters in.add(inCmdLine[i]); } name = (name == null) ? "runtime" : name; branch = (branch == null) ? defaultBranch : branch; version = (version == null) ? defaultVersion : version; // step 1 - get current env data // String ps = File.pathSeparator; String fs = File.separator; Platform platform = Platform.getLocalInstance(); String exeName = platform.isWindows() ? "javaw" : "java"; javaExe = String.format("%s%sbin%s%s", System.getProperty("java.home"), fs, fs, exeName); jniLibraryPath = "-Djava.library.path=libraries/native"; jnaLibraryPath = "-Djna.library.path=libraries/native"; } public boolean isRunning() { return STATE_RUNNING.equals(state); } public String[] buildCmdLine() { ArrayList<String> cmd = new ArrayList<String>(); cmd.add(javaExe); cmd.add(jniLibraryPath); cmd.add(jnaLibraryPath); cmd.add("-cp"); // step 1 - get current env data String ps = File.pathSeparator; // bogus jython.jar added as a hack to support - jython's 'more' fragile // 2.7.0 interface :( // http://www.jython.org/archive/21/docs/registry.html // http://bugs.jython.org/issue2355 String classpath = String.format("%s%s./libraries/jar/jython.jar%s./libraries/jar/*%s./bin%s./build/classes", jarPath, ps, ps, ps, ps); cmd.add(classpath); cmd.add("org.myrobotlab.service.Runtime"); if (!userDefinedServices) { cmd.add("-service"); // cmd.add("webgui"); // cmd.add("WebGui"); cmd.add("log"); cmd.add("Log"); cmd.add("cli"); cmd.add("Cli"); cmd.add("gui"); cmd.add("SwingGui"); cmd.add("python"); cmd.add("Python"); } cmd.add("-fromAgent"); if (in != null) { for (int i = 0; i < in.size(); ++i) { cmd.add(in.get(i)); } } return cmd.toArray(new String[cmd.size()]); } public void setRestarting(){ state = STATE_RESTARTING; } public boolean isRestarting() { return state.equals(STATE_RESTARTING); } }
more fixes
src/org/myrobotlab/framework/ProcessData.java
more fixes
Java
apache-2.0
ba99b8e6af32fee60aba35719fedfef01e1950ca
0
hgschmie/presto,ebyhr/presto,jiangyifangh/presto,EvilMcJerkface/presto,mvp/presto,prestodb/presto,Teradata/presto,losipiuk/presto,smartnews/presto,svstanev/presto,aleph-zero/presto,sopel39/presto,miniway/presto,zzhao0/presto,sumitkgec/presto,erichwang/presto,electrum/presto,yuananf/presto,martint/presto,troels/nz-presto,yuananf/presto,nezihyigitbasi/presto,ebyhr/presto,nezihyigitbasi/presto,prateek1306/presto,Yaliang/presto,ebd2/presto,troels/nz-presto,ocono-tech/presto,geraint0923/presto,wyukawa/presto,ebd2/presto,chrisunder/presto,zzhao0/presto,electrum/presto,youngwookim/presto,jxiang/presto,treasure-data/presto,arhimondr/presto,bloomberg/presto,wagnermarkd/presto,EvilMcJerkface/presto,Jimexist/presto,hgschmie/presto,Praveen2112/presto,11xor6/presto,martint/presto,martint/presto,ptkool/presto,mvp/presto,prestodb/presto,ocono-tech/presto,dain/presto,aleph-zero/presto,shixuan-fan/presto,chrisunder/presto,wagnermarkd/presto,sopel39/presto,Yaliang/presto,prateek1306/presto,arhimondr/presto,sumitkgec/presto,electrum/presto,arhimondr/presto,aramesh117/presto,haozhun/presto,erichwang/presto,shixuan-fan/presto,11xor6/presto,raghavsethi/presto,raghavsethi/presto,facebook/presto,prateek1306/presto,miniway/presto,ArturGajowy/presto,jxiang/presto,RobinUS2/presto,wagnermarkd/presto,treasure-data/presto,erichwang/presto,bloomberg/presto,wyukawa/presto,aramesh117/presto,takari/presto,Yaliang/presto,haozhun/presto,cberner/presto,ebyhr/presto,gh351135612/presto,chrisunder/presto,damiencarol/presto,smartnews/presto,Yaliang/presto,svstanev/presto,aleph-zero/presto,losipiuk/presto,stewartpark/presto,mbeitchman/presto,martint/presto,troels/nz-presto,takari/presto,twitter-forks/presto,shixuan-fan/presto,prestodb/presto,TeradataCenterForHadoop/bootcamp,chrisunder/presto,stewartpark/presto,wyukawa/presto,miniway/presto,mvp/presto,Praveen2112/presto,prestodb/presto,ptkool/presto,dain/presto,jiangyifangh/presto,raghavsethi/presto,ptkool/presto,prateek1306/presto,aglne/presto,youngwookim/presto,cberner/presto,aleph-zero/presto,RobinUS2/presto,mbeitchman/presto,gh351135612/presto,facebook/presto,mvp/presto,aleph-zero/presto,jxiang/presto,Teradata/presto,sopel39/presto,miniway/presto,twitter-forks/presto,wyukawa/presto,elonazoulay/presto,bloomberg/presto,dain/presto,arhimondr/presto,treasure-data/presto,ArturGajowy/presto,Yaliang/presto,ArturGajowy/presto,11xor6/presto,damiencarol/presto,Teradata/presto,aramesh117/presto,youngwookim/presto,nezihyigitbasi/presto,Praveen2112/presto,mbeitchman/presto,facebook/presto,svstanev/presto,mandusm/presto,losipiuk/presto,gh351135612/presto,mbeitchman/presto,11xor6/presto,EvilMcJerkface/presto,mandusm/presto,sumitkgec/presto,jiangyifangh/presto,aramesh117/presto,cberner/presto,Jimexist/presto,EvilMcJerkface/presto,nezihyigitbasi/presto,twitter-forks/presto,mvp/presto,zzhao0/presto,yuananf/presto,stewartpark/presto,stewartpark/presto,elonazoulay/presto,smartnews/presto,gh351135612/presto,ocono-tech/presto,dain/presto,elonazoulay/presto,chrisunder/presto,hgschmie/presto,geraint0923/presto,aglne/presto,nezihyigitbasi/presto,takari/presto,mandusm/presto,Jimexist/presto,smartnews/presto,aramesh117/presto,mandusm/presto,troels/nz-presto,hgschmie/presto,Praveen2112/presto,erichwang/presto,haozhun/presto,treasure-data/presto,11xor6/presto,haozhun/presto,damiencarol/presto,ocono-tech/presto,jiangyifangh/presto,dain/presto,prestodb/presto,EvilMcJerkface/presto,losipiuk/presto,damiencarol/presto,wagnermarkd/presto,jiangyifangh/presto,troels/nz-presto,youngwookim/presto,ebd2/presto,ebyhr/presto,jxiang/presto,twitter-forks/presto,bloomberg/presto,aglne/presto,sopel39/presto,smartnews/presto,cberner/presto,TeradataCenterForHadoop/bootcamp,geraint0923/presto,shixuan-fan/presto,damiencarol/presto,Jimexist/presto,wagnermarkd/presto,TeradataCenterForHadoop/bootcamp,Jimexist/presto,ptkool/presto,miniway/presto,geraint0923/presto,twitter-forks/presto,treasure-data/presto,RobinUS2/presto,stewartpark/presto,elonazoulay/presto,Teradata/presto,takari/presto,sopel39/presto,treasure-data/presto,ptkool/presto,electrum/presto,ebd2/presto,mbeitchman/presto,geraint0923/presto,aglne/presto,facebook/presto,Teradata/presto,martint/presto,ArturGajowy/presto,hgschmie/presto,sumitkgec/presto,TeradataCenterForHadoop/bootcamp,losipiuk/presto,erichwang/presto,shixuan-fan/presto,jxiang/presto,yuananf/presto,raghavsethi/presto,youngwookim/presto,arhimondr/presto,RobinUS2/presto,svstanev/presto,wyukawa/presto,RobinUS2/presto,prateek1306/presto,bloomberg/presto,ocono-tech/presto,svstanev/presto,TeradataCenterForHadoop/bootcamp,ebyhr/presto,takari/presto,aglne/presto,haozhun/presto,prestodb/presto,yuananf/presto,cberner/presto,electrum/presto,mandusm/presto,ArturGajowy/presto,zzhao0/presto,sumitkgec/presto,Praveen2112/presto,gh351135612/presto,ebd2/presto,raghavsethi/presto,facebook/presto,zzhao0/presto,elonazoulay/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.Session; import com.facebook.presto.execution.StageInfo; import com.facebook.presto.execution.StageStats; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.OperatorNotFoundException; import com.facebook.presto.metadata.Signature; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableLayout; import com.facebook.presto.operator.OperatorStats; import com.facebook.presto.operator.PipelineStats; import com.facebook.presto.operator.TaskStats; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorTableLayoutHandle; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.Marker; import com.facebook.presto.spi.predicate.NullableValue; import com.facebook.presto.spi.predicate.Range; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.FunctionInvoker; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.ApplyNode; import com.facebook.presto.sql.planner.plan.AssignUniqueId; import com.facebook.presto.sql.planner.plan.Assignments; import com.facebook.presto.sql.planner.plan.DeleteNode; import com.facebook.presto.sql.planner.plan.DistinctLimitNode; import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode; import com.facebook.presto.sql.planner.plan.ExceptNode; import com.facebook.presto.sql.planner.plan.ExchangeNode; import com.facebook.presto.sql.planner.plan.ExchangeNode.Scope; import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.GroupIdNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.IndexSourceNode; import com.facebook.presto.sql.planner.plan.IntersectNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LimitNode; import com.facebook.presto.sql.planner.plan.MarkDistinctNode; import com.facebook.presto.sql.planner.plan.MetadataDeleteNode; import com.facebook.presto.sql.planner.plan.OutputNode; import com.facebook.presto.sql.planner.plan.PlanFragmentId; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.sql.planner.plan.PlanVisitor; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.RemoteSourceNode; import com.facebook.presto.sql.planner.plan.RowNumberNode; import com.facebook.presto.sql.planner.plan.SampleNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.TableFinishNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.planner.plan.TableWriterNode; import com.facebook.presto.sql.planner.plan.TopNNode; import com.facebook.presto.sql.planner.plan.TopNRowNumberNode; import com.facebook.presto.sql.planner.plan.UnionNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.planner.plan.ValuesNode; import com.facebook.presto.sql.planner.plan.WindowNode; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.ComparisonExpressionType; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FrameBound; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.SymbolReference; import com.facebook.presto.sql.tree.Window; import com.facebook.presto.sql.tree.WindowFrame; import com.facebook.presto.util.GraphvizPrinter; import com.google.common.base.CaseFormat; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import io.airlift.units.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.facebook.presto.execution.StageInfo.getAllStages; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.planner.DomainUtils.simplifyDomain; import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getLast; import static com.google.common.collect.Lists.reverse; import static io.airlift.units.DataSize.Unit.BYTE; import static io.airlift.units.DataSize.succinctBytes; import static io.airlift.units.DataSize.succinctDataSize; import static java.lang.Double.isFinite; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.stream.Collectors.toList; public class PlanPrinter { private final StringBuilder output = new StringBuilder(); private final Metadata metadata; private final Optional<Map<PlanNodeId, PlanNodeStats>> stats; private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session sesion) { this(plan, types, metadata, sesion, 0); } private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, int indent) { requireNonNull(plan, "plan is null"); requireNonNull(types, "types is null"); requireNonNull(metadata, "metadata is null"); this.metadata = metadata; this.stats = Optional.empty(); Visitor visitor = new Visitor(types, session); plan.accept(visitor, indent); } private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, Map<PlanNodeId, PlanNodeStats> stats, int indent) { requireNonNull(plan, "plan is null"); requireNonNull(types, "types is null"); requireNonNull(metadata, "metadata is null"); this.metadata = metadata; this.stats = Optional.of(stats); Visitor visitor = new Visitor(types, session); plan.accept(visitor, indent); } @Override public String toString() { return output.toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session) { return new PlanPrinter(plan, types, metadata, session).toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, int indent) { return new PlanPrinter(plan, types, metadata, session, indent).toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, Map<PlanNodeId, PlanNodeStats> stats, int indent) { return new PlanPrinter(plan, types, metadata, session, stats, indent).toString(); } public static String textDistributedPlan(List<StageInfo> stages, Metadata metadata, Session session) { StringBuilder builder = new StringBuilder(); List<StageInfo> allStages = stages.stream() .flatMap(stage -> getAllStages(Optional.of(stage)).stream()) .collect(toImmutableList()); for (StageInfo stageInfo : allStages) { Map<PlanNodeId, PlanNodeStats> aggregatedStats = new HashMap<>(); List<PlanNodeStats> planNodeStats = stageInfo.getTasks().stream() .map(TaskInfo::getStats) .flatMap(taskStats -> getPlanNodeStats(taskStats).stream()) .collect(toList()); for (PlanNodeStats stats : planNodeStats) { aggregatedStats.merge(stats.getPlanNodeId(), stats, PlanNodeStats::merge); } builder.append(formatFragment(metadata, session, stageInfo.getPlan(), Optional.of(stageInfo.getStageStats()), Optional.of(aggregatedStats))); } return builder.toString(); } private static List<PlanNodeStats> getPlanNodeStats(TaskStats taskStats) { // Best effort to reconstruct the plan nodes from operators. // Because stats are collected separately from query execution, // it's possible that some or all of them are missing or out of date. // For example, a LIMIT clause can cause a query to finish before stats // are collected from the leaf stages. Map<PlanNodeId, Long> inputPositions = new HashMap<>(); Map<PlanNodeId, Long> inputBytes = new HashMap<>(); Map<PlanNodeId, Long> outputPositions = new HashMap<>(); Map<PlanNodeId, Long> outputBytes = new HashMap<>(); Map<PlanNodeId, Long> wallMillis = new HashMap<>(); for (PipelineStats pipelineStats : taskStats.getPipelines()) { // Due to eventual consistently collected stats, these could be empty if (pipelineStats.getOperatorSummaries().isEmpty()) { continue; } Set<PlanNodeId> processedNodes = new HashSet<>(); PlanNodeId inputPlanNode = pipelineStats.getOperatorSummaries().iterator().next().getPlanNodeId(); PlanNodeId outputPlanNode = getLast(pipelineStats.getOperatorSummaries()).getPlanNodeId(); // Gather input statistics for (OperatorStats operatorStats : pipelineStats.getOperatorSummaries()) { PlanNodeId planNodeId = operatorStats.getPlanNodeId(); long wall = operatorStats.getAddInputWall().toMillis() + operatorStats.getGetOutputWall().toMillis() + operatorStats.getFinishWall().toMillis(); wallMillis.merge(planNodeId, wall, Long::sum); // A pipeline like hash build before join might link to another "internal" pipelines which provide actual input for this plan node if (operatorStats.getPlanNodeId().equals(inputPlanNode) && !pipelineStats.isInputPipeline()) { continue; } if (processedNodes.contains(planNodeId)) { continue; } inputPositions.merge(planNodeId, operatorStats.getInputPositions(), Long::sum); inputBytes.merge(planNodeId, operatorStats.getInputDataSize().toBytes(), Long::sum); processedNodes.add(planNodeId); } // Gather output statistics processedNodes.clear(); for (OperatorStats operatorStats : reverse(pipelineStats.getOperatorSummaries())) { PlanNodeId planNodeId = operatorStats.getPlanNodeId(); // An "internal" pipeline like a hash build, links to another pipeline which is the actual output for this plan node if (operatorStats.getPlanNodeId().equals(outputPlanNode) && !pipelineStats.isOutputPipeline()) { continue; } if (processedNodes.contains(planNodeId)) { continue; } outputPositions.merge(planNodeId, operatorStats.getOutputPositions(), Long::sum); outputBytes.merge(planNodeId, operatorStats.getOutputDataSize().toBytes(), Long::sum); processedNodes.add(planNodeId); } } List<PlanNodeStats> stats = new ArrayList<>(); for (Map.Entry<PlanNodeId, Long> entry : wallMillis.entrySet()) { PlanNodeId planNodeId = entry.getKey(); stats.add(new PlanNodeStats( entry.getKey(), new Duration(entry.getValue(), MILLISECONDS), inputPositions.get(planNodeId), succinctDataSize(inputBytes.get(planNodeId), BYTE), // It's possible there will be no output stats because all the pipelines that we observed were non-output. // For example in a query like SELECT * FROM a JOIN b ON c = d LIMIT 1 // It's possible to observe stats after the build starts, but before the probe does // and therefore only have wall time, but no output stats outputPositions.getOrDefault(planNodeId, 0L), succinctDataSize(outputBytes.getOrDefault(planNodeId, 0L), BYTE))); } return stats; } public static String textDistributedPlan(SubPlan plan, Metadata metadata, Session session) { StringBuilder builder = new StringBuilder(); for (PlanFragment fragment : plan.getAllFragments()) { builder.append(formatFragment(metadata, session, fragment, Optional.empty(), Optional.empty())); } return builder.toString(); } private static String formatFragment(Metadata metadata, Session session, PlanFragment fragment, Optional<StageStats> stageStats, Optional<Map<PlanNodeId, PlanNodeStats>> planNodeStats) { StringBuilder builder = new StringBuilder(); builder.append(format("Fragment %s [%s]\n", fragment.getId(), fragment.getPartitioning())); if (stageStats.isPresent()) { builder.append(indentString(1)) .append(format("Cost: CPU %s, Input: %s (%s), Output: %s (%s)\n", stageStats.get().getTotalCpuTime(), formatPositions(stageStats.get().getProcessedInputPositions()), stageStats.get().getProcessedInputDataSize(), formatPositions(stageStats.get().getOutputPositions()), stageStats.get().getOutputDataSize())); } PartitioningScheme partitioningScheme = fragment.getPartitioningScheme(); builder.append(indentString(1)) .append(format("Output layout: [%s]\n", Joiner.on(", ").join(partitioningScheme.getOutputLayout()))); boolean replicateNulls = partitioningScheme.isReplicateNulls(); List<String> arguments = partitioningScheme.getPartitioning().getArguments().stream() .map(argument -> { if (argument.isConstant()) { NullableValue constant = argument.getConstant(); String printableValue = castToVarchar(constant.getType(), constant.getValue(), metadata, session); return constant.getType().getDisplayName() + "(" + printableValue + ")"; } return argument.getColumn().toString(); }) .collect(toImmutableList()); builder.append(indentString(1)); if (replicateNulls) { builder.append(format("Output partitioning: %s (replicate nulls) [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn()))); } else { builder.append(format("Output partitioning: %s [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn()))); } if (stageStats.isPresent()) { builder.append(textLogicalPlan(fragment.getRoot(), fragment.getSymbols(), metadata, session, planNodeStats.get(), 1)) .append("\n"); } else { builder.append(textLogicalPlan(fragment.getRoot(), fragment.getSymbols(), metadata, session, 1)) .append("\n"); } return builder.toString(); } public static String graphvizLogicalPlan(PlanNode plan, Map<Symbol, Type> types) { PlanFragment fragment = new PlanFragment( new PlanFragmentId("graphviz_plan"), plan, types, SINGLE_DISTRIBUTION, ImmutableList.of(plan.getId()), new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), plan.getOutputSymbols())); return GraphvizPrinter.printLogical(ImmutableList.of(fragment)); } public static String graphvizDistributedPlan(SubPlan plan) { return GraphvizPrinter.printDistributed(plan); } private void print(int indent, String format, Object... args) { String value; if (args.length == 0) { value = format; } else { value = format(format, args); } output.append(indentString(indent)).append(value).append('\n'); } private void print(int indent, String format, List<Object> args) { print(indent, format, args.toArray(new Object[args.size()])); } private void printStats(int intent, PlanNodeId planNodeId) { printStats(intent, planNodeId, false, false); } private void printStats(int indent, PlanNodeId planNodeId, boolean printInput, boolean printFiltered) { if (!stats.isPresent()) { return; } long totalMillis = stats.get().values().stream() .mapToLong(node -> node.getWallTime().toMillis()) .sum(); PlanNodeStats nodeStats = stats.get().get(planNodeId); if (nodeStats == null) { output.append(indentString(indent)); output.append("Cost: unknown"); if (printInput) { output.append(", Input: unknown"); } output.append(", Output: unknown"); if (printFiltered) { output.append(", Filtered: unknown"); } output.append('\n'); return; } double fraction = (nodeStats.getWallTime().toMillis()) / (double) totalMillis; String fractionString; if (isFinite(fraction)) { fractionString = format(Locale.US, "%.2f%%", 100.0 * fraction); } else { fractionString = "unknown"; } output.append(indentString(indent)); output.append("Cost: " + fractionString); if (printInput) { output.append(format(", Input: %s (%s)", formatPositions(nodeStats.getInputPositions()), nodeStats.getInputDataSize().toString())); } output.append(format(", Output: %s (%s)", formatPositions(nodeStats.getOutputPositions()), nodeStats.getOutputDataSize().toString())); if (printFiltered) { double filtered = 100.0 * (nodeStats.getInputPositions() - nodeStats.getOutputPositions()) / nodeStats.getInputPositions(); String filteredString; if (isFinite(filtered)) { filteredString = format(Locale.US, "%.2f%%", filtered); } else { filteredString = "unknown"; } output.append(", Filtered: " + filteredString); } output.append('\n'); } private static String formatPositions(long positions) { if (positions == 1) { return "1 row"; } return positions + " rows"; } private static String indentString(int indent) { return Strings.repeat(" ", indent); } private class Visitor extends PlanVisitor<Integer, Void> { private final Map<Symbol, Type> types; private final Session session; @SuppressWarnings("AssignmentToCollectionOrArrayFieldFromParameter") public Visitor(Map<Symbol, Type> types, Session session) { this.types = types; this.session = session; } @Override public Void visitExplainAnalyze(ExplainAnalyzeNode node, Integer indent) { print(indent, "- ExplainAnalyze => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitJoin(JoinNode node, Integer indent) { List<Expression> joinExpressions = new ArrayList<>(); for (JoinNode.EquiJoinClause clause : node.getCriteria()) { joinExpressions.add(new ComparisonExpression(ComparisonExpressionType.EQUAL, clause.getLeft().toSymbolReference(), clause.getRight().toSymbolReference())); } node.getFilter().ifPresent(expression -> joinExpressions.add(expression)); // Check if the node is actually a cross join node if (node.getType() == JoinNode.Type.INNER && joinExpressions.isEmpty()) { print(indent, "- CrossJoin => [%s]", formatOutputs(node.getOutputSymbols())); } else { print(indent, "- %s[%s]%s => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatHash(node.getLeftHashSymbol(), node.getRightHashSymbol()), formatOutputs(node.getOutputSymbols())); } printStats(indent + 2, node.getId()); node.getLeft().accept(this, indent + 1); node.getRight().accept(this, indent + 1); return null; } @Override public Void visitSemiJoin(SemiJoinNode node, Integer indent) { print(indent, "- SemiJoin[%s = %s]%s => [%s]", node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), formatHash(node.getSourceHashSymbol(), node.getFilteringSourceHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getSource().accept(this, indent + 1); node.getFilteringSource().accept(this, indent + 1); return null; } @Override public Void visitIndexSource(IndexSourceNode node, Integer indent) { print(indent, "- IndexSource[%s, lookup = %s] => [%s]", node.getIndexHandle(), node.getLookupSymbols(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, ColumnHandle> entry : node.getAssignments().entrySet()) { if (node.getOutputSymbols().contains(entry.getKey())) { print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } } return null; } @Override public Void visitIndexJoin(IndexJoinNode node, Integer indent) { List<Expression> joinExpressions = new ArrayList<>(); for (IndexJoinNode.EquiJoinClause clause : node.getCriteria()) { joinExpressions.add(new ComparisonExpression(ComparisonExpressionType.EQUAL, clause.getProbe().toSymbolReference(), clause.getIndex().toSymbolReference())); } print(indent, "- %sIndexJoin[%s]%s => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatHash(node.getProbeHashSymbol(), node.getIndexHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getProbeSource().accept(this, indent + 1); node.getIndexSource().accept(this, indent + 1); return null; } @Override public Void visitLimit(LimitNode node, Integer indent) { print(indent, "- Limit%s[%s] => [%s]", node.isPartial() ? "Partial" : "", node.getCount(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitDistinctLimit(DistinctLimitNode node, Integer indent) { print(indent, "- DistinctLimit%s[%s]%s => [%s]", node.isPartial() ? "Partial" : "", node.getLimit(), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitAggregation(AggregationNode node, Integer indent) { String type = ""; if (node.getStep() != AggregationNode.Step.SINGLE) { type = format("(%s)", node.getStep().toString()); } String key = ""; if (!node.getGroupingKeys().isEmpty()) { key = node.getGroupingKeys().toString(); } print(indent, "- Aggregate%s%s%s => [%s]", type, key, formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, FunctionCall> entry : node.getAggregations().entrySet()) { if (node.getMasks().containsKey(entry.getKey())) { print(indent + 2, "%s := %s (mask = %s)", entry.getKey(), entry.getValue(), node.getMasks().get(entry.getKey())); } else { print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } } return processChildren(node, indent + 1); } @Override public Void visitGroupId(GroupIdNode node, Integer indent) { // grouping sets are easier to understand in terms of inputs List<List<Symbol>> inputGroupingSetSymbols = node.getGroupingSets().stream() .map(set -> set.stream() .map(symbol -> node.getGroupingSetMappings().get(symbol)) .collect(Collectors.toList())) .collect(Collectors.toList()); print(indent, "- GroupId%s => [%s]", inputGroupingSetSymbols, formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, Symbol> mapping : node.getGroupingSetMappings().entrySet()) { print(indent + 2, "%s := %s", mapping.getKey(), mapping.getValue()); } for (Map.Entry<Symbol, Symbol> argument : node.getArgumentMappings().entrySet()) { print(indent + 2, "%s := %s", argument.getKey(), argument.getValue()); } return processChildren(node, indent + 1); } @Override public Void visitMarkDistinct(MarkDistinctNode node, Integer indent) { print(indent, "- MarkDistinct[distinct=%s marker=%s]%s => [%s]", formatOutputs(node.getDistinctSymbols()), node.getMarkerSymbol(), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitWindow(WindowNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> orderBy = Lists.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); List<String> args = new ArrayList<>(); if (!partitionBy.isEmpty()) { List<Symbol> prePartitioned = node.getPartitionBy().stream() .filter(node.getPrePartitionedInputs()::contains) .collect(toImmutableList()); List<Symbol> notPrePartitioned = node.getPartitionBy().stream() .filter(column -> !node.getPrePartitionedInputs().contains(column)) .collect(toImmutableList()); StringBuilder builder = new StringBuilder(); if (!prePartitioned.isEmpty()) { builder.append("<") .append(Joiner.on(", ").join(prePartitioned)) .append(">"); if (!notPrePartitioned.isEmpty()) { builder.append(", "); } } if (!notPrePartitioned.isEmpty()) { builder.append(Joiner.on(", ").join(notPrePartitioned)); } args.add(format("partition by (%s)", builder)); } if (!orderBy.isEmpty()) { args.add(format("order by (%s)", Stream.concat( node.getOrderBy().stream() .limit(node.getPreSortedOrderPrefix()) .map(symbol -> "<" + symbol + " " + node.getOrderings().get(symbol) + ">"), node.getOrderBy().stream() .skip(node.getPreSortedOrderPrefix()) .map(symbol -> symbol + " " + node.getOrderings().get(symbol))) .collect(Collectors.joining(", ")))); } print(indent, "- Window[%s]%s => [%s]", Joiner.on(", ").join(args), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) { FunctionCall call = entry.getValue().getFunctionCall(); String frameInfo = call.getWindow() .flatMap(Window::getFrame) .map(PlanPrinter::formatFrame) .orElse(""); print(indent + 2, "%s := %s(%s) %s", entry.getKey(), call.getName(), Joiner.on(", ").join(call.getArguments()), frameInfo); } return processChildren(node, indent + 1); } @Override public Void visitTopNRowNumber(TopNRowNumberNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> orderBy = Lists.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); List<String> args = new ArrayList<>(); args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy))); args.add(format("order by (%s)", Joiner.on(", ").join(orderBy))); print(indent, "- TopNRowNumber[%s limit %s]%s => [%s]", Joiner.on(", ").join(args), node.getMaxRowCountPerPartition(), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); print(indent + 2, "%s := %s", node.getRowNumberSymbol(), "row_number()"); return processChildren(node, indent + 1); } @Override public Void visitRowNumber(RowNumberNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> args = new ArrayList<>(); if (!partitionBy.isEmpty()) { args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy))); } if (node.getMaxRowCountPerPartition().isPresent()) { args.add(format("limit = %s", node.getMaxRowCountPerPartition().get())); } print(indent, "- RowNumber[%s]%s => [%s]", Joiner.on(", ").join(args), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); print(indent + 2, "%s := %s", node.getRowNumberSymbol(), "row_number()"); return processChildren(node, indent + 1); } @Override public Void visitTableScan(TableScanNode node, Integer indent) { TableHandle table = node.getTable(); print(indent, "- TableScan[%s, originalConstraint = %s] => [%s]", table, node.getOriginalConstraint(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); printTableScanInfo(node, indent); return null; } @Override public Void visitValues(ValuesNode node, Integer indent) { print(indent, "- Values => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (List<Expression> row : node.getRows()) { print(indent + 2, "(" + Joiner.on(", ").join(row) + ")"); } return null; } @Override public Void visitFilter(FilterNode node, Integer indent) { return visitScanFilterAndProjectInfo(node.getId(), Optional.of(node), Optional.empty(), indent); } @Override public Void visitProject(ProjectNode node, Integer indent) { if (node.getSource() instanceof FilterNode) { return visitScanFilterAndProjectInfo(node.getId(), Optional.of((FilterNode) node.getSource()), Optional.of(node), indent); } return visitScanFilterAndProjectInfo(node.getId(), Optional.empty(), Optional.of(node), indent); } private Void visitScanFilterAndProjectInfo( PlanNodeId planNodeId, Optional<FilterNode> filterNode, Optional<ProjectNode> projectNode, int indent) { checkState(projectNode.isPresent() || filterNode.isPresent()); PlanNode sourceNode; if (filterNode.isPresent()) { sourceNode = filterNode.get().getSource(); } else { sourceNode = projectNode.get().getSource(); } Optional<TableScanNode> scanNode; if (sourceNode instanceof TableScanNode) { scanNode = Optional.of((TableScanNode) sourceNode); } else { scanNode = Optional.empty(); } String format = "["; String operatorName = "- "; List<Object> arguments = new LinkedList<>(); if (scanNode.isPresent()) { operatorName += "Scan"; format += "table = %s, originalConstraint = %s"; if (filterNode.isPresent()) { format += ", "; } TableHandle table = scanNode.get().getTable(); arguments.add(table); arguments.add(scanNode.get().getOriginalConstraint()); } if (filterNode.isPresent()) { operatorName += "Filter"; format += "filterPredicate = %s"; arguments.add(filterNode.get().getPredicate()); } format += "] => [%s]"; if (projectNode.isPresent()) { operatorName += "Project"; arguments.add(formatOutputs(projectNode.get().getOutputSymbols())); } else { arguments.add(formatOutputs(filterNode.get().getOutputSymbols())); } format = operatorName + format; print(indent, format, arguments); printStats(indent + 2, planNodeId, true, true); if (projectNode.isPresent()) { printAssignments(projectNode.get().getAssignments(), indent + 2); } if (scanNode.isPresent()) { printTableScanInfo(scanNode.get(), indent); return null; } sourceNode.accept(this, indent + 1); return null; } private void printTableScanInfo(TableScanNode node, int indent) { TableHandle table = node.getTable(); TupleDomain<ColumnHandle> predicate = node.getLayout() .map(layoutHandle -> metadata.getLayout(session, layoutHandle)) .map(TableLayout::getPredicate) .orElse(TupleDomain.all()); if (node.getLayout().isPresent()) { // TODO: find a better way to do this ConnectorTableLayoutHandle layout = node.getLayout().get().getConnectorHandle(); if (!table.getConnectorHandle().toString().equals(layout.toString())) { print(indent + 2, "LAYOUT: %s", layout); } } if (predicate.isNone()) { print(indent + 2, ":: NONE"); } else { // first, print output columns and their constraints for (Map.Entry<Symbol, ColumnHandle> assignment : node.getAssignments().entrySet()) { ColumnHandle column = assignment.getValue(); print(indent + 2, "%s := %s", assignment.getKey(), column); printConstraint(indent + 3, column, predicate); } // then, print constraints for columns that are not in the output if (!predicate.isAll()) { Set<ColumnHandle> outputs = ImmutableSet.copyOf(node.getAssignments().values()); predicate.getDomains().get() .entrySet().stream() .filter(entry -> !outputs.contains(entry.getKey())) .forEach(entry -> { ColumnHandle column = entry.getKey(); print(indent + 2, "%s", column); printConstraint(indent + 3, column, predicate); }); } } } @Override public Void visitUnnest(UnnestNode node, Integer indent) { print(indent, "- Unnest [replicate=%s, unnest=%s] => [%s]", formatOutputs(node.getReplicateSymbols()), formatOutputs(node.getUnnestSymbols().keySet()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitOutput(OutputNode node, Integer indent) { print(indent, "- Output[%s] => [%s]", Joiner.on(", ").join(node.getColumnNames()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (int i = 0; i < node.getColumnNames().size(); i++) { String name = node.getColumnNames().get(i); Symbol symbol = node.getOutputSymbols().get(i); if (!name.equals(symbol.toString())) { print(indent + 2, "%s := %s", name, symbol); } } return processChildren(node, indent + 1); } @Override public Void visitTopN(TopNNode node, Integer indent) { Iterable<String> keys = Iterables.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); print(indent, "- TopN[%s by (%s)] => [%s]", node.getCount(), Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitSort(SortNode node, Integer indent) { Iterable<String> keys = Iterables.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); print(indent, "- Sort[%s] => [%s]", Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitRemoteSource(RemoteSourceNode node, Integer indent) { print(indent, "- RemoteSource[%s] => [%s]", Joiner.on(',').join(node.getSourceFragmentIds()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return null; } @Override public Void visitUnion(UnionNode node, Integer indent) { print(indent, "- Union => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitIntersect(IntersectNode node, Integer indent) { print(indent, "- Intersect => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitExcept(ExceptNode node, Integer indent) { print(indent, "- Except => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitTableWriter(TableWriterNode node, Integer indent) { print(indent, "- TableWriter => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (int i = 0; i < node.getColumnNames().size(); i++) { String name = node.getColumnNames().get(i); Symbol symbol = node.getColumns().get(i); print(indent + 2, "%s := %s", name, symbol); } return processChildren(node, indent + 1); } @Override public Void visitTableFinish(TableFinishNode node, Integer indent) { print(indent, "- TableCommit[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitSample(SampleNode node, Integer indent) { print(indent, "- Sample[%s: %s] => [%s]", node.getSampleType(), node.getSampleRatio(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitExchange(ExchangeNode node, Integer indent) { if (node.getScope() == Scope.LOCAL) { print(indent, "- LocalExchange[%s%s]%s (%s) => %s", node.getPartitioningScheme().getPartitioning().getHandle(), node.getPartitioningScheme().isReplicateNulls() ? " - REPLICATE NULLS" : "", formatHash(node.getPartitioningScheme().getHashColumn()), Joiner.on(", ").join(node.getPartitioningScheme().getPartitioning().getArguments()), formatOutputs(node.getOutputSymbols())); } else { print(indent, "- %sExchange[%s%s]%s => %s", UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, node.getScope().toString()), node.getType(), node.getPartitioningScheme().isReplicateNulls() ? " - REPLICATE NULLS" : "", formatHash(node.getPartitioningScheme().getHashColumn()), formatOutputs(node.getOutputSymbols())); } printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitDelete(DeleteNode node, Integer indent) { print(indent, "- Delete[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitMetadataDelete(MetadataDeleteNode node, Integer indent) { print(indent, "- MetadataDelete[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitEnforceSingleRow(EnforceSingleRowNode node, Integer indent) { print(indent, "- Scalar => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitAssignUniqueId(AssignUniqueId node, Integer indent) { print(indent, "- AssignUniqueId => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitApply(ApplyNode node, Integer indent) { print(indent, "- Apply[%s] => [%s]", node.getCorrelation(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); printAssignments(node.getSubqueryAssignments(), indent + 4); return processChildren(node, indent + 1); } @Override protected Void visitPlan(PlanNode node, Integer indent) { throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName()); } private Void processChildren(PlanNode node, int indent) { for (PlanNode child : node.getSources()) { child.accept(this, indent); } return null; } private void printAssignments(Assignments assignments, int indent) { for (Map.Entry<Symbol, Expression> entry : assignments.getMap().entrySet()) { if (entry.getValue() instanceof SymbolReference && ((SymbolReference) entry.getValue()).getName().equals(entry.getKey().getName())) { // skip identity assignments continue; } print(indent, "%s := %s", entry.getKey(), entry.getValue()); } } private String formatOutputs(Iterable<Symbol> symbols) { return Joiner.on(", ").join(Iterables.transform(symbols, input -> input + ":" + types.get(input).getDisplayName())); } private void printConstraint(int indent, ColumnHandle column, TupleDomain<ColumnHandle> constraint) { checkArgument(!constraint.isNone()); Map<ColumnHandle, Domain> domains = constraint.getDomains().get(); if (!constraint.isAll() && domains.containsKey(column)) { print(indent, ":: %s", formatDomain(simplifyDomain(domains.get(column)))); } } private String formatDomain(Domain domain) { ImmutableList.Builder<String> parts = ImmutableList.builder(); if (domain.isNullAllowed()) { parts.add("NULL"); } Type type = domain.getType(); domain.getValues().getValuesProcessor().consume( ranges -> { for (Range range : ranges.getOrderedRanges()) { StringBuilder builder = new StringBuilder(); if (range.isSingleValue()) { String value = castToVarchar(type, range.getSingleValue(), PlanPrinter.this.metadata, session); builder.append('[').append(value).append(']'); } else { builder.append((range.getLow().getBound() == Marker.Bound.EXACTLY) ? '[' : '('); if (range.getLow().isLowerUnbounded()) { builder.append("<min>"); } else { builder.append(castToVarchar(type, range.getLow().getValue(), PlanPrinter.this.metadata, session)); } builder.append(", "); if (range.getHigh().isUpperUnbounded()) { builder.append("<max>"); } else { builder.append(castToVarchar(type, range.getHigh().getValue(), PlanPrinter.this.metadata, session)); } builder.append((range.getHigh().getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); } parts.add(builder.toString()); } }, discreteValues -> discreteValues.getValues().stream() .map(value -> castToVarchar(type, value, PlanPrinter.this.metadata, session)) .sorted() // Sort so the values will be printed in predictable order .forEach(parts::add), allOrNone -> { if (allOrNone.isAll()) { parts.add("ALL VALUES"); } }); return "[" + Joiner.on(", ").join(parts.build()) + "]"; } } private static String formatHash(Optional<Symbol>... hashes) { List<Symbol> symbols = Arrays.stream(hashes) .filter(Optional::isPresent) .map(Optional::get) .collect(toList()); if (symbols.isEmpty()) { return ""; } return "[" + Joiner.on(", ").join(symbols) + "]"; } private static String formatFrame(WindowFrame frame) { StringBuilder builder = new StringBuilder(frame.getType().toString()); FrameBound start = frame.getStart(); if (start.getValue().isPresent()) { builder.append(" ").append(start.getOriginalValue().get()); } builder.append(" ").append(start.getType()); Optional<FrameBound> end = frame.getEnd(); if (end.isPresent()) { if (end.get().getOriginalValue().isPresent()) { builder.append(" ").append(end.get().getOriginalValue().get()); } builder.append(" ").append(end.get().getType()); } return builder.toString(); } private static String castToVarchar(Type type, Object value, Metadata metadata, Session session) { if (value == null) { return "NULL"; } Signature coercion = metadata.getFunctionRegistry().getCoercion(type, VARCHAR); try { Slice coerced = (Slice) new FunctionInvoker(metadata.getFunctionRegistry()).invoke(coercion, session.toConnectorSession(), value); return coerced.toStringUtf8(); } catch (OperatorNotFoundException e) { return "<UNREPRESENTABLE VALUE>"; } catch (Throwable throwable) { throw Throwables.propagate(throwable); } } private static class PlanNodeStats { private final PlanNodeId planNodeId; private final Duration wallTime; private final long inputPositions; private final DataSize inputDataSize; private final long outputPositions; private final DataSize outputDataSize; private PlanNodeStats(PlanNodeId planNodeId, Duration wallTime, long inputPositions, DataSize inputDataSize, long outputPositions, DataSize outputDataSize) { this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.wallTime = requireNonNull(wallTime, "wallTime is null"); this.inputPositions = inputPositions; this.inputDataSize = requireNonNull(inputDataSize, "inputDataSize is null"); this.outputPositions = outputPositions; this.outputDataSize = requireNonNull(outputDataSize, "outputDataSize is null"); } public PlanNodeId getPlanNodeId() { return planNodeId; } public Duration getWallTime() { return wallTime; } public long getInputPositions() { return inputPositions; } public DataSize getInputDataSize() { return inputDataSize; } public long getOutputPositions() { return outputPositions; } public DataSize getOutputDataSize() { return outputDataSize; } public static PlanNodeStats merge(PlanNodeStats planNodeStats1, PlanNodeStats planNodeStats2) { checkArgument(planNodeStats1.getPlanNodeId().equals(planNodeStats2.getPlanNodeId()), "planNodeIds do not match. %s != %s", planNodeStats1.getPlanNodeId(), planNodeStats2.getPlanNodeId()); long inputPositions = planNodeStats1.inputPositions + planNodeStats2.inputPositions; DataSize inputDataSize = succinctBytes(planNodeStats1.inputDataSize.toBytes() + planNodeStats2.inputDataSize.toBytes()); long outputPositions = planNodeStats1.outputPositions + planNodeStats2.outputPositions; DataSize outputDataSize = succinctBytes(planNodeStats1.outputDataSize.toBytes() + planNodeStats2.outputDataSize.toBytes()); return new PlanNodeStats( planNodeStats1.getPlanNodeId(), new Duration(planNodeStats1.getWallTime().toMillis() + planNodeStats2.getWallTime().toMillis(), MILLISECONDS), inputPositions, inputDataSize, outputPositions, outputDataSize); } } }
presto-main/src/main/java/com/facebook/presto/sql/planner/PlanPrinter.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.Session; import com.facebook.presto.execution.StageInfo; import com.facebook.presto.execution.StageStats; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.OperatorNotFoundException; import com.facebook.presto.metadata.Signature; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableLayout; import com.facebook.presto.operator.OperatorStats; import com.facebook.presto.operator.PipelineStats; import com.facebook.presto.operator.TaskStats; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorTableLayoutHandle; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.Marker; import com.facebook.presto.spi.predicate.NullableValue; import com.facebook.presto.spi.predicate.Range; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.FunctionInvoker; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.ApplyNode; import com.facebook.presto.sql.planner.plan.AssignUniqueId; import com.facebook.presto.sql.planner.plan.Assignments; import com.facebook.presto.sql.planner.plan.DeleteNode; import com.facebook.presto.sql.planner.plan.DistinctLimitNode; import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode; import com.facebook.presto.sql.planner.plan.ExceptNode; import com.facebook.presto.sql.planner.plan.ExchangeNode; import com.facebook.presto.sql.planner.plan.ExchangeNode.Scope; import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.GroupIdNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.IndexSourceNode; import com.facebook.presto.sql.planner.plan.IntersectNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LimitNode; import com.facebook.presto.sql.planner.plan.MarkDistinctNode; import com.facebook.presto.sql.planner.plan.MetadataDeleteNode; import com.facebook.presto.sql.planner.plan.OutputNode; import com.facebook.presto.sql.planner.plan.PlanFragmentId; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.sql.planner.plan.PlanVisitor; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.RemoteSourceNode; import com.facebook.presto.sql.planner.plan.RowNumberNode; import com.facebook.presto.sql.planner.plan.SampleNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.TableFinishNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.planner.plan.TableWriterNode; import com.facebook.presto.sql.planner.plan.TopNNode; import com.facebook.presto.sql.planner.plan.TopNRowNumberNode; import com.facebook.presto.sql.planner.plan.UnionNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.planner.plan.ValuesNode; import com.facebook.presto.sql.planner.plan.WindowNode; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.ComparisonExpressionType; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FrameBound; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.SymbolReference; import com.facebook.presto.sql.tree.Window; import com.facebook.presto.sql.tree.WindowFrame; import com.facebook.presto.util.GraphvizPrinter; import com.google.common.base.CaseFormat; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import io.airlift.units.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.facebook.presto.execution.StageInfo.getAllStages; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.planner.DomainUtils.simplifyDomain; import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; import static com.google.common.base.Preconditions.checkArgument; import static io.airlift.units.DataSize.Unit.BYTE; import static io.airlift.units.DataSize.succinctBytes; import static io.airlift.units.DataSize.succinctDataSize; import static java.lang.Double.isFinite; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.stream.Collectors.toList; public class PlanPrinter { private final StringBuilder output = new StringBuilder(); private final Metadata metadata; private final Optional<Map<PlanNodeId, PlanNodeStats>> stats; private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session sesion) { this(plan, types, metadata, sesion, 0); } private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, int indent) { requireNonNull(plan, "plan is null"); requireNonNull(types, "types is null"); requireNonNull(metadata, "metadata is null"); this.metadata = metadata; this.stats = Optional.empty(); Visitor visitor = new Visitor(types, session); plan.accept(visitor, indent); } private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, Map<PlanNodeId, PlanNodeStats> stats, int indent) { requireNonNull(plan, "plan is null"); requireNonNull(types, "types is null"); requireNonNull(metadata, "metadata is null"); this.metadata = metadata; this.stats = Optional.of(stats); Visitor visitor = new Visitor(types, session); plan.accept(visitor, indent); } @Override public String toString() { return output.toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session) { return new PlanPrinter(plan, types, metadata, session).toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, int indent) { return new PlanPrinter(plan, types, metadata, session, indent).toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, Map<PlanNodeId, PlanNodeStats> stats, int indent) { return new PlanPrinter(plan, types, metadata, session, stats, indent).toString(); } public static String textDistributedPlan(List<StageInfo> stages, Metadata metadata, Session session) { StringBuilder builder = new StringBuilder(); List<StageInfo> allStages = stages.stream() .flatMap(stage -> getAllStages(Optional.of(stage)).stream()) .collect(toImmutableList()); for (StageInfo stageInfo : allStages) { Map<PlanNodeId, PlanNodeStats> aggregatedStats = new HashMap<>(); List<PlanNodeStats> planNodeStats = stageInfo.getTasks().stream() .map(TaskInfo::getStats) .flatMap(taskStats -> getPlanNodeStats(taskStats).stream()) .collect(toList()); for (PlanNodeStats stats : planNodeStats) { aggregatedStats.merge(stats.getPlanNodeId(), stats, PlanNodeStats::merge); } builder.append(formatFragment(metadata, session, stageInfo.getPlan(), Optional.of(stageInfo.getStageStats()), Optional.of(aggregatedStats))); } return builder.toString(); } private static List<PlanNodeStats> getPlanNodeStats(TaskStats taskStats) { // Best effort to reconstruct the plan nodes from operators. // Because stats are collected separately from query execution, // it's possible that some or all of them are missing or out of date. // For example, a LIMIT clause can cause a query to finish before stats // are collected from the leaf stages. Map<PlanNodeId, Long> outputPositions = new HashMap<>(); Map<PlanNodeId, Long> outputBytes = new HashMap<>(); Map<PlanNodeId, Long> wallMillis = new HashMap<>(); for (PipelineStats pipelineStats : taskStats.getPipelines()) { Map<PlanNodeId, Long> pipelineOutputPositions = new HashMap<>(); Map<PlanNodeId, Long> pipelineOutputBytes = new HashMap<>(); List<OperatorStats> operatorSummaries = pipelineStats.getOperatorSummaries(); for (int i = 0; i < operatorSummaries.size(); i++) { OperatorStats operatorStats = operatorSummaries.get(i); PlanNodeId planNodeId = operatorStats.getPlanNodeId(); long wall = operatorStats.getAddInputWall().toMillis() + operatorStats.getGetOutputWall().toMillis() + operatorStats.getFinishWall().toMillis(); wallMillis.merge(planNodeId, wall, Long::sum); // An "internal" pipeline like a hash build, links to another pipeline which is the actual output for this plan node if (i == operatorSummaries.size() - 1 && !pipelineStats.isOutputPipeline()) { pipelineOutputBytes.remove(planNodeId); pipelineOutputPositions.remove(planNodeId); } else { // Overwrite whatever we currently have, to get the last operator's stats for this plan node in this pipeline pipelineOutputPositions.put(planNodeId, operatorStats.getOutputPositions()); pipelineOutputBytes.put(planNodeId, operatorStats.getOutputDataSize().toBytes()); } } for (Map.Entry<PlanNodeId, Long> entry : pipelineOutputPositions.entrySet()) { outputBytes.merge(entry.getKey(), pipelineOutputBytes.get(entry.getKey()), Long::sum); outputPositions.merge(entry.getKey(), entry.getValue(), Long::sum); } } List<PlanNodeStats> stats = new ArrayList<>(); for (Map.Entry<PlanNodeId, Long> entry : wallMillis.entrySet()) { if (outputPositions.containsKey(entry.getKey())) { stats.add(new PlanNodeStats(entry.getKey(), new Duration(entry.getValue(), MILLISECONDS), outputPositions.get(entry.getKey()), succinctDataSize(outputBytes.get(entry.getKey()), BYTE))); } else { // It's possible there will be no output stats because all the pipelines that we observed were non-output. // For example in a query like SELECT * FROM a JOIN b ON c = d LIMIT 1 // It's possible to observe stats after the build starts, but before the probe does // and therefore only have wall time, but no output stats stats.add(new PlanNodeStats(entry.getKey(), new Duration(entry.getValue(), MILLISECONDS))); } } return stats; } public static String textDistributedPlan(SubPlan plan, Metadata metadata, Session session) { StringBuilder builder = new StringBuilder(); for (PlanFragment fragment : plan.getAllFragments()) { builder.append(formatFragment(metadata, session, fragment, Optional.empty(), Optional.empty())); } return builder.toString(); } private static String formatFragment(Metadata metadata, Session session, PlanFragment fragment, Optional<StageStats> stageStats, Optional<Map<PlanNodeId, PlanNodeStats>> planNodeStats) { StringBuilder builder = new StringBuilder(); builder.append(format("Fragment %s [%s]\n", fragment.getId(), fragment.getPartitioning())); if (stageStats.isPresent()) { builder.append(indentString(1)) .append(format("Cost: CPU %s, Input %d (%s), Output %d (%s)\n", stageStats.get().getTotalCpuTime(), stageStats.get().getProcessedInputPositions(), stageStats.get().getProcessedInputDataSize(), stageStats.get().getOutputPositions(), stageStats.get().getOutputDataSize())); } PartitioningScheme partitioningScheme = fragment.getPartitioningScheme(); builder.append(indentString(1)) .append(format("Output layout: [%s]\n", Joiner.on(", ").join(partitioningScheme.getOutputLayout()))); boolean replicateNulls = partitioningScheme.isReplicateNulls(); List<String> arguments = partitioningScheme.getPartitioning().getArguments().stream() .map(argument -> { if (argument.isConstant()) { NullableValue constant = argument.getConstant(); String printableValue = castToVarchar(constant.getType(), constant.getValue(), metadata, session); return constant.getType().getDisplayName() + "(" + printableValue + ")"; } return argument.getColumn().toString(); }) .collect(toImmutableList()); builder.append(indentString(1)); if (replicateNulls) { builder.append(format("Output partitioning: %s (replicate nulls) [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn()))); } else { builder.append(format("Output partitioning: %s [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn()))); } if (stageStats.isPresent()) { builder.append(textLogicalPlan(fragment.getRoot(), fragment.getSymbols(), metadata, session, planNodeStats.get(), 1)) .append("\n"); } else { builder.append(textLogicalPlan(fragment.getRoot(), fragment.getSymbols(), metadata, session, 1)) .append("\n"); } return builder.toString(); } public static String graphvizLogicalPlan(PlanNode plan, Map<Symbol, Type> types) { PlanFragment fragment = new PlanFragment( new PlanFragmentId("graphviz_plan"), plan, types, SINGLE_DISTRIBUTION, ImmutableList.of(plan.getId()), new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), plan.getOutputSymbols())); return GraphvizPrinter.printLogical(ImmutableList.of(fragment)); } public static String graphvizDistributedPlan(SubPlan plan) { return GraphvizPrinter.printDistributed(plan); } private void print(int indent, String format, Object... args) { String value; if (args.length == 0) { value = format; } else { value = format(format, args); } output.append(indentString(indent)).append(value).append('\n'); } private void printStats(int indent, PlanNodeId planNodeId) { if (!stats.isPresent()) { return; } long totalMillis = stats.get().values().stream() .mapToLong(node -> node.getWallTime().toMillis()) .sum(); PlanNodeStats stats = this.stats.get().get(planNodeId); if (stats == null) { output.append(indentString(indent)) .append("Cost: unknown, Output: unknown \n"); return; } double fraction = (stats.getWallTime().toMillis()) / (double) totalMillis; String fractionString; if (isFinite(fraction)) { fractionString = format("%.2f%%", 100.0 * fraction); } else { fractionString = "unknown"; } String outputString; if (stats.getOutputPositions().isPresent() && stats.getOutputDataSize().isPresent()) { outputString = format("%s rows (%s)", stats.getOutputPositions().get(), stats.getOutputDataSize().get()); } else { outputString = "unknown"; } output.append(indentString(indent)) .append(format("Cost: %s, Output: %s\n", fractionString, outputString)); } private static String indentString(int indent) { return Strings.repeat(" ", indent); } private class Visitor extends PlanVisitor<Integer, Void> { private final Map<Symbol, Type> types; private final Session session; @SuppressWarnings("AssignmentToCollectionOrArrayFieldFromParameter") public Visitor(Map<Symbol, Type> types, Session session) { this.types = types; this.session = session; } @Override public Void visitExplainAnalyze(ExplainAnalyzeNode node, Integer indent) { print(indent, "- ExplainAnalyze => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitJoin(JoinNode node, Integer indent) { List<Expression> joinExpressions = new ArrayList<>(); for (JoinNode.EquiJoinClause clause : node.getCriteria()) { joinExpressions.add(new ComparisonExpression(ComparisonExpressionType.EQUAL, clause.getLeft().toSymbolReference(), clause.getRight().toSymbolReference())); } node.getFilter().ifPresent(expression -> joinExpressions.add(expression)); // Check if the node is actually a cross join node if (node.getType() == JoinNode.Type.INNER && joinExpressions.isEmpty()) { print(indent, "- CrossJoin => [%s]", formatOutputs(node.getOutputSymbols())); } else { print(indent, "- %s[%s]%s => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatHash(node.getLeftHashSymbol(), node.getRightHashSymbol()), formatOutputs(node.getOutputSymbols())); } printStats(indent + 2, node.getId()); node.getLeft().accept(this, indent + 1); node.getRight().accept(this, indent + 1); return null; } @Override public Void visitSemiJoin(SemiJoinNode node, Integer indent) { print(indent, "- SemiJoin[%s = %s]%s => [%s]", node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), formatHash(node.getSourceHashSymbol(), node.getFilteringSourceHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getSource().accept(this, indent + 1); node.getFilteringSource().accept(this, indent + 1); return null; } @Override public Void visitIndexSource(IndexSourceNode node, Integer indent) { print(indent, "- IndexSource[%s, lookup = %s] => [%s]", node.getIndexHandle(), node.getLookupSymbols(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, ColumnHandle> entry : node.getAssignments().entrySet()) { if (node.getOutputSymbols().contains(entry.getKey())) { print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } } return null; } @Override public Void visitIndexJoin(IndexJoinNode node, Integer indent) { List<Expression> joinExpressions = new ArrayList<>(); for (IndexJoinNode.EquiJoinClause clause : node.getCriteria()) { joinExpressions.add(new ComparisonExpression(ComparisonExpressionType.EQUAL, clause.getProbe().toSymbolReference(), clause.getIndex().toSymbolReference())); } print(indent, "- %sIndexJoin[%s]%s => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatHash(node.getProbeHashSymbol(), node.getIndexHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getProbeSource().accept(this, indent + 1); node.getIndexSource().accept(this, indent + 1); return null; } @Override public Void visitLimit(LimitNode node, Integer indent) { print(indent, "- Limit%s[%s] => [%s]", node.isPartial() ? "Partial" : "", node.getCount(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitDistinctLimit(DistinctLimitNode node, Integer indent) { print(indent, "- DistinctLimit%s[%s]%s => [%s]", node.isPartial() ? "Partial" : "", node.getLimit(), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitAggregation(AggregationNode node, Integer indent) { String type = ""; if (node.getStep() != AggregationNode.Step.SINGLE) { type = format("(%s)", node.getStep().toString()); } String key = ""; if (!node.getGroupingKeys().isEmpty()) { key = node.getGroupingKeys().toString(); } print(indent, "- Aggregate%s%s%s => [%s]", type, key, formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, FunctionCall> entry : node.getAggregations().entrySet()) { if (node.getMasks().containsKey(entry.getKey())) { print(indent + 2, "%s := %s (mask = %s)", entry.getKey(), entry.getValue(), node.getMasks().get(entry.getKey())); } else { print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } } return processChildren(node, indent + 1); } @Override public Void visitGroupId(GroupIdNode node, Integer indent) { // grouping sets are easier to understand in terms of inputs List<List<Symbol>> inputGroupingSetSymbols = node.getGroupingSets().stream() .map(set -> set.stream() .map(symbol -> node.getGroupingSetMappings().get(symbol)) .collect(Collectors.toList())) .collect(Collectors.toList()); print(indent, "- GroupId%s => [%s]", inputGroupingSetSymbols, formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, Symbol> mapping : node.getGroupingSetMappings().entrySet()) { print(indent + 2, "%s := %s", mapping.getKey(), mapping.getValue()); } for (Map.Entry<Symbol, Symbol> argument : node.getArgumentMappings().entrySet()) { print(indent + 2, "%s := %s", argument.getKey(), argument.getValue()); } return processChildren(node, indent + 1); } @Override public Void visitMarkDistinct(MarkDistinctNode node, Integer indent) { print(indent, "- MarkDistinct[distinct=%s marker=%s]%s => [%s]", formatOutputs(node.getDistinctSymbols()), node.getMarkerSymbol(), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitWindow(WindowNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> orderBy = Lists.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); List<String> args = new ArrayList<>(); if (!partitionBy.isEmpty()) { List<Symbol> prePartitioned = node.getPartitionBy().stream() .filter(node.getPrePartitionedInputs()::contains) .collect(toImmutableList()); List<Symbol> notPrePartitioned = node.getPartitionBy().stream() .filter(column -> !node.getPrePartitionedInputs().contains(column)) .collect(toImmutableList()); StringBuilder builder = new StringBuilder(); if (!prePartitioned.isEmpty()) { builder.append("<") .append(Joiner.on(", ").join(prePartitioned)) .append(">"); if (!notPrePartitioned.isEmpty()) { builder.append(", "); } } if (!notPrePartitioned.isEmpty()) { builder.append(Joiner.on(", ").join(notPrePartitioned)); } args.add(format("partition by (%s)", builder)); } if (!orderBy.isEmpty()) { args.add(format("order by (%s)", Stream.concat( node.getOrderBy().stream() .limit(node.getPreSortedOrderPrefix()) .map(symbol -> "<" + symbol + " " + node.getOrderings().get(symbol) + ">"), node.getOrderBy().stream() .skip(node.getPreSortedOrderPrefix()) .map(symbol -> symbol + " " + node.getOrderings().get(symbol))) .collect(Collectors.joining(", ")))); } print(indent, "- Window[%s]%s => [%s]", Joiner.on(", ").join(args), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) { FunctionCall call = entry.getValue().getFunctionCall(); String frameInfo = call.getWindow() .flatMap(Window::getFrame) .map(PlanPrinter::formatFrame) .orElse(""); print(indent + 2, "%s := %s(%s) %s", entry.getKey(), call.getName(), Joiner.on(", ").join(call.getArguments()), frameInfo); } return processChildren(node, indent + 1); } @Override public Void visitTopNRowNumber(TopNRowNumberNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> orderBy = Lists.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); List<String> args = new ArrayList<>(); args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy))); args.add(format("order by (%s)", Joiner.on(", ").join(orderBy))); print(indent, "- TopNRowNumber[%s limit %s]%s => [%s]", Joiner.on(", ").join(args), node.getMaxRowCountPerPartition(), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); print(indent + 2, "%s := %s", node.getRowNumberSymbol(), "row_number()"); return processChildren(node, indent + 1); } @Override public Void visitRowNumber(RowNumberNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> args = new ArrayList<>(); if (!partitionBy.isEmpty()) { args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy))); } if (node.getMaxRowCountPerPartition().isPresent()) { args.add(format("limit = %s", node.getMaxRowCountPerPartition().get())); } print(indent, "- RowNumber[%s]%s => [%s]", Joiner.on(", ").join(args), formatHash(node.getHashSymbol()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); print(indent + 2, "%s := %s", node.getRowNumberSymbol(), "row_number()"); return processChildren(node, indent + 1); } @Override public Void visitTableScan(TableScanNode node, Integer indent) { TableHandle table = node.getTable(); print(indent, "- TableScan[%s, originalConstraint = %s] => [%s]", table, node.getOriginalConstraint(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); TupleDomain<ColumnHandle> predicate = node.getLayout() .map(layoutHandle -> metadata.getLayout(session, layoutHandle)) .map(TableLayout::getPredicate) .orElse(TupleDomain.all()); if (node.getLayout().isPresent()) { // TODO: find a better way to do this ConnectorTableLayoutHandle layout = node.getLayout().get().getConnectorHandle(); if (!table.getConnectorHandle().toString().equals(layout.toString())) { print(indent + 2, "LAYOUT: %s", layout); } } if (predicate.isNone()) { print(indent + 2, ":: NONE"); } else { // first, print output columns and their constraints for (Map.Entry<Symbol, ColumnHandle> assignment : node.getAssignments().entrySet()) { ColumnHandle column = assignment.getValue(); print(indent + 2, "%s := %s", assignment.getKey(), column); printConstraint(indent + 3, column, predicate); } // then, print constraints for columns that are not in the output if (!predicate.isAll()) { Set<ColumnHandle> outputs = ImmutableSet.copyOf(node.getAssignments().values()); predicate.getDomains().get() .entrySet().stream() .filter(entry -> !outputs.contains(entry.getKey())) .forEach(entry -> { ColumnHandle column = entry.getKey(); print(indent + 2, "%s", column); printConstraint(indent + 3, column, predicate); }); } } return null; } @Override public Void visitValues(ValuesNode node, Integer indent) { print(indent, "- Values => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (List<Expression> row : node.getRows()) { print(indent + 2, "(" + Joiner.on(", ").join(row) + ")"); } return null; } @Override public Void visitFilter(FilterNode node, Integer indent) { print(indent, "- Filter[%s] => [%s]", node.getPredicate(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitProject(ProjectNode node, Integer indent) { print(indent, "- Project => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); printAssignments(node.getAssignments(), indent + 2); return processChildren(node, indent + 1); } @Override public Void visitUnnest(UnnestNode node, Integer indent) { print(indent, "- Unnest [replicate=%s, unnest=%s] => [%s]", formatOutputs(node.getReplicateSymbols()), formatOutputs(node.getUnnestSymbols().keySet()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitOutput(OutputNode node, Integer indent) { print(indent, "- Output[%s] => [%s]", Joiner.on(", ").join(node.getColumnNames()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (int i = 0; i < node.getColumnNames().size(); i++) { String name = node.getColumnNames().get(i); Symbol symbol = node.getOutputSymbols().get(i); if (!name.equals(symbol.toString())) { print(indent + 2, "%s := %s", name, symbol); } } return processChildren(node, indent + 1); } @Override public Void visitTopN(TopNNode node, Integer indent) { Iterable<String> keys = Iterables.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); print(indent, "- TopN[%s by (%s)] => [%s]", node.getCount(), Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitSort(SortNode node, Integer indent) { Iterable<String> keys = Iterables.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); print(indent, "- Sort[%s] => [%s]", Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitRemoteSource(RemoteSourceNode node, Integer indent) { print(indent, "- RemoteSource[%s] => [%s]", Joiner.on(',').join(node.getSourceFragmentIds()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return null; } @Override public Void visitUnion(UnionNode node, Integer indent) { print(indent, "- Union => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitIntersect(IntersectNode node, Integer indent) { print(indent, "- Intersect => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitExcept(ExceptNode node, Integer indent) { print(indent, "- Except => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitTableWriter(TableWriterNode node, Integer indent) { print(indent, "- TableWriter => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (int i = 0; i < node.getColumnNames().size(); i++) { String name = node.getColumnNames().get(i); Symbol symbol = node.getColumns().get(i); print(indent + 2, "%s := %s", name, symbol); } return processChildren(node, indent + 1); } @Override public Void visitTableFinish(TableFinishNode node, Integer indent) { print(indent, "- TableCommit[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitSample(SampleNode node, Integer indent) { print(indent, "- Sample[%s: %s] => [%s]", node.getSampleType(), node.getSampleRatio(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitExchange(ExchangeNode node, Integer indent) { if (node.getScope() == Scope.LOCAL) { print(indent, "- LocalExchange[%s%s]%s (%s) => %s", node.getPartitioningScheme().getPartitioning().getHandle(), node.getPartitioningScheme().isReplicateNulls() ? " - REPLICATE NULLS" : "", formatHash(node.getPartitioningScheme().getHashColumn()), Joiner.on(", ").join(node.getPartitioningScheme().getPartitioning().getArguments()), formatOutputs(node.getOutputSymbols())); } else { print(indent, "- %sExchange[%s%s]%s => %s", UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, node.getScope().toString()), node.getType(), node.getPartitioningScheme().isReplicateNulls() ? " - REPLICATE NULLS" : "", formatHash(node.getPartitioningScheme().getHashColumn()), formatOutputs(node.getOutputSymbols())); } printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitDelete(DeleteNode node, Integer indent) { print(indent, "- Delete[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitMetadataDelete(MetadataDeleteNode node, Integer indent) { print(indent, "- MetadataDelete[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitEnforceSingleRow(EnforceSingleRowNode node, Integer indent) { print(indent, "- Scalar => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitAssignUniqueId(AssignUniqueId node, Integer indent) { print(indent, "- AssignUniqueId => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitApply(ApplyNode node, Integer indent) { print(indent, "- Apply[%s] => [%s]", node.getCorrelation(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); printAssignments(node.getSubqueryAssignments(), indent + 4); return processChildren(node, indent + 1); } @Override protected Void visitPlan(PlanNode node, Integer indent) { throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName()); } private Void processChildren(PlanNode node, int indent) { for (PlanNode child : node.getSources()) { child.accept(this, indent); } return null; } private void printAssignments(Assignments assignments, int indent) { for (Map.Entry<Symbol, Expression> entry : assignments.getMap().entrySet()) { if (entry.getValue() instanceof SymbolReference && ((SymbolReference) entry.getValue()).getName().equals(entry.getKey().getName())) { // skip identity assignments continue; } print(indent, "%s := %s", entry.getKey(), entry.getValue()); } } private String formatOutputs(Iterable<Symbol> symbols) { return Joiner.on(", ").join(Iterables.transform(symbols, input -> input + ":" + types.get(input).getDisplayName())); } private void printConstraint(int indent, ColumnHandle column, TupleDomain<ColumnHandle> constraint) { checkArgument(!constraint.isNone()); Map<ColumnHandle, Domain> domains = constraint.getDomains().get(); if (!constraint.isAll() && domains.containsKey(column)) { print(indent, ":: %s", formatDomain(simplifyDomain(domains.get(column)))); } } private String formatDomain(Domain domain) { ImmutableList.Builder<String> parts = ImmutableList.builder(); if (domain.isNullAllowed()) { parts.add("NULL"); } Type type = domain.getType(); domain.getValues().getValuesProcessor().consume( ranges -> { for (Range range : ranges.getOrderedRanges()) { StringBuilder builder = new StringBuilder(); if (range.isSingleValue()) { String value = castToVarchar(type, range.getSingleValue(), PlanPrinter.this.metadata, session); builder.append('[').append(value).append(']'); } else { builder.append((range.getLow().getBound() == Marker.Bound.EXACTLY) ? '[' : '('); if (range.getLow().isLowerUnbounded()) { builder.append("<min>"); } else { builder.append(castToVarchar(type, range.getLow().getValue(), PlanPrinter.this.metadata, session)); } builder.append(", "); if (range.getHigh().isUpperUnbounded()) { builder.append("<max>"); } else { builder.append(castToVarchar(type, range.getHigh().getValue(), PlanPrinter.this.metadata, session)); } builder.append((range.getHigh().getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); } parts.add(builder.toString()); } }, discreteValues -> discreteValues.getValues().stream() .map(value -> castToVarchar(type, value, PlanPrinter.this.metadata, session)) .sorted() // Sort so the values will be printed in predictable order .forEach(parts::add), allOrNone -> { if (allOrNone.isAll()) { parts.add("ALL VALUES"); } }); return "[" + Joiner.on(", ").join(parts.build()) + "]"; } } private static String formatHash(Optional<Symbol>... hashes) { List<Symbol> symbols = Arrays.stream(hashes) .filter(Optional::isPresent) .map(Optional::get) .collect(toList()); if (symbols.isEmpty()) { return ""; } return "[" + Joiner.on(", ").join(symbols) + "]"; } private static String formatFrame(WindowFrame frame) { StringBuilder builder = new StringBuilder(frame.getType().toString()); FrameBound start = frame.getStart(); if (start.getValue().isPresent()) { builder.append(" ").append(start.getOriginalValue().get()); } builder.append(" ").append(start.getType()); Optional<FrameBound> end = frame.getEnd(); if (end.isPresent()) { if (end.get().getOriginalValue().isPresent()) { builder.append(" ").append(end.get().getOriginalValue().get()); } builder.append(" ").append(end.get().getType()); } return builder.toString(); } private static String castToVarchar(Type type, Object value, Metadata metadata, Session session) { if (value == null) { return "NULL"; } Signature coercion = metadata.getFunctionRegistry().getCoercion(type, VARCHAR); try { Slice coerced = (Slice) new FunctionInvoker(metadata.getFunctionRegistry()).invoke(coercion, session.toConnectorSession(), value); return coerced.toStringUtf8(); } catch (OperatorNotFoundException e) { return "<UNREPRESENTABLE VALUE>"; } catch (Throwable throwable) { throw Throwables.propagate(throwable); } } private static class PlanNodeStats { private final PlanNodeId planNodeId; private final Duration wallTime; private final Optional<Long> outputPositions; private final Optional<DataSize> outputDataSize; public PlanNodeStats(PlanNodeId planNodeId, Duration wallTime) { this(planNodeId, wallTime, Optional.empty(), Optional.empty()); } public PlanNodeStats(PlanNodeId planNodeId, Duration wallTime, long outputPositions, DataSize outputDataSize) { this(planNodeId, wallTime, Optional.of(outputPositions), Optional.of(outputDataSize)); } private PlanNodeStats(PlanNodeId planNodeId, Duration wallTime, Optional<Long> outputPositions, Optional<DataSize> outputDataSize) { this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.wallTime = requireNonNull(wallTime, "wallTime is null"); this.outputPositions = outputPositions; this.outputDataSize = outputDataSize; } public PlanNodeId getPlanNodeId() { return planNodeId; } public Duration getWallTime() { return wallTime; } public Optional<Long> getOutputPositions() { return outputPositions; } public Optional<DataSize> getOutputDataSize() { return outputDataSize; } public static PlanNodeStats merge(PlanNodeStats planNodeStats1, PlanNodeStats planNodeStats2) { checkArgument(planNodeStats1.getPlanNodeId().equals(planNodeStats2.getPlanNodeId()), "planNodeIds do not match. %s != %s", planNodeStats1.getPlanNodeId(), planNodeStats2.getPlanNodeId()); Optional<Long> outputPositions; if (planNodeStats1.getOutputPositions().isPresent() && planNodeStats2.getOutputPositions().isPresent()) { outputPositions = Optional.of(planNodeStats1.getOutputPositions().get() + planNodeStats2.getOutputPositions().get()); } else if (planNodeStats1.getOutputPositions().isPresent()) { outputPositions = planNodeStats1.getOutputPositions(); } else { outputPositions = planNodeStats2.getOutputPositions(); } Optional<DataSize> outputDataSize; if (planNodeStats1.getOutputDataSize().isPresent() && planNodeStats2.getOutputDataSize().isPresent()) { outputDataSize = Optional.of(succinctBytes(planNodeStats1.getOutputDataSize().get().toBytes() + planNodeStats2.getOutputDataSize().get().toBytes())); } else if (planNodeStats1.getOutputDataSize().isPresent()) { outputDataSize = planNodeStats1.getOutputDataSize(); } else { outputDataSize = planNodeStats2.getOutputDataSize(); } return new PlanNodeStats( planNodeStats1.getPlanNodeId(), new Duration(planNodeStats1.getWallTime().toMillis() + planNodeStats2.getWallTime().toMillis(), MILLISECONDS), outputPositions, outputDataSize); } } }
Improvements to explain analyze - Display Scan, Filter and Project operators as a single node in explain (since it is the same physical operator) - Display input rows and filtered rows statistics
presto-main/src/main/java/com/facebook/presto/sql/planner/PlanPrinter.java
Improvements to explain analyze
Java
apache-2.0
f2dad045ddc6d368488424d240e6fb9071bcd92e
0
AOSP-JF-MM/platform_external_guava,AOSP-JF/platform_external_guava,DARKPOP/external_guava,bhargavkumar040/android-source-browsing.platform--external--guava,yinquan529/platform-external-guava,UBERMALLOW/external_guava,TurboROM/external_guava,PurityROM/platform_external_guava,xin3liang/platform_external_guava,SaleJumper/android-source-browsing.platform--external--guava,TurboROM/external_guava,thiz11/platform_external_guava,xhteam/external-guava,UBERMALLOW/external_guava,TeamExodus/external_guava,geekboxzone/mmallow_external_guava,Evervolv/android_external_guava,Omegaphora/external_guava,omapzoom/platform-external-guava,VanirAOSP/external_guava,xin3liang/platform_external_guava,VRToxin-AOSP/android_external_guava,geekboxzone/lollipop_external_guava,SaleJumper/android-source-browsing.platform--external--guava,yinquan529/platform-external-guava,thiz11/platform_external_guava,xhteam/external-guava,CyanogenMod/android_external_guava,Omegaphora/external_guava,bhargavkumar040/android-source-browsing.platform--external--guava,TeamExodus/external_guava,PurityROM/platform_external_guava,android-ia/platform_external_guava,geekboxzone/lollipop_external_guava,geekboxzone/mmallow_external_guava,Pankaj-Sakariya/android-source-browsing.platform--external--guava,ThangBK2009/android-source-browsing.platform--external--guava,AOSP-JF/platform_external_guava,VanirAOSP/external_guava,Evervolv/android_external_guava,VRToxin-AOSP/android_external_guava,Pankaj-Sakariya/android-source-browsing.platform--external--guava,DARKPOP/external_guava,AOSP-JF-MM/platform_external_guava,CyanogenMod/android_external_guava,omapzoom/platform-external-guava,ThangBK2009/android-source-browsing.platform--external--guava,android-ia/platform_external_guava
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Equivalence; import com.google.common.base.Equivalences; import com.google.common.base.Function; import com.google.common.base.Joiner.MapJoiner; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.MapDifference.ValueDifference; import com.google.common.primitives.Ints; import java.io.Serializable; import java.util.AbstractCollection; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.Enumeration; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; /** * Static utility methods pertaining to {@link Map} instances. Also see this * class's counterparts {@link Lists} and {@link Sets}. * * @author Kevin Bourrillion * @author Mike Bostock * @author Isaac Shum * @author Louis Wasserman * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(emulated = true) public final class Maps { private Maps() {} /** * Creates a <i>mutable</i>, empty {@code HashMap} instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#of()} instead. * * <p><b>Note:</b> if {@code K} is an {@code enum} type, use {@link * #newEnumMap} instead. * * @return a new, empty {@code HashMap} */ public static <K, V> HashMap<K, V> newHashMap() { return new HashMap<K, V>(); } /** * Creates a {@code HashMap} instance, with a high enough "initial capacity" * that it <i>should</i> hold {@code expectedSize} elements without growth. * This behavior cannot be broadly guaranteed, but it is observed to be true * for OpenJDK 1.6. It also can't be guaranteed that the method isn't * inadvertently <i>oversizing</i> the returned map. * * @param expectedSize the number of elements you expect to add to the * returned map * @return a new, empty {@code HashMap} with enough capacity to hold {@code * expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative */ public static <K, V> HashMap<K, V> newHashMapWithExpectedSize( int expectedSize) { return new HashMap<K, V>(capacity(expectedSize)); } /** * Returns a capacity that is sufficient to keep the map from being resized as * long as it grows no larger than expectedSize and the load factor is >= its * default (0.75). */ static int capacity(int expectedSize) { if (expectedSize < 3) { checkArgument(expectedSize >= 0); return expectedSize + 1; } if (expectedSize < Ints.MAX_POWER_OF_TWO) { return expectedSize + expectedSize / 3; } return Integer.MAX_VALUE; // any large value } /** * Creates a <i>mutable</i> {@code HashMap} instance with the same mappings as * the specified map. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#copyOf(Map)} instead. * * <p><b>Note:</b> if {@code K} is an {@link Enum} type, use {@link * #newEnumMap} instead. * * @param map the mappings to be placed in the new map * @return a new {@code HashMap} initialized with the mappings from {@code * map} */ public static <K, V> HashMap<K, V> newHashMap( Map<? extends K, ? extends V> map) { return new HashMap<K, V>(map); } /** * Creates a <i>mutable</i>, empty, insertion-ordered {@code LinkedHashMap} * instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#of()} instead. * * @return a new, empty {@code LinkedHashMap} */ public static <K, V> LinkedHashMap<K, V> newLinkedHashMap() { return new LinkedHashMap<K, V>(); } /** * Creates a <i>mutable</i>, insertion-ordered {@code LinkedHashMap} instance * with the same mappings as the specified map. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#copyOf(Map)} instead. * * @param map the mappings to be placed in the new map * @return a new, {@code LinkedHashMap} initialized with the mappings from * {@code map} */ public static <K, V> LinkedHashMap<K, V> newLinkedHashMap( Map<? extends K, ? extends V> map) { return new LinkedHashMap<K, V>(map); } /** * Returns a general-purpose instance of {@code ConcurrentMap}, which supports * all optional operations of the ConcurrentMap interface. It does not permit * null keys or values. It is serializable. * * <p>This is currently accomplished by calling {@link MapMaker#makeMap()}. * * <p>It is preferable to use {@code MapMaker} directly (rather than through * this method), as it presents numerous useful configuration options, * such as the concurrency level, load factor, key/value reference types, * and value computation. * * @return a new, empty {@code ConcurrentMap} * @since 3.0 */ public static <K, V> ConcurrentMap<K, V> newConcurrentMap() { return new MapMaker().<K, V>makeMap(); } /** * Creates a <i>mutable</i>, empty {@code TreeMap} instance using the natural * ordering of its elements. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedMap#of()} instead. * * @return a new, empty {@code TreeMap} */ public static <K extends Comparable, V> TreeMap<K, V> newTreeMap() { return new TreeMap<K, V>(); } /** * Creates a <i>mutable</i> {@code TreeMap} instance with the same mappings as * the specified map and using the same ordering as the specified map. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedMap#copyOfSorted(SortedMap)} instead. * * @param map the sorted map whose mappings are to be placed in the new map * and whose comparator is to be used to sort the new map * @return a new {@code TreeMap} initialized with the mappings from {@code * map} and using the comparator of {@code map} */ public static <K, V> TreeMap<K, V> newTreeMap(SortedMap<K, ? extends V> map) { return new TreeMap<K, V>(map); } /** * Creates a <i>mutable</i>, empty {@code TreeMap} instance using the given * comparator. * * <p><b>Note:</b> if mutability is not required, use {@code * ImmutableSortedMap.orderedBy(comparator).build()} instead. * * @param comparator the comparator to sort the keys with * @return a new, empty {@code TreeMap} */ public static <K, V> TreeMap<K, V> newTreeMap( @Nullable Comparator<? super K> comparator) { // Ideally, the extra type parameter "C" shouldn't be necessary. It is a // work-around of a compiler type inference quirk that prevents the // following code from being compiled: // Comparator<Class<?>> comparator = null; // Map<Class<? extends Throwable>, String> map = newTreeMap(comparator); return new TreeMap<K, V>(comparator); } /** * Creates an {@code EnumMap} instance. * * @param type the key type for this map * @return a new, empty {@code EnumMap} */ public static <K extends Enum<K>, V> EnumMap<K, V> newEnumMap(Class<K> type) { return new EnumMap<K, V>(checkNotNull(type)); } /** * Creates an {@code EnumMap} with the same mappings as the specified map. * * @param map the map from which to initialize this {@code EnumMap} * @return a new {@code EnumMap} initialized with the mappings from {@code * map} * @throws IllegalArgumentException if {@code m} is not an {@code EnumMap} * instance and contains no mappings */ public static <K extends Enum<K>, V> EnumMap<K, V> newEnumMap( Map<K, ? extends V> map) { return new EnumMap<K, V>(map); } /** * Creates an {@code IdentityHashMap} instance. * * @return a new, empty {@code IdentityHashMap} */ public static <K, V> IdentityHashMap<K, V> newIdentityHashMap() { return new IdentityHashMap<K, V>(); } /** * Returns a synchronized (thread-safe) bimap backed by the specified bimap. * In order to guarantee serial access, it is critical that <b>all</b> access * to the backing bimap is accomplished through the returned bimap. * * <p>It is imperative that the user manually synchronize on the returned map * when accessing any of its collection views: <pre> {@code * * BiMap<Long, String> map = Maps.synchronizedBiMap( * HashBiMap.<Long, String>create()); * ... * Set<Long> set = map.keySet(); // Needn't be in synchronized block * ... * synchronized (map) { // Synchronizing on map, not set! * Iterator<Long> it = set.iterator(); // Must be in synchronized block * while (it.hasNext()) { * foo(it.next()); * } * }}</pre> * * Failure to follow this advice may result in non-deterministic behavior. * * <p>The returned bimap will be serializable if the specified bimap is * serializable. * * @param bimap the bimap to be wrapped in a synchronized view * @return a sychronized view of the specified bimap */ public static <K, V> BiMap<K, V> synchronizedBiMap(BiMap<K, V> bimap) { return Synchronized.biMap(bimap, null); } /** * Computes the difference between two maps. This difference is an immutable * snapshot of the state of the maps at the time this method is called. It * will never change, even if the maps change at a later time. * * <p>Since this method uses {@code HashMap} instances internally, the keys of * the supplied maps must be well-behaved with respect to * {@link Object#equals} and {@link Object#hashCode}. * * <p><b>Note:</b>If you only need to know whether two maps have the same * mappings, call {@code left.equals(right)} instead of this method. * * @param left the map to treat as the "left" map for purposes of comparison * @param right the map to treat as the "right" map for purposes of comparison * @return the difference between the two maps */ @SuppressWarnings("unchecked") public static <K, V> MapDifference<K, V> difference( Map<? extends K, ? extends V> left, Map<? extends K, ? extends V> right) { if (left instanceof SortedMap) { SortedMap<K, ? extends V> sortedLeft = (SortedMap<K, ? extends V>) left; SortedMapDifference<K, V> result = difference(sortedLeft, right); return result; } return difference(left, right, Equivalences.equals()); } /** * Computes the difference between two maps. This difference is an immutable * snapshot of the state of the maps at the time this method is called. It * will never change, even if the maps change at a later time. * * <p>Values are compared using a provided equivalence, in the case of * equality, the value on the 'left' is returned in the difference. * * <p>Since this method uses {@code HashMap} instances internally, the keys of * the supplied maps must be well-behaved with respect to * {@link Object#equals} and {@link Object#hashCode}. * * @param left the map to treat as the "left" map for purposes of comparison * @param right the map to treat as the "right" map for purposes of comparison * @param valueEquivalence the equivalence relationship to use to compare * values * @return the difference between the two maps * @since 10.0 */ @Beta public static <K, V> MapDifference<K, V> difference( Map<? extends K, ? extends V> left, Map<? extends K, ? extends V> right, Equivalence<? super V> valueEquivalence) { Preconditions.checkNotNull(valueEquivalence); Map<K, V> onlyOnLeft = newHashMap(); Map<K, V> onlyOnRight = new HashMap<K, V>(right); // will whittle it down Map<K, V> onBoth = newHashMap(); Map<K, MapDifference.ValueDifference<V>> differences = newHashMap(); boolean eq = true; for (Entry<? extends K, ? extends V> entry : left.entrySet()) { K leftKey = entry.getKey(); V leftValue = entry.getValue(); if (right.containsKey(leftKey)) { V rightValue = onlyOnRight.remove(leftKey); if (valueEquivalence.equivalent(leftValue, rightValue)) { onBoth.put(leftKey, leftValue); } else { eq = false; differences.put( leftKey, ValueDifferenceImpl.create(leftValue, rightValue)); } } else { eq = false; onlyOnLeft.put(leftKey, leftValue); } } boolean areEqual = eq && onlyOnRight.isEmpty(); return mapDifference( areEqual, onlyOnLeft, onlyOnRight, onBoth, differences); } private static <K, V> MapDifference<K, V> mapDifference(boolean areEqual, Map<K, V> onlyOnLeft, Map<K, V> onlyOnRight, Map<K, V> onBoth, Map<K, ValueDifference<V>> differences) { return new MapDifferenceImpl<K, V>(areEqual, Collections.unmodifiableMap(onlyOnLeft), Collections.unmodifiableMap(onlyOnRight), Collections.unmodifiableMap(onBoth), Collections.unmodifiableMap(differences)); } static class MapDifferenceImpl<K, V> implements MapDifference<K, V> { final boolean areEqual; final Map<K, V> onlyOnLeft; final Map<K, V> onlyOnRight; final Map<K, V> onBoth; final Map<K, ValueDifference<V>> differences; MapDifferenceImpl(boolean areEqual, Map<K, V> onlyOnLeft, Map<K, V> onlyOnRight, Map<K, V> onBoth, Map<K, ValueDifference<V>> differences) { this.areEqual = areEqual; this.onlyOnLeft = onlyOnLeft; this.onlyOnRight = onlyOnRight; this.onBoth = onBoth; this.differences = differences; } @Override public boolean areEqual() { return areEqual; } @Override public Map<K, V> entriesOnlyOnLeft() { return onlyOnLeft; } @Override public Map<K, V> entriesOnlyOnRight() { return onlyOnRight; } @Override public Map<K, V> entriesInCommon() { return onBoth; } @Override public Map<K, ValueDifference<V>> entriesDiffering() { return differences; } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof MapDifference) { MapDifference<?, ?> other = (MapDifference<?, ?>) object; return entriesOnlyOnLeft().equals(other.entriesOnlyOnLeft()) && entriesOnlyOnRight().equals(other.entriesOnlyOnRight()) && entriesInCommon().equals(other.entriesInCommon()) && entriesDiffering().equals(other.entriesDiffering()); } return false; } @Override public int hashCode() { return Objects.hashCode(entriesOnlyOnLeft(), entriesOnlyOnRight(), entriesInCommon(), entriesDiffering()); } @Override public String toString() { if (areEqual) { return "equal"; } StringBuilder result = new StringBuilder("not equal"); if (!onlyOnLeft.isEmpty()) { result.append(": only on left=").append(onlyOnLeft); } if (!onlyOnRight.isEmpty()) { result.append(": only on right=").append(onlyOnRight); } if (!differences.isEmpty()) { result.append(": value differences=").append(differences); } return result.toString(); } } static class ValueDifferenceImpl<V> implements MapDifference.ValueDifference<V> { private final V left; private final V right; static <V> ValueDifference<V> create(@Nullable V left, @Nullable V right) { return new ValueDifferenceImpl<V>(left, right); } private ValueDifferenceImpl(@Nullable V left, @Nullable V right) { this.left = left; this.right = right; } @Override public V leftValue() { return left; } @Override public V rightValue() { return right; } @Override public boolean equals(@Nullable Object object) { if (object instanceof MapDifference.ValueDifference<?>) { MapDifference.ValueDifference<?> that = (MapDifference.ValueDifference<?>) object; return Objects.equal(this.left, that.leftValue()) && Objects.equal(this.right, that.rightValue()); } return false; } @Override public int hashCode() { return Objects.hashCode(left, right); } @Override public String toString() { return "(" + left + ", " + right + ")"; } } /** * Computes the difference between two sorted maps, using the comparator of * the left map, or {@code Ordering.natural()} if the left map uses the * natural ordering of its elements. This difference is an immutable snapshot * of the state of the maps at the time this method is called. It will never * change, even if the maps change at a later time. * * <p>Since this method uses {@code TreeMap} instances internally, the keys of * the right map must all compare as distinct according to the comparator * of the left map. * * <p><b>Note:</b>If you only need to know whether two sorted maps have the * same mappings, call {@code left.equals(right)} instead of this method. * * @param left the map to treat as the "left" map for purposes of comparison * @param right the map to treat as the "right" map for purposes of comparison * @return the difference between the two maps * @since 11.0 */ @Beta public static <K, V> SortedMapDifference<K, V> difference( SortedMap<K, ? extends V> left, Map<? extends K, ? extends V> right) { checkNotNull(left); checkNotNull(right); Comparator<? super K> comparator = orNaturalOrder(left.comparator()); SortedMap<K, V> onlyOnLeft = Maps.newTreeMap(comparator); SortedMap<K, V> onlyOnRight = Maps.newTreeMap(comparator); onlyOnRight.putAll(right); // will whittle it down SortedMap<K, V> onBoth = Maps.newTreeMap(comparator); SortedMap<K, MapDifference.ValueDifference<V>> differences = Maps.newTreeMap(comparator); boolean eq = true; for (Entry<? extends K, ? extends V> entry : left.entrySet()) { K leftKey = entry.getKey(); V leftValue = entry.getValue(); if (right.containsKey(leftKey)) { V rightValue = onlyOnRight.remove(leftKey); if (Objects.equal(leftValue, rightValue)) { onBoth.put(leftKey, leftValue); } else { eq = false; differences.put( leftKey, ValueDifferenceImpl.create(leftValue, rightValue)); } } else { eq = false; onlyOnLeft.put(leftKey, leftValue); } } boolean areEqual = eq && onlyOnRight.isEmpty(); return sortedMapDifference( areEqual, onlyOnLeft, onlyOnRight, onBoth, differences); } private static <K, V> SortedMapDifference<K, V> sortedMapDifference( boolean areEqual, SortedMap<K, V> onlyOnLeft, SortedMap<K, V> onlyOnRight, SortedMap<K, V> onBoth, SortedMap<K, ValueDifference<V>> differences) { return new SortedMapDifferenceImpl<K, V>(areEqual, Collections.unmodifiableSortedMap(onlyOnLeft), Collections.unmodifiableSortedMap(onlyOnRight), Collections.unmodifiableSortedMap(onBoth), Collections.unmodifiableSortedMap(differences)); } static class SortedMapDifferenceImpl<K, V> extends MapDifferenceImpl<K, V> implements SortedMapDifference<K, V> { SortedMapDifferenceImpl(boolean areEqual, SortedMap<K, V> onlyOnLeft, SortedMap<K, V> onlyOnRight, SortedMap<K, V> onBoth, SortedMap<K, ValueDifference<V>> differences) { super(areEqual, onlyOnLeft, onlyOnRight, onBoth, differences); } @Override public SortedMap<K, ValueDifference<V>> entriesDiffering() { return (SortedMap<K, ValueDifference<V>>) super.entriesDiffering(); } @Override public SortedMap<K, V> entriesInCommon() { return (SortedMap<K, V>) super.entriesInCommon(); } @Override public SortedMap<K, V> entriesOnlyOnLeft() { return (SortedMap<K, V>) super.entriesOnlyOnLeft(); } @Override public SortedMap<K, V> entriesOnlyOnRight() { return (SortedMap<K, V>) super.entriesOnlyOnRight(); } } /** * Returns the specified comparator if not null; otherwise returns {@code * Ordering.natural()}. This method is an abomination of generics; the only * purpose of this method is to contain the ugly type-casting in one place. */ @SuppressWarnings("unchecked") static <E> Comparator<? super E> orNaturalOrder( @Nullable Comparator<? super E> comparator) { if (comparator != null) { // can't use ? : because of javac bug 5080917 return comparator; } return (Comparator<E>) Ordering.natural(); } /** * Returns an immutable map for which the {@link Map#values} are the given * elements in the given order, and each key is the product of invoking a * supplied function on its corresponding value. * * @param values the values to use when constructing the {@code Map} * @param keyFunction the function used to produce the key for each value * @return a map mapping the result of evaluating the function {@code * keyFunction} on each value in the input collection to that value * @throws IllegalArgumentException if {@code keyFunction} produces the same * key for more than one value in the input collection * @throws NullPointerException if any elements of {@code values} is null, or * if {@code keyFunction} produces {@code null} for any value */ public static <K, V> ImmutableMap<K, V> uniqueIndex( Iterable<V> values, Function<? super V, K> keyFunction) { return uniqueIndex(values.iterator(), keyFunction); } /** * <b>Deprecated.</b> * * @since 10.0 * @deprecated use {@link #uniqueIndex(Iterator, Function)} by casting {@code * values} to {@code Iterator<V>}, or better yet, by implementing only * {@code Iterator} and not {@code Iterable}. <b>This method is scheduled * for deletion in March 2012.</b> */ @Beta @Deprecated public static <K, V, I extends Object & Iterable<V> & Iterator<V>> ImmutableMap<K, V> uniqueIndex( I values, Function<? super V, K> keyFunction) { Iterable<V> valuesIterable = checkNotNull(values); return uniqueIndex(valuesIterable, keyFunction); } /** * Returns an immutable map for which the {@link Map#values} are the given * elements in the given order, and each key is the product of invoking a * supplied function on its corresponding value. * * @param values the values to use when constructing the {@code Map} * @param keyFunction the function used to produce the key for each value * @return a map mapping the result of evaluating the function {@code * keyFunction} on each value in the input collection to that value * @throws IllegalArgumentException if {@code keyFunction} produces the same * key for more than one value in the input collection * @throws NullPointerException if any elements of {@code values} is null, or * if {@code keyFunction} produces {@code null} for any value * @since 10.0 */ public static <K, V> ImmutableMap<K, V> uniqueIndex( Iterator<V> values, Function<? super V, K> keyFunction) { checkNotNull(keyFunction); ImmutableMap.Builder<K, V> builder = ImmutableMap.builder(); while (values.hasNext()) { V value = values.next(); builder.put(keyFunction.apply(value), value); } return builder.build(); } /** * Creates an {@code ImmutableMap<String, String>} from a {@code Properties} * instance. Properties normally derive from {@code Map<Object, Object>}, but * they typically contain strings, which is awkward. This method lets you get * a plain-old-{@code Map} out of a {@code Properties}. * * @param properties a {@code Properties} object to be converted * @return an immutable map containing all the entries in {@code properties} * @throws ClassCastException if any key in {@code Properties} is not a {@code * String} * @throws NullPointerException if any key or value in {@code Properties} is * null */ @GwtIncompatible("java.util.Properties") public static ImmutableMap<String, String> fromProperties( Properties properties) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (Enumeration<?> e = properties.propertyNames(); e.hasMoreElements();) { String key = (String) e.nextElement(); builder.put(key, properties.getProperty(key)); } return builder.build(); } /** * Returns an immutable map entry with the specified key and value. The {@link * Entry#setValue} operation throws an {@link UnsupportedOperationException}. * * <p>The returned entry is serializable. * * @param key the key to be associated with the returned entry * @param value the value to be associated with the returned entry */ @GwtCompatible(serializable = true) public static <K, V> Entry<K, V> immutableEntry( @Nullable K key, @Nullable V value) { return new ImmutableEntry<K, V>(key, value); } /** * Returns an unmodifiable view of the specified set of entries. The {@link * Entry#setValue} operation throws an {@link UnsupportedOperationException}, * as do any operations that would modify the returned set. * * @param entrySet the entries for which to return an unmodifiable view * @return an unmodifiable view of the entries */ static <K, V> Set<Entry<K, V>> unmodifiableEntrySet( Set<Entry<K, V>> entrySet) { return new UnmodifiableEntrySet<K, V>( Collections.unmodifiableSet(entrySet)); } /** * Returns an unmodifiable view of the specified map entry. The {@link * Entry#setValue} operation throws an {@link UnsupportedOperationException}. * This also has the side-effect of redefining {@code equals} to comply with * the Entry contract, to avoid a possible nefarious implementation of equals. * * @param entry the entry for which to return an unmodifiable view * @return an unmodifiable view of the entry */ static <K, V> Entry<K, V> unmodifiableEntry(final Entry<K, V> entry) { checkNotNull(entry); return new AbstractMapEntry<K, V>() { @Override public K getKey() { return entry.getKey(); } @Override public V getValue() { return entry.getValue(); } }; } /** @see Multimaps#unmodifiableEntries */ static class UnmodifiableEntries<K, V> extends ForwardingCollection<Entry<K, V>> { private final Collection<Entry<K, V>> entries; UnmodifiableEntries(Collection<Entry<K, V>> entries) { this.entries = entries; } @Override protected Collection<Entry<K, V>> delegate() { return entries; } @Override public Iterator<Entry<K, V>> iterator() { final Iterator<Entry<K, V>> delegate = super.iterator(); return new ForwardingIterator<Entry<K, V>>() { @Override public Entry<K, V> next() { return unmodifiableEntry(super.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override protected Iterator<Entry<K, V>> delegate() { return delegate; } }; } // See java.util.Collections.UnmodifiableEntrySet for details on attacks. @Override public boolean add(Entry<K, V> element) { throw new UnsupportedOperationException(); } @Override public boolean addAll( Collection<? extends Entry<K, V>> collection) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public boolean remove(Object object) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> collection) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> collection) { throw new UnsupportedOperationException(); } @Override public Object[] toArray() { return standardToArray(); } @Override public <T> T[] toArray(T[] array) { return standardToArray(array); } } /** @see Maps#unmodifiableEntrySet(Set) */ static class UnmodifiableEntrySet<K, V> extends UnmodifiableEntries<K, V> implements Set<Entry<K, V>> { UnmodifiableEntrySet(Set<Entry<K, V>> entries) { super(entries); } // See java.util.Collections.UnmodifiableEntrySet for details on attacks. @Override public boolean equals(@Nullable Object object) { return Sets.equalsImpl(this, object); } @Override public int hashCode() { return Sets.hashCodeImpl(this); } } /** * Returns an unmodifiable view of the specified bimap. This method allows * modules to provide users with "read-only" access to internal bimaps. Query * operations on the returned bimap "read through" to the specified bimap, and * attempts to modify the returned map, whether direct or via its collection * views, result in an {@code UnsupportedOperationException}. * * <p>The returned bimap will be serializable if the specified bimap is * serializable. * * @param bimap the bimap for which an unmodifiable view is to be returned * @return an unmodifiable view of the specified bimap */ public static <K, V> BiMap<K, V> unmodifiableBiMap( BiMap<? extends K, ? extends V> bimap) { return new UnmodifiableBiMap<K, V>(bimap, null); } /** @see Maps#unmodifiableBiMap(BiMap) */ private static class UnmodifiableBiMap<K, V> extends ForwardingMap<K, V> implements BiMap<K, V>, Serializable { final Map<K, V> unmodifiableMap; final BiMap<? extends K, ? extends V> delegate; transient BiMap<V, K> inverse; transient Set<V> values; UnmodifiableBiMap(BiMap<? extends K, ? extends V> delegate, @Nullable BiMap<V, K> inverse) { unmodifiableMap = Collections.unmodifiableMap(delegate); this.delegate = delegate; this.inverse = inverse; } @Override protected Map<K, V> delegate() { return unmodifiableMap; } @Override public V forcePut(K key, V value) { throw new UnsupportedOperationException(); } @Override public BiMap<V, K> inverse() { BiMap<V, K> result = inverse; return (result == null) ? inverse = new UnmodifiableBiMap<V, K>(delegate.inverse(), this) : result; } @Override public Set<V> values() { Set<V> result = values; return (result == null) ? values = Collections.unmodifiableSet(delegate.values()) : result; } private static final long serialVersionUID = 0; } /** * Returns a view of a map where each value is transformed by a function. All * other properties of the map, such as iteration order, are left intact. For * example, the code: <pre> {@code * * Map<String, Integer> map = ImmutableMap.of("a", 4, "b", 9); * Function<Integer, Double> sqrt = * new Function<Integer, Double>() { * public Double apply(Integer in) { * return Math.sqrt((int) in); * } * }; * Map<String, Double> transformed = Maps.transformValues(map, sqrt); * System.out.println(transformed);}</pre> * * ... prints {@code {a=2.0, b=3.0}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys, and even * null values provided that the function is capable of accepting null input. * The transformed map might contain null values, if the function sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The function is applied lazily, invoked when needed. This is necessary * for the returned map to be a view, but it means that the function will be * applied many times for bulk operations like {@link Map#containsValue} and * {@code Map.toString()}. For this to perform well, {@code function} should * be fast. To avoid lazy evaluation when the returned map doesn't need to be * a view, copy the returned map into a new map of your choosing. */ public static <K, V1, V2> Map<K, V2> transformValues( Map<K, V1> fromMap, final Function<? super V1, V2> function) { checkNotNull(function); EntryTransformer<K, V1, V2> transformer = new EntryTransformer<K, V1, V2>() { @Override public V2 transformEntry(K key, V1 value) { return function.apply(value); } }; return transformEntries(fromMap, transformer); } /** * Returns a view of a sorted map where each value is transformed by a * function. All other properties of the map, such as iteration order, are * left intact. For example, the code: <pre> {@code * * SortedMap<String, Integer> map = ImmutableSortedMap.of("a", 4, "b", 9); * Function<Integer, Double> sqrt = * new Function<Integer, Double>() { * public Double apply(Integer in) { * return Math.sqrt((int) in); * } * }; * SortedMap<String, Double> transformed = * Maps.transformSortedValues(map, sqrt); * System.out.println(transformed);}</pre> * * ... prints {@code {a=2.0, b=3.0}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys, and even * null values provided that the function is capable of accepting null input. * The transformed map might contain null values, if the function sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The function is applied lazily, invoked when needed. This is necessary * for the returned map to be a view, but it means that the function will be * applied many times for bulk operations like {@link Map#containsValue} and * {@code Map.toString()}. For this to perform well, {@code function} should * be fast. To avoid lazy evaluation when the returned map doesn't need to be * a view, copy the returned map into a new map of your choosing. * * @since 11.0 */ @Beta public static <K, V1, V2> SortedMap<K, V2> transformValues( SortedMap<K, V1> fromMap, final Function<? super V1, V2> function) { checkNotNull(function); EntryTransformer<K, V1, V2> transformer = new EntryTransformer<K, V1, V2>() { @Override public V2 transformEntry(K key, V1 value) { return function.apply(value); } }; return transformEntries(fromMap, transformer); } /** * Returns a view of a map whose values are derived from the original map's * entries. In contrast to {@link #transformValues}, this method's * entry-transformation logic may depend on the key as well as the value. * * <p>All other properties of the transformed map, such as iteration order, * are left intact. For example, the code: <pre> {@code * * Map<String, Boolean> options = * ImmutableMap.of("verbose", true, "sort", false); * EntryTransformer<String, Boolean, String> flagPrefixer = * new EntryTransformer<String, Boolean, String>() { * public String transformEntry(String key, Boolean value) { * return value ? key : "no" + key; * } * }; * Map<String, String> transformed = * Maps.transformEntries(options, flagPrefixer); * System.out.println(transformed);}</pre> * * ... prints {@code {verbose=verbose, sort=nosort}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys and null * values provided that the transformer is capable of accepting null inputs. * The transformed map might contain null values if the transformer sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The transformer is applied lazily, invoked when needed. This is * necessary for the returned map to be a view, but it means that the * transformer will be applied many times for bulk operations like {@link * Map#containsValue} and {@link Object#toString}. For this to perform well, * {@code transformer} should be fast. To avoid lazy evaluation when the * returned map doesn't need to be a view, copy the returned map into a new * map of your choosing. * * <p><b>Warning:</b> This method assumes that for any instance {@code k} of * {@code EntryTransformer} key type {@code K}, {@code k.equals(k2)} implies * that {@code k2} is also of type {@code K}. Using an {@code * EntryTransformer} key type for which this may not hold, such as {@code * ArrayList}, may risk a {@code ClassCastException} when calling methods on * the transformed map. * * @since 7.0 */ public static <K, V1, V2> Map<K, V2> transformEntries( Map<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { if (fromMap instanceof SortedMap) { return transformEntries((SortedMap<K, V1>) fromMap, transformer); } return new TransformedEntriesMap<K, V1, V2>(fromMap, transformer); } /** * Returns a view of a sorted map whose values are derived from the original * sorted map's entries. In contrast to {@link #transformValues}, this * method's entry-transformation logic may depend on the key as well as the * value. * * <p>All other properties of the transformed map, such as iteration order, * are left intact. For example, the code: <pre> {@code * * Map<String, Boolean> options = * ImmutableSortedMap.of("verbose", true, "sort", false); * EntryTransformer<String, Boolean, String> flagPrefixer = * new EntryTransformer<String, Boolean, String>() { * public String transformEntry(String key, Boolean value) { * return value ? key : "yes" + key; * } * }; * SortedMap<String, String> transformed = * LabsMaps.transformSortedEntries(options, flagPrefixer); * System.out.println(transformed);}</pre> * * ... prints {@code {sort=yessort, verbose=verbose}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys and null * values provided that the transformer is capable of accepting null inputs. * The transformed map might contain null values if the transformer sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The transformer is applied lazily, invoked when needed. This is * necessary for the returned map to be a view, but it means that the * transformer will be applied many times for bulk operations like {@link * Map#containsValue} and {@link Object#toString}. For this to perform well, * {@code transformer} should be fast. To avoid lazy evaluation when the * returned map doesn't need to be a view, copy the returned map into a new * map of your choosing. * * <p><b>Warning:</b> This method assumes that for any instance {@code k} of * {@code EntryTransformer} key type {@code K}, {@code k.equals(k2)} implies * that {@code k2} is also of type {@code K}. Using an {@code * EntryTransformer} key type for which this may not hold, such as {@code * ArrayList}, may risk a {@code ClassCastException} when calling methods on * the transformed map. * * @since 11.0 */ @Beta public static <K, V1, V2> SortedMap<K, V2> transformEntries( final SortedMap<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { return new TransformedEntriesSortedMap<K, V1, V2>(fromMap, transformer); } /** * A transformation of the value of a key-value pair, using both key and value * as inputs. To apply the transformation to a map, use * {@link Maps#transformEntries(Map, EntryTransformer)}. * * @param <K> the key type of the input and output entries * @param <V1> the value type of the input entry * @param <V2> the value type of the output entry * @since 7.0 */ public interface EntryTransformer<K, V1, V2> { /** * Determines an output value based on a key-value pair. This method is * <i>generally expected</i>, but not absolutely required, to have the * following properties: * * <ul> * <li>Its execution does not cause any observable side effects. * <li>The computation is <i>consistent with equals</i>; that is, * {@link Objects#equal Objects.equal}{@code (k1, k2) &&} * {@link Objects#equal}{@code (v1, v2)} implies that {@code * Objects.equal(transformer.transform(k1, v1), * transformer.transform(k2, v2))}. * </ul> * * @throws NullPointerException if the key or value is null and this * transformer does not accept null arguments */ V2 transformEntry(@Nullable K key, @Nullable V1 value); } static class TransformedEntriesMap<K, V1, V2> extends AbstractMap<K, V2> { final Map<K, V1> fromMap; final EntryTransformer<? super K, ? super V1, V2> transformer; TransformedEntriesMap( Map<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { this.fromMap = checkNotNull(fromMap); this.transformer = checkNotNull(transformer); } @Override public int size() { return fromMap.size(); } @Override public boolean containsKey(Object key) { return fromMap.containsKey(key); } // safe as long as the user followed the <b>Warning</b> in the javadoc @SuppressWarnings("unchecked") @Override public V2 get(Object key) { V1 value = fromMap.get(key); return (value != null || fromMap.containsKey(key)) ? transformer.transformEntry((K) key, value) : null; } // safe as long as the user followed the <b>Warning</b> in the javadoc @SuppressWarnings("unchecked") @Override public V2 remove(Object key) { return fromMap.containsKey(key) ? transformer.transformEntry((K) key, fromMap.remove(key)) : null; } @Override public void clear() { fromMap.clear(); } @Override public Set<K> keySet() { return fromMap.keySet(); } Set<Entry<K, V2>> entrySet; @Override public Set<Entry<K, V2>> entrySet() { Set<Entry<K, V2>> result = entrySet; if (result == null) { entrySet = result = new EntrySet<K, V2>() { @Override Map<K, V2> map() { return TransformedEntriesMap.this; } @Override public Iterator<Entry<K, V2>> iterator() { final Iterator<Entry<K, V1>> backingIterator = fromMap.entrySet().iterator(); return Iterators.transform(backingIterator, new Function<Entry<K, V1>, Entry<K, V2>>() { @Override public Entry<K, V2> apply(Entry<K, V1> entry) { return immutableEntry( entry.getKey(), transformer.transformEntry(entry.getKey(), entry.getValue())); } }); } }; } return result; } Collection<V2> values; @Override public Collection<V2> values() { Collection<V2> result = values; if (result == null) { return values = new Values<K, V2>() { @Override Map<K, V2> map() { return TransformedEntriesMap.this; } }; } return result; } } static class TransformedEntriesSortedMap<K, V1, V2> extends TransformedEntriesMap<K, V1, V2> implements SortedMap<K, V2> { protected SortedMap<K, V1> fromMap() { return (SortedMap<K, V1>) fromMap; } TransformedEntriesSortedMap(SortedMap<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { super(fromMap, transformer); } @Override public Comparator<? super K> comparator() { return fromMap().comparator(); } @Override public K firstKey() { return fromMap().firstKey(); } @Override public SortedMap<K, V2> headMap(K toKey) { return transformEntries(fromMap().headMap(toKey), transformer); } @Override public K lastKey() { return fromMap().lastKey(); } @Override public SortedMap<K, V2> subMap(K fromKey, K toKey) { return transformEntries( fromMap().subMap(fromKey, toKey), transformer); } @Override public SortedMap<K, V2> tailMap(K fromKey) { return transformEntries(fromMap().tailMap(fromKey), transformer); } } /** * Returns a map containing the mappings in {@code unfiltered} whose keys * satisfy a predicate. The returned map is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a key that * doesn't satisfy the predicate, the map's {@code put()} and {@code putAll()} * methods throw an {@link IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose keys satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code keyPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. */ public static <K, V> Map<K, V> filterKeys( Map<K, V> unfiltered, final Predicate<? super K> keyPredicate) { if (unfiltered instanceof SortedMap) { return filterKeys((SortedMap<K, V>) unfiltered, keyPredicate); } checkNotNull(keyPredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return keyPredicate.apply(input.getKey()); } }; return (unfiltered instanceof AbstractFilteredMap) ? filterFiltered((AbstractFilteredMap<K, V>) unfiltered, entryPredicate) : new FilteredKeyMap<K, V>( checkNotNull(unfiltered), keyPredicate, entryPredicate); } /** * Returns a sorted map containing the mappings in {@code unfiltered} whose * keys satisfy a predicate. The returned map is a live view of {@code * unfiltered}; changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a key that * doesn't satisfy the predicate, the map's {@code put()} and {@code putAll()} * methods throw an {@link IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose keys satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code keyPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. * * @since 11.0 */ @Beta public static <K, V> SortedMap<K, V> filterKeys( SortedMap<K, V> unfiltered, final Predicate<? super K> keyPredicate) { // TODO: Return a subclass of Maps.FilteredKeyMap for slightly better // performance. checkNotNull(keyPredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return keyPredicate.apply(input.getKey()); } }; return filterEntries(unfiltered, entryPredicate); } /** * Returns a map containing the mappings in {@code unfiltered} whose values * satisfy a predicate. The returned map is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a value * that doesn't satisfy the predicate, the map's {@code put()}, {@code * putAll()}, and {@link Entry#setValue} methods throw an {@link * IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose values satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code valuePredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. */ public static <K, V> Map<K, V> filterValues( Map<K, V> unfiltered, final Predicate<? super V> valuePredicate) { if (unfiltered instanceof SortedMap) { return filterValues((SortedMap<K, V>) unfiltered, valuePredicate); } checkNotNull(valuePredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return valuePredicate.apply(input.getValue()); } }; return filterEntries(unfiltered, entryPredicate); } /** * Returns a sorted map containing the mappings in {@code unfiltered} whose * values satisfy a predicate. The returned map is a live view of {@code * unfiltered}; changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a value * that doesn't satisfy the predicate, the map's {@code put()}, {@code * putAll()}, and {@link Entry#setValue} methods throw an {@link * IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose values satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code valuePredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. * * @since 11.0 */ @Beta public static <K, V> SortedMap<K, V> filterValues( SortedMap<K, V> unfiltered, final Predicate<? super V> valuePredicate) { checkNotNull(valuePredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return valuePredicate.apply(input.getValue()); } }; return filterEntries(unfiltered, entryPredicate); } /** * Returns a map containing the mappings in {@code unfiltered} that satisfy a * predicate. The returned map is a live view of {@code unfiltered}; changes * to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a * key/value pair that doesn't satisfy the predicate, the map's {@code put()} * and {@code putAll()} methods throw an {@link IllegalArgumentException}. * Similarly, the map's entries have a {@link Entry#setValue} method that * throws an {@link IllegalArgumentException} when the existing key and the * provided value don't satisfy the predicate. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings that satisfy the filter * will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code entryPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. */ public static <K, V> Map<K, V> filterEntries( Map<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { if (unfiltered instanceof SortedMap) { return filterEntries((SortedMap<K, V>) unfiltered, entryPredicate); } checkNotNull(entryPredicate); return (unfiltered instanceof AbstractFilteredMap) ? filterFiltered((AbstractFilteredMap<K, V>) unfiltered, entryPredicate) : new FilteredEntryMap<K, V>(checkNotNull(unfiltered), entryPredicate); } /** * Returns a sorted map containing the mappings in {@code unfiltered} that * satisfy a predicate. The returned map is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a * key/value pair that doesn't satisfy the predicate, the map's {@code put()} * and {@code putAll()} methods throw an {@link IllegalArgumentException}. * Similarly, the map's entries have a {@link Entry#setValue} method that * throws an {@link IllegalArgumentException} when the existing key and the * provided value don't satisfy the predicate. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings that satisfy the filter * will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code entryPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. * * @since 11.0 */ @Beta public static <K, V> SortedMap<K, V> filterEntries( SortedMap<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { checkNotNull(entryPredicate); return (unfiltered instanceof FilteredEntrySortedMap) ? filterFiltered((FilteredEntrySortedMap<K, V>) unfiltered, entryPredicate) : new FilteredEntrySortedMap<K, V>(checkNotNull(unfiltered), entryPredicate); } /** * Support {@code clear()}, {@code removeAll()}, and {@code retainAll()} when * filtering a filtered map. */ private static <K, V> Map<K, V> filterFiltered(AbstractFilteredMap<K, V> map, Predicate<? super Entry<K, V>> entryPredicate) { Predicate<Entry<K, V>> predicate = Predicates.and(map.predicate, entryPredicate); return new FilteredEntryMap<K, V>(map.unfiltered, predicate); } private abstract static class AbstractFilteredMap<K, V> extends AbstractMap<K, V> { final Map<K, V> unfiltered; final Predicate<? super Entry<K, V>> predicate; AbstractFilteredMap( Map<K, V> unfiltered, Predicate<? super Entry<K, V>> predicate) { this.unfiltered = unfiltered; this.predicate = predicate; } boolean apply(Object key, V value) { // This method is called only when the key is in the map, implying that // key is a K. @SuppressWarnings("unchecked") K k = (K) key; return predicate.apply(Maps.immutableEntry(k, value)); } @Override public V put(K key, V value) { checkArgument(apply(key, value)); return unfiltered.put(key, value); } @Override public void putAll(Map<? extends K, ? extends V> map) { for (Entry<? extends K, ? extends V> entry : map.entrySet()) { checkArgument(apply(entry.getKey(), entry.getValue())); } unfiltered.putAll(map); } @Override public boolean containsKey(Object key) { return unfiltered.containsKey(key) && apply(key, unfiltered.get(key)); } @Override public V get(Object key) { V value = unfiltered.get(key); return ((value != null) && apply(key, value)) ? value : null; } @Override public boolean isEmpty() { return entrySet().isEmpty(); } @Override public V remove(Object key) { return containsKey(key) ? unfiltered.remove(key) : null; } Collection<V> values; @Override public Collection<V> values() { Collection<V> result = values; return (result == null) ? values = new Values() : result; } class Values extends AbstractCollection<V> { @Override public Iterator<V> iterator() { final Iterator<Entry<K, V>> entryIterator = entrySet().iterator(); return new UnmodifiableIterator<V>() { @Override public boolean hasNext() { return entryIterator.hasNext(); } @Override public V next() { return entryIterator.next().getValue(); } }; } @Override public int size() { return entrySet().size(); } @Override public void clear() { entrySet().clear(); } @Override public boolean isEmpty() { return entrySet().isEmpty(); } @Override public boolean remove(Object o) { Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (Objects.equal(o, entry.getValue()) && predicate.apply(entry)) { iterator.remove(); return true; } } return false; } @Override public boolean removeAll(Collection<?> collection) { checkNotNull(collection); boolean changed = false; Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (collection.contains(entry.getValue()) && predicate.apply(entry)) { iterator.remove(); changed = true; } } return changed; } @Override public boolean retainAll(Collection<?> collection) { checkNotNull(collection); boolean changed = false; Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (!collection.contains(entry.getValue()) && predicate.apply(entry)) { iterator.remove(); changed = true; } } return changed; } @Override public Object[] toArray() { // creating an ArrayList so filtering happens once return Lists.newArrayList(iterator()).toArray(); } @Override public <T> T[] toArray(T[] array) { return Lists.newArrayList(iterator()).toArray(array); } } } /** * Support {@code clear()}, {@code removeAll()}, and {@code retainAll()} when * filtering a filtered sorted map. */ private static <K, V> SortedMap<K, V> filterFiltered( FilteredEntrySortedMap<K, V> map, Predicate<? super Entry<K, V>> entryPredicate) { Predicate<Entry<K, V>> predicate = Predicates.and(map.predicate, entryPredicate); return new FilteredEntrySortedMap<K, V>(map.sortedMap(), predicate); } private static class FilteredEntrySortedMap<K, V> extends FilteredEntryMap<K, V> implements SortedMap<K, V> { FilteredEntrySortedMap(SortedMap<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { super(unfiltered, entryPredicate); } SortedMap<K, V> sortedMap() { return (SortedMap<K, V>) unfiltered; } @Override public Comparator<? super K> comparator() { return sortedMap().comparator(); } @Override public K firstKey() { // correctly throws NoSuchElementException when filtered map is empty. return keySet().iterator().next(); } @Override public K lastKey() { SortedMap<K, V> headMap = sortedMap(); while (true) { // correctly throws NoSuchElementException when filtered map is empty. K key = headMap.lastKey(); if (apply(key, unfiltered.get(key))) { return key; } headMap = sortedMap().headMap(key); } } @Override public SortedMap<K, V> headMap(K toKey) { return new FilteredEntrySortedMap<K, V>(sortedMap().headMap(toKey), predicate); } @Override public SortedMap<K, V> subMap(K fromKey, K toKey) { return new FilteredEntrySortedMap<K, V>( sortedMap().subMap(fromKey, toKey), predicate); } @Override public SortedMap<K, V> tailMap(K fromKey) { return new FilteredEntrySortedMap<K, V>( sortedMap().tailMap(fromKey), predicate); } } private static class FilteredKeyMap<K, V> extends AbstractFilteredMap<K, V> { Predicate<? super K> keyPredicate; FilteredKeyMap(Map<K, V> unfiltered, Predicate<? super K> keyPredicate, Predicate<Entry<K, V>> entryPredicate) { super(unfiltered, entryPredicate); this.keyPredicate = keyPredicate; } Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> result = entrySet; return (result == null) ? entrySet = Sets.filter(unfiltered.entrySet(), predicate) : result; } Set<K> keySet; @Override public Set<K> keySet() { Set<K> result = keySet; return (result == null) ? keySet = Sets.filter(unfiltered.keySet(), keyPredicate) : result; } // The cast is called only when the key is in the unfiltered map, implying // that key is a K. @Override @SuppressWarnings("unchecked") public boolean containsKey(Object key) { return unfiltered.containsKey(key) && keyPredicate.apply((K) key); } } static class FilteredEntryMap<K, V> extends AbstractFilteredMap<K, V> { /** * Entries in this set satisfy the predicate, but they don't validate the * input to {@code Entry.setValue()}. */ final Set<Entry<K, V>> filteredEntrySet; FilteredEntryMap( Map<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { super(unfiltered, entryPredicate); filteredEntrySet = Sets.filter(unfiltered.entrySet(), predicate); } Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> result = entrySet; return (result == null) ? entrySet = new EntrySet() : result; } private class EntrySet extends ForwardingSet<Entry<K, V>> { @Override protected Set<Entry<K, V>> delegate() { return filteredEntrySet; } @Override public Iterator<Entry<K, V>> iterator() { final Iterator<Entry<K, V>> iterator = filteredEntrySet.iterator(); return new UnmodifiableIterator<Entry<K, V>>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public Entry<K, V> next() { final Entry<K, V> entry = iterator.next(); return new ForwardingMapEntry<K, V>() { @Override protected Entry<K, V> delegate() { return entry; } @Override public V setValue(V value) { checkArgument(apply(entry.getKey(), value)); return super.setValue(value); } }; } }; } } Set<K> keySet; @Override public Set<K> keySet() { Set<K> result = keySet; return (result == null) ? keySet = new KeySet() : result; } private class KeySet extends AbstractSet<K> { @Override public Iterator<K> iterator() { final Iterator<Entry<K, V>> iterator = filteredEntrySet.iterator(); return new UnmodifiableIterator<K>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public K next() { return iterator.next().getKey(); } }; } @Override public int size() { return filteredEntrySet.size(); } @Override public void clear() { filteredEntrySet.clear(); } @Override public boolean contains(Object o) { return containsKey(o); } @Override public boolean remove(Object o) { if (containsKey(o)) { unfiltered.remove(o); return true; } return false; } @Override public boolean removeAll(Collection<?> collection) { checkNotNull(collection); // for GWT boolean changed = false; for (Object obj : collection) { changed |= remove(obj); } return changed; } @Override public boolean retainAll(Collection<?> collection) { checkNotNull(collection); // for GWT boolean changed = false; Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (!collection.contains(entry.getKey()) && predicate.apply(entry)) { iterator.remove(); changed = true; } } return changed; } @Override public Object[] toArray() { // creating an ArrayList so filtering happens once return Lists.newArrayList(iterator()).toArray(); } @Override public <T> T[] toArray(T[] array) { return Lists.newArrayList(iterator()).toArray(array); } } } /** * {@code AbstractMap} extension that implements {@link #isEmpty()} as {@code * entrySet().isEmpty()} instead of {@code size() == 0} to speed up * implementations where {@code size()} is O(n), and it delegates the {@code * isEmpty()} methods of its key set and value collection to this * implementation. */ @GwtCompatible static abstract class ImprovedAbstractMap<K, V> extends AbstractMap<K, V> { /** * Creates the entry set to be returned by {@link #entrySet()}. This method * is invoked at most once on a given map, at the time when {@code entrySet} * is first called. */ protected abstract Set<Entry<K, V>> createEntrySet(); private Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> result = entrySet; if (result == null) { entrySet = result = createEntrySet(); } return result; } private Set<K> keySet; @Override public Set<K> keySet() { Set<K> result = keySet; if (result == null) { return keySet = new KeySet<K, V>() { @Override Map<K, V> map() { return ImprovedAbstractMap.this; } }; } return result; } private Collection<V> values; @Override public Collection<V> values() { Collection<V> result = values; if (result == null) { return values = new Values<K, V>(){ @Override Map<K, V> map() { return ImprovedAbstractMap.this; } }; } return result; } /** * Returns {@code true} if this map contains no key-value mappings. * * <p>The implementation returns {@code entrySet().isEmpty()}. * * @return {@code true} if this map contains no key-value mappings */ @Override public boolean isEmpty() { return entrySet().isEmpty(); } } static final MapJoiner STANDARD_JOINER = Collections2.STANDARD_JOINER.withKeyValueSeparator("="); /** * Delegates to {@link Map#get}. Returns {@code null} on {@code * ClassCastException}. */ static <V> V safeGet(Map<?, V> map, Object key) { try { return map.get(key); } catch (ClassCastException e) { return null; } } /** * Delegates to {@link Map#containsKey}. Returns {@code false} on {@code * ClassCastException} */ static boolean safeContainsKey(Map<?, ?> map, Object key) { try { return map.containsKey(key); } catch (ClassCastException e) { return false; } } /** * Implements {@code Collection.contains} safely for forwarding collections of * map entries. If {@code o} is an instance of {@code Map.Entry}, it is * wrapped using {@link #unmodifiableEntry} to protect against a possible * nefarious equals method. * * <p>Note that {@code c} is the backing (delegate) collection, rather than * the forwarding collection. * * @param c the delegate (unwrapped) collection of map entries * @param o the object that might be contained in {@code c} * @return {@code true} if {@code c} contains {@code o} */ static <K, V> boolean containsEntryImpl(Collection<Entry<K, V>> c, Object o) { if (!(o instanceof Entry)) { return false; } return c.contains(unmodifiableEntry((Entry<?, ?>) o)); } /** * Implements {@code Collection.remove} safely for forwarding collections of * map entries. If {@code o} is an instance of {@code Map.Entry}, it is * wrapped using {@link #unmodifiableEntry} to protect against a possible * nefarious equals method. * * <p>Note that {@code c} is backing (delegate) collection, rather than the * forwarding collection. * * @param c the delegate (unwrapped) collection of map entries * @param o the object to remove from {@code c} * @return {@code true} if {@code c} was changed */ static <K, V> boolean removeEntryImpl(Collection<Entry<K, V>> c, Object o) { if (!(o instanceof Entry)) { return false; } return c.remove(unmodifiableEntry((Entry<?, ?>) o)); } /** * An implementation of {@link Map#equals}. */ static boolean equalsImpl(Map<?, ?> map, Object object) { if (map == object) { return true; } if (object instanceof Map) { Map<?, ?> o = (Map<?, ?>) object; return map.entrySet().equals(o.entrySet()); } return false; } /** * An implementation of {@link Map#hashCode}. */ static int hashCodeImpl(Map<?, ?> map) { return Sets.hashCodeImpl(map.entrySet()); } /** * An implementation of {@link Map#toString}. */ static String toStringImpl(Map<?, ?> map) { StringBuilder sb = Collections2.newStringBuilderForCollection(map.size()).append('{'); STANDARD_JOINER.appendTo(sb, map); return sb.append('}').toString(); } /** * An implementation of {@link Map#putAll}. */ static <K, V> void putAllImpl( Map<K, V> self, Map<? extends K, ? extends V> map) { for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) { self.put(entry.getKey(), entry.getValue()); } } /** * An admittedly inefficient implementation of {@link Map#containsKey}. */ static boolean containsKeyImpl(Map<?, ?> map, @Nullable Object key) { for (Entry<?, ?> entry : map.entrySet()) { if (Objects.equal(entry.getKey(), key)) { return true; } } return false; } /** * An implementation of {@link Map#containsValue}. */ static boolean containsValueImpl(Map<?, ?> map, @Nullable Object value) { for (Entry<?, ?> entry : map.entrySet()) { if (Objects.equal(entry.getValue(), value)) { return true; } } return false; } abstract static class KeySet<K, V> extends AbstractSet<K> { abstract Map<K, V> map(); @Override public Iterator<K> iterator() { return Iterators.transform(map().entrySet().iterator(), new Function<Map.Entry<K, V>, K>() { @Override public K apply(Entry<K, V> entry) { return entry.getKey(); } }); } @Override public int size() { return map().size(); } @Override public boolean isEmpty() { return map().isEmpty(); } @Override public boolean contains(Object o) { return map().containsKey(o); } @Override public boolean remove(Object o) { if (contains(o)) { map().remove(o); return true; } return false; } @Override public boolean removeAll(Collection<?> c) { // TODO(user): find out why this is necessary to make GWT tests pass. return super.removeAll(checkNotNull(c)); } @Override public void clear() { map().clear(); } } abstract static class Values<K, V> extends AbstractCollection<V> { abstract Map<K, V> map(); @Override public Iterator<V> iterator() { return Iterators.transform(map().entrySet().iterator(), new Function<Entry<K, V>, V>() { @Override public V apply(Entry<K, V> entry) { return entry.getValue(); } }); } @Override public boolean remove(Object o) { try { return super.remove(o); } catch (UnsupportedOperationException e) { for (Entry<K, V> entry : map().entrySet()) { if (Objects.equal(o, entry.getValue())) { map().remove(entry.getKey()); return true; } } return false; } } @Override public boolean removeAll(Collection<?> c) { try { return super.removeAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { Set<K> toRemove = Sets.newHashSet(); for (Entry<K, V> entry : map().entrySet()) { if (c.contains(entry.getValue())) { toRemove.add(entry.getKey()); } } return map().keySet().removeAll(toRemove); } } @Override public boolean retainAll(Collection<?> c) { try { return super.retainAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { Set<K> toRetain = Sets.newHashSet(); for (Entry<K, V> entry : map().entrySet()) { if (c.contains(entry.getValue())) { toRetain.add(entry.getKey()); } } return map().keySet().retainAll(toRetain); } } @Override public int size() { return map().size(); } @Override public boolean isEmpty() { return map().isEmpty(); } @Override public boolean contains(@Nullable Object o) { return map().containsValue(o); } @Override public void clear() { map().clear(); } } abstract static class EntrySet<K, V> extends AbstractSet<Entry<K, V>> { abstract Map<K, V> map(); @Override public int size() { return map().size(); } @Override public void clear() { map().clear(); } @Override public boolean contains(Object o) { if (o instanceof Entry) { Entry<?, ?> entry = (Entry<?, ?>) o; Object key = entry.getKey(); V value = map().get(key); return Objects.equal(value, entry.getValue()) && (value != null || map().containsKey(key)); } return false; } @Override public boolean isEmpty() { return map().isEmpty(); } @Override public boolean remove(Object o) { if (contains(o)) { Entry<?, ?> entry = (Entry<?, ?>) o; return map().keySet().remove(entry.getKey()); } return false; } @Override public boolean removeAll(Collection<?> c) { try { return super.removeAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { // if the iterators don't support remove boolean changed = true; for (Object o : c) { changed |= remove(o); } return changed; } } @Override public boolean retainAll(Collection<?> c) { try { return super.retainAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { // if the iterators don't support remove Set<Object> keys = Sets.newHashSetWithExpectedSize(c.size()); for (Object o : c) { if (contains(o)) { Entry<?, ?> entry = (Entry<?, ?>) o; keys.add(entry.getKey()); } } return map().keySet().retainAll(keys); } } } }
guava/src/com/google/common/collect/Maps.java
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Equivalence; import com.google.common.base.Equivalences; import com.google.common.base.Function; import com.google.common.base.Joiner.MapJoiner; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.MapDifference.ValueDifference; import com.google.common.primitives.Ints; import java.io.Serializable; import java.util.AbstractCollection; import java.util.AbstractMap; import java.util.AbstractSet; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.Enumeration; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; /** * Static utility methods pertaining to {@link Map} instances. Also see this * class's counterparts {@link Lists} and {@link Sets}. * * @author Kevin Bourrillion * @author Mike Bostock * @author Isaac Shum * @author Louis Wasserman * @since 2.0 (imported from Google Collections Library) */ @GwtCompatible(emulated = true) public final class Maps { private Maps() {} /** * Creates a <i>mutable</i>, empty {@code HashMap} instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#of()} instead. * * <p><b>Note:</b> if {@code K} is an {@code enum} type, use {@link * #newEnumMap} instead. * * @return a new, empty {@code HashMap} */ public static <K, V> HashMap<K, V> newHashMap() { return new HashMap<K, V>(); } /** * Creates a {@code HashMap} instance, with a high enough "initial capacity" * that it <i>should</i> hold {@code expectedSize} elements without growth. * This behavior cannot be broadly guaranteed, but it is observed to be true * for OpenJDK 1.6. It also can't be guaranteed that the method isn't * inadvertently <i>oversizing</i> the returned map. * * @param expectedSize the number of elements you expect to add to the * returned map * @return a new, empty {@code HashMap} with enough capacity to hold {@code * expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative */ public static <K, V> HashMap<K, V> newHashMapWithExpectedSize( int expectedSize) { return new HashMap<K, V>(capacity(expectedSize)); } /** * Returns a capacity that is sufficient to keep the map from being resized as * long as it grows no larger than expectedSize and the load factor is >= its * default (0.75). */ static int capacity(int expectedSize) { if (expectedSize < 3) { checkArgument(expectedSize >= 0); return expectedSize + 1; } if (expectedSize < Ints.MAX_POWER_OF_TWO) { return expectedSize + expectedSize / 3; } return Integer.MAX_VALUE; // any large value } /** * Creates a <i>mutable</i> {@code HashMap} instance with the same mappings as * the specified map. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#copyOf(Map)} instead. * * <p><b>Note:</b> if {@code K} is an {@link Enum} type, use {@link * #newEnumMap} instead. * * @param map the mappings to be placed in the new map * @return a new {@code HashMap} initialized with the mappings from {@code * map} */ public static <K, V> HashMap<K, V> newHashMap( Map<? extends K, ? extends V> map) { return new HashMap<K, V>(map); } /** * Creates a <i>mutable</i>, empty, insertion-ordered {@code LinkedHashMap} * instance. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#of()} instead. * * @return a new, empty {@code LinkedHashMap} */ public static <K, V> LinkedHashMap<K, V> newLinkedHashMap() { return new LinkedHashMap<K, V>(); } /** * Creates a <i>mutable</i>, insertion-ordered {@code LinkedHashMap} instance * with the same mappings as the specified map. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableMap#copyOf(Map)} instead. * * @param map the mappings to be placed in the new map * @return a new, {@code LinkedHashMap} initialized with the mappings from * {@code map} */ public static <K, V> LinkedHashMap<K, V> newLinkedHashMap( Map<? extends K, ? extends V> map) { return new LinkedHashMap<K, V>(map); } /** * Returns a general-purpose instance of {@code ConcurrentMap}, which supports * all optional operations of the ConcurrentMap interface. It does not permit * null keys or values. It is serializable. * * <p>This is currently accomplished by calling {@link MapMaker#makeMap()}. * * <p>It is preferable to use {@code MapMaker} directly (rather than through * this method), as it presents numerous useful configuration options, * such as the concurrency level, load factor, key/value reference types, * and value computation. * * @return a new, empty {@code ConcurrentMap} * @since 3.0 */ public static <K, V> ConcurrentMap<K, V> newConcurrentMap() { return new MapMaker().<K, V>makeMap(); } /** * Creates a <i>mutable</i>, empty {@code TreeMap} instance using the natural * ordering of its elements. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedMap#of()} instead. * * @return a new, empty {@code TreeMap} */ public static <K extends Comparable, V> TreeMap<K, V> newTreeMap() { return new TreeMap<K, V>(); } /** * Creates a <i>mutable</i> {@code TreeMap} instance with the same mappings as * the specified map and using the same ordering as the specified map. * * <p><b>Note:</b> if mutability is not required, use {@link * ImmutableSortedMap#copyOfSorted(SortedMap)} instead. * * @param map the sorted map whose mappings are to be placed in the new map * and whose comparator is to be used to sort the new map * @return a new {@code TreeMap} initialized with the mappings from {@code * map} and using the comparator of {@code map} */ public static <K, V> TreeMap<K, V> newTreeMap(SortedMap<K, ? extends V> map) { return new TreeMap<K, V>(map); } /** * Creates a <i>mutable</i>, empty {@code TreeMap} instance using the given * comparator. * * <p><b>Note:</b> if mutability is not required, use {@code * ImmutableSortedMap.orderedBy(comparator).build()} instead. * * @param comparator the comparator to sort the keys with * @return a new, empty {@code TreeMap} */ public static <C, K extends C, V> TreeMap<K, V> newTreeMap( @Nullable Comparator<C> comparator) { // Ideally, the extra type parameter "C" shouldn't be necessary. It is a // work-around of a compiler type inference quirk that prevents the // following code from being compiled: // Comparator<Class<?>> comparator = null; // Map<Class<? extends Throwable>, String> map = newTreeMap(comparator); return new TreeMap<K, V>(comparator); } /** * Creates an {@code EnumMap} instance. * * @param type the key type for this map * @return a new, empty {@code EnumMap} */ public static <K extends Enum<K>, V> EnumMap<K, V> newEnumMap(Class<K> type) { return new EnumMap<K, V>(checkNotNull(type)); } /** * Creates an {@code EnumMap} with the same mappings as the specified map. * * @param map the map from which to initialize this {@code EnumMap} * @return a new {@code EnumMap} initialized with the mappings from {@code * map} * @throws IllegalArgumentException if {@code m} is not an {@code EnumMap} * instance and contains no mappings */ public static <K extends Enum<K>, V> EnumMap<K, V> newEnumMap( Map<K, ? extends V> map) { return new EnumMap<K, V>(map); } /** * Creates an {@code IdentityHashMap} instance. * * @return a new, empty {@code IdentityHashMap} */ public static <K, V> IdentityHashMap<K, V> newIdentityHashMap() { return new IdentityHashMap<K, V>(); } /** * Returns a synchronized (thread-safe) bimap backed by the specified bimap. * In order to guarantee serial access, it is critical that <b>all</b> access * to the backing bimap is accomplished through the returned bimap. * * <p>It is imperative that the user manually synchronize on the returned map * when accessing any of its collection views: <pre> {@code * * BiMap<Long, String> map = Maps.synchronizedBiMap( * HashBiMap.<Long, String>create()); * ... * Set<Long> set = map.keySet(); // Needn't be in synchronized block * ... * synchronized (map) { // Synchronizing on map, not set! * Iterator<Long> it = set.iterator(); // Must be in synchronized block * while (it.hasNext()) { * foo(it.next()); * } * }}</pre> * * Failure to follow this advice may result in non-deterministic behavior. * * <p>The returned bimap will be serializable if the specified bimap is * serializable. * * @param bimap the bimap to be wrapped in a synchronized view * @return a sychronized view of the specified bimap */ public static <K, V> BiMap<K, V> synchronizedBiMap(BiMap<K, V> bimap) { return Synchronized.biMap(bimap, null); } /** * Computes the difference between two maps. This difference is an immutable * snapshot of the state of the maps at the time this method is called. It * will never change, even if the maps change at a later time. * * <p>Since this method uses {@code HashMap} instances internally, the keys of * the supplied maps must be well-behaved with respect to * {@link Object#equals} and {@link Object#hashCode}. * * <p><b>Note:</b>If you only need to know whether two maps have the same * mappings, call {@code left.equals(right)} instead of this method. * * @param left the map to treat as the "left" map for purposes of comparison * @param right the map to treat as the "right" map for purposes of comparison * @return the difference between the two maps */ @SuppressWarnings("unchecked") public static <K, V> MapDifference<K, V> difference( Map<? extends K, ? extends V> left, Map<? extends K, ? extends V> right) { if (left instanceof SortedMap) { SortedMap<K, ? extends V> sortedLeft = (SortedMap<K, ? extends V>) left; SortedMapDifference<K, V> result = difference(sortedLeft, right); return result; } return difference(left, right, Equivalences.equals()); } /** * Computes the difference between two maps. This difference is an immutable * snapshot of the state of the maps at the time this method is called. It * will never change, even if the maps change at a later time. * * <p>Values are compared using a provided equivalence, in the case of * equality, the value on the 'left' is returned in the difference. * * <p>Since this method uses {@code HashMap} instances internally, the keys of * the supplied maps must be well-behaved with respect to * {@link Object#equals} and {@link Object#hashCode}. * * @param left the map to treat as the "left" map for purposes of comparison * @param right the map to treat as the "right" map for purposes of comparison * @param valueEquivalence the equivalence relationship to use to compare * values * @return the difference between the two maps * @since 10.0 */ @Beta public static <K, V> MapDifference<K, V> difference( Map<? extends K, ? extends V> left, Map<? extends K, ? extends V> right, Equivalence<? super V> valueEquivalence) { Preconditions.checkNotNull(valueEquivalence); Map<K, V> onlyOnLeft = newHashMap(); Map<K, V> onlyOnRight = new HashMap<K, V>(right); // will whittle it down Map<K, V> onBoth = newHashMap(); Map<K, MapDifference.ValueDifference<V>> differences = newHashMap(); boolean eq = true; for (Entry<? extends K, ? extends V> entry : left.entrySet()) { K leftKey = entry.getKey(); V leftValue = entry.getValue(); if (right.containsKey(leftKey)) { V rightValue = onlyOnRight.remove(leftKey); if (valueEquivalence.equivalent(leftValue, rightValue)) { onBoth.put(leftKey, leftValue); } else { eq = false; differences.put( leftKey, ValueDifferenceImpl.create(leftValue, rightValue)); } } else { eq = false; onlyOnLeft.put(leftKey, leftValue); } } boolean areEqual = eq && onlyOnRight.isEmpty(); return mapDifference( areEqual, onlyOnLeft, onlyOnRight, onBoth, differences); } private static <K, V> MapDifference<K, V> mapDifference(boolean areEqual, Map<K, V> onlyOnLeft, Map<K, V> onlyOnRight, Map<K, V> onBoth, Map<K, ValueDifference<V>> differences) { return new MapDifferenceImpl<K, V>(areEqual, Collections.unmodifiableMap(onlyOnLeft), Collections.unmodifiableMap(onlyOnRight), Collections.unmodifiableMap(onBoth), Collections.unmodifiableMap(differences)); } static class MapDifferenceImpl<K, V> implements MapDifference<K, V> { final boolean areEqual; final Map<K, V> onlyOnLeft; final Map<K, V> onlyOnRight; final Map<K, V> onBoth; final Map<K, ValueDifference<V>> differences; MapDifferenceImpl(boolean areEqual, Map<K, V> onlyOnLeft, Map<K, V> onlyOnRight, Map<K, V> onBoth, Map<K, ValueDifference<V>> differences) { this.areEqual = areEqual; this.onlyOnLeft = onlyOnLeft; this.onlyOnRight = onlyOnRight; this.onBoth = onBoth; this.differences = differences; } @Override public boolean areEqual() { return areEqual; } @Override public Map<K, V> entriesOnlyOnLeft() { return onlyOnLeft; } @Override public Map<K, V> entriesOnlyOnRight() { return onlyOnRight; } @Override public Map<K, V> entriesInCommon() { return onBoth; } @Override public Map<K, ValueDifference<V>> entriesDiffering() { return differences; } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof MapDifference) { MapDifference<?, ?> other = (MapDifference<?, ?>) object; return entriesOnlyOnLeft().equals(other.entriesOnlyOnLeft()) && entriesOnlyOnRight().equals(other.entriesOnlyOnRight()) && entriesInCommon().equals(other.entriesInCommon()) && entriesDiffering().equals(other.entriesDiffering()); } return false; } @Override public int hashCode() { return Objects.hashCode(entriesOnlyOnLeft(), entriesOnlyOnRight(), entriesInCommon(), entriesDiffering()); } @Override public String toString() { if (areEqual) { return "equal"; } StringBuilder result = new StringBuilder("not equal"); if (!onlyOnLeft.isEmpty()) { result.append(": only on left=").append(onlyOnLeft); } if (!onlyOnRight.isEmpty()) { result.append(": only on right=").append(onlyOnRight); } if (!differences.isEmpty()) { result.append(": value differences=").append(differences); } return result.toString(); } } static class ValueDifferenceImpl<V> implements MapDifference.ValueDifference<V> { private final V left; private final V right; static <V> ValueDifference<V> create(@Nullable V left, @Nullable V right) { return new ValueDifferenceImpl<V>(left, right); } private ValueDifferenceImpl(@Nullable V left, @Nullable V right) { this.left = left; this.right = right; } @Override public V leftValue() { return left; } @Override public V rightValue() { return right; } @Override public boolean equals(@Nullable Object object) { if (object instanceof MapDifference.ValueDifference<?>) { MapDifference.ValueDifference<?> that = (MapDifference.ValueDifference<?>) object; return Objects.equal(this.left, that.leftValue()) && Objects.equal(this.right, that.rightValue()); } return false; } @Override public int hashCode() { return Objects.hashCode(left, right); } @Override public String toString() { return "(" + left + ", " + right + ")"; } } /** * Computes the difference between two sorted maps, using the comparator of * the left map, or {@code Ordering.natural()} if the left map uses the * natural ordering of its elements. This difference is an immutable snapshot * of the state of the maps at the time this method is called. It will never * change, even if the maps change at a later time. * * <p>Since this method uses {@code TreeMap} instances internally, the keys of * the right map must all compare as distinct according to the comparator * of the left map. * * <p><b>Note:</b>If you only need to know whether two sorted maps have the * same mappings, call {@code left.equals(right)} instead of this method. * * @param left the map to treat as the "left" map for purposes of comparison * @param right the map to treat as the "right" map for purposes of comparison * @return the difference between the two maps * @since 11.0 */ @Beta public static <K, V> SortedMapDifference<K, V> difference( SortedMap<K, ? extends V> left, Map<? extends K, ? extends V> right) { checkNotNull(left); checkNotNull(right); Comparator<? super K> comparator = orNaturalOrder(left.comparator()); SortedMap<K, V> onlyOnLeft = Maps.newTreeMap(comparator); SortedMap<K, V> onlyOnRight = Maps.newTreeMap(comparator); onlyOnRight.putAll(right); // will whittle it down SortedMap<K, V> onBoth = Maps.newTreeMap(comparator); SortedMap<K, MapDifference.ValueDifference<V>> differences = Maps.newTreeMap(comparator); boolean eq = true; for (Entry<? extends K, ? extends V> entry : left.entrySet()) { K leftKey = entry.getKey(); V leftValue = entry.getValue(); if (right.containsKey(leftKey)) { V rightValue = onlyOnRight.remove(leftKey); if (Objects.equal(leftValue, rightValue)) { onBoth.put(leftKey, leftValue); } else { eq = false; differences.put( leftKey, ValueDifferenceImpl.create(leftValue, rightValue)); } } else { eq = false; onlyOnLeft.put(leftKey, leftValue); } } boolean areEqual = eq && onlyOnRight.isEmpty(); return sortedMapDifference( areEqual, onlyOnLeft, onlyOnRight, onBoth, differences); } private static <K, V> SortedMapDifference<K, V> sortedMapDifference( boolean areEqual, SortedMap<K, V> onlyOnLeft, SortedMap<K, V> onlyOnRight, SortedMap<K, V> onBoth, SortedMap<K, ValueDifference<V>> differences) { return new SortedMapDifferenceImpl<K, V>(areEqual, Collections.unmodifiableSortedMap(onlyOnLeft), Collections.unmodifiableSortedMap(onlyOnRight), Collections.unmodifiableSortedMap(onBoth), Collections.unmodifiableSortedMap(differences)); } static class SortedMapDifferenceImpl<K, V> extends MapDifferenceImpl<K, V> implements SortedMapDifference<K, V> { SortedMapDifferenceImpl(boolean areEqual, SortedMap<K, V> onlyOnLeft, SortedMap<K, V> onlyOnRight, SortedMap<K, V> onBoth, SortedMap<K, ValueDifference<V>> differences) { super(areEqual, onlyOnLeft, onlyOnRight, onBoth, differences); } @Override public SortedMap<K, ValueDifference<V>> entriesDiffering() { return (SortedMap<K, ValueDifference<V>>) super.entriesDiffering(); } @Override public SortedMap<K, V> entriesInCommon() { return (SortedMap<K, V>) super.entriesInCommon(); } @Override public SortedMap<K, V> entriesOnlyOnLeft() { return (SortedMap<K, V>) super.entriesOnlyOnLeft(); } @Override public SortedMap<K, V> entriesOnlyOnRight() { return (SortedMap<K, V>) super.entriesOnlyOnRight(); } } /** * Returns the specified comparator if not null; otherwise returns {@code * Ordering.natural()}. This method is an abomination of generics; the only * purpose of this method is to contain the ugly type-casting in one place. */ @SuppressWarnings("unchecked") static <E> Comparator<? super E> orNaturalOrder( @Nullable Comparator<? super E> comparator) { if (comparator != null) { // can't use ? : because of javac bug 5080917 return comparator; } return (Comparator<E>) Ordering.natural(); } /** * Returns an immutable map for which the {@link Map#values} are the given * elements in the given order, and each key is the product of invoking a * supplied function on its corresponding value. * * @param values the values to use when constructing the {@code Map} * @param keyFunction the function used to produce the key for each value * @return a map mapping the result of evaluating the function {@code * keyFunction} on each value in the input collection to that value * @throws IllegalArgumentException if {@code keyFunction} produces the same * key for more than one value in the input collection * @throws NullPointerException if any elements of {@code values} is null, or * if {@code keyFunction} produces {@code null} for any value */ public static <K, V> ImmutableMap<K, V> uniqueIndex( Iterable<V> values, Function<? super V, K> keyFunction) { return uniqueIndex(values.iterator(), keyFunction); } /** * <b>Deprecated.</b> * * @since 10.0 * @deprecated use {@link #uniqueIndex(Iterator, Function)} by casting {@code * values} to {@code Iterator<V>}, or better yet, by implementing only * {@code Iterator} and not {@code Iterable}. <b>This method is scheduled * for deletion in March 2012.</b> */ @Beta @Deprecated public static <K, V, I extends Object & Iterable<V> & Iterator<V>> ImmutableMap<K, V> uniqueIndex( I values, Function<? super V, K> keyFunction) { Iterable<V> valuesIterable = checkNotNull(values); return uniqueIndex(valuesIterable, keyFunction); } /** * Returns an immutable map for which the {@link Map#values} are the given * elements in the given order, and each key is the product of invoking a * supplied function on its corresponding value. * * @param values the values to use when constructing the {@code Map} * @param keyFunction the function used to produce the key for each value * @return a map mapping the result of evaluating the function {@code * keyFunction} on each value in the input collection to that value * @throws IllegalArgumentException if {@code keyFunction} produces the same * key for more than one value in the input collection * @throws NullPointerException if any elements of {@code values} is null, or * if {@code keyFunction} produces {@code null} for any value * @since 10.0 */ public static <K, V> ImmutableMap<K, V> uniqueIndex( Iterator<V> values, Function<? super V, K> keyFunction) { checkNotNull(keyFunction); ImmutableMap.Builder<K, V> builder = ImmutableMap.builder(); while (values.hasNext()) { V value = values.next(); builder.put(keyFunction.apply(value), value); } return builder.build(); } /** * Creates an {@code ImmutableMap<String, String>} from a {@code Properties} * instance. Properties normally derive from {@code Map<Object, Object>}, but * they typically contain strings, which is awkward. This method lets you get * a plain-old-{@code Map} out of a {@code Properties}. * * @param properties a {@code Properties} object to be converted * @return an immutable map containing all the entries in {@code properties} * @throws ClassCastException if any key in {@code Properties} is not a {@code * String} * @throws NullPointerException if any key or value in {@code Properties} is * null */ @GwtIncompatible("java.util.Properties") public static ImmutableMap<String, String> fromProperties( Properties properties) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (Enumeration<?> e = properties.propertyNames(); e.hasMoreElements();) { String key = (String) e.nextElement(); builder.put(key, properties.getProperty(key)); } return builder.build(); } /** * Returns an immutable map entry with the specified key and value. The {@link * Entry#setValue} operation throws an {@link UnsupportedOperationException}. * * <p>The returned entry is serializable. * * @param key the key to be associated with the returned entry * @param value the value to be associated with the returned entry */ @GwtCompatible(serializable = true) public static <K, V> Entry<K, V> immutableEntry( @Nullable K key, @Nullable V value) { return new ImmutableEntry<K, V>(key, value); } /** * Returns an unmodifiable view of the specified set of entries. The {@link * Entry#setValue} operation throws an {@link UnsupportedOperationException}, * as do any operations that would modify the returned set. * * @param entrySet the entries for which to return an unmodifiable view * @return an unmodifiable view of the entries */ static <K, V> Set<Entry<K, V>> unmodifiableEntrySet( Set<Entry<K, V>> entrySet) { return new UnmodifiableEntrySet<K, V>( Collections.unmodifiableSet(entrySet)); } /** * Returns an unmodifiable view of the specified map entry. The {@link * Entry#setValue} operation throws an {@link UnsupportedOperationException}. * This also has the side-effect of redefining {@code equals} to comply with * the Entry contract, to avoid a possible nefarious implementation of equals. * * @param entry the entry for which to return an unmodifiable view * @return an unmodifiable view of the entry */ static <K, V> Entry<K, V> unmodifiableEntry(final Entry<K, V> entry) { checkNotNull(entry); return new AbstractMapEntry<K, V>() { @Override public K getKey() { return entry.getKey(); } @Override public V getValue() { return entry.getValue(); } }; } /** @see Multimaps#unmodifiableEntries */ static class UnmodifiableEntries<K, V> extends ForwardingCollection<Entry<K, V>> { private final Collection<Entry<K, V>> entries; UnmodifiableEntries(Collection<Entry<K, V>> entries) { this.entries = entries; } @Override protected Collection<Entry<K, V>> delegate() { return entries; } @Override public Iterator<Entry<K, V>> iterator() { final Iterator<Entry<K, V>> delegate = super.iterator(); return new ForwardingIterator<Entry<K, V>>() { @Override public Entry<K, V> next() { return unmodifiableEntry(super.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override protected Iterator<Entry<K, V>> delegate() { return delegate; } }; } // See java.util.Collections.UnmodifiableEntrySet for details on attacks. @Override public boolean add(Entry<K, V> element) { throw new UnsupportedOperationException(); } @Override public boolean addAll( Collection<? extends Entry<K, V>> collection) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public boolean remove(Object object) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> collection) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> collection) { throw new UnsupportedOperationException(); } @Override public Object[] toArray() { return standardToArray(); } @Override public <T> T[] toArray(T[] array) { return standardToArray(array); } } /** @see Maps#unmodifiableEntrySet(Set) */ static class UnmodifiableEntrySet<K, V> extends UnmodifiableEntries<K, V> implements Set<Entry<K, V>> { UnmodifiableEntrySet(Set<Entry<K, V>> entries) { super(entries); } // See java.util.Collections.UnmodifiableEntrySet for details on attacks. @Override public boolean equals(@Nullable Object object) { return Sets.equalsImpl(this, object); } @Override public int hashCode() { return Sets.hashCodeImpl(this); } } /** * Returns an unmodifiable view of the specified bimap. This method allows * modules to provide users with "read-only" access to internal bimaps. Query * operations on the returned bimap "read through" to the specified bimap, and * attempts to modify the returned map, whether direct or via its collection * views, result in an {@code UnsupportedOperationException}. * * <p>The returned bimap will be serializable if the specified bimap is * serializable. * * @param bimap the bimap for which an unmodifiable view is to be returned * @return an unmodifiable view of the specified bimap */ public static <K, V> BiMap<K, V> unmodifiableBiMap( BiMap<? extends K, ? extends V> bimap) { return new UnmodifiableBiMap<K, V>(bimap, null); } /** @see Maps#unmodifiableBiMap(BiMap) */ private static class UnmodifiableBiMap<K, V> extends ForwardingMap<K, V> implements BiMap<K, V>, Serializable { final Map<K, V> unmodifiableMap; final BiMap<? extends K, ? extends V> delegate; transient BiMap<V, K> inverse; transient Set<V> values; UnmodifiableBiMap(BiMap<? extends K, ? extends V> delegate, @Nullable BiMap<V, K> inverse) { unmodifiableMap = Collections.unmodifiableMap(delegate); this.delegate = delegate; this.inverse = inverse; } @Override protected Map<K, V> delegate() { return unmodifiableMap; } @Override public V forcePut(K key, V value) { throw new UnsupportedOperationException(); } @Override public BiMap<V, K> inverse() { BiMap<V, K> result = inverse; return (result == null) ? inverse = new UnmodifiableBiMap<V, K>(delegate.inverse(), this) : result; } @Override public Set<V> values() { Set<V> result = values; return (result == null) ? values = Collections.unmodifiableSet(delegate.values()) : result; } private static final long serialVersionUID = 0; } /** * Returns a view of a map where each value is transformed by a function. All * other properties of the map, such as iteration order, are left intact. For * example, the code: <pre> {@code * * Map<String, Integer> map = ImmutableMap.of("a", 4, "b", 9); * Function<Integer, Double> sqrt = * new Function<Integer, Double>() { * public Double apply(Integer in) { * return Math.sqrt((int) in); * } * }; * Map<String, Double> transformed = Maps.transformValues(map, sqrt); * System.out.println(transformed);}</pre> * * ... prints {@code {a=2.0, b=3.0}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys, and even * null values provided that the function is capable of accepting null input. * The transformed map might contain null values, if the function sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The function is applied lazily, invoked when needed. This is necessary * for the returned map to be a view, but it means that the function will be * applied many times for bulk operations like {@link Map#containsValue} and * {@code Map.toString()}. For this to perform well, {@code function} should * be fast. To avoid lazy evaluation when the returned map doesn't need to be * a view, copy the returned map into a new map of your choosing. */ public static <K, V1, V2> Map<K, V2> transformValues( Map<K, V1> fromMap, final Function<? super V1, V2> function) { checkNotNull(function); EntryTransformer<K, V1, V2> transformer = new EntryTransformer<K, V1, V2>() { @Override public V2 transformEntry(K key, V1 value) { return function.apply(value); } }; return transformEntries(fromMap, transformer); } /** * Returns a view of a sorted map where each value is transformed by a * function. All other properties of the map, such as iteration order, are * left intact. For example, the code: <pre> {@code * * SortedMap<String, Integer> map = ImmutableSortedMap.of("a", 4, "b", 9); * Function<Integer, Double> sqrt = * new Function<Integer, Double>() { * public Double apply(Integer in) { * return Math.sqrt((int) in); * } * }; * SortedMap<String, Double> transformed = * Maps.transformSortedValues(map, sqrt); * System.out.println(transformed);}</pre> * * ... prints {@code {a=2.0, b=3.0}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys, and even * null values provided that the function is capable of accepting null input. * The transformed map might contain null values, if the function sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The function is applied lazily, invoked when needed. This is necessary * for the returned map to be a view, but it means that the function will be * applied many times for bulk operations like {@link Map#containsValue} and * {@code Map.toString()}. For this to perform well, {@code function} should * be fast. To avoid lazy evaluation when the returned map doesn't need to be * a view, copy the returned map into a new map of your choosing. * * @since 11.0 */ @Beta public static <K, V1, V2> SortedMap<K, V2> transformValues( SortedMap<K, V1> fromMap, final Function<? super V1, V2> function) { checkNotNull(function); EntryTransformer<K, V1, V2> transformer = new EntryTransformer<K, V1, V2>() { @Override public V2 transformEntry(K key, V1 value) { return function.apply(value); } }; return transformEntries(fromMap, transformer); } /** * Returns a view of a map whose values are derived from the original map's * entries. In contrast to {@link #transformValues}, this method's * entry-transformation logic may depend on the key as well as the value. * * <p>All other properties of the transformed map, such as iteration order, * are left intact. For example, the code: <pre> {@code * * Map<String, Boolean> options = * ImmutableMap.of("verbose", true, "sort", false); * EntryTransformer<String, Boolean, String> flagPrefixer = * new EntryTransformer<String, Boolean, String>() { * public String transformEntry(String key, Boolean value) { * return value ? key : "no" + key; * } * }; * Map<String, String> transformed = * Maps.transformEntries(options, flagPrefixer); * System.out.println(transformed);}</pre> * * ... prints {@code {verbose=verbose, sort=nosort}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys and null * values provided that the transformer is capable of accepting null inputs. * The transformed map might contain null values if the transformer sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The transformer is applied lazily, invoked when needed. This is * necessary for the returned map to be a view, but it means that the * transformer will be applied many times for bulk operations like {@link * Map#containsValue} and {@link Object#toString}. For this to perform well, * {@code transformer} should be fast. To avoid lazy evaluation when the * returned map doesn't need to be a view, copy the returned map into a new * map of your choosing. * * <p><b>Warning:</b> This method assumes that for any instance {@code k} of * {@code EntryTransformer} key type {@code K}, {@code k.equals(k2)} implies * that {@code k2} is also of type {@code K}. Using an {@code * EntryTransformer} key type for which this may not hold, such as {@code * ArrayList}, may risk a {@code ClassCastException} when calling methods on * the transformed map. * * @since 7.0 */ public static <K, V1, V2> Map<K, V2> transformEntries( Map<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { if (fromMap instanceof SortedMap) { return transformEntries((SortedMap<K, V1>) fromMap, transformer); } return new TransformedEntriesMap<K, V1, V2>(fromMap, transformer); } /** * Returns a view of a sorted map whose values are derived from the original * sorted map's entries. In contrast to {@link #transformValues}, this * method's entry-transformation logic may depend on the key as well as the * value. * * <p>All other properties of the transformed map, such as iteration order, * are left intact. For example, the code: <pre> {@code * * Map<String, Boolean> options = * ImmutableSortedMap.of("verbose", true, "sort", false); * EntryTransformer<String, Boolean, String> flagPrefixer = * new EntryTransformer<String, Boolean, String>() { * public String transformEntry(String key, Boolean value) { * return value ? key : "yes" + key; * } * }; * SortedMap<String, String> transformed = * LabsMaps.transformSortedEntries(options, flagPrefixer); * System.out.println(transformed);}</pre> * * ... prints {@code {sort=yessort, verbose=verbose}}. * * <p>Changes in the underlying map are reflected in this view. Conversely, * this view supports removal operations, and these are reflected in the * underlying map. * * <p>It's acceptable for the underlying map to contain null keys and null * values provided that the transformer is capable of accepting null inputs. * The transformed map might contain null values if the transformer sometimes * gives a null result. * * <p>The returned map is not thread-safe or serializable, even if the * underlying map is. * * <p>The transformer is applied lazily, invoked when needed. This is * necessary for the returned map to be a view, but it means that the * transformer will be applied many times for bulk operations like {@link * Map#containsValue} and {@link Object#toString}. For this to perform well, * {@code transformer} should be fast. To avoid lazy evaluation when the * returned map doesn't need to be a view, copy the returned map into a new * map of your choosing. * * <p><b>Warning:</b> This method assumes that for any instance {@code k} of * {@code EntryTransformer} key type {@code K}, {@code k.equals(k2)} implies * that {@code k2} is also of type {@code K}. Using an {@code * EntryTransformer} key type for which this may not hold, such as {@code * ArrayList}, may risk a {@code ClassCastException} when calling methods on * the transformed map. * * @since 11.0 */ @Beta public static <K, V1, V2> SortedMap<K, V2> transformEntries( final SortedMap<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { return new TransformedEntriesSortedMap<K, V1, V2>(fromMap, transformer); } /** * A transformation of the value of a key-value pair, using both key and value * as inputs. To apply the transformation to a map, use * {@link Maps#transformEntries(Map, EntryTransformer)}. * * @param <K> the key type of the input and output entries * @param <V1> the value type of the input entry * @param <V2> the value type of the output entry * @since 7.0 */ public interface EntryTransformer<K, V1, V2> { /** * Determines an output value based on a key-value pair. This method is * <i>generally expected</i>, but not absolutely required, to have the * following properties: * * <ul> * <li>Its execution does not cause any observable side effects. * <li>The computation is <i>consistent with equals</i>; that is, * {@link Objects#equal Objects.equal}{@code (k1, k2) &&} * {@link Objects#equal}{@code (v1, v2)} implies that {@code * Objects.equal(transformer.transform(k1, v1), * transformer.transform(k2, v2))}. * </ul> * * @throws NullPointerException if the key or value is null and this * transformer does not accept null arguments */ V2 transformEntry(@Nullable K key, @Nullable V1 value); } static class TransformedEntriesMap<K, V1, V2> extends AbstractMap<K, V2> { final Map<K, V1> fromMap; final EntryTransformer<? super K, ? super V1, V2> transformer; TransformedEntriesMap( Map<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { this.fromMap = checkNotNull(fromMap); this.transformer = checkNotNull(transformer); } @Override public int size() { return fromMap.size(); } @Override public boolean containsKey(Object key) { return fromMap.containsKey(key); } // safe as long as the user followed the <b>Warning</b> in the javadoc @SuppressWarnings("unchecked") @Override public V2 get(Object key) { V1 value = fromMap.get(key); return (value != null || fromMap.containsKey(key)) ? transformer.transformEntry((K) key, value) : null; } // safe as long as the user followed the <b>Warning</b> in the javadoc @SuppressWarnings("unchecked") @Override public V2 remove(Object key) { return fromMap.containsKey(key) ? transformer.transformEntry((K) key, fromMap.remove(key)) : null; } @Override public void clear() { fromMap.clear(); } @Override public Set<K> keySet() { return fromMap.keySet(); } Set<Entry<K, V2>> entrySet; @Override public Set<Entry<K, V2>> entrySet() { Set<Entry<K, V2>> result = entrySet; if (result == null) { entrySet = result = new EntrySet<K, V2>() { @Override Map<K, V2> map() { return TransformedEntriesMap.this; } @Override public Iterator<Entry<K, V2>> iterator() { final Iterator<Entry<K, V1>> backingIterator = fromMap.entrySet().iterator(); return Iterators.transform(backingIterator, new Function<Entry<K, V1>, Entry<K, V2>>() { @Override public Entry<K, V2> apply(Entry<K, V1> entry) { return immutableEntry( entry.getKey(), transformer.transformEntry(entry.getKey(), entry.getValue())); } }); } }; } return result; } Collection<V2> values; @Override public Collection<V2> values() { Collection<V2> result = values; if (result == null) { return values = new Values<K, V2>() { @Override Map<K, V2> map() { return TransformedEntriesMap.this; } }; } return result; } } static class TransformedEntriesSortedMap<K, V1, V2> extends TransformedEntriesMap<K, V1, V2> implements SortedMap<K, V2> { protected SortedMap<K, V1> fromMap() { return (SortedMap<K, V1>) fromMap; } TransformedEntriesSortedMap(SortedMap<K, V1> fromMap, EntryTransformer<? super K, ? super V1, V2> transformer) { super(fromMap, transformer); } @Override public Comparator<? super K> comparator() { return fromMap().comparator(); } @Override public K firstKey() { return fromMap().firstKey(); } @Override public SortedMap<K, V2> headMap(K toKey) { return transformEntries(fromMap().headMap(toKey), transformer); } @Override public K lastKey() { return fromMap().lastKey(); } @Override public SortedMap<K, V2> subMap(K fromKey, K toKey) { return transformEntries( fromMap().subMap(fromKey, toKey), transformer); } @Override public SortedMap<K, V2> tailMap(K fromKey) { return transformEntries(fromMap().tailMap(fromKey), transformer); } } /** * Returns a map containing the mappings in {@code unfiltered} whose keys * satisfy a predicate. The returned map is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a key that * doesn't satisfy the predicate, the map's {@code put()} and {@code putAll()} * methods throw an {@link IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose keys satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code keyPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. */ public static <K, V> Map<K, V> filterKeys( Map<K, V> unfiltered, final Predicate<? super K> keyPredicate) { if (unfiltered instanceof SortedMap) { return filterKeys((SortedMap<K, V>) unfiltered, keyPredicate); } checkNotNull(keyPredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return keyPredicate.apply(input.getKey()); } }; return (unfiltered instanceof AbstractFilteredMap) ? filterFiltered((AbstractFilteredMap<K, V>) unfiltered, entryPredicate) : new FilteredKeyMap<K, V>( checkNotNull(unfiltered), keyPredicate, entryPredicate); } /** * Returns a sorted map containing the mappings in {@code unfiltered} whose * keys satisfy a predicate. The returned map is a live view of {@code * unfiltered}; changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a key that * doesn't satisfy the predicate, the map's {@code put()} and {@code putAll()} * methods throw an {@link IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose keys satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code keyPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. * * @since 11.0 */ @Beta public static <K, V> SortedMap<K, V> filterKeys( SortedMap<K, V> unfiltered, final Predicate<? super K> keyPredicate) { // TODO: Return a subclass of Maps.FilteredKeyMap for slightly better // performance. checkNotNull(keyPredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return keyPredicate.apply(input.getKey()); } }; return filterEntries(unfiltered, entryPredicate); } /** * Returns a map containing the mappings in {@code unfiltered} whose values * satisfy a predicate. The returned map is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a value * that doesn't satisfy the predicate, the map's {@code put()}, {@code * putAll()}, and {@link Entry#setValue} methods throw an {@link * IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose values satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code valuePredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. */ public static <K, V> Map<K, V> filterValues( Map<K, V> unfiltered, final Predicate<? super V> valuePredicate) { if (unfiltered instanceof SortedMap) { return filterValues((SortedMap<K, V>) unfiltered, valuePredicate); } checkNotNull(valuePredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return valuePredicate.apply(input.getValue()); } }; return filterEntries(unfiltered, entryPredicate); } /** * Returns a sorted map containing the mappings in {@code unfiltered} whose * values satisfy a predicate. The returned map is a live view of {@code * unfiltered}; changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a value * that doesn't satisfy the predicate, the map's {@code put()}, {@code * putAll()}, and {@link Entry#setValue} methods throw an {@link * IllegalArgumentException}. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings whose values satisfy the * filter will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code valuePredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. Do not provide a * predicate such as {@code Predicates.instanceOf(ArrayList.class)}, which is * inconsistent with equals. * * @since 11.0 */ @Beta public static <K, V> SortedMap<K, V> filterValues( SortedMap<K, V> unfiltered, final Predicate<? super V> valuePredicate) { checkNotNull(valuePredicate); Predicate<Entry<K, V>> entryPredicate = new Predicate<Entry<K, V>>() { @Override public boolean apply(Entry<K, V> input) { return valuePredicate.apply(input.getValue()); } }; return filterEntries(unfiltered, entryPredicate); } /** * Returns a map containing the mappings in {@code unfiltered} that satisfy a * predicate. The returned map is a live view of {@code unfiltered}; changes * to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a * key/value pair that doesn't satisfy the predicate, the map's {@code put()} * and {@code putAll()} methods throw an {@link IllegalArgumentException}. * Similarly, the map's entries have a {@link Entry#setValue} method that * throws an {@link IllegalArgumentException} when the existing key and the * provided value don't satisfy the predicate. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings that satisfy the filter * will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code entryPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. */ public static <K, V> Map<K, V> filterEntries( Map<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { if (unfiltered instanceof SortedMap) { return filterEntries((SortedMap<K, V>) unfiltered, entryPredicate); } checkNotNull(entryPredicate); return (unfiltered instanceof AbstractFilteredMap) ? filterFiltered((AbstractFilteredMap<K, V>) unfiltered, entryPredicate) : new FilteredEntryMap<K, V>(checkNotNull(unfiltered), entryPredicate); } /** * Returns a sorted map containing the mappings in {@code unfiltered} that * satisfy a predicate. The returned map is a live view of {@code unfiltered}; * changes to one affect the other. * * <p>The resulting map's {@code keySet()}, {@code entrySet()}, and {@code * values()} views have iterators that don't support {@code remove()}, but all * other methods are supported by the map and its views. When given a * key/value pair that doesn't satisfy the predicate, the map's {@code put()} * and {@code putAll()} methods throw an {@link IllegalArgumentException}. * Similarly, the map's entries have a {@link Entry#setValue} method that * throws an {@link IllegalArgumentException} when the existing key and the * provided value don't satisfy the predicate. * * <p>When methods such as {@code removeAll()} and {@code clear()} are called * on the filtered map or its views, only mappings that satisfy the filter * will be removed from the underlying map. * * <p>The returned map isn't threadsafe or serializable, even if {@code * unfiltered} is. * * <p>Many of the filtered map's methods, such as {@code size()}, * iterate across every key/value mapping in the underlying map and determine * which satisfy the filter. When a live view is <i>not</i> needed, it may be * faster to copy the filtered map and use the copy. * * <p><b>Warning:</b> {@code entryPredicate} must be <i>consistent with * equals</i>, as documented at {@link Predicate#apply}. * * @since 11.0 */ @Beta public static <K, V> SortedMap<K, V> filterEntries( SortedMap<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { checkNotNull(entryPredicate); return (unfiltered instanceof FilteredEntrySortedMap) ? filterFiltered((FilteredEntrySortedMap<K, V>) unfiltered, entryPredicate) : new FilteredEntrySortedMap<K, V>(checkNotNull(unfiltered), entryPredicate); } /** * Support {@code clear()}, {@code removeAll()}, and {@code retainAll()} when * filtering a filtered map. */ private static <K, V> Map<K, V> filterFiltered(AbstractFilteredMap<K, V> map, Predicate<? super Entry<K, V>> entryPredicate) { Predicate<Entry<K, V>> predicate = Predicates.and(map.predicate, entryPredicate); return new FilteredEntryMap<K, V>(map.unfiltered, predicate); } private abstract static class AbstractFilteredMap<K, V> extends AbstractMap<K, V> { final Map<K, V> unfiltered; final Predicate<? super Entry<K, V>> predicate; AbstractFilteredMap( Map<K, V> unfiltered, Predicate<? super Entry<K, V>> predicate) { this.unfiltered = unfiltered; this.predicate = predicate; } boolean apply(Object key, V value) { // This method is called only when the key is in the map, implying that // key is a K. @SuppressWarnings("unchecked") K k = (K) key; return predicate.apply(Maps.immutableEntry(k, value)); } @Override public V put(K key, V value) { checkArgument(apply(key, value)); return unfiltered.put(key, value); } @Override public void putAll(Map<? extends K, ? extends V> map) { for (Entry<? extends K, ? extends V> entry : map.entrySet()) { checkArgument(apply(entry.getKey(), entry.getValue())); } unfiltered.putAll(map); } @Override public boolean containsKey(Object key) { return unfiltered.containsKey(key) && apply(key, unfiltered.get(key)); } @Override public V get(Object key) { V value = unfiltered.get(key); return ((value != null) && apply(key, value)) ? value : null; } @Override public boolean isEmpty() { return entrySet().isEmpty(); } @Override public V remove(Object key) { return containsKey(key) ? unfiltered.remove(key) : null; } Collection<V> values; @Override public Collection<V> values() { Collection<V> result = values; return (result == null) ? values = new Values() : result; } class Values extends AbstractCollection<V> { @Override public Iterator<V> iterator() { final Iterator<Entry<K, V>> entryIterator = entrySet().iterator(); return new UnmodifiableIterator<V>() { @Override public boolean hasNext() { return entryIterator.hasNext(); } @Override public V next() { return entryIterator.next().getValue(); } }; } @Override public int size() { return entrySet().size(); } @Override public void clear() { entrySet().clear(); } @Override public boolean isEmpty() { return entrySet().isEmpty(); } @Override public boolean remove(Object o) { Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (Objects.equal(o, entry.getValue()) && predicate.apply(entry)) { iterator.remove(); return true; } } return false; } @Override public boolean removeAll(Collection<?> collection) { checkNotNull(collection); boolean changed = false; Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (collection.contains(entry.getValue()) && predicate.apply(entry)) { iterator.remove(); changed = true; } } return changed; } @Override public boolean retainAll(Collection<?> collection) { checkNotNull(collection); boolean changed = false; Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (!collection.contains(entry.getValue()) && predicate.apply(entry)) { iterator.remove(); changed = true; } } return changed; } @Override public Object[] toArray() { // creating an ArrayList so filtering happens once return Lists.newArrayList(iterator()).toArray(); } @Override public <T> T[] toArray(T[] array) { return Lists.newArrayList(iterator()).toArray(array); } } } /** * Support {@code clear()}, {@code removeAll()}, and {@code retainAll()} when * filtering a filtered sorted map. */ private static <K, V> SortedMap<K, V> filterFiltered( FilteredEntrySortedMap<K, V> map, Predicate<? super Entry<K, V>> entryPredicate) { Predicate<Entry<K, V>> predicate = Predicates.and(map.predicate, entryPredicate); return new FilteredEntrySortedMap<K, V>(map.sortedMap(), predicate); } private static class FilteredEntrySortedMap<K, V> extends FilteredEntryMap<K, V> implements SortedMap<K, V> { FilteredEntrySortedMap(SortedMap<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { super(unfiltered, entryPredicate); } SortedMap<K, V> sortedMap() { return (SortedMap<K, V>) unfiltered; } @Override public Comparator<? super K> comparator() { return sortedMap().comparator(); } @Override public K firstKey() { // correctly throws NoSuchElementException when filtered map is empty. return keySet().iterator().next(); } @Override public K lastKey() { SortedMap<K, V> headMap = sortedMap(); while (true) { // correctly throws NoSuchElementException when filtered map is empty. K key = headMap.lastKey(); if (apply(key, unfiltered.get(key))) { return key; } headMap = sortedMap().headMap(key); } } @Override public SortedMap<K, V> headMap(K toKey) { return new FilteredEntrySortedMap<K, V>(sortedMap().headMap(toKey), predicate); } @Override public SortedMap<K, V> subMap(K fromKey, K toKey) { return new FilteredEntrySortedMap<K, V>( sortedMap().subMap(fromKey, toKey), predicate); } @Override public SortedMap<K, V> tailMap(K fromKey) { return new FilteredEntrySortedMap<K, V>( sortedMap().tailMap(fromKey), predicate); } } private static class FilteredKeyMap<K, V> extends AbstractFilteredMap<K, V> { Predicate<? super K> keyPredicate; FilteredKeyMap(Map<K, V> unfiltered, Predicate<? super K> keyPredicate, Predicate<Entry<K, V>> entryPredicate) { super(unfiltered, entryPredicate); this.keyPredicate = keyPredicate; } Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> result = entrySet; return (result == null) ? entrySet = Sets.filter(unfiltered.entrySet(), predicate) : result; } Set<K> keySet; @Override public Set<K> keySet() { Set<K> result = keySet; return (result == null) ? keySet = Sets.filter(unfiltered.keySet(), keyPredicate) : result; } // The cast is called only when the key is in the unfiltered map, implying // that key is a K. @Override @SuppressWarnings("unchecked") public boolean containsKey(Object key) { return unfiltered.containsKey(key) && keyPredicate.apply((K) key); } } static class FilteredEntryMap<K, V> extends AbstractFilteredMap<K, V> { /** * Entries in this set satisfy the predicate, but they don't validate the * input to {@code Entry.setValue()}. */ final Set<Entry<K, V>> filteredEntrySet; FilteredEntryMap( Map<K, V> unfiltered, Predicate<? super Entry<K, V>> entryPredicate) { super(unfiltered, entryPredicate); filteredEntrySet = Sets.filter(unfiltered.entrySet(), predicate); } Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> result = entrySet; return (result == null) ? entrySet = new EntrySet() : result; } private class EntrySet extends ForwardingSet<Entry<K, V>> { @Override protected Set<Entry<K, V>> delegate() { return filteredEntrySet; } @Override public Iterator<Entry<K, V>> iterator() { final Iterator<Entry<K, V>> iterator = filteredEntrySet.iterator(); return new UnmodifiableIterator<Entry<K, V>>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public Entry<K, V> next() { final Entry<K, V> entry = iterator.next(); return new ForwardingMapEntry<K, V>() { @Override protected Entry<K, V> delegate() { return entry; } @Override public V setValue(V value) { checkArgument(apply(entry.getKey(), value)); return super.setValue(value); } }; } }; } } Set<K> keySet; @Override public Set<K> keySet() { Set<K> result = keySet; return (result == null) ? keySet = new KeySet() : result; } private class KeySet extends AbstractSet<K> { @Override public Iterator<K> iterator() { final Iterator<Entry<K, V>> iterator = filteredEntrySet.iterator(); return new UnmodifiableIterator<K>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public K next() { return iterator.next().getKey(); } }; } @Override public int size() { return filteredEntrySet.size(); } @Override public void clear() { filteredEntrySet.clear(); } @Override public boolean contains(Object o) { return containsKey(o); } @Override public boolean remove(Object o) { if (containsKey(o)) { unfiltered.remove(o); return true; } return false; } @Override public boolean removeAll(Collection<?> collection) { checkNotNull(collection); // for GWT boolean changed = false; for (Object obj : collection) { changed |= remove(obj); } return changed; } @Override public boolean retainAll(Collection<?> collection) { checkNotNull(collection); // for GWT boolean changed = false; Iterator<Entry<K, V>> iterator = unfiltered.entrySet().iterator(); while (iterator.hasNext()) { Entry<K, V> entry = iterator.next(); if (!collection.contains(entry.getKey()) && predicate.apply(entry)) { iterator.remove(); changed = true; } } return changed; } @Override public Object[] toArray() { // creating an ArrayList so filtering happens once return Lists.newArrayList(iterator()).toArray(); } @Override public <T> T[] toArray(T[] array) { return Lists.newArrayList(iterator()).toArray(array); } } } /** * {@code AbstractMap} extension that implements {@link #isEmpty()} as {@code * entrySet().isEmpty()} instead of {@code size() == 0} to speed up * implementations where {@code size()} is O(n), and it delegates the {@code * isEmpty()} methods of its key set and value collection to this * implementation. */ @GwtCompatible static abstract class ImprovedAbstractMap<K, V> extends AbstractMap<K, V> { /** * Creates the entry set to be returned by {@link #entrySet()}. This method * is invoked at most once on a given map, at the time when {@code entrySet} * is first called. */ protected abstract Set<Entry<K, V>> createEntrySet(); private Set<Entry<K, V>> entrySet; @Override public Set<Entry<K, V>> entrySet() { Set<Entry<K, V>> result = entrySet; if (result == null) { entrySet = result = createEntrySet(); } return result; } private Set<K> keySet; @Override public Set<K> keySet() { Set<K> result = keySet; if (result == null) { return keySet = new KeySet<K, V>() { @Override Map<K, V> map() { return ImprovedAbstractMap.this; } }; } return result; } private Collection<V> values; @Override public Collection<V> values() { Collection<V> result = values; if (result == null) { return values = new Values<K, V>(){ @Override Map<K, V> map() { return ImprovedAbstractMap.this; } }; } return result; } /** * Returns {@code true} if this map contains no key-value mappings. * * <p>The implementation returns {@code entrySet().isEmpty()}. * * @return {@code true} if this map contains no key-value mappings */ @Override public boolean isEmpty() { return entrySet().isEmpty(); } } static final MapJoiner STANDARD_JOINER = Collections2.STANDARD_JOINER.withKeyValueSeparator("="); /** * Delegates to {@link Map#get}. Returns {@code null} on {@code * ClassCastException}. */ static <V> V safeGet(Map<?, V> map, Object key) { try { return map.get(key); } catch (ClassCastException e) { return null; } } /** * Delegates to {@link Map#containsKey}. Returns {@code false} on {@code * ClassCastException} */ static boolean safeContainsKey(Map<?, ?> map, Object key) { try { return map.containsKey(key); } catch (ClassCastException e) { return false; } } /** * Implements {@code Collection.contains} safely for forwarding collections of * map entries. If {@code o} is an instance of {@code Map.Entry}, it is * wrapped using {@link #unmodifiableEntry} to protect against a possible * nefarious equals method. * * <p>Note that {@code c} is the backing (delegate) collection, rather than * the forwarding collection. * * @param c the delegate (unwrapped) collection of map entries * @param o the object that might be contained in {@code c} * @return {@code true} if {@code c} contains {@code o} */ static <K, V> boolean containsEntryImpl(Collection<Entry<K, V>> c, Object o) { if (!(o instanceof Entry)) { return false; } return c.contains(unmodifiableEntry((Entry<?, ?>) o)); } /** * Implements {@code Collection.remove} safely for forwarding collections of * map entries. If {@code o} is an instance of {@code Map.Entry}, it is * wrapped using {@link #unmodifiableEntry} to protect against a possible * nefarious equals method. * * <p>Note that {@code c} is backing (delegate) collection, rather than the * forwarding collection. * * @param c the delegate (unwrapped) collection of map entries * @param o the object to remove from {@code c} * @return {@code true} if {@code c} was changed */ static <K, V> boolean removeEntryImpl(Collection<Entry<K, V>> c, Object o) { if (!(o instanceof Entry)) { return false; } return c.remove(unmodifiableEntry((Entry<?, ?>) o)); } /** * An implementation of {@link Map#equals}. */ static boolean equalsImpl(Map<?, ?> map, Object object) { if (map == object) { return true; } if (object instanceof Map) { Map<?, ?> o = (Map<?, ?>) object; return map.entrySet().equals(o.entrySet()); } return false; } /** * An implementation of {@link Map#hashCode}. */ static int hashCodeImpl(Map<?, ?> map) { return Sets.hashCodeImpl(map.entrySet()); } /** * An implementation of {@link Map#toString}. */ static String toStringImpl(Map<?, ?> map) { StringBuilder sb = Collections2.newStringBuilderForCollection(map.size()).append('{'); STANDARD_JOINER.appendTo(sb, map); return sb.append('}').toString(); } /** * An implementation of {@link Map#putAll}. */ static <K, V> void putAllImpl( Map<K, V> self, Map<? extends K, ? extends V> map) { for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) { self.put(entry.getKey(), entry.getValue()); } } /** * An admittedly inefficient implementation of {@link Map#containsKey}. */ static boolean containsKeyImpl(Map<?, ?> map, @Nullable Object key) { for (Entry<?, ?> entry : map.entrySet()) { if (Objects.equal(entry.getKey(), key)) { return true; } } return false; } /** * An implementation of {@link Map#containsValue}. */ static boolean containsValueImpl(Map<?, ?> map, @Nullable Object value) { for (Entry<?, ?> entry : map.entrySet()) { if (Objects.equal(entry.getValue(), value)) { return true; } } return false; } abstract static class KeySet<K, V> extends AbstractSet<K> { abstract Map<K, V> map(); @Override public Iterator<K> iterator() { return Iterators.transform(map().entrySet().iterator(), new Function<Map.Entry<K, V>, K>() { @Override public K apply(Entry<K, V> entry) { return entry.getKey(); } }); } @Override public int size() { return map().size(); } @Override public boolean isEmpty() { return map().isEmpty(); } @Override public boolean contains(Object o) { return map().containsKey(o); } @Override public boolean remove(Object o) { if (contains(o)) { map().remove(o); return true; } return false; } @Override public boolean removeAll(Collection<?> c) { // TODO(user): find out why this is necessary to make GWT tests pass. return super.removeAll(checkNotNull(c)); } @Override public void clear() { map().clear(); } } abstract static class Values<K, V> extends AbstractCollection<V> { abstract Map<K, V> map(); @Override public Iterator<V> iterator() { return Iterators.transform(map().entrySet().iterator(), new Function<Entry<K, V>, V>() { @Override public V apply(Entry<K, V> entry) { return entry.getValue(); } }); } @Override public boolean remove(Object o) { try { return super.remove(o); } catch (UnsupportedOperationException e) { for (Entry<K, V> entry : map().entrySet()) { if (Objects.equal(o, entry.getValue())) { map().remove(entry.getKey()); return true; } } return false; } } @Override public boolean removeAll(Collection<?> c) { try { return super.removeAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { Set<K> toRemove = Sets.newHashSet(); for (Entry<K, V> entry : map().entrySet()) { if (c.contains(entry.getValue())) { toRemove.add(entry.getKey()); } } return map().keySet().removeAll(toRemove); } } @Override public boolean retainAll(Collection<?> c) { try { return super.retainAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { Set<K> toRetain = Sets.newHashSet(); for (Entry<K, V> entry : map().entrySet()) { if (c.contains(entry.getValue())) { toRetain.add(entry.getKey()); } } return map().keySet().retainAll(toRetain); } } @Override public int size() { return map().size(); } @Override public boolean isEmpty() { return map().isEmpty(); } @Override public boolean contains(@Nullable Object o) { return map().containsValue(o); } @Override public void clear() { map().clear(); } } abstract static class EntrySet<K, V> extends AbstractSet<Entry<K, V>> { abstract Map<K, V> map(); @Override public int size() { return map().size(); } @Override public void clear() { map().clear(); } @Override public boolean contains(Object o) { if (o instanceof Entry) { Entry<?, ?> entry = (Entry<?, ?>) o; Object key = entry.getKey(); V value = map().get(key); return Objects.equal(value, entry.getValue()) && (value != null || map().containsKey(key)); } return false; } @Override public boolean isEmpty() { return map().isEmpty(); } @Override public boolean remove(Object o) { if (contains(o)) { Entry<?, ?> entry = (Entry<?, ?>) o; return map().keySet().remove(entry.getKey()); } return false; } @Override public boolean removeAll(Collection<?> c) { try { return super.removeAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { // if the iterators don't support remove boolean changed = true; for (Object o : c) { changed |= remove(o); } return changed; } } @Override public boolean retainAll(Collection<?> c) { try { return super.retainAll(checkNotNull(c)); } catch (UnsupportedOperationException e) { // if the iterators don't support remove Set<Object> keys = Sets.newHashSetWithExpectedSize(c.size()); for (Object o : c) { if (contains(o)) { Entry<?, ?> entry = (Entry<?, ?>) o; keys.add(entry.getKey()); } } return map().keySet().retainAll(keys); } } } }
Add Ubuntu patch to work around OpenJDK bug Change-Id: Ide9497aeba2570c5151cd7f6e6e4cea6480d3885
guava/src/com/google/common/collect/Maps.java
Add Ubuntu patch to work around OpenJDK bug
Java
apache-2.0
c3d8f9dc36075cca60bc7fdc0931f2f048dce78e
0
OrienteerBAP/Orienteer,OrienteerBAP/Orienteer,OrienteerBAP/Orienteer,OrienteerBAP/Orienteer
package org.orienteer.metrics; import org.apache.wicket.MetaDataKey; import org.apache.wicket.core.request.handler.IPageClassRequestHandler; import org.apache.wicket.request.IRequestHandler; import org.apache.wicket.request.cycle.IRequestCycleListener; import org.apache.wicket.request.cycle.RequestCycle; import org.apache.wicket.request.http.WebRequest; import org.apache.wicket.request.http.WebResponse; import org.orienteer.core.OrienteerWebApplication; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.prometheus.client.CollectorRegistry; import io.prometheus.client.Counter; import io.prometheus.client.Histogram; /** * {@link IRequestCycleListener} to monitor requests */ public class OMetricsRequestCycleListener implements IRequestCycleListener { private static final Logger LOG = LoggerFactory.getLogger(OMetricsRequestCycleListener.class); private static OMetricsRequestCycleListener listener; private static final MetaDataKey<Histogram.Timer> REQUESTS_HISTOGRAM_KEY = new MetaDataKey<Histogram.Timer>() {}; private static final Counter COUNTER_EXCEPTIONS = Counter.build() .namespace("wicket") .name("exceptions_count") .help("Total number of exceptions") .labelNames("ajax") .create(); private static final Counter COUNTER_EXECUTIONS = Counter.build() .namespace("wicket") .name("executions") .help("Total number request handlers executions") .labelNames("ajax", "handler") .create(); private static final Counter COUNTER_PAGES = Counter.build() .namespace("wicket") .name("pages") .help("Total number of requests per page") .labelNames("ajax", "handler") .create(); private static final Histogram HISTOGRAM_REQUESTS = Histogram.build() .namespace("wicket") .name("requests") .help("Request times and counts histogram") .labelNames("ajax") .create(); private OMetricsRequestCycleListener() { CollectorRegistry.defaultRegistry.register(COUNTER_EXCEPTIONS); //Just to init by 0 sub counters COUNTER_EXCEPTIONS.labels(Boolean.TRUE.toString()).inc(0); COUNTER_EXCEPTIONS.labels(Boolean.FALSE.toString()).inc(0); CollectorRegistry.defaultRegistry.register(COUNTER_EXECUTIONS); CollectorRegistry.defaultRegistry.register(COUNTER_PAGES); CollectorRegistry.defaultRegistry.register(HISTOGRAM_REQUESTS); } @Override public void onBeginRequest(RequestCycle cycle) { Histogram.Timer requestTimer = HISTOGRAM_REQUESTS .labels(Boolean.toString(((WebRequest)cycle.getRequest()).isAjax())) .startTimer(); cycle.setMetaData(REQUESTS_HISTOGRAM_KEY, requestTimer); } @Override public void onEndRequest(RequestCycle cycle) { Histogram.Timer requestTimer = cycle.getMetaData(REQUESTS_HISTOGRAM_KEY); requestTimer.observeDuration(); } @Override public IRequestHandler onException(RequestCycle cycle, Exception ex) { COUNTER_EXCEPTIONS.labels(Boolean.toString(((WebRequest)cycle.getRequest()).isAjax())).inc(); return null; } @Override public void onRequestHandlerExecuted(RequestCycle cycle, IRequestHandler handler) { String ajaxLabel = Boolean.toString(((WebRequest)cycle.getRequest()).isAjax()); COUNTER_EXECUTIONS.labels(ajaxLabel, handler.getClass().getSimpleName()).inc(); if(handler instanceof IPageClassRequestHandler) { COUNTER_PAGES.labels(ajaxLabel, ((IPageClassRequestHandler)handler).getPageClass().getSimpleName()).inc(); } } protected void onDestroy() { CollectorRegistry.defaultRegistry.unregister(COUNTER_EXECUTIONS); CollectorRegistry.defaultRegistry.unregister(COUNTER_PAGES); CollectorRegistry.defaultRegistry.unregister(COUNTER_EXCEPTIONS); CollectorRegistry.defaultRegistry.unregister(HISTOGRAM_REQUESTS); } public static synchronized void install(OrienteerWebApplication app) { if(listener!=null) deinstall(app); listener = new OMetricsRequestCycleListener(); app.getRequestCycleListeners().add(listener); } public static synchronized void deinstall(OrienteerWebApplication app) { if(listener!=null) { listener.onDestroy(); app.getRequestCycleListeners().remove(listener); listener = null; } } }
orienteer-metrics/src/main/java/org/orienteer/metrics/OMetricsRequestCycleListener.java
package org.orienteer.metrics; import org.apache.wicket.MetaDataKey; import org.apache.wicket.request.IRequestHandler; import org.apache.wicket.request.cycle.IRequestCycleListener; import org.apache.wicket.request.cycle.RequestCycle; import org.apache.wicket.request.http.WebRequest; import org.apache.wicket.request.http.WebResponse; import org.orienteer.core.OrienteerWebApplication; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.prometheus.client.CollectorRegistry; import io.prometheus.client.Counter; import io.prometheus.client.Histogram; /** * {@link IRequestCycleListener} to monitor requests */ public class OMetricsRequestCycleListener implements IRequestCycleListener { private static final Logger LOG = LoggerFactory.getLogger(OMetricsRequestCycleListener.class); private static OMetricsRequestCycleListener listener; private static final MetaDataKey<Histogram.Timer> REQUESTS_HISTOGRAM_KEY = new MetaDataKey<Histogram.Timer>() {}; private static final Counter COUNTER_EXCEPTIONS = Counter.build() .namespace("wicket") .name("exceptions_count") .help("Total number of exceptions") .labelNames("ajax") .create(); private static final Counter COUNTER_EXECUTIONS = Counter.build() .namespace("wicket") .name("executions") .help("Total number request handlers executions") .labelNames("ajax", "handler") .create(); private static final Histogram HISTOGRAM_REQUESTS = Histogram.build() .namespace("wicket") .name("requests") .help("Request times and counts histogram") .labelNames("ajax") .create(); private OMetricsRequestCycleListener() { CollectorRegistry.defaultRegistry.register(COUNTER_EXCEPTIONS); //Just to init by 0 sub counters COUNTER_EXCEPTIONS.labels(Boolean.TRUE.toString()).inc(0); COUNTER_EXCEPTIONS.labels(Boolean.FALSE.toString()).inc(0); CollectorRegistry.defaultRegistry.register(COUNTER_EXECUTIONS); CollectorRegistry.defaultRegistry.register(HISTOGRAM_REQUESTS); } @Override public void onBeginRequest(RequestCycle cycle) { Histogram.Timer requestTimer = HISTOGRAM_REQUESTS .labels(Boolean.toString(((WebRequest)cycle.getRequest()).isAjax())) .startTimer(); cycle.setMetaData(REQUESTS_HISTOGRAM_KEY, requestTimer); } @Override public void onEndRequest(RequestCycle cycle) { Histogram.Timer requestTimer = cycle.getMetaData(REQUESTS_HISTOGRAM_KEY); requestTimer.observeDuration(); } @Override public IRequestHandler onException(RequestCycle cycle, Exception ex) { COUNTER_EXCEPTIONS.labels(Boolean.toString(((WebRequest)cycle.getRequest()).isAjax())).inc(); return null; } @Override public void onRequestHandlerExecuted(RequestCycle cycle, IRequestHandler handler) { COUNTER_EXECUTIONS.labels(Boolean.toString(((WebRequest)cycle.getRequest()).isAjax()), handler.getClass().getSimpleName()).inc(); } protected void onDestroy() { CollectorRegistry.defaultRegistry.unregister(COUNTER_EXECUTIONS); CollectorRegistry.defaultRegistry.unregister(COUNTER_EXCEPTIONS); CollectorRegistry.defaultRegistry.unregister(HISTOGRAM_REQUESTS); } public static synchronized void install(OrienteerWebApplication app) { if(listener!=null) deinstall(app); listener = new OMetricsRequestCycleListener(); app.getRequestCycleListeners().add(listener); } public static synchronized void deinstall(OrienteerWebApplication app) { if(listener!=null) { listener.onDestroy(); app.getRequestCycleListeners().remove(listener); listener = null; } } }
Add counter of rendered pages for issue #395
orienteer-metrics/src/main/java/org/orienteer/metrics/OMetricsRequestCycleListener.java
Add counter of rendered pages for issue #395
Java
apache-2.0
04f817dc10f53e7baf337d71f6766ca907dd4bd7
0
reportportal/commons-dao
/* * Copyright (C) 2018 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.ta.reportportal.dao; import com.epam.ta.reportportal.commons.querygen.CriteriaHolder; import com.epam.ta.reportportal.commons.querygen.Filter; import com.epam.ta.reportportal.commons.querygen.QueryBuilder; import com.epam.ta.reportportal.commons.validation.Suppliers; import com.epam.ta.reportportal.dao.util.JooqFieldNameTransformer; import com.epam.ta.reportportal.entity.widget.content.*; import com.epam.ta.reportportal.exception.ReportPortalException; import com.epam.ta.reportportal.jooq.enums.JTestItemTypeEnum; import com.epam.ta.reportportal.ws.model.ErrorType; import com.google.common.collect.Lists; import org.jooq.*; import org.jooq.impl.DSL; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Repository; import javax.annotation.Nullable; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.*; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static com.epam.ta.reportportal.commons.querygen.QueryBuilder.STATISTICS_KEY; import static com.epam.ta.reportportal.dao.constant.WidgetContentRepositoryConstants.*; import static com.epam.ta.reportportal.dao.util.JooqFieldNameTransformer.fieldName; import static com.epam.ta.reportportal.dao.util.WidgetContentUtil.*; import static com.epam.ta.reportportal.jooq.Tables.*; import static com.epam.ta.reportportal.jooq.tables.JActivity.ACTIVITY; import static com.epam.ta.reportportal.jooq.tables.JIssue.ISSUE; import static com.epam.ta.reportportal.jooq.tables.JIssueTicket.ISSUE_TICKET; import static com.epam.ta.reportportal.jooq.tables.JLaunch.LAUNCH; import static com.epam.ta.reportportal.jooq.tables.JProject.PROJECT; import static com.epam.ta.reportportal.jooq.tables.JTestItem.TEST_ITEM; import static com.epam.ta.reportportal.jooq.tables.JTestItemResults.TEST_ITEM_RESULTS; import static com.epam.ta.reportportal.jooq.tables.JTicket.TICKET; import static com.epam.ta.reportportal.jooq.tables.JUsers.USERS; import static java.util.Optional.ofNullable; import static java.util.stream.Collectors.*; import static org.jooq.impl.DSL.*; /** * Repository that contains queries of content loading for widgets. * * @author Pavel Bortnik */ @Repository public class WidgetContentRepositoryImpl implements WidgetContentRepository { @Autowired private DSLContext dsl; private static final List<JTestItemTypeEnum> HAS_METHOD_OR_CLASS = Arrays.stream(JTestItemTypeEnum.values()).filter(it -> { String name = it.name(); return name.contains("METHOD") || name.contains("CLASS"); }).collect(Collectors.toList()); @Override public OverallStatisticsContent overallStatisticsContent(Filter filter, Sort sort, List<String> contentFields, boolean latest, int limit) { return OVERALL_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).with(latest).build()) .select(STATISTICS_FIELD.NAME, sum(STATISTICS.S_COUNTER).as(SUM)) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .groupBy(STATISTICS_FIELD.NAME) .fetch()); } @Override public List<CriteriaHistoryItem> topItemsByCriteria(Filter filter, String criteria, int limit, boolean includeMethods) { return dsl.with(HISTORY) .as(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).build()) .select(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, DSL.arrayAgg(DSL.when(STATISTICS_FIELD.NAME.eq(criteria), "true").otherwise("false")) .orderBy(LAUNCH.NUMBER.asc()) .as(STATUS_HISTORY), DSL.arrayAgg(TEST_ITEM.START_TIME).orderBy(LAUNCH.NUMBER.asc()).as(START_TIME_HISTORY), DSL.sum(DSL.when(STATISTICS_FIELD.NAME.eq(criteria), 1).otherwise(ZERO_QUERY_VALUE)).as(CRITERIA), DSL.count(TEST_ITEM_RESULTS.STATUS).as(TOTAL) ) .from(LAUNCH) .join(TEST_ITEM) .on(LAUNCH.ID.eq(TEST_ITEM.LAUNCH_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .join(STATISTICS) .on(TEST_ITEM.ITEM_ID.eq(STATISTICS.ITEM_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(TEST_ITEM.TYPE.in(includeMethods ? Lists.newArrayList(HAS_METHOD_OR_CLASS, JTestItemTypeEnum.STEP) : Collections.singletonList(JTestItemTypeEnum.STEP))) .and(STATISTICS_FIELD.NAME.eq(criteria)) .and(TEST_ITEM.LAUNCH_ID.in(dsl.select(field(name(LAUNCHES, ID)).cast(Long.class)).from(name(LAUNCHES)))) .groupBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME)) .select() .from(DSL.table(DSL.name(HISTORY))) .where(DSL.field(DSL.name(CRITERIA)).greaterThan(ZERO_QUERY_VALUE)) .orderBy(DSL.field(DSL.name(CRITERIA)).desc(), DSL.field(DSL.name(TOTAL)).asc()) .limit(limit) .fetchInto(CriteriaHistoryItem.class); } @Override public List<FlakyCasesTableContent> flakyCasesStatistics(Filter filter, int limit) { Select commonSelect = dsl.select(field(name(LAUNCHES, ID)).cast(Long.class)) .from(name(LAUNCHES)) .orderBy(field(name(LAUNCHES, NUMBER)).desc()) .limit(limit); return dsl.select(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())).as(UNIQUE_ID), field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName())).as(ITEM_NAME), DSL.arrayAgg(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM_RESULTS.STATUS.getName()))).as(STATUSES), sum(field(name(FLAKY_TABLE_RESULTS, SWITCH_FLAG)).cast(Long.class)).as(FLAKY_COUNT), sum(field(name(FLAKY_TABLE_RESULTS, TOTAL)).cast(Long.class)).as(TOTAL) ) .from(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(LAUNCH.NUMBER.desc()).build()) .select(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, TEST_ITEM_RESULTS.STATUS, when(TEST_ITEM_RESULTS.STATUS.notEqual(lag(TEST_ITEM_RESULTS.STATUS).over(orderBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.ITEM_ID ))) .and(TEST_ITEM.UNIQUE_ID.equal(lag(TEST_ITEM.UNIQUE_ID).over(orderBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.ITEM_ID )))), 1 ).otherwise(ZERO_QUERY_VALUE).as(SWITCH_FLAG), count(TEST_ITEM_RESULTS.STATUS).as(TOTAL) ) .from(LAUNCH) .join(TEST_ITEM) .on(LAUNCH.ID.eq(TEST_ITEM.LAUNCH_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .where(LAUNCH.ID.in(commonSelect)) .and(TEST_ITEM.TYPE.eq(JTestItemTypeEnum.STEP)) .groupBy(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME) .asTable(FLAKY_TABLE_RESULTS)) .groupBy(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())), field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName())) ) .orderBy(fieldName(FLAKY_COUNT).desc(), fieldName(TOTAL).asc(), fieldName(UNIQUE_ID)) .limit(20) .fetchInto(FlakyCasesTableContent.class); } @Override public List<LaunchesStatisticsContent> launchStatistics(Filter filter, List<String> contentFields, @Nullable Sort sort, int limit) { return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME, STATISTICS_FIELD.NAME, STATISTICS.S_COUNTER) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetch()); } @Override public List<InvestigatedStatisticsResult> investigatedStatistics(Filter filter, Sort sort, int limit) { List<Field<?>> groupingFields = StreamSupport.stream(sort.spliterator(), false).map(s -> field(s.getProperty())).collect(toList()); Collections.addAll(groupingFields, LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME); return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME, round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(DEFECTS_TO_INVESTIGATE_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField() .cast(Double.class)) .div(nullif(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.in(DEFECTS_AUTOMATION_BUG_TOTAL, DEFECTS_NO_DEFECT_TOTAL, DEFECTS_TO_INVESTIGATE_TOTAL, DEFECTS_PRODUCT_BUG_TOTAL, DEFECTS_SYSTEM_ISSUE_TOTAL ).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField(), 0)), 2).as(TO_INVESTIGATE) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .groupBy(groupingFields) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetch(INVESTIGATED_STATISTICS_CONTENT_RECORD_MAPPER); } @Override public PassingRateStatisticsResult passingRatePerLaunchStatistics(Filter filter, Sort sort, int limit) { List<Field<?>> groupingFields = StreamSupport.stream(sort.spliterator(), false).map(s -> field(s.getProperty())).collect(toList()); return buildPassingRateSelect(filter, sort, limit).groupBy(groupingFields) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetchInto(PassingRateStatisticsResult.class) .stream() .findFirst() .orElseThrow(() -> new ReportPortalException("No results for filter were found")); } @Override public PassingRateStatisticsResult summaryPassingRateStatistics(Filter filter, Sort sort, int limit) { return buildPassingRateSelect(filter, sort, limit).fetchInto(PassingRateStatisticsResult.class) .stream() .findFirst() .orElseThrow(() -> new ReportPortalException("No results for filter were found")); } @Override public List<CasesTrendContent> casesTrendStatistics(Filter filter, String contentField, Sort sort, int limit) { List<? extends SortField<?>> deltaCounterSort = ofNullable(sort).map(s -> StreamSupport.stream(s.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())).orElseGet(Collections::emptyList); return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME, STATISTICS.S_COUNTER.as(contentField), STATISTICS.S_COUNTER.sub(lag(STATISTICS.S_COUNTER).over().orderBy(deltaCounterSort)).as(DELTA) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.eq(contentField)) .orderBy(deltaCounterSort) .fetchInto(CasesTrendContent.class); } @Override public List<LaunchesStatisticsContent> bugTrendStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) { return BUG_TREND_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS_FIELD.NAME, STATISTICS.S_COUNTER) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .fetch()); } @Override public List<LaunchesStatisticsContent> launchesComparisonStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) { List<String> executionStatisticsFields = contentFields.stream().filter(cf -> cf.contains(EXECUTIONS_KEY)).collect(toList()); List<String> defectStatisticsFields = contentFields.stream().filter(cf -> cf.contains(DEFECTS_KEY)).collect(toList()); return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS_FIELD.NAME, round(val(PERCENTAGE_MULTIPLIER).mul(STATISTICS.S_COUNTER) .div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)) .and(STATISTICS_FIELD.NAME.in(executionStatisticsFields)), 0).cast(Double.class)), 2 ).as("s_counter") ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(executionStatisticsFields)) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .unionAll(DSL.select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS_FIELD.NAME, round(val(PERCENTAGE_MULTIPLIER).mul(STATISTICS.S_COUNTER) .div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)) .and(STATISTICS_FIELD.NAME.in(defectStatisticsFields)), 0).cast(Double.class)), 2) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(defectStatisticsFields)) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList()))) .fetch()); } @Override public List<LaunchesDurationContent> launchesDurationStatistics(Filter filter, Sort sort, boolean isLatest, int limit) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(isLatest).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.STATUS, LAUNCH.START_TIME, LAUNCH.END_TIME, timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetchInto(LaunchesDurationContent.class); } @Override public List<NotPassedCasesContent> notPassedCasesStatistics(Filter filter, Sort sort, int limit) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS.S_COUNTER, coalesce(round(val(PERCENTAGE_MULTIPLIER).mul(DSL.select(DSL.sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(EXECUTIONS_SKIPPED, EXECUTIONS_FAILED)) .and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)) .asField() .cast(Double.class)).div(nullif(STATISTICS.S_COUNTER, 0).cast(Double.class)), 2), 0).as(PERCENTAGE) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(EXECUTIONS_TOTAL)) .fetch(NOT_PASSED_CASES_CONTENT_RECORD_MAPPER); } @Override public List<LaunchesTableContent> launchesTableStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) { Map<String, String> criteria = filter.getTarget() .getCriteriaHolders() .stream() .collect(Collectors.toMap(CriteriaHolder::getFilterCriteria, CriteriaHolder::getQueryCriteria)); boolean remove = contentFields.remove("tags"); List<Field<?>> selectFields = contentFields.stream() .filter(cf -> !cf.startsWith(STATISTICS_KEY)) .map(cf -> field(ofNullable(criteria.get(cf)).orElseThrow(() -> new ReportPortalException(Suppliers.formattedSupplier( "Unknown table field - '{}'", cf ).get())))) .collect(Collectors.toList()); Collections.addAll(selectFields, LAUNCH.ID, fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), fieldName(STATISTICS_TABLE, SF_NAME)); List<SortField<?>> orderFields = StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList()); List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList()); return LAUNCHES_TABLE_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(selectFields) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(statisticsFields)) .asTable(STATISTICS_TABLE)) .on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class))) .join(USERS) .on(LAUNCH.USER_ID.eq(USERS.ID)) .orderBy(orderFields) .fetch(), contentFields); } @Override public List<ActivityContent> activityStatistics(Filter filter, Sort sort, int limit) { return dsl.with(ACTIVITIES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(ACTIVITY.ID.as(ID), ACTIVITY.ACTION.as(ACTION_TYPE), ACTIVITY.ENTITY.as(ENTITY), ACTIVITY.CREATION_DATE.as(LAST_MODIFIED), USERS.LOGIN.as(USER_LOGIN), PROJECT.NAME.as(PROJECT_NAME) ) .from(ACTIVITY) .join(ACTIVITIES) .on(fieldName(ACTIVITIES, ID).cast(Long.class).eq(ACTIVITY.ID)) .join(USERS) .on(ACTIVITY.USER_ID.eq(USERS.ID)) .join(PROJECT) .on(ACTIVITY.PROJECT_ID.eq(PROJECT.ID)) .fetchInto(ActivityContent.class); } @Override public Map<String, List<UniqueBugContent>> uniqueBugStatistics(Filter filter, Sort sort, boolean isLatest, int limit) { List<UniqueBugContent> uniqueBugContents = dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(limit).with(sort).with(isLatest).build()) .select(TICKET.TICKET_ID, TICKET.SUBMIT_DATE, TICKET.URL, TEST_ITEM.ITEM_ID, TEST_ITEM.NAME, TEST_ITEM.DESCRIPTION, TEST_ITEM.LAUNCH_ID, USERS.LOGIN ) .from(TEST_ITEM) .join(LAUNCHES) .on(fieldName(LAUNCHES, ID).cast(Long.class).eq(TEST_ITEM.LAUNCH_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .leftJoin(ISSUE) .on(TEST_ITEM.ITEM_ID.eq(ISSUE.ISSUE_ID)) .leftJoin(ISSUE_TICKET) .on(ISSUE.ISSUE_ID.eq(ISSUE_TICKET.ISSUE_ID)) .join(TICKET) .on(ISSUE_TICKET.TICKET_ID.eq(TICKET.ID)) .join(USERS) .on(TICKET.SUBMITTER_ID.eq(USERS.ID)) .fetchInto(UniqueBugContent.class); return uniqueBugContents.stream().collect(groupingBy(UniqueBugContent::getTicketId, LinkedHashMap::new, toList())); } @Override public Map<String, List<CumulativeTrendChartContent>> cumulativeTrendStatistics(Filter filter, List<String> contentFields, Sort sort, String tagPrefix, int limit) { List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList()); return CUMULATIVE_TREND_CHART_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(LAUNCHES_COUNT).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, ITEM_ATTRIBUTE.ID, ITEM_ATTRIBUTE.VALUE, fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), fieldName(STATISTICS_TABLE, SF_NAME) ) .from(LAUNCH) .join(LAUNCHES) .on(fieldName(LAUNCHES, ID).cast(Long.class).eq(LAUNCH.ID)) .join(ITEM_ATTRIBUTE) .on(ITEM_ATTRIBUTE.LAUNCH_ID.eq(LAUNCH.ID)) .leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(statisticsFields)) .asTable(STATISTICS_TABLE)) .on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class))) .orderBy(ofNullable(sort).map(s -> StreamSupport.stream(s.spliterator(), false) .map(order -> field(name(order.getProperty())).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())).orElseGet(Collections::emptyList)) .fetch()); } @Override public Map<String, List<ProductStatusStatisticsContent>> productStatusGroupedByFilterStatistics(Map<Filter, Sort> filterSortMapping, List<String> contentFields, List<String> tags, boolean isLatest, int limit) { Select<? extends Record> select = filterSortMapping.entrySet() .stream() .map(f -> (Select<? extends Record>) buildFilterGroupedQuery(f.getKey(), isLatest, f.getValue(), limit, contentFields, tags )) .collect(Collectors.toList()) .stream() .reduce((prev, curr) -> curr = prev.unionAll(curr)) .orElseThrow(() -> new ReportPortalException(ErrorType.BAD_REQUEST_ERROR, "Query build for Product Status Widget failed")); Map<String, List<ProductStatusStatisticsContent>> productStatusContent = PRODUCT_STATUS_FILTER_GROUPED_FETCHER.apply(select.fetch()); productStatusContent.put(TOTAL, countFilterTotalStatistics(productStatusContent)); return productStatusContent; } @Override public List<ProductStatusStatisticsContent> productStatusGroupedByLaunchesStatistics(Filter filter, List<String> contentFields, List<String> tags, Sort sort, boolean isLatest, int limit) { List<ProductStatusStatisticsContent> productStatusStatisticsResult = PRODUCT_STATUS_LAUNCH_GROUPED_FETCHER.apply( buildLaunchGroupedQuery(filter, isLatest, sort, limit, contentFields, tags).fetch()); productStatusStatisticsResult.add(countLaunchTotalStatistics(productStatusStatisticsResult)); return productStatusStatisticsResult; } @Override public List<MostTimeConsumingTestCasesContent> mostTimeConsumingTestCasesStatistics(Filter filter) { return dsl.with(ITEMS) .as(QueryBuilder.newBuilder(filter).build()) .select(TEST_ITEM.ITEM_ID.as(ID), TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, TEST_ITEM.TYPE, TEST_ITEM.START_TIME, TEST_ITEM_RESULTS.END_TIME, TEST_ITEM_RESULTS.DURATION, TEST_ITEM_RESULTS.STATUS ) .from(TEST_ITEM) .join(ITEMS) .on(fieldName(ITEMS, ID).cast(Long.class).eq(TEST_ITEM.ITEM_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .orderBy(fieldName(TEST_ITEM_RESULTS.DURATION).desc()) .limit(20) .fetchInto(MostTimeConsumingTestCasesContent.class); } private List<SortField<Object>> buildSortFields(Sort sort) { return ofNullable(sort).map(s -> StreamSupport.stream(s.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())).orElseGet(Collections::emptyList); } private SelectOnConditionStep<? extends Record> buildPassingRateSelect(Filter filter, Sort sort, int limit) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(sum(when(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED), STATISTICS.S_COUNTER).otherwise(0)).as(PASSED), sum(when(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL), STATISTICS.S_COUNTER).otherwise(0)).as(TOTAL) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)); } private SelectSeekStepN<? extends Record> buildFilterGroupedQuery(Filter filter, boolean isLatest, Sort sort, int limit, Collection<String> contentFields, Collection<String> tags) { List<Field<?>> fields = Lists.newArrayList(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.STATUS, fieldName(STATISTICS_TABLE, SF_NAME), fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField() .cast(Double.class)) .div(nullif(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField(), 0)), 2).as(PASSING_RATE), timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION), DSL.selectDistinct(FILTER.NAME).from(FILTER).where(FILTER.ID.eq(filter.getId())).asField(FILTER_NAME) ); return buildProductStatusQuery(filter, isLatest, sort, limit, fields, contentFields, tags).orderBy(buildSortFields(sort)); } private SelectSeekStepN<? extends Record> buildLaunchGroupedQuery(Filter filter, boolean isLatest, Sort sort, int limit, Collection<String> contentFields, Collection<String> tags) { List<Field<?>> fields = Lists.newArrayList(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.STATUS, fieldName(STATISTICS_TABLE, SF_NAME), fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField() .cast(Double.class)) .div(nullif(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField(), 0)), 2).as(PASSING_RATE), timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION) ); return buildProductStatusQuery(filter, isLatest, sort, limit, fields, contentFields, tags).orderBy(buildSortFields(sort)); } private SelectOnConditionStep<? extends Record> buildProductStatusQuery(Filter filter, boolean isLatest, Sort sort, int limit, Collection<Field<?>> fields, Collection<String> contentFields, Collection<String> tags) { List<Condition> conditions = tags.stream() .map(cf -> ITEM_ATTRIBUTE.KEY.like(cf + LIKE_CONDITION_SYMBOL)) .collect(Collectors.toList()); Optional<Condition> combinedTagCondition = conditions.stream().reduce((prev, curr) -> curr = prev.or(curr)); List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList()); if (combinedTagCondition.isPresent()) { Collections.addAll(fields, fieldName(ATTRIBUTE_TABLE, ATTRIBUTE_ID), fieldName(ATTRIBUTE_TABLE, ATTRIBUTE_VALUE)); return getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields).leftJoin(DSL.select(ITEM_ATTRIBUTE.ID.as( ATTRIBUTE_ID), ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE)) .from(ITEM_ATTRIBUTE) .where(combinedTagCondition.get()) .asTable(ATTRIBUTE_TABLE)).on(LAUNCH.ID.eq(fieldName(ATTRIBUTE_TABLE, ATTRIBUTE_ID).cast(Long.class))); } else { return getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields); } } private SelectOnConditionStep<? extends Record> getProductStatusSelect(Filter filter, boolean isLatest, Sort sort, int limit, Collection<Field<?>> fields, Collection<String> contentFields) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(isLatest).with(sort).with(limit).build()) .select(fields) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .asTable(STATISTICS_TABLE)) .on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class))); } private ProductStatusStatisticsContent countLaunchTotalStatistics(List<ProductStatusStatisticsContent> launchesStatisticsResult) { Map<String, Integer> total = launchesStatisticsResult.stream() .flatMap(lsc -> lsc.getValues().entrySet().stream()) .collect(Collectors.groupingBy(entry -> (entry.getKey()), summingInt(entry -> Integer.parseInt(entry.getValue())))); Double averagePassingRate = launchesStatisticsResult.stream() .collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D))); ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent(); launchesStatisticsContent.setTotalStatistics(total); Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue(); launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate); return launchesStatisticsContent; } private List<ProductStatusStatisticsContent> countFilterTotalStatistics( Map<String, List<ProductStatusStatisticsContent>> launchesStatisticsResult) { Map<String, Integer> total = launchesStatisticsResult.values() .stream() .flatMap(Collection::stream) .flatMap(lsc -> lsc.getValues().entrySet().stream()) .collect(Collectors.groupingBy(entry -> (entry.getKey()), summingInt(entry -> Integer.parseInt(entry.getValue())))); Double averagePassingRate = launchesStatisticsResult.values() .stream() .flatMap(Collection::stream) .collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D))); ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent(); launchesStatisticsContent.setTotalStatistics(total); Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue(); launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate); return Lists.newArrayList(launchesStatisticsContent); } private List<Field<?>> buildFieldsFromContentFields(List<String> contentFields) { return contentFields.stream().map(JooqFieldNameTransformer::fieldName).collect(Collectors.toList()); } }
src/main/java/com/epam/ta/reportportal/dao/WidgetContentRepositoryImpl.java
/* * Copyright (C) 2018 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.ta.reportportal.dao; import com.epam.ta.reportportal.commons.querygen.CriteriaHolder; import com.epam.ta.reportportal.commons.querygen.Filter; import com.epam.ta.reportportal.commons.querygen.QueryBuilder; import com.epam.ta.reportportal.commons.validation.Suppliers; import com.epam.ta.reportportal.dao.util.JooqFieldNameTransformer; import com.epam.ta.reportportal.entity.widget.content.*; import com.epam.ta.reportportal.exception.ReportPortalException; import com.epam.ta.reportportal.jooq.enums.JTestItemTypeEnum; import com.epam.ta.reportportal.ws.model.ErrorType; import com.google.common.collect.Lists; import org.jooq.*; import org.jooq.impl.DSL; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Repository; import javax.annotation.Nullable; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.*; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static com.epam.ta.reportportal.commons.querygen.QueryBuilder.STATISTICS_KEY; import static com.epam.ta.reportportal.dao.constant.WidgetContentRepositoryConstants.*; import static com.epam.ta.reportportal.dao.util.JooqFieldNameTransformer.fieldName; import static com.epam.ta.reportportal.dao.util.WidgetContentUtil.*; import static com.epam.ta.reportportal.jooq.Tables.*; import static com.epam.ta.reportportal.jooq.tables.JActivity.ACTIVITY; import static com.epam.ta.reportportal.jooq.tables.JIssue.ISSUE; import static com.epam.ta.reportportal.jooq.tables.JIssueTicket.ISSUE_TICKET; import static com.epam.ta.reportportal.jooq.tables.JLaunch.LAUNCH; import static com.epam.ta.reportportal.jooq.tables.JProject.PROJECT; import static com.epam.ta.reportportal.jooq.tables.JTestItem.TEST_ITEM; import static com.epam.ta.reportportal.jooq.tables.JTestItemResults.TEST_ITEM_RESULTS; import static com.epam.ta.reportportal.jooq.tables.JTicket.TICKET; import static com.epam.ta.reportportal.jooq.tables.JUsers.USERS; import static java.util.Optional.ofNullable; import static java.util.stream.Collectors.*; import static org.jooq.impl.DSL.*; /** * Repository that contains queries of content loading for widgets. * * @author Pavel Bortnik */ @Repository public class WidgetContentRepositoryImpl implements WidgetContentRepository { @Autowired private DSLContext dsl; private static final List<JTestItemTypeEnum> HAS_METHOD_OR_CLASS = Arrays.stream(JTestItemTypeEnum.values()).filter(it -> { String name = it.name(); return name.contains("METHOD") || name.contains("CLASS"); }).collect(Collectors.toList()); @Override public OverallStatisticsContent overallStatisticsContent(Filter filter, Sort sort, List<String> contentFields, boolean latest, int limit) { return OVERALL_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).with(latest).build()) .select(STATISTICS_FIELD.NAME, sum(STATISTICS.S_COUNTER).as(SUM)) .from(STATISTICS) .join(LAUNCHES) .on(fieldName(LAUNCHES, ID).cast(Long.class).eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .groupBy(STATISTICS_FIELD.NAME) .fetch()); } @Override public List<CriteriaHistoryItem> topItemsByCriteria(Filter filter, String criteria, int limit, boolean includeMethods) { return dsl.with(HISTORY) .as(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).build()) .select(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, DSL.arrayAgg(DSL.when(STATISTICS_FIELD.NAME.eq(criteria), "true").otherwise("false")) .orderBy(LAUNCH.NUMBER.asc()) .as(STATUS_HISTORY), DSL.arrayAgg(TEST_ITEM.START_TIME).orderBy(LAUNCH.NUMBER.asc()).as(START_TIME_HISTORY), DSL.sum(DSL.when(STATISTICS_FIELD.NAME.eq(criteria), 1).otherwise(ZERO_QUERY_VALUE)).as(CRITERIA), DSL.count(TEST_ITEM_RESULTS.STATUS).as(TOTAL) ) .from(LAUNCH) .join(TEST_ITEM) .on(LAUNCH.ID.eq(TEST_ITEM.LAUNCH_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .join(STATISTICS) .on(TEST_ITEM.ITEM_ID.eq(STATISTICS.ITEM_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(TEST_ITEM.TYPE.in(includeMethods ? Lists.newArrayList(HAS_METHOD_OR_CLASS, JTestItemTypeEnum.STEP) : Collections.singletonList(JTestItemTypeEnum.STEP))) .and(STATISTICS_FIELD.NAME.eq(criteria)) .and(TEST_ITEM.LAUNCH_ID.in(dsl.select(field(name(LAUNCHES, ID)).cast(Long.class)).from(name(LAUNCHES)))) .groupBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME)) .select() .from(DSL.table(DSL.name(HISTORY))) .where(DSL.field(DSL.name(CRITERIA)).greaterThan(ZERO_QUERY_VALUE)) .orderBy(DSL.field(DSL.name(CRITERIA)).desc(), DSL.field(DSL.name(TOTAL)).asc()) .limit(limit) .fetchInto(CriteriaHistoryItem.class); } @Override public List<FlakyCasesTableContent> flakyCasesStatistics(Filter filter, int limit) { Select commonSelect = dsl.select(field(name(LAUNCHES, ID)).cast(Long.class)) .from(name(LAUNCHES)) .orderBy(field(name(LAUNCHES, NUMBER)).desc()) .limit(limit); return dsl.select(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())).as(UNIQUE_ID), field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName())).as(ITEM_NAME), DSL.arrayAgg(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM_RESULTS.STATUS.getName()))).as(STATUSES), sum(field(name(FLAKY_TABLE_RESULTS, SWITCH_FLAG)).cast(Long.class)).as(FLAKY_COUNT), sum(field(name(FLAKY_TABLE_RESULTS, TOTAL)).cast(Long.class)).as(TOTAL) ) .from(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(LAUNCH.NUMBER.desc()).build()) .select(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, TEST_ITEM_RESULTS.STATUS, when(TEST_ITEM_RESULTS.STATUS.notEqual(lag(TEST_ITEM_RESULTS.STATUS).over(orderBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.ITEM_ID ))) .and(TEST_ITEM.UNIQUE_ID.equal(lag(TEST_ITEM.UNIQUE_ID).over(orderBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.ITEM_ID )))), 1 ).otherwise(ZERO_QUERY_VALUE).as(SWITCH_FLAG), count(TEST_ITEM_RESULTS.STATUS).as(TOTAL) ) .from(LAUNCH) .join(TEST_ITEM) .on(LAUNCH.ID.eq(TEST_ITEM.LAUNCH_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .where(LAUNCH.ID.in(commonSelect)) .and(TEST_ITEM.TYPE.eq(JTestItemTypeEnum.STEP)) .groupBy(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME) .asTable(FLAKY_TABLE_RESULTS)) .groupBy(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())), field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName())) ) .orderBy(fieldName(FLAKY_COUNT).desc(), fieldName(TOTAL).asc(), fieldName(UNIQUE_ID)) .limit(20) .fetchInto(FlakyCasesTableContent.class); } @Override public List<LaunchesStatisticsContent> launchStatistics(Filter filter, List<String> contentFields, @Nullable Sort sort, int limit) { return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME, STATISTICS_FIELD.NAME, STATISTICS.S_COUNTER) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetch()); } @Override public List<InvestigatedStatisticsResult> investigatedStatistics(Filter filter, Sort sort, int limit) { List<Field<?>> groupingFields = StreamSupport.stream(sort.spliterator(), false).map(s -> field(s.getProperty())).collect(toList()); Collections.addAll(groupingFields, LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME); return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME, round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(DEFECTS_TO_INVESTIGATE_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField() .cast(Double.class)) .div(nullif(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.in(DEFECTS_AUTOMATION_BUG_TOTAL, DEFECTS_NO_DEFECT_TOTAL, DEFECTS_TO_INVESTIGATE_TOTAL, DEFECTS_PRODUCT_BUG_TOTAL, DEFECTS_SYSTEM_ISSUE_TOTAL ).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField(), 0)), 2).as(TO_INVESTIGATE) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .groupBy(groupingFields) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetch(INVESTIGATED_STATISTICS_CONTENT_RECORD_MAPPER); } @Override public PassingRateStatisticsResult passingRatePerLaunchStatistics(Filter filter, Sort sort, int limit) { List<Field<?>> groupingFields = StreamSupport.stream(sort.spliterator(), false).map(s -> field(s.getProperty())).collect(toList()); return buildPassingRateSelect(filter, sort, limit).groupBy(groupingFields) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetchInto(PassingRateStatisticsResult.class) .stream() .findFirst() .orElseThrow(() -> new ReportPortalException("No results for filter were found")); } @Override public PassingRateStatisticsResult summaryPassingRateStatistics(Filter filter, Sort sort, int limit) { return buildPassingRateSelect(filter, sort, limit).fetchInto(PassingRateStatisticsResult.class) .stream() .findFirst() .orElseThrow(() -> new ReportPortalException("No results for filter were found")); } @Override public List<CasesTrendContent> casesTrendStatistics(Filter filter, String contentField, Sort sort, int limit) { List<? extends SortField<?>> deltaCounterSort = ofNullable(sort).map(s -> StreamSupport.stream(s.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())).orElseGet(Collections::emptyList); return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.NAME, STATISTICS.S_COUNTER.as(contentField), STATISTICS.S_COUNTER.sub(lag(STATISTICS.S_COUNTER).over().orderBy(deltaCounterSort)).as(DELTA) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.eq(contentField)) .orderBy(deltaCounterSort) .fetchInto(CasesTrendContent.class); } @Override public List<LaunchesStatisticsContent> bugTrendStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) { return BUG_TREND_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS_FIELD.NAME, STATISTICS.S_COUNTER) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .fetch()); } @Override public List<LaunchesStatisticsContent> launchesComparisonStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) { List<String> executionStatisticsFields = contentFields.stream().filter(cf -> cf.contains(EXECUTIONS_KEY)).collect(toList()); List<String> defectStatisticsFields = contentFields.stream().filter(cf -> cf.contains(DEFECTS_KEY)).collect(toList()); return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS_FIELD.NAME, round(val(PERCENTAGE_MULTIPLIER).mul(STATISTICS.S_COUNTER) .div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)) .and(STATISTICS_FIELD.NAME.in(executionStatisticsFields)), 0).cast(Double.class)), 2 ).as("s_counter") ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(executionStatisticsFields)) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .unionAll(DSL.select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS_FIELD.NAME, round(val(PERCENTAGE_MULTIPLIER).mul(STATISTICS.S_COUNTER) .div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)) .and(STATISTICS_FIELD.NAME.in(defectStatisticsFields)), 0).cast(Double.class)), 2) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(defectStatisticsFields)) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList()))) .fetch()); } @Override public List<LaunchesDurationContent> launchesDurationStatistics(Filter filter, Sort sort, boolean isLatest, int limit) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(isLatest).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.STATUS, LAUNCH.START_TIME, LAUNCH.END_TIME, timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .orderBy(StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())) .fetchInto(LaunchesDurationContent.class); } @Override public List<NotPassedCasesContent> notPassedCasesStatistics(Filter filter, Sort sort, int limit) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, STATISTICS.S_COUNTER, coalesce(round(val(PERCENTAGE_MULTIPLIER).mul(DSL.select(DSL.sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(EXECUTIONS_SKIPPED, EXECUTIONS_FAILED)) .and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)) .asField() .cast(Double.class)).div(nullif(STATISTICS.S_COUNTER, 0).cast(Double.class)), 2), 0).as(PERCENTAGE) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(EXECUTIONS_TOTAL)) .fetch(NOT_PASSED_CASES_CONTENT_RECORD_MAPPER); } @Override public List<LaunchesTableContent> launchesTableStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) { Map<String, String> criteria = filter.getTarget() .getCriteriaHolders() .stream() .collect(Collectors.toMap(CriteriaHolder::getFilterCriteria, CriteriaHolder::getQueryCriteria)); boolean remove = contentFields.remove("tags"); List<Field<?>> selectFields = contentFields.stream() .filter(cf -> !cf.startsWith(STATISTICS_KEY)) .map(cf -> field(ofNullable(criteria.get(cf)).orElseThrow(() -> new ReportPortalException(Suppliers.formattedSupplier( "Unknown table field - '{}'", cf ).get())))) .collect(Collectors.toList()); Collections.addAll(selectFields, LAUNCH.ID, fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), fieldName(STATISTICS_TABLE, SF_NAME)); List<SortField<?>> orderFields = StreamSupport.stream(sort.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList()); List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList()); return LAUNCHES_TABLE_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(selectFields) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(statisticsFields)) .asTable(STATISTICS_TABLE)) .on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class))) .join(USERS) .on(LAUNCH.USER_ID.eq(USERS.ID)) .orderBy(orderFields) .fetch(), contentFields); } @Override public List<ActivityContent> activityStatistics(Filter filter, Sort sort, int limit) { return dsl.with(ACTIVITIES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(ACTIVITY.ID.as(ID), ACTIVITY.ACTION.as(ACTION_TYPE), ACTIVITY.ENTITY.as(ENTITY), ACTIVITY.CREATION_DATE.as(LAST_MODIFIED), USERS.LOGIN.as(USER_LOGIN), PROJECT.NAME.as(PROJECT_NAME) ) .from(ACTIVITY) .join(ACTIVITIES) .on(fieldName(ACTIVITIES, ID).cast(Long.class).eq(ACTIVITY.ID)) .join(USERS) .on(ACTIVITY.USER_ID.eq(USERS.ID)) .join(PROJECT) .on(ACTIVITY.PROJECT_ID.eq(PROJECT.ID)) .fetchInto(ActivityContent.class); } @Override public Map<String, List<UniqueBugContent>> uniqueBugStatistics(Filter filter, Sort sort, boolean isLatest, int limit) { List<UniqueBugContent> uniqueBugContents = dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(limit).with(sort).with(isLatest).build()) .select(TICKET.TICKET_ID, TICKET.SUBMIT_DATE, TICKET.URL, TEST_ITEM.ITEM_ID, TEST_ITEM.NAME, TEST_ITEM.DESCRIPTION, TEST_ITEM.LAUNCH_ID, USERS.LOGIN ) .from(TEST_ITEM) .join(LAUNCHES) .on(fieldName(LAUNCHES, ID).cast(Long.class).eq(TEST_ITEM.LAUNCH_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .leftJoin(ISSUE) .on(TEST_ITEM.ITEM_ID.eq(ISSUE.ISSUE_ID)) .leftJoin(ISSUE_TICKET) .on(ISSUE.ISSUE_ID.eq(ISSUE_TICKET.ISSUE_ID)) .join(TICKET) .on(ISSUE_TICKET.TICKET_ID.eq(TICKET.ID)) .join(USERS) .on(TICKET.SUBMITTER_ID.eq(USERS.ID)) .fetchInto(UniqueBugContent.class); return uniqueBugContents.stream().collect(groupingBy(UniqueBugContent::getTicketId, LinkedHashMap::new, toList())); } @Override public Map<String, List<CumulativeTrendChartContent>> cumulativeTrendStatistics(Filter filter, List<String> contentFields, Sort sort, String tagPrefix, int limit) { List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList()); return CUMULATIVE_TREND_CHART_FETCHER.apply(dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(LAUNCHES_COUNT).build()) .select(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, ITEM_ATTRIBUTE.ID, ITEM_ATTRIBUTE.VALUE, fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), fieldName(STATISTICS_TABLE, SF_NAME) ) .from(LAUNCH) .join(LAUNCHES) .on(fieldName(LAUNCHES, ID).cast(Long.class).eq(LAUNCH.ID)) .join(ITEM_ATTRIBUTE) .on(ITEM_ATTRIBUTE.LAUNCH_ID.eq(LAUNCH.ID)) .leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(statisticsFields)) .asTable(STATISTICS_TABLE)) .on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class))) .orderBy(ofNullable(sort).map(s -> StreamSupport.stream(s.spliterator(), false) .map(order -> field(name(order.getProperty())).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())).orElseGet(Collections::emptyList)) .fetch()); } @Override public Map<String, List<ProductStatusStatisticsContent>> productStatusGroupedByFilterStatistics(Map<Filter, Sort> filterSortMapping, List<String> contentFields, List<String> tags, boolean isLatest, int limit) { Select<? extends Record> select = filterSortMapping.entrySet() .stream() .map(f -> (Select<? extends Record>) buildFilterGroupedQuery(f.getKey(), isLatest, f.getValue(), limit, contentFields, tags )) .collect(Collectors.toList()) .stream() .reduce((prev, curr) -> curr = prev.unionAll(curr)) .orElseThrow(() -> new ReportPortalException(ErrorType.BAD_REQUEST_ERROR, "Query build for Product Status Widget failed")); Map<String, List<ProductStatusStatisticsContent>> productStatusContent = PRODUCT_STATUS_FILTER_GROUPED_FETCHER.apply(select.fetch()); productStatusContent.put(TOTAL, countFilterTotalStatistics(productStatusContent)); return productStatusContent; } @Override public List<ProductStatusStatisticsContent> productStatusGroupedByLaunchesStatistics(Filter filter, List<String> contentFields, List<String> tags, Sort sort, boolean isLatest, int limit) { List<ProductStatusStatisticsContent> productStatusStatisticsResult = PRODUCT_STATUS_LAUNCH_GROUPED_FETCHER.apply( buildLaunchGroupedQuery(filter, isLatest, sort, limit, contentFields, tags).fetch()); productStatusStatisticsResult.add(countLaunchTotalStatistics(productStatusStatisticsResult)); return productStatusStatisticsResult; } @Override public List<MostTimeConsumingTestCasesContent> mostTimeConsumingTestCasesStatistics(Filter filter) { return dsl.with(ITEMS) .as(QueryBuilder.newBuilder(filter).build()) .select(TEST_ITEM.ITEM_ID.as(ID), TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, TEST_ITEM.TYPE, TEST_ITEM.START_TIME, TEST_ITEM_RESULTS.END_TIME, TEST_ITEM_RESULTS.DURATION, TEST_ITEM_RESULTS.STATUS ) .from(TEST_ITEM) .join(ITEMS) .on(fieldName(ITEMS, ID).cast(Long.class).eq(TEST_ITEM.ITEM_ID)) .join(TEST_ITEM_RESULTS) .on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID)) .orderBy(fieldName(TEST_ITEM_RESULTS.DURATION).desc()) .limit(20) .fetchInto(MostTimeConsumingTestCasesContent.class); } private List<SortField<Object>> buildSortFields(Sort sort) { return ofNullable(sort).map(s -> StreamSupport.stream(s.spliterator(), false) .map(order -> field(order.getProperty()).sort(order.getDirection().isDescending() ? SortOrder.DESC : SortOrder.ASC)) .collect(Collectors.toList())).orElseGet(Collections::emptyList); } private SelectOnConditionStep<? extends Record> buildPassingRateSelect(Filter filter, Sort sort, int limit) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(sort).with(limit).build()) .select(sum(when(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED), STATISTICS.S_COUNTER).otherwise(0)).as(PASSED), sum(when(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL), STATISTICS.S_COUNTER).otherwise(0)).as(TOTAL) ) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(STATISTICS) .on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID)) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)); } private SelectSeekStepN<? extends Record> buildFilterGroupedQuery(Filter filter, boolean isLatest, Sort sort, int limit, Collection<String> contentFields, Collection<String> tags) { List<Field<?>> fields = Lists.newArrayList(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.STATUS, fieldName(STATISTICS_TABLE, SF_NAME), fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField() .cast(Double.class)) .div(nullif(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField(), 0)), 2).as(PASSING_RATE), timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION), DSL.selectDistinct(FILTER.NAME).from(FILTER).where(FILTER.ID.eq(filter.getId())).asField(FILTER_NAME) ); return buildProductStatusQuery(filter, isLatest, sort, limit, fields, contentFields, tags).orderBy(buildSortFields(sort)); } private SelectSeekStepN<? extends Record> buildLaunchGroupedQuery(Filter filter, boolean isLatest, Sort sort, int limit, Collection<String> contentFields, Collection<String> tags) { List<Field<?>> fields = Lists.newArrayList(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, LAUNCH.START_TIME, LAUNCH.STATUS, fieldName(STATISTICS_TABLE, SF_NAME), fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField() .cast(Double.class)) .div(nullif(dsl.select(sum(STATISTICS.S_COUNTER)) .from(STATISTICS) .join(STATISTICS_FIELD) .onKey() .where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))) .asField(), 0)), 2).as(PASSING_RATE), timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION) ); return buildProductStatusQuery(filter, isLatest, sort, limit, fields, contentFields, tags).orderBy(buildSortFields(sort)); } private SelectOnConditionStep<? extends Record> buildProductStatusQuery(Filter filter, boolean isLatest, Sort sort, int limit, Collection<Field<?>> fields, Collection<String> contentFields, Collection<String> tags) { List<Condition> conditions = tags.stream() .map(cf -> ITEM_ATTRIBUTE.KEY.like(cf + LIKE_CONDITION_SYMBOL)) .collect(Collectors.toList()); Optional<Condition> combinedTagCondition = conditions.stream().reduce((prev, curr) -> curr = prev.or(curr)); List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList()); if (combinedTagCondition.isPresent()) { Collections.addAll(fields, fieldName(ATTRIBUTE_TABLE, ATTRIBUTE_ID), fieldName(ATTRIBUTE_TABLE, ATTRIBUTE_VALUE)); return getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields).leftJoin(DSL.select(ITEM_ATTRIBUTE.ID.as( ATTRIBUTE_ID), ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE)) .from(ITEM_ATTRIBUTE) .where(combinedTagCondition.get()) .asTable(ATTRIBUTE_TABLE)).on(LAUNCH.ID.eq(fieldName(ATTRIBUTE_TABLE, ATTRIBUTE_ID).cast(Long.class))); } else { return getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields); } } private SelectOnConditionStep<? extends Record> getProductStatusSelect(Filter filter, boolean isLatest, Sort sort, int limit, Collection<Field<?>> fields, Collection<String> contentFields) { return dsl.with(LAUNCHES) .as(QueryBuilder.newBuilder(filter).with(isLatest).with(sort).with(limit).build()) .select(fields) .from(LAUNCH) .join(LAUNCHES) .on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class))) .leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME)) .from(STATISTICS) .join(STATISTICS_FIELD) .on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID)) .where(STATISTICS_FIELD.NAME.in(contentFields)) .asTable(STATISTICS_TABLE)) .on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class))); } private ProductStatusStatisticsContent countLaunchTotalStatistics(List<ProductStatusStatisticsContent> launchesStatisticsResult) { Map<String, Integer> total = launchesStatisticsResult.stream() .flatMap(lsc -> lsc.getValues().entrySet().stream()) .collect(Collectors.groupingBy(entry -> (entry.getKey()), summingInt(entry -> Integer.parseInt(entry.getValue())))); Double averagePassingRate = launchesStatisticsResult.stream() .collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D))); ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent(); launchesStatisticsContent.setTotalStatistics(total); Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue(); launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate); return launchesStatisticsContent; } private List<ProductStatusStatisticsContent> countFilterTotalStatistics( Map<String, List<ProductStatusStatisticsContent>> launchesStatisticsResult) { Map<String, Integer> total = launchesStatisticsResult.values() .stream() .flatMap(Collection::stream) .flatMap(lsc -> lsc.getValues().entrySet().stream()) .collect(Collectors.groupingBy(entry -> (entry.getKey()), summingInt(entry -> Integer.parseInt(entry.getValue())))); Double averagePassingRate = launchesStatisticsResult.values() .stream() .flatMap(Collection::stream) .collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D))); ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent(); launchesStatisticsContent.setTotalStatistics(total); Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue(); launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate); return Lists.newArrayList(launchesStatisticsContent); } private List<Field<?>> buildFieldsFromContentFields(List<String> contentFields) { return contentFields.stream().map(JooqFieldNameTransformer::fieldName).collect(Collectors.toList()); } }
overall statistics fix
src/main/java/com/epam/ta/reportportal/dao/WidgetContentRepositoryImpl.java
overall statistics fix
Java
apache-2.0
155c4b14c846141f71dd617ff8d82796da014c0a
0
rouazana/james,rouazana/james,rouazana/james,aduprat/james,aduprat/james,aduprat/james,chibenwa/james,chibenwa/james,chibenwa/james,aduprat/james,rouazana/james,chibenwa/james
/* ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Apache", "Jakarta", "JAMES" and "Apache Software Foundation" * must not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * * Portions of this software are based upon public domain software * originally written at the National Center for Supercomputing Applications, * University of Illinois, Urbana-Champaign. */ package org.apache.james.transport.mailets; import org.apache.james.util.RFC2822Headers; import org.apache.mailet.GenericMailet; import org.apache.mailet.Mail; import org.apache.mailet.MailAddress; import org.apache.mailet.MailetException; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import java.io.IOException; import java.util.Collection; import java.util.Vector; /** * An abstract implementation of a listserv. The underlying implementation must define * various settings, and can vary in their individual configuration. Supports restricting * to members only, allowing attachments or not, sending replies back to the list, and an * optional subject prefix. */ public abstract class GenericListserv extends GenericMailet { /** * Returns a Collection of MailAddress objects of members to receive this email */ public abstract Collection getMembers() throws MessagingException; /** * Returns whether this list should restrict to senders only */ public abstract boolean isMembersOnly() throws MessagingException; /** * Returns whether this listserv allow attachments */ public abstract boolean isAttachmentsAllowed() throws MessagingException; /** * Returns whether listserv should add reply-to header */ public abstract boolean isReplyToList() throws MessagingException; /** * The email address that this listserv processes on. If returns null, will use the * recipient of the message, which hopefully will be the correct email address assuming * the matcher was properly specified. */ public MailAddress getListservAddress() throws MessagingException { return null; } /** * An optional subject prefix. */ public abstract String getSubjectPrefix() throws MessagingException; /** * Should the subject prefix be automatically surrounded by []. * * @return whether the subject prefix will be surrounded by [] * * @throws MessagingException never, for this implementation */ public boolean isPrefixAutoBracketed() throws MessagingException { return true; // preserve old behavior unless subclass overrides. } /** * <p>This takes the subject string and reduces (normailzes) it. * Multiple "Re:" entries are reduced to one, and capitalized. The * prefix is always moved/placed at the beginning of the line, and * extra blanks are reduced, so that the output is always of the * form:</p> * <code> * &lt;prefix&gt; + &lt;one-optional-"Re:"*gt; + &lt;remaining subject&gt; * </code> * <p>I have done extensive testing of this routine with a standalone * driver, and am leaving the commented out debug messages so that * when someone decides to enhance this method, it can be yanked it * from this file, embedded it with a test driver, and the comments * enabled.</p> */ static private String normalizeSubject(final String subj, final String prefix) { // JDK IMPLEMENTATION NOTE! When we require JDK 1.4+, all // occurrences of subject.toString.().indexOf(...) can be // replaced by subject.indexOf(...). StringBuffer subject = new StringBuffer(subj); int prefixLength = prefix.length(); // System.err.println("In: " + subject); // If the "prefix" is not at the beginning the subject line, remove it int index = subject.toString().indexOf(prefix); if (index != 0) { // System.err.println("(p) index: " + index + ", subject: " + subject); if (index > 0) { subject.delete(index, index + prefixLength); } subject.insert(0, prefix); // insert prefix at the front } // Replace Re: with RE: String match = "Re:"; index = subject.toString().indexOf(match, prefixLength); while(index > -1) { // System.err.println("(a) index: " + index + ", subject: " + subject); subject.replace(index, index + match.length(), "RE:"); index = subject.toString().indexOf(match, prefixLength); // System.err.println("(b) index: " + index + ", subject: " + subject); } // Reduce them to one at the beginning match ="RE:"; int indexRE = subject.toString().indexOf(match, prefixLength) + match.length(); index = subject.toString().indexOf(match, indexRE); while(index > 0) { // System.err.println("(c) index: " + index + ", subject: " + subject); subject.delete(index, index + match.length()); index = subject.toString().indexOf(match, indexRE); // System.err.println("(d) index: " + index + ", subject: " + subject); } // Reduce blanks match = " "; index = subject.toString().indexOf(match, prefixLength); while(index > -1) { // System.err.println("(e) index: " + index + ", subject: " + subject); subject.replace(index, index + match.length(), " "); index = subject.toString().indexOf(match, prefixLength); // System.err.println("(f) index: " + index + ", subject: " + subject); } // System.err.println("Out: " + subject); return subject.toString(); } /** * It attempts to determine the charset used to encode an "unstructured" * RFC 822 header (like Subject). The encoding is specified in RFC 2047. * If it cannot determine or the the text is not encoded then it returns null. * * Under Java 1.4 it further checks if the encoding is supported under the * current runtime environment. * * In some cases it returns UTF-8 as a fallback charset. This is not * an official MIME standard yet, and most importantly not all email client * support it, but it is likely better then the server default. * * Here is an example raw text: * Subject: =?iso-8859-2?Q?leg=FAjabb_pr=F3ba_l=F5elemmel?= * * Possible enhancement: under java 1.4 java.nio the system can determine if the * suggested charset fits or not (if there is untranslatable * characters). If the charset doesn't fit the new value, it * can fall back to UTF-8. * * @param rawText the raw (not decoded) value of the header * @return the java charset name or null if no encoding applied */ static private String determineMailHeaderEncodingCharset(String rawText) { int iEncodingPrefix = rawText.indexOf("=?"); if (iEncodingPrefix == -1) return null; int iCharsetBegin = iEncodingPrefix + 2; int iSecondQuestionMark = rawText.indexOf('?', iCharsetBegin); if (iSecondQuestionMark == -1) return null; // safety checks if (iSecondQuestionMark == iCharsetBegin) return null; // empty charset? impossible int iThirdQuestionMark = rawText.indexOf('?', iSecondQuestionMark + 1); if (iThirdQuestionMark == -1) return null; // there must be one after encoding if (-1 == rawText.indexOf("?=", iThirdQuestionMark + 1)) return null; // closing tag String mimeCharset = rawText.substring(iCharsetBegin, iSecondQuestionMark); String javaCharset = javax.mail.internet.MimeUtility.javaCharset(mimeCharset); // using reflection for a JRE 1.4 function if (charsetIsSupportedMethod == null) return javaCharset; // pre 1.4 runtime try { String[] arguments = { javaCharset }; Boolean isSupported = (Boolean)charsetIsSupportedMethod.invoke(null, arguments); if (isSupported.booleanValue()) return javaCharset; else // UTF-8 must be supported by every JRE, and it is better then server default, // even if a few clients don't support it yet. // I use UTF-8 instead of UTF8 because there is no java-MIME mapping, // and official MIME code yet, so this will be directly used as a MIME // code, and it is the quasi-standard MIME code (OE uses this). return "UTF-8"; } catch (java.lang.reflect.InvocationTargetException e) { // it was thrown by Charset.isSupported, illegal charset name return "UTF-8"; } catch (Exception e) { // impossible return javaCharset; } } /** * JRE 1.4 specific method, java.nio.charset.Charset.isSupported(String). * This field is initialized by the static initialization block and * is used by the determineMailHeaderEncodingCharset method. * James doesn't require JRE 1.4 so we must use reflection. */ static private java.lang.reflect.Method charsetIsSupportedMethod; /** * class initialization, it initializes the charsetIsSupportedMethod member */ static { try { Class charsetClass = Class.forName("java.nio.charset.Charset"); Class[] parameterTypes = { String.class }; charsetIsSupportedMethod = charsetClass.getMethod("isSupported", parameterTypes); } catch (Exception e) { charsetIsSupportedMethod = null; // pre 1.4 runtime } } /** * Processes the message. Assumes it is the only recipient of this forked message. */ public final void service(Mail mail) throws MessagingException { try { Collection members = getMembers(); //Check for members only flag.... if (isMembersOnly() && !members.contains(mail.getSender())) { //Need to bounce the message to say they can't send to this list getMailetContext().bounce(mail, "Only members of this listserv are allowed to send a message to this address."); mail.setState(Mail.GHOST); return; } //Check for no attachments if (!isAttachmentsAllowed() && mail.getMessage().getContent() instanceof MimeMultipart) { getMailetContext().bounce(mail, "You cannot send attachments to this listserv."); mail.setState(Mail.GHOST); return; } //Create a copy of this message to send out MimeMessage message = new MimeMessage(mail.getMessage()); //We need to remove this header from the copy we're sending around message.removeHeader(RFC2822Headers.RETURN_PATH); //Figure out the listserv address. MailAddress listservAddr = getListservAddress(); if (listservAddr == null) { //Use the recipient listservAddr = (MailAddress)mail.getRecipients().iterator().next(); } //Check if the X-been-there header is set to the listserv's name // (the address). If it has, this means it's a message from this // listserv that's getting bounced back, so we need to swallow it if (listservAddr.equals(message.getHeader("X-been-there"))) { mail.setState(Mail.GHOST); return; } //Set the subject if set String prefix = getSubjectPrefix(); if (prefix != null) { if (isPrefixAutoBracketed()) { StringBuffer prefixBuffer = new StringBuffer(64) .append("[") .append(prefix) .append("] "); prefix = prefixBuffer.toString(); } String rawSubject = message.getHeader(RFC2822Headers.SUBJECT, null); String charset = determineMailHeaderEncodingCharset(rawSubject); String subj = message.getSubject(); if (subj == null) { subj = ""; } subj = normalizeSubject(subj, prefix); try { message.setSubject(subj, charset); } catch (MessagingException e) { // known, but unsupported encoding if (charset != null) log(charset + " charset unsupported by the JRE, email subject may be damaged"); message.setSubject(subj); // recover } } //If replies should go to this list, we need to set the header if (isReplyToList()) { message.setHeader(RFC2822Headers.REPLY_TO, listservAddr.toString()); } //We're going to set this special header to avoid bounces // getting sent back out to the list message.setHeader("X-been-there", listservAddr.toString()); //Send the message to the list members //We set the postmaster as the sender for now so bounces go to him/her getMailetContext().sendMail(getMailetContext().getPostmaster(), members, message); //Kill the old message mail.setState(Mail.GHOST); } catch (IOException ioe) { throw new MailetException("Error creating listserv message", ioe); } } }
branches/branch_2_1_fcs/src/java/org/apache/james/transport/mailets/GenericListserv.java
/* ==================================================================== * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Apache", "Jakarta", "JAMES" and "Apache Software Foundation" * must not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * * Portions of this software are based upon public domain software * originally written at the National Center for Supercomputing Applications, * University of Illinois, Urbana-Champaign. */ package org.apache.james.transport.mailets; import org.apache.james.util.RFC2822Headers; import org.apache.mailet.GenericMailet; import org.apache.mailet.Mail; import org.apache.mailet.MailAddress; import org.apache.mailet.MailetException; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import java.io.IOException; import java.util.Collection; import java.util.Vector; /** * An abstract implementation of a listserv. The underlying implementation must define * various settings, and can vary in their individual configuration. Supports restricting * to members only, allowing attachments or not, sending replies back to the list, and an * optional subject prefix. */ public abstract class GenericListserv extends GenericMailet { /** * Returns a Collection of MailAddress objects of members to receive this email */ public abstract Collection getMembers() throws MessagingException; /** * Returns whether this list should restrict to senders only */ public abstract boolean isMembersOnly() throws MessagingException; /** * Returns whether this listserv allow attachments */ public abstract boolean isAttachmentsAllowed() throws MessagingException; /** * Returns whether listserv should add reply-to header */ public abstract boolean isReplyToList() throws MessagingException; /** * The email address that this listserv processes on. If returns null, will use the * recipient of the message, which hopefully will be the correct email address assuming * the matcher was properly specified. */ public MailAddress getListservAddress() throws MessagingException { return null; } /** * An optional subject prefix. */ public abstract String getSubjectPrefix() throws MessagingException; /** * Should the subject prefix be automatically surrounded by []. * * @return whether the subject prefix will be surrounded by [] * * @throws MessagingException never, for this implementation */ public boolean isPrefixAutoBracketed() throws MessagingException { return true; // preserve old behavior unless subclass overrides. } /** * <p>This takes the subject string and reduces (normailzes) it. * Multiple "Re:" entries are reduced to one, and capitalized. The * prefix is always moved/placed at the beginning of the line, and * extra blanks are reduced, so that the output is always of the * form:</p> * <code> * &lt;prefix&gt; + &lt;one-optional-"Re:"*gt; + &lt;remaining subject&gt; * </code> * <p>I have done extensive testing of this routine with a standalone * driver, and am leaving the commented out debug messages so that * when someone decides to enhance this method, it can be yanked it * from this file, embedded it with a test driver, and the comments * enabled.</p> */ static private String normalizeSubject(final String subj, final String prefix) { // JDK IMPLEMENTATION NOTE! When we require JDK 1.4+, all // occurrences of subject.toString.().indexOf(...) can be // replaced by subject.indexOf(...). StringBuffer subject = new StringBuffer(subj); int prefixLength = prefix.length(); // System.err.println("In: " + subject); // If the "prefix" is not at the beginning the subject line, remove it int index = subject.toString().indexOf(prefix); if (index != 0) { // System.err.println("(p) index: " + index + ", subject: " + subject); if (index > 0) { subject.delete(index, index + prefixLength); } subject.insert(0, prefix); // insert prefix at the front } // Replace Re: with RE: String match = "Re:"; index = subject.toString().indexOf(match, prefixLength); while(index > -1) { // System.err.println("(a) index: " + index + ", subject: " + subject); subject.replace(index, index + match.length(), "RE:"); index = subject.toString().indexOf(match, prefixLength); // System.err.println("(b) index: " + index + ", subject: " + subject); } // Reduce them to one at the beginning match ="RE:"; int indexRE = subject.toString().indexOf(match, prefixLength) + match.length(); index = subject.toString().indexOf(match, indexRE); while(index > 0) { // System.err.println("(c) index: " + index + ", subject: " + subject); subject.delete(index, index + match.length()); index = subject.toString().indexOf(match, indexRE); // System.err.println("(d) index: " + index + ", subject: " + subject); } // Reduce blanks match = " "; index = subject.toString().indexOf(match, prefixLength); while(index > -1) { // System.err.println("(e) index: " + index + ", subject: " + subject); subject.replace(index, index + match.length(), " "); index = subject.toString().indexOf(match, prefixLength); // System.err.println("(f) index: " + index + ", subject: " + subject); } // System.err.println("Out: " + subject); return subject.toString(); } /** * It attempts to determine the charset used to encode an "unstructured" * RFC 822 header (like Subject). The encoding is specified in RFC 2047. * If it cannot determine or the the text is not encoded then it returns null. * * Under Java 1.4 it further checks if the encoding is supported under the * current runtime environment. * * In some cases it returns UTF-8 as a fallback charset. This is not * an official MIME standard yet, and most importantly not all email client * support it, but it is likely better then the server default. * * Here is an example raw text: * Subject: =?iso-8859-2?Q?leg=FAjabb_pr=F3ba_l=F5elemmel?= * * Possible enhancement: under java 1.4 java.nio the system can determine if the * suggested charset fits or not (if there is untranslatable * characters). If the charset doesn't fit the new value, it * can fall back to UTF-8. * * @param rawText the raw (not decoded) value of the header * @return the java charset name or null if no encoding applied */ static private String determineMailHeaderEncodingCharset(String rawText) { int iEncodingPrefix = rawText.indexOf("=?"); if (iEncodingPrefix == -1) return null; int iCharsetBegin = iEncodingPrefix + 2; int iSecondQuestionMark = rawText.indexOf('?', iCharsetBegin); if (iSecondQuestionMark == -1) return null; // safety checks if (iSecondQuestionMark == iCharsetBegin) return null; // empty charset? impossible int iThirdQuestionMark = rawText.indexOf('?', iSecondQuestionMark + 1); if (iThirdQuestionMark == -1) return null; // there must be one after encoding if (-1 == rawText.indexOf("?=", iThirdQuestionMark + 1)) return null; // closing tag String mimeCharset = rawText.substring(iCharsetBegin, iSecondQuestionMark); String javaCharset = javax.mail.internet.MimeUtility.javaCharset(mimeCharset); // using reflection for a JRE 1.4 function if (charsetIsSupportedMethod == null) return javaCharset; // pre 1.4 runtime try { String[] arguments = { javaCharset }; Boolean isSupported = (Boolean)charsetIsSupportedMethod.invoke(null, arguments); if (isSupported.booleanValue()) return javaCharset; else // UTF-8 must be supported by every JRE, and it is better then server default, // even if a few clients don't support it yet. // I use UTF-8 instead of UTF8 because there is no java-MIME mapping, // and official MIME code yet, so this will be directly used as a MIME // code, and it is the quasi-standard MIME code (OE uses this). return "UTF-8"; } catch (java.lang.reflect.InvocationTargetException e) { // it was thrown by Charset.isSupported, illegal charset name return "UTF-8"; } catch (Exception e) { // impossible return javaCharset; } } /** * JRE 1.4 specific method, java.nio.charset.Charset.isSupported(String). * This field is initialized by the static initialization block and * is used by the determineMailHeaderEncodingCharset method. * James doesn't require JRE 1.4 so we must use reflection. */ static private java.lang.reflect.Method charsetIsSupportedMethod; /** * class initialization, it initializes the charsetIsSupportedMethod member */ static { try { Class charsetClass = Class.forName("java.nio.charset.Charset"); Class[] parameterTypes = { String.class }; charsetIsSupportedMethod = charsetClass.getMethod("isSupported", parameterTypes); } catch (Exception e) { charsetIsSupportedMethod = null; // pre 1.4 runtime } } /** * Processes the message. Assumes it is the only recipient of this forked message. */ public final void service(Mail mail) throws MessagingException { try { Collection members = new Vector(); members.addAll(getMembers()); //Check for members only flag.... if (isMembersOnly() && !members.contains(mail.getSender())) { //Need to bounce the message to say they can't send to this list getMailetContext().bounce(mail, "Only members of this listserv are allowed to send a message to this address."); mail.setState(Mail.GHOST); return; } //Check for no attachments if (!isAttachmentsAllowed() && mail.getMessage().getContent() instanceof MimeMultipart) { getMailetContext().bounce(mail, "You cannot send attachments to this listserv."); mail.setState(Mail.GHOST); return; } //Create a copy of this message to send out MimeMessage message = new MimeMessage(mail.getMessage()); //We need to remove this header from the copy we're sending around message.removeHeader(RFC2822Headers.RETURN_PATH); //Figure out the listserv address. MailAddress listservAddr = getListservAddress(); if (listservAddr == null) { //Use the recipient listservAddr = (MailAddress)mail.getRecipients().iterator().next(); } //Check if the X-been-there header is set to the listserv's name // (the address). If it has, this means it's a message from this // listserv that's getting bounced back, so we need to swallow it if (listservAddr.equals(message.getHeader("X-been-there"))) { mail.setState(Mail.GHOST); return; } //Set the subject if set String prefix = getSubjectPrefix(); if (prefix != null) { if (isPrefixAutoBracketed()) { StringBuffer prefixBuffer = new StringBuffer(64) .append("[") .append(prefix) .append("] "); prefix = prefixBuffer.toString(); } String rawSubject = message.getHeader(RFC2822Headers.SUBJECT, null); String charset = determineMailHeaderEncodingCharset(rawSubject); String subj = message.getSubject(); if (subj == null) { subj = ""; } subj = normalizeSubject(subj, prefix); try { message.setSubject(subj, charset); } catch (MessagingException e) { // known, but unsupported encoding if (charset != null) log(charset + " charset unsupported by the JRE, email subject may be damaged"); message.setSubject(subj); // recover } } //If replies should go to this list, we need to set the header if (isReplyToList()) { message.setHeader(RFC2822Headers.REPLY_TO, listservAddr.toString()); } //We're going to set this special header to avoid bounces // getting sent back out to the list message.setHeader("X-been-there", listservAddr.toString()); //Send the message to the list members //We set the postmaster as the sender for now so bounces go to him/her getMailetContext().sendMail(getMailetContext().getPostmaster(), members, message); //Kill the old message mail.setState(Mail.GHOST); } catch (IOException ioe) { throw new MailetException("Error creating listserv message", ioe); } } }
Remove redundant Collection copy git-svn-id: 88158f914d5603334254b4adf21dfd50ec107162@108723 13f79535-47bb-0310-9956-ffa450edef68
branches/branch_2_1_fcs/src/java/org/apache/james/transport/mailets/GenericListserv.java
Remove redundant Collection copy
Java
apache-2.0
a9b4b72a0b249edb269507f079701263f5ce55a4
0
micrometer-metrics/micrometer,micrometer-metrics/micrometer,micrometer-metrics/micrometer
/* * Copyright 2022 VMware, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.micrometer.observation.transport; import io.micrometer.common.lang.Nullable; /** * Inspired by OpenZipkin Brave and OpenTelemetry. Most of the documentation is taken * directly from OpenTelemetry. * * Injects and extracts a value as text into carriers that travel in-band across process * boundaries. Encoding is expected to conform to the HTTP Header Field semantics. Values * are often encoded as RPC/HTTP request headers. * * @author OpenZipkin Brave Authors * @author OpenTelemetry Authors * @author Marcin Grzejszczak * @since 1.10.0 */ public interface Propagator { /** * Class that allows to set propagated fields into a carrier. * * <p> * {@code Setter} is stateless and allows to be saved as a constant to avoid runtime * allocations. * * @param <C> carrier of propagation fields, such as an http request * @since 1.10.0 */ interface Setter<C> { /** * Replaces a propagated field with the given value. * * <p> * For example, a setter for an {@link java.net.HttpURLConnection} would be the * method reference * {@link java.net.HttpURLConnection#addRequestProperty(String, String)} * @param carrier holds propagation fields. For example, an outgoing message or * http request. To facilitate implementations as java lambdas, this parameter may * be null. * @param key the key of the field. * @param value the value of the field. */ void set(@Nullable C carrier, String key, String value); } /** * Interface that allows to read propagated fields from a carrier. * * <p> * {@code Getter} is stateless and allows to be saved as a constant to avoid runtime * allocations. * * @param <C> carrier of propagation fields, such as an http request. * @since 1.10.0 */ interface Getter<C> { /** * Returns the first value of the given propagation {@code key} or returns * {@code null}. * @param carrier carrier of propagation fields, such as an http request. * @param key the key of the field. * @return the first value of the given propagation {@code key} or returns * {@code null}. */ @Nullable String get(C carrier, String key); } }
micrometer-observation/src/main/java/io/micrometer/observation/transport/Propagator.java
/* * Copyright 2022 VMware, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.micrometer.observation.transport; import io.micrometer.common.lang.Nullable; /** * Inspired by OpenZipkin Brave and OpenTelemetry. Most of the documentation is taken * directly from OpenTelemetry. * * Injects and extracts a value as text into carriers that travel in-band across process * boundaries. Encoding is expected to conform to the HTTP Header Field semantics. Values * are often encoded as RPC/HTTP request headers. * * @author OpenZipkin Brave Authors * @author OpenTelemetry Authors * @author Marcin Grzejszczak * @since 1.0.0 */ public interface Propagator { /** * Class that allows to set propagated fields into a carrier. * * <p> * {@code Setter} is stateless and allows to be saved as a constant to avoid runtime * allocations. * * @param <C> carrier of propagation fields, such as an http request * @since 1.0.0 */ interface Setter<C> { /** * Replaces a propagated field with the given value. * * <p> * For example, a setter for an {@link java.net.HttpURLConnection} would be the * method reference * {@link java.net.HttpURLConnection#addRequestProperty(String, String)} * @param carrier holds propagation fields. For example, an outgoing message or * http request. To facilitate implementations as java lambdas, this parameter may * be null. * @param key the key of the field. * @param value the value of the field. */ void set(@Nullable C carrier, String key, String value); } /** * Interface that allows to read propagated fields from a carrier. * * <p> * {@code Getter} is stateless and allows to be saved as a constant to avoid runtime * allocations. * * @param <C> carrier of propagation fields, such as an http request. * @since 1.0.0 */ interface Getter<C> { /** * Returns the first value of the given propagation {@code key} or returns * {@code null}. * @param carrier carrier of propagation fields, such as an http request. * @param key the key of the field. * @return the first value of the given propagation {@code key} or returns * {@code null}. */ @Nullable String get(C carrier, String key); } }
Polish
micrometer-observation/src/main/java/io/micrometer/observation/transport/Propagator.java
Polish
Java
apache-2.0
508c6640cfc664297d50d971259f70bdf32573cf
0
salguarnieri/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,amith01994/intellij-community,asedunov/intellij-community,izonder/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,dslomov/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,dslomov/intellij-community,slisson/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,retomerz/intellij-community,caot/intellij-community,gnuhub/intellij-community,caot/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,vladmm/intellij-community,da1z/intellij-community,diorcety/intellij-community,da1z/intellij-community,orekyuu/intellij-community,supersven/intellij-community,fitermay/intellij-community,samthor/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,consulo/consulo,tmpgit/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,jagguli/intellij-community,kool79/intellij-community,xfournet/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,apixandru/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,caot/intellij-community,apixandru/intellij-community,xfournet/intellij-community,clumsy/intellij-community,supersven/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,vladmm/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,da1z/intellij-community,signed/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,holmes/intellij-community,signed/intellij-community,dslomov/intellij-community,kool79/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,diorcety/intellij-community,apixandru/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,vladmm/intellij-community,caot/intellij-community,clumsy/intellij-community,semonte/intellij-community,adedayo/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,amith01994/intellij-community,FHannes/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,clumsy/intellij-community,asedunov/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,fnouama/intellij-community,semonte/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,caot/intellij-community,amith01994/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,fitermay/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,retomerz/intellij-community,ibinti/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,xfournet/intellij-community,signed/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,asedunov/intellij-community,FHannes/intellij-community,ernestp/consulo,ivan-fedorov/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,consulo/consulo,slisson/intellij-community,akosyakov/intellij-community,consulo/consulo,suncycheng/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,consulo/consulo,jagguli/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,retomerz/intellij-community,ernestp/consulo,tmpgit/intellij-community,ryano144/intellij-community,diorcety/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,retomerz/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,da1z/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,caot/intellij-community,dslomov/intellij-community,petteyg/intellij-community,semonte/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,signed/intellij-community,petteyg/intellij-community,signed/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,xfournet/intellij-community,kool79/intellij-community,Lekanich/intellij-community,allotria/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,blademainer/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,ahb0327/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,ernestp/consulo,muntasirsyed/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,caot/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,xfournet/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,amith01994/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,caot/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,ernestp/consulo,semonte/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,kdwink/intellij-community,da1z/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,semonte/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,dslomov/intellij-community,slisson/intellij-community,fnouama/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,robovm/robovm-studio,fnouama/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,dslomov/intellij-community,fitermay/intellij-community,samthor/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,consulo/consulo,petteyg/intellij-community,supersven/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,izonder/intellij-community,vladmm/intellij-community,ryano144/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,semonte/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,jagguli/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,izonder/intellij-community,consulo/consulo,muntasirsyed/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,semonte/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,kdwink/intellij-community,diorcety/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,kool79/intellij-community,vladmm/intellij-community,holmes/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,allotria/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,da1z/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,slisson/intellij-community,ibinti/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,kool79/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,ernestp/consulo,idea4bsd/idea4bsd,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,amith01994/intellij-community,holmes/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,izonder/intellij-community,caot/intellij-community,akosyakov/intellij-community,samthor/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,robovm/robovm-studio,robovm/robovm-studio,vvv1559/intellij-community,gnuhub/intellij-community,semonte/intellij-community,ernestp/consulo,slisson/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,signed/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,holmes/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,apixandru/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,slisson/intellij-community,semonte/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,kool79/intellij-community,supersven/intellij-community,gnuhub/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,vladmm/intellij-community,holmes/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,caot/intellij-community,kool79/intellij-community,vladmm/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,retomerz/intellij-community,asedunov/intellij-community,signed/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,hurricup/intellij-community,signed/intellij-community,da1z/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,allotria/intellij-community,vladmm/intellij-community,supersven/intellij-community,asedunov/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,FHannes/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,dslomov/intellij-community,allotria/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,apixandru/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,kdwink/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,signed/intellij-community,kool79/intellij-community,Distrotech/intellij-community,samthor/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,caot/intellij-community,kdwink/intellij-community,petteyg/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,adedayo/intellij-community,samthor/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,caot/intellij-community,FHannes/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,signed/intellij-community
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.highlighter; import com.intellij.diagnostic.LogMessageEx; import com.intellij.lexer.LexerBase; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.StringEscapesTokenTypes; import com.intellij.psi.tree.IElementType; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes; /** * @author Max Medvedev */ public class GroovySlashyStringLexer extends LexerBase { private static final Logger LOG = Logger.getInstance(GroovySlashyStringLexer.class); private CharSequence myBuffer; private int myStart; private int myBufferEnd; private IElementType myTokenType; private int myEnd; public GroovySlashyStringLexer() { } @Override public void start(CharSequence buffer, int startOffset, int endOffset, int initialState) { if (buffer.length()<endOffset) { LogMessageEx.error(LOG, "buffer Length: " + buffer.length() + ", endOffset: " + endOffset, buffer.toString()); } myBuffer = buffer; myEnd = startOffset; myBufferEnd = endOffset; myTokenType = locateToken(); } @Nullable private IElementType locateToken() { if (myEnd >= myBufferEnd) return null; myStart = myEnd; if (checkForSlashEscape(myStart)) { myEnd = myStart + 2; return StringEscapesTokenTypes.VALID_STRING_ESCAPE_TOKEN; } else if (checkForHexCodeStart(myStart)) { for (myEnd = myStart + 2; myEnd < myStart + 6; myEnd++) { if (myEnd >= myBufferEnd || !StringUtil.isHexDigit(myBuffer.charAt(myEnd))) { return StringEscapesTokenTypes.INVALID_UNICODE_ESCAPE_TOKEN; } } return StringEscapesTokenTypes.VALID_STRING_ESCAPE_TOKEN; } while (myEnd < myBufferEnd && !checkForSlashEscape(myEnd) && !checkForHexCodeStart(myEnd)) myEnd++; return GroovyTokenTypes.mREGEX_CONTENT; } private boolean checkForSlashEscape(int start) { return myBuffer.charAt(start) == '\\' && start + 1 < myBufferEnd && myBuffer.charAt(start + 1) == '/'; } private boolean checkForHexCodeStart(int start) { return myBuffer.charAt(start) == '\\' && start + 1 < myBufferEnd && myBuffer.charAt(start + 1) == 'u'; } @Override public int getState() { return 0; } @Override public IElementType getTokenType() { return myTokenType; } @Override public int getTokenStart() { return myStart; } @Override public int getTokenEnd() { return myEnd; } @Override public void advance() { myTokenType = locateToken(); } @Override public CharSequence getBufferSequence() { return myBuffer; } @Override public int getBufferEnd() { return myBufferEnd; } }
plugins/groovy/src/org/jetbrains/plugins/groovy/highlighter/GroovySlashyStringLexer.java
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.highlighter; import com.intellij.lexer.LexerBase; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.StringEscapesTokenTypes; import com.intellij.psi.tree.IElementType; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes; /** * @author Max Medvedev */ public class GroovySlashyStringLexer extends LexerBase { private CharSequence myBuffer; private int myStart; private int myBufferEnd; private IElementType myTokenType; private int myEnd; public GroovySlashyStringLexer() { } @Override public void start(CharSequence buffer, int startOffset, int endOffset, int initialState) { assert buffer.length() >= endOffset : "buffer Length: " + buffer.length() + ", endOffset: " + endOffset + "buffer: " + buffer; myBuffer = buffer; myEnd = startOffset; myBufferEnd = endOffset; myTokenType = locateToken(); } @Nullable private IElementType locateToken() { if (myEnd >= myBufferEnd) return null; myStart = myEnd; if (checkForSlashEscape(myStart)) { myEnd = myStart + 2; return StringEscapesTokenTypes.VALID_STRING_ESCAPE_TOKEN; } else if (checkForHexCodeStart(myStart)) { for (myEnd = myStart + 2; myEnd < myStart + 6; myEnd++) { if (myEnd >= myBufferEnd || !StringUtil.isHexDigit(myBuffer.charAt(myEnd))) { return StringEscapesTokenTypes.INVALID_UNICODE_ESCAPE_TOKEN; } } return StringEscapesTokenTypes.VALID_STRING_ESCAPE_TOKEN; } while (myEnd < myBufferEnd && !checkForSlashEscape(myEnd) && !checkForHexCodeStart(myEnd)) myEnd++; return GroovyTokenTypes.mREGEX_CONTENT; } private boolean checkForSlashEscape(int start) { return myBuffer.charAt(start) == '\\' && start + 1 < myBufferEnd && myBuffer.charAt(start + 1) == '/'; } private boolean checkForHexCodeStart(int start) { return myBuffer.charAt(start) == '\\' && start + 1 < myBufferEnd && myBuffer.charAt(start + 1) == 'u'; } @Override public int getState() { return 0; } @Override public IElementType getTokenType() { return myTokenType; } @Override public int getTokenStart() { return myStart; } @Override public int getTokenEnd() { return myEnd; } @Override public void advance() { myTokenType = locateToken(); } @Override public CharSequence getBufferSequence() { return myBuffer; } @Override public int getBufferEnd() { return myBufferEnd; } }
EA-34656 more correct diagnostics
plugins/groovy/src/org/jetbrains/plugins/groovy/highlighter/GroovySlashyStringLexer.java
EA-34656 more correct diagnostics
Java
apache-2.0
9a833facbdd01479e1ccbd555d18117d245763e7
0
DesignAndDeploy/dnd,DesignAndDeploy/dnd,DesignAndDeploy/dnd
package edu.teco.dnd.eclipse; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.eclipse.ui.IMemento; import org.eclipse.ui.IViewSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.part.ViewPart; import edu.teco.dnd.module.Module; import edu.teco.dnd.server.ModuleManager; import edu.teco.dnd.server.ModuleManagerListener; import edu.teco.dnd.server.ServerManager; /** * ModuleView: Shows available modules, Start / Stop Server. * * @author jung * */ public class ModuleView extends ViewPart implements ModuleManagerListener { /** * The logger for this class. */ private static final Logger LOGGER = LogManager.getLogger(ModuleView.class); private Composite parent; private Button button; private Label serverStatus; private Table moduleTable; private ServerManager serverManager; private Activator activator; private ModuleManager manager; private Map<UUID, TableItem> map = new HashMap<UUID, TableItem>(); private Display display; public ModuleView() { super(); } @Override public void setFocus() { } @Override public void init(IViewSite site, IMemento memento) throws PartInitException { LOGGER.entry(site, memento); super.init(site, memento); activator = Activator.getDefault(); serverManager = ServerManager.getDefault(); display = Display.getCurrent(); manager = serverManager.getModuleManager(); if (display == null) { display = Display.getDefault(); LOGGER.trace("Display.getCurrent() returned null, using Display.getDefault(): {}", display); } manager.addModuleManagerListener(this); LOGGER.exit(); } @Override public void dispose() { manager.removeModuleManagerListener(this); } @Override public void createPartControl(Composite parent) { this.parent = parent; GridLayout layout = new GridLayout(); layout.numColumns = 2; this.parent.setLayout(layout); createStartButton(); createServerInfo(); createModuleTable(); } /** * Creates a Button that starts the Server when pressed * * @param parent * Composite containing the button */ private void createStartButton() { button = new Button(parent, SWT.NONE); if (serverManager.isRunning()) { button.setText("Stop Server"); } else { button.setText("Start Server"); } button.setToolTipText("Start / Stop the server. duh."); button.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { if (ServerManager.getDefault().isRunning()) { ModuleView.this.serverStatus.setText("Stopping serverโ€ฆ"); ModuleView.this.activator.shutdownServer(); } else { ModuleView.this.serverStatus.setText("Starting serverโ€ฆ"); ModuleView.this.activator.startServer(); } } }); } private void createServerInfo() { GridData gridData = new GridData(); gridData.verticalAlignment = GridData.BEGINNING; gridData.horizontalAlignment = GridData.FILL; serverStatus = new Label(parent, 0); if (serverManager.isRunning()) { serverStatus.setText("Server running"); } else { serverStatus.setText("Server down"); } serverStatus.setLayoutData(gridData); } /** * Creates a Table containing currently available modules. * * @param parent * Composite containing the table */ private void createModuleTable() { GridData grid = new GridData(); grid.horizontalSpan = 2; grid.verticalAlignment = GridData.FILL; grid.horizontalAlignment = GridData.FILL; grid.grabExcessHorizontalSpace = true; grid.grabExcessVerticalSpace = true; moduleTable = new Table(parent, 0); moduleTable.setLinesVisible(true); moduleTable.setHeaderVisible(true); moduleTable.setLayoutData(grid); TableColumn column1 = new TableColumn(moduleTable, SWT.None); column1.setText("Module ID"); TableColumn column2 = new TableColumn(moduleTable, SWT.None); column2.setText("Name"); TableColumn column3 = new TableColumn(moduleTable, SWT.None); column3.setText("Location"); moduleTable.setToolTipText("Currently available modules"); /** * Collection<UUID> modules = getModules(); * * for (UUID moduleID : modules) { addID(moduleID); } **/ moduleTable.getColumn(0).pack(); moduleTable.getColumn(1).pack(); moduleTable.getColumn(2).pack(); } /** * Adds a Module ID to the table. * * @param id * the ID to add */ private synchronized void addID(final UUID id) { LOGGER.entry(id); if (!map.containsKey(id)) { LOGGER.trace("id {} is new, adding", id); TableItem item = new TableItem(moduleTable, SWT.NONE); item.setText(0, id.toString()); map.put(id, item); } else { LOGGER.debug("trying to add existing id {}", id); } LOGGER.exit(); } /** * Removes a Module ID from the table. * * @param id * the ID to remove */ private synchronized void removeID(final UUID id) { LOGGER.entry(id); TableItem item = map.get(id); if (item != null) { LOGGER.trace("found item {} for id {}", item, id); moduleTable.remove(moduleTable.indexOf(item)); map.remove(id); } else { LOGGER.debug("trying to remove nonexistant id {}", id); } LOGGER.exit(); } @Override public void moduleOnline(final UUID id) { LOGGER.entry(id); display.asyncExec(new Runnable() { @Override public void run() { addID(id); } }); LOGGER.exit(); } @Override public void moduleOffline(final UUID id, Module module) { LOGGER.entry(id); display.asyncExec(new Runnable() { @Override public void run() { removeID(id); } }); LOGGER.exit(); } @Override public void moduleResolved(final UUID id, final Module module) { display.asyncExec(new Runnable() { @Override public void run() { if (!map.containsKey(id)) { addID(id); } TableItem item = map.get(id); if (module.getName() != null) { item.setText(1, module.getName()); } if (module.getLocation() != null) { item.setText(2, module.getLocation()); } } }); } @Override public void serverOnline(final Map<UUID, Module> modules) { display.asyncExec(new Runnable() { @Override public void run() { if (serverStatus != null && button != null) { serverStatus.setText("Server running"); button.setText("Stop Server"); } synchronized (ModuleView.this) { for (UUID id : new ArrayList<UUID>(map.keySet())) { removeID(id); } for (UUID moduleID : modules.keySet()) { addID(moduleID); } } } }); } @Override public void serverOffline() { display.asyncExec(new Runnable() { @Override public void run() { synchronized (ModuleView.this) { for (UUID id : new ArrayList<UUID>(map.keySet())) { removeID(id); } } if (serverStatus != null && button != null) { serverStatus.setText("Server down"); button.setText("Start Server"); } } }); } }
DND/src/edu/teco/dnd/eclipse/ModuleView.java
package edu.teco.dnd.eclipse; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.eclipse.ui.IMemento; import org.eclipse.ui.IViewSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.part.ViewPart; import edu.teco.dnd.eclipse.prefs.PreferencesNetwork; import edu.teco.dnd.module.Module; import edu.teco.dnd.server.ModuleManager; import edu.teco.dnd.server.ModuleManagerListener; import edu.teco.dnd.server.ServerManager; /** * ModuleView: Shows available modules, Start / Stop Server. * * @author jung * */ public class ModuleView extends ViewPart implements ModuleManagerListener { /** * The logger for this class. */ private static final Logger LOGGER = LogManager.getLogger(ModuleView.class); private Composite parent; private Button button; private Label serverStatus; private Table moduleTable; private ServerManager serverManager; private Activator activator; private ModuleManager manager; private Map<UUID, TableItem> map = new HashMap<UUID, TableItem>(); private Display display; public ModuleView() { super(); } @Override public void setFocus() { } @Override public void init(IViewSite site, IMemento memento) throws PartInitException { LOGGER.entry(site, memento); super.init(site, memento); activator = Activator.getDefault(); serverManager = ServerManager.getDefault(); display = Display.getCurrent(); manager = serverManager.getModuleManager(); if (display == null) { display = Display.getDefault(); LOGGER.trace("Display.getCurrent() returned null, using Display.getDefault(): {}", display); } manager.addModuleManagerListener(this); LOGGER.exit(); } @Override public void dispose() { manager.removeModuleManagerListener(this); } @Override public void createPartControl(Composite parent) { this.parent = parent; GridLayout layout = new GridLayout(); layout.numColumns = 2; this.parent.setLayout(layout); createStartButton(); createServerInfo(); createModuleTable(); } /** * Creates a Button that starts the Server when pressed * * @param parent * Composite containing the button */ private void createStartButton() { button = new Button(parent, SWT.NONE); if (serverManager.isRunning()) { button.setText("Stop Server"); } else { button.setText("Start Server"); } button.setToolTipText("Start / Stop the server. duh."); button.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { if (ServerManager.getDefault().isRunning()) { ModuleView.this.serverStatus.setText("Stopping serverโ€ฆ"); ModuleView.this.activator.shutdownServer(); } else { ModuleView.this.serverStatus.setText("Starting serverโ€ฆ"); ModuleView.this.activator.startServer(); } } }); } private void createServerInfo() { GridData gridData = new GridData(); gridData.verticalAlignment = GridData.BEGINNING; gridData.horizontalAlignment = GridData.FILL; serverStatus = new Label(parent, 0); if (serverManager.isRunning()) { serverStatus.setText("Server running"); } else { serverStatus.setText("Server down"); } serverStatus.setLayoutData(gridData); } /** * Creates a Table containing currently available modules. * * @param parent * Composite containing the table */ private void createModuleTable() { GridData grid = new GridData(); grid.horizontalSpan = 2; grid.verticalAlignment = GridData.FILL; grid.horizontalAlignment = GridData.FILL; grid.grabExcessHorizontalSpace = true; grid.grabExcessVerticalSpace = true; moduleTable = new Table(parent, 0); moduleTable.setLinesVisible(true); moduleTable.setHeaderVisible(true); moduleTable.setLayoutData(grid); TableColumn column1 = new TableColumn(moduleTable, SWT.None); column1.setText("Module ID"); TableColumn column2 = new TableColumn(moduleTable, SWT.None); column2.setText("Name"); TableColumn column3 = new TableColumn(moduleTable, SWT.None); column3.setText("Location"); moduleTable.setToolTipText("Currently available modules"); /** * Collection<UUID> modules = getModules(); * * for (UUID moduleID : modules) { addID(moduleID); } **/ moduleTable.getColumn(0).pack(); moduleTable.getColumn(1).pack(); moduleTable.getColumn(2).pack(); } /** * Adds a Module ID to the table. * * @param id * the ID to add */ private synchronized void addID(final UUID id) { LOGGER.entry(id); if (!map.containsKey(id)) { LOGGER.trace("id {} is new, adding", id); TableItem item = new TableItem(moduleTable, SWT.NONE); item.setText(0, id.toString()); map.put(id, item); } else { LOGGER.debug("trying to add existing id {}", id); } LOGGER.exit(); } /** * Removes a Module ID from the table. * * @param id * the ID to remove */ private synchronized void removeID(final UUID id) { LOGGER.entry(id); TableItem item = map.get(id); if (item != null) { LOGGER.trace("found item {} for id {}", item, id); moduleTable.remove(moduleTable.indexOf(item)); map.remove(id); } else { LOGGER.debug("trying to remove nonexistant id {}", id); } LOGGER.exit(); } @Override public void moduleOnline(final UUID id) { LOGGER.entry(id); display.asyncExec(new Runnable() { @Override public void run() { addID(id); } }); LOGGER.exit(); } @Override public void moduleOffline(final UUID id, Module module) { LOGGER.entry(id); display.asyncExec(new Runnable() { @Override public void run() { removeID(id); } }); LOGGER.exit(); } @Override public void moduleResolved(final UUID id, final Module module) { display.asyncExec(new Runnable() { @Override public void run() { if (!map.containsKey(id)) { addID(id); } TableItem item = map.get(id); if (module.getName() != null) { item.setText(1, module.getName()); } if (module.getLocation() != null) { item.setText(2, module.getLocation()); } } }); } @Override public void serverOnline(final Map<UUID, Module> modules) { display.asyncExec(new Runnable() { @Override public void run() { if (serverStatus != null && button != null) { serverStatus.setText("Server running"); button.setText("Stop Server"); } synchronized (ModuleView.this) { for (UUID id : new ArrayList<UUID>(map.keySet())) { removeID(id); } for (UUID moduleID : modules.keySet()) { addID(moduleID); } } } }); } @Override public void serverOffline() { display.asyncExec(new Runnable() { @Override public void run() { synchronized (ModuleView.this) { for (UUID id : new ArrayList<UUID>(map.keySet())) { removeID(id); } } if (serverStatus != null && button != null) { serverStatus.setText("Server down"); button.setText("Start Server"); } } }); } }
removed unused import.
DND/src/edu/teco/dnd/eclipse/ModuleView.java
removed unused import.
Java
apache-2.0
24de2197ed8632b14f237bb0c52f41901c8660ed
0
googleinterns/step176-2020,googleinterns/step176-2020,googleinterns/step176-2020
package com.google.sps.servlets; import org.apache.commons.io.FileUtils; import com.google.api.client.auth.oauth2.BearerToken; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.auth.oauth2.TokenResponseException; import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow; import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeTokenRequest; import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets; import com.google.api.client.googleapis.auth.oauth2.GoogleRefreshTokenRequest; import com.google.api.client.googleapis.auth.oauth2.GoogleTokenResponse; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.store.FileDataStoreFactory; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import com.google.appengine.api.datastore.PreparedQuery.TooManyResultsException; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query.FilterOperator; import com.google.appengine.api.datastore.Query; import com.google.appengine.api.users.User; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.sps.data.ChromeOSDevice; import com.google.sps.data.ListDeviceResponse; import com.google.sps.gson.Json; import java.io.File; import java.io.FileReader; import java.io.IOException; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import java.security.GeneralSecurityException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.squareup.okhttp.HttpUrl; import com.squareup.okhttp.MediaType; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Request; import com.squareup.okhttp.RequestBody; import com.squareup.okhttp.Response; import java.util.ArrayList; import java.util.List; class Util { //This class only works locally [endpoints will not function if deployed] private final String TOKEN_END_POINT = "https://oauth2.googleapis.com/token"; private final String REROUTE_LINK = "http://localhost:8080"; private static final String CLIENT_SECRET_FILE = "/client_info.json"; private static final String API_KEY_FILE = "/api_key.txt"; private static final OkHttpClient client = new OkHttpClient(); private static final String INVALID_ACCESS_TOKEN = "INVALID"; private static final String EMPTY_REFRESH_TOKEN = ""; private static final String EMPTY_API_KEY = ""; private static final String EMPTY_PAGE_TOKEN = ""; private static final String ALL_DEVICES_ENDPOINT = "https://www.googleapis.com/admin/directory/v1/customer/my_customer/devices/chromeos"; private static final int DEFAULT_MAX_DEVICES = 200; //is limited to effectively 200 private static final String DEFAULT_SORT_ORDER = "ASCENDING"; private static final String DEFAULT_PROJECTION = "FULL"; private static final DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); public static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); public String getNextResponse(String userId, int maxDeviceCount, String pageToken) throws IOException, TokenResponseException, TooManyResultsException { final String apiKey = getAPIKey(); final String accessToken = getAccessToken(userId); final ListDeviceResponse resp = getDevicesResponse(pageToken, accessToken, apiKey, maxDeviceCount); final String responseJson = Json.toJson(resp);//THIS MIGHT NOT ACTUALLY WORK return responseJson; } public List<ChromeOSDevice> getAllDevices(String userId) throws IOException, TokenResponseException, TooManyResultsException { final String apiKey = getAPIKey(); final String accessToken = getAccessToken(userId); ListDeviceResponse resp = getDevicesResponse(EMPTY_PAGE_TOKEN, accessToken, apiKey, DEFAULT_MAX_DEVICES); final List<ChromeOSDevice> allDevices = new ArrayList<>(resp.getDevices()); while (resp.hasNextPageToken()) { final String pageToken = (String) resp.getNextPageToken(); resp = getDevicesResponse(pageToken, accessToken, apiKey, DEFAULT_MAX_DEVICES); allDevices.addAll(resp.getDevices()); } return allDevices; } public static String getAPIKey() throws IOException { File file = new File(Util.class.getResource(API_KEY_FILE).getFile()); String str = FileUtils.readFileToString(file); return str; } private static String getRefreshToken(String userId) throws IOException, TooManyResultsException { Query query = new Query("RefreshToken").setFilter(FilterOperator.EQUAL.of("userId", userId)); PreparedQuery results = datastore.prepare(query); System.out.println(results.countEntities()); Entity entity = results.asSingleEntity(); String refreshToken = (String) entity.getProperty("refreshToken"); return refreshToken; } public static String getAccessToken(String userId) throws IOException, TokenResponseException, TooManyResultsException { final String refreshToken = getRefreshToken(userId); File file = new File(Util.class.getResource(CLIENT_SECRET_FILE).getFile()); final GoogleClientSecrets clientSecrets = GoogleClientSecrets.load( JacksonFactory.getDefaultInstance(), new FileReader(file)); final String clientId = clientSecrets.getDetails().getClientId(); final String clientSecret = clientSecrets.getDetails().getClientSecret(); GoogleTokenResponse response = new GoogleRefreshTokenRequest( new NetHttpTransport(), new JacksonFactory(), refreshToken, clientId, clientSecret) .execute(); return response.getAccessToken(); } private static ListDeviceResponse getDevicesResponse(String pageToken, String accessToken, String apiKey, int maxDeviceCount) throws IOException { HttpUrl.Builder urlBuilder = HttpUrl.parse(ALL_DEVICES_ENDPOINT).newBuilder(); urlBuilder.addQueryParameter("maxResults", String.valueOf(maxDeviceCount)); urlBuilder.addQueryParameter("projection", DEFAULT_PROJECTION); urlBuilder.addQueryParameter("sortOrder", DEFAULT_SORT_ORDER); urlBuilder.addQueryParameter("key", apiKey); if (!pageToken.equals(EMPTY_PAGE_TOKEN)) { urlBuilder.addQueryParameter("pageToken", pageToken); } final String myUrl = urlBuilder.build().toString(); Request req = new Request.Builder().url(myUrl).addHeader("Authorization", "Bearer " + accessToken).build(); Response myResponse = client.newCall(req).execute(); final String content = myResponse.body().string(); ListDeviceResponse resp = (ListDeviceResponse) Json.fromJson(content, ListDeviceResponse.class); return resp; } public void deleteStaleTokens(String userId) { Query query = new Query("RefreshToken").setFilter(FilterOperator.EQUAL.of("userId", userId)); PreparedQuery results = datastore.prepare(query); List<Key> keysToDelete = new ArrayList<>(); for (final Entity entity : results.asIterable()) { final long id = entity.getKey().getId(); final Key key = KeyFactory.createKey("RefreshToken", id); keysToDelete.add(key); } datastore.delete(keysToDelete); } public String getNewRefreshToken(String authCode) throws IOException { File file = new File(this.getClass().getResource(CLIENT_SECRET_FILE).getFile()); final GoogleClientSecrets clientSecrets = GoogleClientSecrets.load( JacksonFactory.getDefaultInstance(), new FileReader(file)); final String clientId = clientSecrets.getDetails().getClientId(); final String clientSecret = clientSecrets.getDetails().getClientSecret(); final GoogleTokenResponse tokenResponse = new GoogleAuthorizationCodeTokenRequest( new NetHttpTransport(), JacksonFactory.getDefaultInstance(), TOKEN_END_POINT, clientSecrets.getDetails().getClientId(), clientSecrets.getDetails().getClientSecret(), authCode, REROUTE_LINK) .execute(); final String refreshToken = tokenResponse.getRefreshToken(); return refreshToken; } public void associateRefreshToken(String userId, String refreshToken) { Entity tokenEntity = new Entity("RefreshToken"); tokenEntity.setProperty("userId", userId); tokenEntity.setProperty("refreshToken", refreshToken); deleteStaleTokens(userId); datastore.put(tokenEntity); } public void updateDevices(String userId, List<String> deviceIds, String updatesInJson) throws IOException { final String accessToken = getAccessToken(userId); deviceIds .parallelStream() .forEach( deviceId -> { try { updateSingleDevice(accessToken, deviceId, updatesInJson); } catch (IOException e) { System.out.println(e); } } ); // for (final String deviceId : deviceIds) { // updateSingleDevice(accessToken, deviceId, updatesInJson); // } } private void updateSingleDevice(String accessToken, String deviceId, String updatesInJson) throws IOException { final String myUrl = getUpdateUrl(deviceId); RequestBody body = RequestBody.create(JSON, updatesInJson); Request req = new Request.Builder().url(myUrl).put(body).addHeader("Authorization", "Bearer " + accessToken).build(); Response myResponse = client.newCall(req).execute(); myResponse.body().close(); } private String getUpdateUrl(String deviceId) { return "https://www.googleapis.com/admin/directory/v1/customer/my_customer/devices/chromeos/" + deviceId + "?projection=BASIC"; } }
src/main/java/com/google/sps/servlets/Util.java
package com.google.sps.servlets; import org.apache.commons.io.FileUtils; import com.google.api.client.auth.oauth2.BearerToken; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.auth.oauth2.TokenResponseException; import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow; import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeTokenRequest; import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets; import com.google.api.client.googleapis.auth.oauth2.GoogleRefreshTokenRequest; import com.google.api.client.googleapis.auth.oauth2.GoogleTokenResponse; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.store.FileDataStoreFactory; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import com.google.appengine.api.datastore.PreparedQuery.TooManyResultsException; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query.FilterOperator; import com.google.appengine.api.datastore.Query; import com.google.appengine.api.users.User; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.sps.data.ChromeOSDevice; import com.google.sps.data.ListDeviceResponse; import com.google.sps.gson.Json; import java.io.File; import java.io.FileReader; import java.io.IOException; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import java.security.GeneralSecurityException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.squareup.okhttp.HttpUrl; import com.squareup.okhttp.MediaType; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Request; import com.squareup.okhttp.RequestBody; import com.squareup.okhttp.Response; import java.util.ArrayList; import java.util.List; class Util { //This class only works locally [endpoints will not function if deployed] private final String TOKEN_END_POINT = "https://oauth2.googleapis.com/token"; private final String REROUTE_LINK = "http://localhost:8080"; private static final String CLIENT_SECRET_FILE = "/client_info.json"; private static final String API_KEY_FILE = "/api_key.txt"; private static final OkHttpClient client = new OkHttpClient(); private static final String INVALID_ACCESS_TOKEN = "INVALID"; private static final String EMPTY_REFRESH_TOKEN = ""; private static final String EMPTY_API_KEY = ""; private static final String EMPTY_PAGE_TOKEN = ""; private static final String ALL_DEVICES_ENDPOINT = "https://www.googleapis.com/admin/directory/v1/customer/my_customer/devices/chromeos"; private static final int DEFAULT_MAX_DEVICES = 200; //is limited to effectively 200 private static final String DEFAULT_SORT_ORDER = "ASCENDING"; private static final String DEFAULT_PROJECTION = "FULL"; private static final DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); public static final MediaType JSON = MediaType.parse("application/json; charset=utf-8"); public String getNextResponse(String userId, int maxDeviceCount, String pageToken) throws IOException, TokenResponseException, TooManyResultsException { final String apiKey = getAPIKey(); final String accessToken = getAccessToken(userId); final ListDeviceResponse resp = getDevicesResponse(pageToken, accessToken, apiKey, maxDeviceCount); final String responseJson = Json.toJson(resp);//THIS MIGHT NOT ACTUALLY WORK return responseJson; } public List<ChromeOSDevice> getAllDevices(String userId) throws IOException, TokenResponseException, TooManyResultsException { final String apiKey = getAPIKey(); final String accessToken = getAccessToken(userId); ListDeviceResponse resp = getDevicesResponse(EMPTY_PAGE_TOKEN, accessToken, apiKey, DEFAULT_MAX_DEVICES); final List<ChromeOSDevice> allDevices = new ArrayList<>(resp.getDevices()); while (resp.hasNextPageToken()) { final String pageToken = (String) resp.getNextPageToken(); resp = getDevicesResponse(pageToken, accessToken, apiKey, DEFAULT_MAX_DEVICES); allDevices.addAll(resp.getDevices()); } return allDevices; } public static String getAPIKey() throws IOException { File file = new File(Util.class.getResource(API_KEY_FILE).getFile()); String str = FileUtils.readFileToString(file); return str; } private static String getRefreshToken(String userId) throws IOException, TooManyResultsException { Query query = new Query("RefreshToken").setFilter(FilterOperator.EQUAL.of("userId", userId)); PreparedQuery results = datastore.prepare(query); System.out.println(results.countEntities()); Entity entity = results.asSingleEntity(); String refreshToken = (String) entity.getProperty("refreshToken"); return refreshToken; } public static String getAccessToken(String userId) throws IOException, TokenResponseException, TooManyResultsException { final String refreshToken = getRefreshToken(userId); File file = new File(Util.class.getResource(CLIENT_SECRET_FILE).getFile()); final GoogleClientSecrets clientSecrets = GoogleClientSecrets.load( JacksonFactory.getDefaultInstance(), new FileReader(file)); final String clientId = clientSecrets.getDetails().getClientId(); final String clientSecret = clientSecrets.getDetails().getClientSecret(); GoogleTokenResponse response = new GoogleRefreshTokenRequest( new NetHttpTransport(), new JacksonFactory(), refreshToken, clientId, clientSecret) .execute(); return response.getAccessToken(); } private static ListDeviceResponse getDevicesResponse(String pageToken, String accessToken, String apiKey, int maxDeviceCount) throws IOException { HttpUrl.Builder urlBuilder = HttpUrl.parse(ALL_DEVICES_ENDPOINT).newBuilder(); urlBuilder.addQueryParameter("maxResults", String.valueOf(maxDeviceCount)); urlBuilder.addQueryParameter("projection", DEFAULT_PROJECTION); urlBuilder.addQueryParameter("sortOrder", DEFAULT_SORT_ORDER); urlBuilder.addQueryParameter("key", apiKey); if (!pageToken.equals(EMPTY_PAGE_TOKEN)) { urlBuilder.addQueryParameter("pageToken", pageToken); } final String myUrl = urlBuilder.build().toString(); Request req = new Request.Builder().url(myUrl).addHeader("Authorization", "Bearer " + accessToken).build(); Response myResponse = client.newCall(req).execute(); final String content = myResponse.body().string(); ListDeviceResponse resp = (ListDeviceResponse) Json.fromJson(content, ListDeviceResponse.class); return resp; } public void deleteStaleTokens(String userId) { Query query = new Query("RefreshToken").setFilter(FilterOperator.EQUAL.of("userId", userId)); PreparedQuery results = datastore.prepare(query); List<Key> keysToDelete = new ArrayList<>(); for (final Entity entity : results.asIterable()) { final long id = entity.getKey().getId(); final Key key = KeyFactory.createKey("RefreshToken", id); keysToDelete.add(key); } datastore.delete(keysToDelete); } public String getNewRefreshToken(String authCode) throws IOException { File file = new File(this.getClass().getResource(CLIENT_SECRET_FILE).getFile()); final GoogleClientSecrets clientSecrets = GoogleClientSecrets.load( JacksonFactory.getDefaultInstance(), new FileReader(file)); final String clientId = clientSecrets.getDetails().getClientId(); final String clientSecret = clientSecrets.getDetails().getClientSecret(); final GoogleTokenResponse tokenResponse = new GoogleAuthorizationCodeTokenRequest( new NetHttpTransport(), JacksonFactory.getDefaultInstance(), TOKEN_END_POINT, clientSecrets.getDetails().getClientId(), clientSecrets.getDetails().getClientSecret(), authCode, REROUTE_LINK) .execute(); final String refreshToken = tokenResponse.getRefreshToken(); return refreshToken; } public void associateRefreshToken(String userId, String refreshToken) { Entity tokenEntity = new Entity("RefreshToken"); tokenEntity.setProperty("userId", userId); tokenEntity.setProperty("refreshToken", refreshToken); deleteStaleTokens(userId); datastore.put(tokenEntity); } public void updateDevices(String userId, List<String> deviceIds, String updatesInJson) throws IOException { final String accessToken = getAccessToken(userId); for (final String deviceId : deviceIds) { updateSingleDevice(accessToken, deviceId, updatesInJson); } } private void updateSingleDevice(String accessToken, String deviceId, String updatesInJson) throws IOException { final String myUrl = getUpdateUrl(deviceId); RequestBody body = RequestBody.create(JSON, updatesInJson); Request req = new Request.Builder().url(myUrl).put(body).addHeader("Authorization", "Bearer " + accessToken).build(); Response myResponse = client.newCall(req).execute(); myResponse.body().close(); } private String getUpdateUrl(String deviceId) { return "https://www.googleapis.com/admin/directory/v1/customer/my_customer/devices/chromeos/" + deviceId + "?projection=BASIC"; } }
parallelize attempt
src/main/java/com/google/sps/servlets/Util.java
parallelize attempt
Java
apache-2.0
c4244eb027f16b888ac970178377e1f0788902d1
0
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.search.grouping; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource; import org.apache.lucene.search.CachingCollector; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector; import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector; import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingCollector; import org.apache.lucene.search.grouping.term.TermAllGroupsCollector; import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector; import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.mutable.MutableValue; import org.apache.lucene.util.mutable.MutableValueStr; // TODO // - should test relevance sort too // - test null // - test ties // - test compound sort public class TestGrouping extends LuceneTestCase { public void testBasic() throws Exception { String groupField = "author"; FieldType customType = new FieldType(); customType.setStored(true); Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); addGroupField(doc, groupField, "author1"); doc.add(new TextField("content", "random text", Field.Store.YES)); doc.add(new Field("id", "1", customType)); w.addDocument(doc); // 1 doc = new Document(); addGroupField(doc, groupField, "author1"); doc.add(new TextField("content", "some more random text", Field.Store.YES)); doc.add(new Field("id", "2", customType)); w.addDocument(doc); // 2 doc = new Document(); addGroupField(doc, groupField, "author1"); doc.add(new TextField("content", "some more random textual data", Field.Store.YES)); doc.add(new Field("id", "3", customType)); w.addDocument(doc); // 3 doc = new Document(); addGroupField(doc, groupField, "author2"); doc.add(new TextField("content", "some random text", Field.Store.YES)); doc.add(new Field("id", "4", customType)); w.addDocument(doc); // 4 doc = new Document(); addGroupField(doc, groupField, "author3"); doc.add(new TextField("content", "some more random text", Field.Store.YES)); doc.add(new Field("id", "5", customType)); w.addDocument(doc); // 5 doc = new Document(); addGroupField(doc, groupField, "author3"); doc.add(new TextField("content", "random", Field.Store.YES)); doc.add(new Field("id", "6", customType)); w.addDocument(doc); // 6 -- no author field doc = new Document(); doc.add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES)); doc.add(new Field("id", "6", customType)); w.addDocument(doc); IndexSearcher indexSearcher = newSearcher(w.getReader()); w.close(); final Sort groupSort = Sort.RELEVANCE; final AbstractFirstPassGroupingCollector<?> c1 = createRandomFirstPassCollector(groupField, groupSort, 10); indexSearcher.search(new TermQuery(new Term("content", "random")), c1); final AbstractSecondPassGroupingCollector<?> c2 = createSecondPassCollector(c1, groupField, groupSort, Sort.RELEVANCE, 0, 5, true, true, true); indexSearcher.search(new TermQuery(new Term("content", "random")), c2); final TopGroups<?> groups = c2.getTopGroups(0); assertFalse(Float.isNaN(groups.maxScore)); assertEquals(7, groups.totalHitCount); assertEquals(7, groups.totalGroupedHitCount); assertEquals(4, groups.groups.length); // relevance order: 5, 0, 3, 4, 1, 2, 6 // the later a document is added the higher this docId // value GroupDocs<?> group = groups.groups[0]; compareGroupValue("author3", group); assertEquals(2, group.scoreDocs.length); assertEquals(5, group.scoreDocs[0].doc); assertEquals(4, group.scoreDocs[1].doc); assertTrue(group.scoreDocs[0].score > group.scoreDocs[1].score); group = groups.groups[1]; compareGroupValue("author1", group); assertEquals(3, group.scoreDocs.length); assertEquals(0, group.scoreDocs[0].doc); assertEquals(1, group.scoreDocs[1].doc); assertEquals(2, group.scoreDocs[2].doc); assertTrue(group.scoreDocs[0].score > group.scoreDocs[1].score); assertTrue(group.scoreDocs[1].score > group.scoreDocs[2].score); group = groups.groups[2]; compareGroupValue("author2", group); assertEquals(1, group.scoreDocs.length); assertEquals(3, group.scoreDocs[0].doc); group = groups.groups[3]; compareGroupValue(null, group); assertEquals(1, group.scoreDocs.length); assertEquals(6, group.scoreDocs[0].doc); indexSearcher.getIndexReader().close(); dir.close(); } private void addGroupField(Document doc, String groupField, String value) { doc.add(new SortedDocValuesField(groupField, new BytesRef(value))); } private AbstractFirstPassGroupingCollector<?> createRandomFirstPassCollector(String groupField, Sort groupSort, int topDocs) throws IOException { AbstractFirstPassGroupingCollector<?> selected; if (random().nextBoolean()) { ValueSource vs = new BytesRefFieldSource(groupField); selected = new FunctionFirstPassGroupingCollector(vs, new HashMap<>(), groupSort, topDocs); } else { selected = new TermFirstPassGroupingCollector(groupField, groupSort, topDocs); } if (VERBOSE) { System.out.println("Selected implementation: " + selected.getClass().getName()); } return selected; } private AbstractFirstPassGroupingCollector<?> createFirstPassCollector(String groupField, Sort groupSort, int topDocs, AbstractFirstPassGroupingCollector<?> firstPassGroupingCollector) throws IOException { if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) { ValueSource vs = new BytesRefFieldSource(groupField); return new FunctionFirstPassGroupingCollector(vs, new HashMap<>(), groupSort, topDocs); } else { return new TermFirstPassGroupingCollector(groupField, groupSort, topDocs); } } @SuppressWarnings({"unchecked","rawtypes"}) private <T> AbstractSecondPassGroupingCollector<T> createSecondPassCollector(AbstractFirstPassGroupingCollector firstPassGroupingCollector, String groupField, Sort groupSort, Sort sortWithinGroup, int groupOffset, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException { if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) { Collection<SearchGroup<BytesRef>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields); return (AbstractSecondPassGroupingCollector) new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields); } else { ValueSource vs = new BytesRefFieldSource(groupField); Collection<SearchGroup<MutableValue>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields); return (AbstractSecondPassGroupingCollector) new FunctionSecondPassGroupingCollector(searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap()); } } // Basically converts searchGroups from MutableValue to BytesRef if grouping by ValueSource @SuppressWarnings("unchecked") private AbstractSecondPassGroupingCollector<?> createSecondPassCollector(AbstractFirstPassGroupingCollector<?> firstPassGroupingCollector, String groupField, Collection<SearchGroup<BytesRef>> searchGroups, Sort groupSort, Sort sortWithinGroup, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException { if (firstPassGroupingCollector.getClass().isAssignableFrom(TermFirstPassGroupingCollector.class)) { return new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields); } else { ValueSource vs = new BytesRefFieldSource(groupField); List<SearchGroup<MutableValue>> mvalSearchGroups = new ArrayList<>(searchGroups.size()); for (SearchGroup<BytesRef> mergedTopGroup : searchGroups) { SearchGroup<MutableValue> sg = new SearchGroup<>(); MutableValueStr groupValue = new MutableValueStr(); if (mergedTopGroup.groupValue != null) { groupValue.value.copyBytes(mergedTopGroup.groupValue); } else { groupValue.exists = false; } sg.groupValue = groupValue; sg.sortValues = mergedTopGroup.sortValues; mvalSearchGroups.add(sg); } return new FunctionSecondPassGroupingCollector(mvalSearchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap<>()); } } private AbstractAllGroupsCollector<?> createAllGroupsCollector(AbstractFirstPassGroupingCollector<?> firstPassGroupingCollector, String groupField) { if (firstPassGroupingCollector.getClass().isAssignableFrom(TermFirstPassGroupingCollector.class)) { return new TermAllGroupsCollector(groupField); } else { ValueSource vs = new BytesRefFieldSource(groupField); return new FunctionAllGroupsCollector(vs, new HashMap<>()); } } private void compareGroupValue(String expected, GroupDocs<?> group) { if (expected == null) { if (group.groupValue == null) { return; } else if (group.groupValue.getClass().isAssignableFrom(MutableValueStr.class)) { return; } else if (((BytesRef) group.groupValue).length == 0) { return; } fail(); } if (group.groupValue.getClass().isAssignableFrom(BytesRef.class)) { assertEquals(new BytesRef(expected), group.groupValue); } else if (group.groupValue.getClass().isAssignableFrom(MutableValueStr.class)) { MutableValueStr v = new MutableValueStr(); v.value.copyChars(expected); assertEquals(v, group.groupValue); } else { fail(); } } private Collection<SearchGroup<BytesRef>> getSearchGroups(AbstractFirstPassGroupingCollector<?> c, int groupOffset, boolean fillFields) { if (TermFirstPassGroupingCollector.class.isAssignableFrom(c.getClass())) { return ((TermFirstPassGroupingCollector) c).getTopGroups(groupOffset, fillFields); } else if (FunctionFirstPassGroupingCollector.class.isAssignableFrom(c.getClass())) { Collection<SearchGroup<MutableValue>> mutableValueGroups = ((FunctionFirstPassGroupingCollector) c).getTopGroups(groupOffset, fillFields); if (mutableValueGroups == null) { return null; } List<SearchGroup<BytesRef>> groups = new ArrayList<>(mutableValueGroups.size()); for (SearchGroup<MutableValue> mutableValueGroup : mutableValueGroups) { SearchGroup<BytesRef> sg = new SearchGroup<>(); sg.groupValue = mutableValueGroup.groupValue.exists() ? ((MutableValueStr) mutableValueGroup.groupValue).value.get() : null; sg.sortValues = mutableValueGroup.sortValues; groups.add(sg); } return groups; } fail(); return null; } @SuppressWarnings({"unchecked", "rawtypes"}) private TopGroups<BytesRef> getTopGroups(AbstractSecondPassGroupingCollector c, int withinGroupOffset) { if (c.getClass().isAssignableFrom(TermSecondPassGroupingCollector.class)) { return ((TermSecondPassGroupingCollector) c).getTopGroups(withinGroupOffset); } else if (c.getClass().isAssignableFrom(FunctionSecondPassGroupingCollector.class)) { TopGroups<MutableValue> mvalTopGroups = ((FunctionSecondPassGroupingCollector) c).getTopGroups(withinGroupOffset); List<GroupDocs<BytesRef>> groups = new ArrayList<>(mvalTopGroups.groups.length); for (GroupDocs<MutableValue> mvalGd : mvalTopGroups.groups) { BytesRef groupValue = mvalGd.groupValue.exists() ? ((MutableValueStr) mvalGd.groupValue).value.get() : null; groups.add(new GroupDocs<>(Float.NaN, mvalGd.maxScore, mvalGd.totalHits, mvalGd.scoreDocs, groupValue, mvalGd.groupSortValues)); } // NOTE: currenlty using diamond operator on MergedIterator (without explicit Term class) causes // errors on Eclipse Compiler (ecj) used for javadoc lint return new TopGroups<BytesRef>(mvalTopGroups.groupSort, mvalTopGroups.withinGroupSort, mvalTopGroups.totalHitCount, mvalTopGroups.totalGroupedHitCount, groups.toArray(new GroupDocs[groups.size()]), Float.NaN); } fail(); return null; } private static class GroupDoc { final int id; final BytesRef group; final BytesRef sort1; final BytesRef sort2; // content must be "realN ..." final String content; float score; float score2; public GroupDoc(int id, BytesRef group, BytesRef sort1, BytesRef sort2, String content) { this.id = id; this.group = group; this.sort1 = sort1; this.sort2 = sort2; this.content = content; } } private Sort getRandomSort() { final List<SortField> sortFields = new ArrayList<>(); if (random().nextInt(7) == 2) { sortFields.add(SortField.FIELD_SCORE); } else { if (random().nextBoolean()) { if (random().nextBoolean()) { sortFields.add(new SortField("sort1", SortField.Type.STRING, random().nextBoolean())); } else { sortFields.add(new SortField("sort2", SortField.Type.STRING, random().nextBoolean())); } } else if (random().nextBoolean()) { sortFields.add(new SortField("sort1", SortField.Type.STRING, random().nextBoolean())); sortFields.add(new SortField("sort2", SortField.Type.STRING, random().nextBoolean())); } } // Break ties: sortFields.add(new SortField("id", SortField.Type.INT)); return new Sort(sortFields.toArray(new SortField[sortFields.size()])); } private Comparator<GroupDoc> getComparator(Sort sort) { final SortField[] sortFields = sort.getSort(); return new Comparator<GroupDoc>() { @Override public int compare(GroupDoc d1, GroupDoc d2) { for(SortField sf : sortFields) { final int cmp; if (sf.getType() == SortField.Type.SCORE) { if (d1.score > d2.score) { cmp = -1; } else if (d1.score < d2.score) { cmp = 1; } else { cmp = 0; } } else if (sf.getField().equals("sort1")) { cmp = d1.sort1.compareTo(d2.sort1); } else if (sf.getField().equals("sort2")) { cmp = d1.sort2.compareTo(d2.sort2); } else { assertEquals(sf.getField(), "id"); cmp = d1.id - d2.id; } if (cmp != 0) { return sf.getReverse() ? -cmp : cmp; } } // Our sort always fully tie breaks: fail(); return 0; } }; } @SuppressWarnings({"unchecked","rawtypes"}) private Comparable<?>[] fillFields(GroupDoc d, Sort sort) { final SortField[] sortFields = sort.getSort(); final Comparable<?>[] fields = new Comparable[sortFields.length]; for(int fieldIDX=0;fieldIDX<sortFields.length;fieldIDX++) { final Comparable<?> c; final SortField sf = sortFields[fieldIDX]; if (sf.getType() == SortField.Type.SCORE) { c = d.score; } else if (sf.getField().equals("sort1")) { c = d.sort1; } else if (sf.getField().equals("sort2")) { c = d.sort2; } else { assertEquals("id", sf.getField()); c = d.id; } fields[fieldIDX] = c; } return fields; } private String groupToString(BytesRef b) { if (b == null) { return "null"; } else { return b.utf8ToString(); } } private TopGroups<BytesRef> slowGrouping(GroupDoc[] groupDocs, String searchTerm, boolean fillFields, boolean getScores, boolean getMaxScores, boolean doAllGroups, Sort groupSort, Sort docSort, int topNGroups, int docsPerGroup, int groupOffset, int docOffset) { final Comparator<GroupDoc> groupSortComp = getComparator(groupSort); Arrays.sort(groupDocs, groupSortComp); final HashMap<BytesRef,List<GroupDoc>> groups = new HashMap<>(); final List<BytesRef> sortedGroups = new ArrayList<>(); final List<Comparable<?>[]> sortedGroupFields = new ArrayList<>(); int totalHitCount = 0; Set<BytesRef> knownGroups = new HashSet<>(); //System.out.println("TEST: slowGrouping"); for(GroupDoc d : groupDocs) { // TODO: would be better to filter by searchTerm before sorting! if (!d.content.startsWith(searchTerm)) { continue; } totalHitCount++; //System.out.println(" match id=" + d.id + " score=" + d.score); if (doAllGroups) { if (!knownGroups.contains(d.group)) { knownGroups.add(d.group); //System.out.println(" add group=" + groupToString(d.group)); } } List<GroupDoc> l = groups.get(d.group); if (l == null) { //System.out.println(" add sortedGroup=" + groupToString(d.group)); sortedGroups.add(d.group); if (fillFields) { sortedGroupFields.add(fillFields(d, groupSort)); } l = new ArrayList<>(); groups.put(d.group, l); } l.add(d); } if (groupOffset >= sortedGroups.size()) { // slice is out of bounds return null; } final int limit = Math.min(groupOffset + topNGroups, groups.size()); final Comparator<GroupDoc> docSortComp = getComparator(docSort); @SuppressWarnings({"unchecked","rawtypes"}) final GroupDocs<BytesRef>[] result = new GroupDocs[limit-groupOffset]; int totalGroupedHitCount = 0; for(int idx=groupOffset;idx < limit;idx++) { final BytesRef group = sortedGroups.get(idx); final List<GroupDoc> docs = groups.get(group); totalGroupedHitCount += docs.size(); Collections.sort(docs, docSortComp); final ScoreDoc[] hits; if (docs.size() > docOffset) { final int docIDXLimit = Math.min(docOffset + docsPerGroup, docs.size()); hits = new ScoreDoc[docIDXLimit - docOffset]; for(int docIDX=docOffset; docIDX < docIDXLimit; docIDX++) { final GroupDoc d = docs.get(docIDX); final FieldDoc fd; if (fillFields) { fd = new FieldDoc(d.id, getScores ? d.score : Float.NaN, fillFields(d, docSort)); } else { fd = new FieldDoc(d.id, getScores ? d.score : Float.NaN); } hits[docIDX-docOffset] = fd; } } else { hits = new ScoreDoc[0]; } result[idx-groupOffset] = new GroupDocs<>(Float.NaN, 0.0f, docs.size(), hits, group, fillFields ? sortedGroupFields.get(idx) : null); } if (doAllGroups) { return new TopGroups<>( new TopGroups<>(groupSort.getSort(), docSort.getSort(), totalHitCount, totalGroupedHitCount, result, Float.NaN), knownGroups.size() ); } else { return new TopGroups<>(groupSort.getSort(), docSort.getSort(), totalHitCount, totalGroupedHitCount, result, Float.NaN); } } private DirectoryReader getDocBlockReader(Directory dir, GroupDoc[] groupDocs) throws IOException { // Coalesce by group, but in random order: Collections.shuffle(Arrays.asList(groupDocs), random()); final Map<BytesRef,List<GroupDoc>> groupMap = new HashMap<>(); final List<BytesRef> groupValues = new ArrayList<>(); for(GroupDoc groupDoc : groupDocs) { if (!groupMap.containsKey(groupDoc.group)) { groupValues.add(groupDoc.group); groupMap.put(groupDoc.group, new ArrayList<GroupDoc>()); } groupMap.get(groupDoc.group).add(groupDoc); } RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); final List<List<Document>> updateDocs = new ArrayList<>(); FieldType groupEndType = new FieldType(StringField.TYPE_NOT_STORED); groupEndType.setIndexOptions(IndexOptions.DOCS); groupEndType.setOmitNorms(true); //System.out.println("TEST: index groups"); for(BytesRef group : groupValues) { final List<Document> docs = new ArrayList<>(); //System.out.println("TEST: group=" + (group == null ? "null" : group.utf8ToString())); for(GroupDoc groupValue : groupMap.get(group)) { Document doc = new Document(); docs.add(doc); if (groupValue.group != null) { doc.add(newStringField("group", groupValue.group.utf8ToString(), Field.Store.YES)); doc.add(new SortedDocValuesField("group", BytesRef.deepCopyOf(groupValue.group))); } doc.add(newStringField("sort1", groupValue.sort1.utf8ToString(), Field.Store.NO)); doc.add(new SortedDocValuesField("sort1", BytesRef.deepCopyOf(groupValue.sort1))); doc.add(newStringField("sort2", groupValue.sort2.utf8ToString(), Field.Store.NO)); doc.add(new SortedDocValuesField("sort2", BytesRef.deepCopyOf(groupValue.sort2))); doc.add(new NumericDocValuesField("id", groupValue.id)); doc.add(newTextField("content", groupValue.content, Field.Store.NO)); //System.out.println("TEST: doc content=" + groupValue.content + " group=" + (groupValue.group == null ? "null" : groupValue.group.utf8ToString()) + " sort1=" + groupValue.sort1.utf8ToString() + " id=" + groupValue.id); } // So we can pull filter marking last doc in block: final Field groupEnd = newField("groupend", "x", groupEndType); docs.get(docs.size()-1).add(groupEnd); // Add as a doc block: w.addDocuments(docs); if (group != null && random().nextInt(7) == 4) { updateDocs.add(docs); } } for(List<Document> docs : updateDocs) { // Just replaces docs w/ same docs: w.updateDocuments(new Term("group", docs.get(0).get("group")), docs); } final DirectoryReader r = w.getReader(); w.close(); return r; } private static class ShardState { public final ShardSearcher[] subSearchers; public final int[] docStarts; public ShardState(IndexSearcher s) { final IndexReaderContext ctx = s.getTopReaderContext(); final List<LeafReaderContext> leaves = ctx.leaves(); subSearchers = new ShardSearcher[leaves.size()]; for(int searcherIDX=0;searcherIDX<subSearchers.length;searcherIDX++) { subSearchers[searcherIDX] = new ShardSearcher(leaves.get(searcherIDX), ctx); } docStarts = new int[subSearchers.length]; for(int subIDX=0;subIDX<docStarts.length;subIDX++) { docStarts[subIDX] = leaves.get(subIDX).docBase; //System.out.println("docStarts[" + subIDX + "]=" + docStarts[subIDX]); } } } public void testRandom() throws Exception { int numberOfRuns = TestUtil.nextInt(random(), 3, 6); for (int iter=0; iter<numberOfRuns; iter++) { if (VERBOSE) { System.out.println("TEST: iter=" + iter); } final int numDocs = TestUtil.nextInt(random(), 100, 1000) * RANDOM_MULTIPLIER; //final int numDocs = _TestUtil.nextInt(random, 5, 20); final int numGroups = TestUtil.nextInt(random(), 1, numDocs); if (VERBOSE) { System.out.println("TEST: numDocs=" + numDocs + " numGroups=" + numGroups); } final List<BytesRef> groups = new ArrayList<>(); for(int i=0;i<numGroups;i++) { String randomValue; do { // B/c of DV based impl we can't see the difference between an empty string and a null value. // For that reason we don't generate empty string // groups. randomValue = TestUtil.randomRealisticUnicodeString(random()); //randomValue = TestUtil.randomSimpleString(random()); } while ("".equals(randomValue)); groups.add(new BytesRef(randomValue)); } final String[] contentStrings = new String[TestUtil.nextInt(random(), 2, 20)]; if (VERBOSE) { System.out.println("TEST: create fake content"); } for(int contentIDX=0;contentIDX<contentStrings.length;contentIDX++) { final StringBuilder sb = new StringBuilder(); sb.append("real").append(random().nextInt(3)).append(' '); final int fakeCount = random().nextInt(10); for(int fakeIDX=0;fakeIDX<fakeCount;fakeIDX++) { sb.append("fake "); } contentStrings[contentIDX] = sb.toString(); if (VERBOSE) { System.out.println(" content=" + sb.toString()); } } Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); Document docNoGroup = new Document(); Field idvGroupField = new SortedDocValuesField("group", new BytesRef()); doc.add(idvGroupField); docNoGroup.add(idvGroupField); Field group = newStringField("group", "", Field.Store.NO); doc.add(group); Field sort1 = new SortedDocValuesField("sort1", new BytesRef()); doc.add(sort1); docNoGroup.add(sort1); Field sort2 = new SortedDocValuesField("sort2", new BytesRef()); doc.add(sort2); docNoGroup.add(sort2); Field content = newTextField("content", "", Field.Store.NO); doc.add(content); docNoGroup.add(content); NumericDocValuesField idDV = new NumericDocValuesField("id", 0); doc.add(idDV); docNoGroup.add(idDV); final GroupDoc[] groupDocs = new GroupDoc[numDocs]; for(int i=0;i<numDocs;i++) { final BytesRef groupValue; if (random().nextInt(24) == 17) { // So we test the "doc doesn't have the group'd // field" case: groupValue = null; } else { groupValue = groups.get(random().nextInt(groups.size())); } final GroupDoc groupDoc = new GroupDoc(i, groupValue, groups.get(random().nextInt(groups.size())), groups.get(random().nextInt(groups.size())), contentStrings[random().nextInt(contentStrings.length)]); if (VERBOSE) { System.out.println(" doc content=" + groupDoc.content + " id=" + i + " group=" + (groupDoc.group == null ? "null" : groupDoc.group.utf8ToString()) + " sort1=" + groupDoc.sort1.utf8ToString() + " sort2=" + groupDoc.sort2.utf8ToString()); } groupDocs[i] = groupDoc; if (groupDoc.group != null) { group.setStringValue(groupDoc.group.utf8ToString()); idvGroupField.setBytesValue(BytesRef.deepCopyOf(groupDoc.group)); } else { // TODO: not true // Must explicitly set empty string, else eg if // the segment has all docs missing the field then // we get null back instead of empty BytesRef: idvGroupField.setBytesValue(new BytesRef()); } sort1.setBytesValue(BytesRef.deepCopyOf(groupDoc.sort1)); sort2.setBytesValue(BytesRef.deepCopyOf(groupDoc.sort2)); content.setStringValue(groupDoc.content); idDV.setLongValue(groupDoc.id); if (groupDoc.group == null) { w.addDocument(docNoGroup); } else { w.addDocument(doc); } } final GroupDoc[] groupDocsByID = new GroupDoc[groupDocs.length]; System.arraycopy(groupDocs, 0, groupDocsByID, 0, groupDocs.length); final DirectoryReader r = w.getReader(); w.close(); final NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id"); DirectoryReader rBlocks = null; Directory dirBlocks = null; final IndexSearcher s = newSearcher(r); if (VERBOSE) { System.out.println("\nTEST: searcher=" + s); } final ShardState shards = new ShardState(s); Set<Integer> seenIDs = new HashSet<>(); for(int contentID=0;contentID<3;contentID++) { final ScoreDoc[] hits = s.search(new TermQuery(new Term("content", "real"+contentID)), numDocs).scoreDocs; for(ScoreDoc hit : hits) { int idValue = (int) docIDToID.get(hit.doc); final GroupDoc gd = groupDocs[idValue]; seenIDs.add(idValue); assertTrue(gd.score == 0.0); gd.score = hit.score; assertEquals(gd.id, idValue); } } // make sure all groups were seen across the hits assertEquals(groupDocs.length, seenIDs.size()); for(GroupDoc gd : groupDocs) { assertTrue(Float.isFinite(gd.score)); assertTrue(gd.score >= 0.0); } // Build 2nd index, where docs are added in blocks by // group, so we can use single pass collector dirBlocks = newDirectory(); rBlocks = getDocBlockReader(dirBlocks, groupDocs); final Query lastDocInBlock = new TermQuery(new Term("groupend", "x")); final NumericDocValues docIDToIDBlocks = MultiDocValues.getNumericValues(rBlocks, "id"); assertNotNull(docIDToIDBlocks); final IndexSearcher sBlocks = newSearcher(rBlocks); final ShardState shardsBlocks = new ShardState(sBlocks); // ReaderBlocks only increases maxDoc() vs reader, which // means a monotonic shift in scores, so we can // reliably remap them w/ Map: final Map<String,Map<Float,Float>> scoreMap = new HashMap<>(); // Tricky: must separately set .score2, because the doc // block index was created with possible deletions! //System.out.println("fixup score2"); for(int contentID=0;contentID<3;contentID++) { //System.out.println(" term=real" + contentID); final Map<Float,Float> termScoreMap = new HashMap<>(); scoreMap.put("real"+contentID, termScoreMap); //System.out.println("term=real" + contentID + " dfold=" + s.docFreq(new Term("content", "real"+contentID)) + //" dfnew=" + sBlocks.docFreq(new Term("content", "real"+contentID))); final ScoreDoc[] hits = sBlocks.search(new TermQuery(new Term("content", "real"+contentID)), numDocs).scoreDocs; for(ScoreDoc hit : hits) { final GroupDoc gd = groupDocsByID[(int) docIDToIDBlocks.get(hit.doc)]; assertTrue(gd.score2 == 0.0); gd.score2 = hit.score; assertEquals(gd.id, docIDToIDBlocks.get(hit.doc)); //System.out.println(" score=" + gd.score + " score2=" + hit.score + " id=" + docIDToIDBlocks.get(hit.doc)); termScoreMap.put(gd.score, gd.score2); } } for(int searchIter=0;searchIter<100;searchIter++) { if (VERBOSE) { System.out.println("\nTEST: searchIter=" + searchIter); } final String searchTerm = "real" + random().nextInt(3); final boolean fillFields = random().nextBoolean(); boolean getScores = random().nextBoolean(); final boolean getMaxScores = random().nextBoolean(); final Sort groupSort = getRandomSort(); //final Sort groupSort = new Sort(new SortField[] {new SortField("sort1", SortField.STRING), new SortField("id", SortField.INT)}); final Sort docSort = getRandomSort(); getScores |= (groupSort.needsScores() || docSort.needsScores()); final int topNGroups = TestUtil.nextInt(random(), 1, 30); //final int topNGroups = 10; final int docsPerGroup = TestUtil.nextInt(random(), 1, 50); final int groupOffset = TestUtil.nextInt(random(), 0, (topNGroups - 1) / 2); //final int groupOffset = 0; final int docOffset = TestUtil.nextInt(random(), 0, docsPerGroup - 1); //final int docOffset = 0; final boolean doCache = random().nextBoolean(); final boolean doAllGroups = random().nextBoolean(); if (VERBOSE) { System.out.println("TEST: groupSort=" + groupSort + " docSort=" + docSort + " searchTerm=" + searchTerm + " dF=" + r.docFreq(new Term("content", searchTerm)) +" dFBlock=" + rBlocks.docFreq(new Term("content", searchTerm)) + " topNGroups=" + topNGroups + " groupOffset=" + groupOffset + " docOffset=" + docOffset + " doCache=" + doCache + " docsPerGroup=" + docsPerGroup + " doAllGroups=" + doAllGroups + " getScores=" + getScores + " getMaxScores=" + getMaxScores); } String groupField = "group"; if (VERBOSE) { System.out.println(" groupField=" + groupField); } final AbstractFirstPassGroupingCollector<?> c1 = createRandomFirstPassCollector(groupField, groupSort, groupOffset+topNGroups); final CachingCollector cCache; final Collector c; final AbstractAllGroupsCollector<?> allGroupsCollector; if (doAllGroups) { allGroupsCollector = createAllGroupsCollector(c1, groupField); } else { allGroupsCollector = null; } final boolean useWrappingCollector = random().nextBoolean(); if (doCache) { final double maxCacheMB = random().nextDouble(); if (VERBOSE) { System.out.println("TEST: maxCacheMB=" + maxCacheMB); } if (useWrappingCollector) { if (doAllGroups) { cCache = CachingCollector.create(c1, true, maxCacheMB); c = MultiCollector.wrap(cCache, allGroupsCollector); } else { c = cCache = CachingCollector.create(c1, true, maxCacheMB); } } else { // Collect only into cache, then replay multiple times: c = cCache = CachingCollector.create(true, maxCacheMB); } } else { cCache = null; if (doAllGroups) { c = MultiCollector.wrap(c1, allGroupsCollector); } else { c = c1; } } // Search top reader: final Query query = new TermQuery(new Term("content", searchTerm)); s.search(query, c); if (doCache && !useWrappingCollector) { if (cCache.isCached()) { // Replay for first-pass grouping cCache.replay(c1); if (doAllGroups) { // Replay for all groups: cCache.replay(allGroupsCollector); } } else { // Replay by re-running search: s.search(query, c1); if (doAllGroups) { s.search(query, allGroupsCollector); } } } // Get 1st pass top groups final Collection<SearchGroup<BytesRef>> topGroups = getSearchGroups(c1, groupOffset, fillFields); final TopGroups<BytesRef> groupsResult; if (VERBOSE) { System.out.println("TEST: first pass topGroups"); if (topGroups == null) { System.out.println(" null"); } else { for (SearchGroup<BytesRef> searchGroup : topGroups) { System.out.println(" " + (searchGroup.groupValue == null ? "null" : searchGroup.groupValue) + ": " + Arrays.deepToString(searchGroup.sortValues)); } } } // Get 1st pass top groups using shards final TopGroups<BytesRef> topGroupsShards = searchShards(s, shards.subSearchers, query, groupSort, docSort, groupOffset, topNGroups, docOffset, docsPerGroup, getScores, getMaxScores, true, false); final AbstractSecondPassGroupingCollector<?> c2; if (topGroups != null) { if (VERBOSE) { System.out.println("TEST: topGroups"); for (SearchGroup<BytesRef> searchGroup : topGroups) { System.out.println(" " + (searchGroup.groupValue == null ? "null" : searchGroup.groupValue.utf8ToString()) + ": " + Arrays.deepToString(searchGroup.sortValues)); } } c2 = createSecondPassCollector(c1, groupField, groupSort, docSort, groupOffset, docOffset + docsPerGroup, getScores, getMaxScores, fillFields); if (doCache) { if (cCache.isCached()) { if (VERBOSE) { System.out.println("TEST: cache is intact"); } cCache.replay(c2); } else { if (VERBOSE) { System.out.println("TEST: cache was too large"); } s.search(query, c2); } } else { s.search(query, c2); } if (doAllGroups) { TopGroups<BytesRef> tempTopGroups = getTopGroups(c2, docOffset); groupsResult = new TopGroups<>(tempTopGroups, allGroupsCollector.getGroupCount()); } else { groupsResult = getTopGroups(c2, docOffset); } } else { c2 = null; groupsResult = null; if (VERBOSE) { System.out.println("TEST: no results"); } } final TopGroups<BytesRef> expectedGroups = slowGrouping(groupDocs, searchTerm, fillFields, getScores, getMaxScores, doAllGroups, groupSort, docSort, topNGroups, docsPerGroup, groupOffset, docOffset); if (VERBOSE) { if (expectedGroups == null) { System.out.println("TEST: no expected groups"); } else { System.out.println("TEST: expected groups totalGroupedHitCount=" + expectedGroups.totalGroupedHitCount); for(GroupDocs<BytesRef> gd : expectedGroups.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue) + " totalHits=" + gd.totalHits + " scoreDocs.len=" + gd.scoreDocs.length); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + sd.doc + " score=" + sd.score); } } } if (groupsResult == null) { System.out.println("TEST: no matched groups"); } else { System.out.println("TEST: matched groups totalGroupedHitCount=" + groupsResult.totalGroupedHitCount); for(GroupDocs<BytesRef> gd : groupsResult.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue) + " totalHits=" + gd.totalHits); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + docIDToID.get(sd.doc) + " score=" + sd.score); } } if (searchIter == 14) { for(int docIDX=0;docIDX<s.getIndexReader().maxDoc();docIDX++) { System.out.println("ID=" + docIDToID.get(docIDX) + " explain=" + s.explain(query, docIDX)); } } } if (topGroupsShards == null) { System.out.println("TEST: no matched-merged groups"); } else { System.out.println("TEST: matched-merged groups totalGroupedHitCount=" + topGroupsShards.totalGroupedHitCount); for(GroupDocs<BytesRef> gd : topGroupsShards.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue) + " totalHits=" + gd.totalHits); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + docIDToID.get(sd.doc) + " score=" + sd.score); } } } } assertEquals(docIDToID, expectedGroups, groupsResult, true, true, true, getScores, true); // Confirm merged shards match: assertEquals(docIDToID, expectedGroups, topGroupsShards, true, false, fillFields, getScores, true); if (topGroupsShards != null) { verifyShards(shards.docStarts, topGroupsShards); } final boolean needsScores = getScores || getMaxScores || docSort == null; final BlockGroupingCollector c3 = new BlockGroupingCollector(groupSort, groupOffset+topNGroups, needsScores, sBlocks.createNormalizedWeight(lastDocInBlock, false)); final TermAllGroupsCollector allGroupsCollector2; final Collector c4; if (doAllGroups) { // NOTE: must be "group" and not "group_dv" // (groupField) because we didn't index doc // values in the block index: allGroupsCollector2 = new TermAllGroupsCollector("group"); c4 = MultiCollector.wrap(c3, allGroupsCollector2); } else { allGroupsCollector2 = null; c4 = c3; } // Get block grouping result: sBlocks.search(query, c4); @SuppressWarnings({"unchecked","rawtypes"}) final TopGroups<BytesRef> tempTopGroupsBlocks = (TopGroups<BytesRef>) c3.getTopGroups(docSort, groupOffset, docOffset, docOffset+docsPerGroup, fillFields); final TopGroups<BytesRef> groupsResultBlocks; if (doAllGroups && tempTopGroupsBlocks != null) { assertEquals((int) tempTopGroupsBlocks.totalGroupCount, allGroupsCollector2.getGroupCount()); groupsResultBlocks = new TopGroups<>(tempTopGroupsBlocks, allGroupsCollector2.getGroupCount()); } else { groupsResultBlocks = tempTopGroupsBlocks; } if (VERBOSE) { if (groupsResultBlocks == null) { System.out.println("TEST: no block groups"); } else { System.out.println("TEST: block groups totalGroupedHitCount=" + groupsResultBlocks.totalGroupedHitCount); boolean first = true; for(GroupDocs<BytesRef> gd : groupsResultBlocks.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue.utf8ToString()) + " totalHits=" + gd.totalHits); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + docIDToIDBlocks.get(sd.doc) + " score=" + sd.score); if (first) { System.out.println("explain: " + sBlocks.explain(query, sd.doc)); first = false; } } } } } // Get shard'd block grouping result: final TopGroups<BytesRef> topGroupsBlockShards = searchShards(sBlocks, shardsBlocks.subSearchers, query, groupSort, docSort, groupOffset, topNGroups, docOffset, docsPerGroup, getScores, getMaxScores, false, false); if (expectedGroups != null) { // Fixup scores for reader2 for (GroupDocs<?> groupDocsHits : expectedGroups.groups) { for(ScoreDoc hit : groupDocsHits.scoreDocs) { final GroupDoc gd = groupDocsByID[hit.doc]; assertEquals(gd.id, hit.doc); //System.out.println("fixup score " + hit.score + " to " + gd.score2 + " vs " + gd.score); hit.score = gd.score2; } } final SortField[] sortFields = groupSort.getSort(); final Map<Float,Float> termScoreMap = scoreMap.get(searchTerm); for(int groupSortIDX=0;groupSortIDX<sortFields.length;groupSortIDX++) { if (sortFields[groupSortIDX].getType() == SortField.Type.SCORE) { for (GroupDocs<?> groupDocsHits : expectedGroups.groups) { if (groupDocsHits.groupSortValues != null) { //System.out.println("remap " + groupDocsHits.groupSortValues[groupSortIDX] + " to " + termScoreMap.get(groupDocsHits.groupSortValues[groupSortIDX])); groupDocsHits.groupSortValues[groupSortIDX] = termScoreMap.get(groupDocsHits.groupSortValues[groupSortIDX]); assertNotNull(groupDocsHits.groupSortValues[groupSortIDX]); } } } } final SortField[] docSortFields = docSort.getSort(); for(int docSortIDX=0;docSortIDX<docSortFields.length;docSortIDX++) { if (docSortFields[docSortIDX].getType() == SortField.Type.SCORE) { for (GroupDocs<?> groupDocsHits : expectedGroups.groups) { for(ScoreDoc _hit : groupDocsHits.scoreDocs) { FieldDoc hit = (FieldDoc) _hit; if (hit.fields != null) { hit.fields[docSortIDX] = termScoreMap.get(hit.fields[docSortIDX]); assertNotNull(hit.fields[docSortIDX]); } } } } } } assertEquals(docIDToIDBlocks, expectedGroups, groupsResultBlocks, false, true, true, getScores, false); assertEquals(docIDToIDBlocks, expectedGroups, topGroupsBlockShards, false, false, fillFields, getScores, false); } r.close(); dir.close(); rBlocks.close(); dirBlocks.close(); } } private void verifyShards(int[] docStarts, TopGroups<BytesRef> topGroups) { for(GroupDocs<?> group : topGroups.groups) { for(int hitIDX=0;hitIDX<group.scoreDocs.length;hitIDX++) { final ScoreDoc sd = group.scoreDocs[hitIDX]; assertEquals("doc=" + sd.doc + " wrong shard", ReaderUtil.subIndex(sd.doc, docStarts), sd.shardIndex); } } } private TopGroups<BytesRef> searchShards(IndexSearcher topSearcher, ShardSearcher[] subSearchers, Query query, Sort groupSort, Sort docSort, int groupOffset, int topNGroups, int docOffset, int topNDocs, boolean getScores, boolean getMaxScores, boolean canUseIDV, boolean preFlex) throws Exception { // TODO: swap in caching, all groups collector hereassertEquals(expected.totalHitCount, actual.totalHitCount); // too... if (VERBOSE) { System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV); } // Run 1st pass collector to get top groups per shard final Weight w = topSearcher.createNormalizedWeight(query, getScores); final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>(); List<AbstractFirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>(); AbstractFirstPassGroupingCollector<?> firstPassCollector = null; boolean shardsCanUseIDV; if (canUseIDV) { if (SlowCompositeReaderWrapper.class.isAssignableFrom(subSearchers[0].getIndexReader().getClass())) { shardsCanUseIDV = false; } else { shardsCanUseIDV = !preFlex; } } else { shardsCanUseIDV = false; } String groupField = "group"; for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) { // First shard determines whether we use IDV or not; // all other shards match that: if (firstPassCollector == null) { firstPassCollector = createRandomFirstPassCollector(groupField, groupSort, groupOffset + topNGroups); } else { firstPassCollector = createFirstPassCollector(groupField, groupSort, groupOffset + topNGroups, firstPassCollector); } if (VERBOSE) { System.out.println(" shard=" + shardIDX + " groupField=" + groupField); System.out.println(" 1st pass collector=" + firstPassCollector); } firstPassGroupingCollectors.add(firstPassCollector); subSearchers[shardIDX].search(w, firstPassCollector); final Collection<SearchGroup<BytesRef>> topGroups = getSearchGroups(firstPassCollector, 0, true); if (topGroups != null) { if (VERBOSE) { System.out.println(" shard " + shardIDX + " s=" + subSearchers[shardIDX] + " totalGroupedHitCount=?" + " " + topGroups.size() + " groups:"); for(SearchGroup<BytesRef> group : topGroups) { System.out.println(" " + groupToString(group.groupValue) + " groupSort=" + Arrays.toString(group.sortValues)); } } shardGroups.add(topGroups); } } final Collection<SearchGroup<BytesRef>> mergedTopGroups = SearchGroup.merge(shardGroups, groupOffset, topNGroups, groupSort); if (VERBOSE) { System.out.println(" top groups merged:"); if (mergedTopGroups == null) { System.out.println(" null"); } else { System.out.println(" " + mergedTopGroups.size() + " top groups:"); for(SearchGroup<BytesRef> group : mergedTopGroups) { System.out.println(" [" + groupToString(group.groupValue) + "] groupSort=" + Arrays.toString(group.sortValues)); } } } if (mergedTopGroups != null) { // Now 2nd pass: @SuppressWarnings({"unchecked","rawtypes"}) final TopGroups<BytesRef>[] shardTopGroups = new TopGroups[subSearchers.length]; for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) { final AbstractSecondPassGroupingCollector<?> secondPassCollector = createSecondPassCollector(firstPassGroupingCollectors.get(shardIDX), groupField, mergedTopGroups, groupSort, docSort, docOffset + topNDocs, getScores, getMaxScores, true); subSearchers[shardIDX].search(w, secondPassCollector); shardTopGroups[shardIDX] = getTopGroups(secondPassCollector, 0); if (VERBOSE) { System.out.println(" " + shardTopGroups[shardIDX].groups.length + " shard[" + shardIDX + "] groups:"); for(GroupDocs<BytesRef> group : shardTopGroups[shardIDX].groups) { System.out.println(" [" + groupToString(group.groupValue) + "] groupSort=" + Arrays.toString(group.groupSortValues) + " numDocs=" + group.scoreDocs.length); } } } TopGroups<BytesRef> mergedGroups = TopGroups.merge(shardTopGroups, groupSort, docSort, docOffset, topNDocs, TopGroups.ScoreMergeMode.None); if (VERBOSE) { System.out.println(" " + mergedGroups.groups.length + " merged groups:"); for(GroupDocs<BytesRef> group : mergedGroups.groups) { System.out.println(" [" + groupToString(group.groupValue) + "] groupSort=" + Arrays.toString(group.groupSortValues) + " numDocs=" + group.scoreDocs.length); } } return mergedGroups; } else { return null; } } private void assertEquals(NumericDocValues docIDtoID, TopGroups<BytesRef> expected, TopGroups<BytesRef> actual, boolean verifyGroupValues, boolean verifyTotalGroupCount, boolean verifySortValues, boolean testScores, boolean idvBasedImplsUsed) { if (expected == null) { assertNull(actual); return; } assertNotNull(actual); assertEquals("expected.groups.length != actual.groups.length", expected.groups.length, actual.groups.length); assertEquals("expected.totalHitCount != actual.totalHitCount", expected.totalHitCount, actual.totalHitCount); assertEquals("expected.totalGroupedHitCount != actual.totalGroupedHitCount", expected.totalGroupedHitCount, actual.totalGroupedHitCount); if (expected.totalGroupCount != null && verifyTotalGroupCount) { assertEquals("expected.totalGroupCount != actual.totalGroupCount", expected.totalGroupCount, actual.totalGroupCount); } for(int groupIDX=0;groupIDX<expected.groups.length;groupIDX++) { if (VERBOSE) { System.out.println(" check groupIDX=" + groupIDX); } final GroupDocs<BytesRef> expectedGroup = expected.groups[groupIDX]; final GroupDocs<BytesRef> actualGroup = actual.groups[groupIDX]; if (verifyGroupValues) { if (idvBasedImplsUsed) { if (actualGroup.groupValue.length == 0) { assertNull(expectedGroup.groupValue); } else { assertEquals(expectedGroup.groupValue, actualGroup.groupValue); } } else { assertEquals(expectedGroup.groupValue, actualGroup.groupValue); } } if (verifySortValues) { assertArrayEquals(expectedGroup.groupSortValues, actualGroup.groupSortValues); } // TODO // assertEquals(expectedGroup.maxScore, actualGroup.maxScore); assertEquals(expectedGroup.totalHits, actualGroup.totalHits); final ScoreDoc[] expectedFDs = expectedGroup.scoreDocs; final ScoreDoc[] actualFDs = actualGroup.scoreDocs; assertEquals(expectedFDs.length, actualFDs.length); for(int docIDX=0;docIDX<expectedFDs.length;docIDX++) { final FieldDoc expectedFD = (FieldDoc) expectedFDs[docIDX]; final FieldDoc actualFD = (FieldDoc) actualFDs[docIDX]; //System.out.println(" actual doc=" + docIDtoID.get(actualFD.doc) + " score=" + actualFD.score); assertEquals(expectedFD.doc, docIDtoID.get(actualFD.doc)); if (testScores) { assertEquals(expectedFD.score, actualFD.score, 0.1); } else { // TODO: too anal for now //assertEquals(Float.NaN, actualFD.score); } if (verifySortValues) { assertArrayEquals(expectedFD.fields, actualFD.fields); } } } } private static class ShardSearcher extends IndexSearcher { private final List<LeafReaderContext> ctx; public ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); this.ctx = Collections.singletonList(ctx); } public void search(Weight weight, Collector collector) throws IOException { search(ctx, weight, collector); } @Override public String toString() { return "ShardSearcher(" + ctx.get(0).reader() + ")"; } } private static class ValueHolder<V> { V value; private ValueHolder(V value) { this.value = value; } } }
lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.search.grouping; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.Term; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.BytesRefFieldSource; import org.apache.lucene.search.CachingCollector; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector; import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector; import org.apache.lucene.search.grouping.function.FunctionSecondPassGroupingCollector; import org.apache.lucene.search.grouping.term.TermAllGroupsCollector; import org.apache.lucene.search.grouping.term.TermFirstPassGroupingCollector; import org.apache.lucene.search.grouping.term.TermSecondPassGroupingCollector; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.mutable.MutableValue; import org.apache.lucene.util.mutable.MutableValueStr; // TODO // - should test relevance sort too // - test null // - test ties // - test compound sort public class TestGrouping extends LuceneTestCase { public void testBasic() throws Exception { String groupField = "author"; FieldType customType = new FieldType(); customType.setStored(true); Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); addGroupField(doc, groupField, "author1"); doc.add(new TextField("content", "random text", Field.Store.YES)); doc.add(new Field("id", "1", customType)); w.addDocument(doc); // 1 doc = new Document(); addGroupField(doc, groupField, "author1"); doc.add(new TextField("content", "some more random text", Field.Store.YES)); doc.add(new Field("id", "2", customType)); w.addDocument(doc); // 2 doc = new Document(); addGroupField(doc, groupField, "author1"); doc.add(new TextField("content", "some more random textual data", Field.Store.YES)); doc.add(new Field("id", "3", customType)); w.addDocument(doc); // 3 doc = new Document(); addGroupField(doc, groupField, "author2"); doc.add(new TextField("content", "some random text", Field.Store.YES)); doc.add(new Field("id", "4", customType)); w.addDocument(doc); // 4 doc = new Document(); addGroupField(doc, groupField, "author3"); doc.add(new TextField("content", "some more random text", Field.Store.YES)); doc.add(new Field("id", "5", customType)); w.addDocument(doc); // 5 doc = new Document(); addGroupField(doc, groupField, "author3"); doc.add(new TextField("content", "random", Field.Store.YES)); doc.add(new Field("id", "6", customType)); w.addDocument(doc); // 6 -- no author field doc = new Document(); doc.add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES)); doc.add(new Field("id", "6", customType)); w.addDocument(doc); IndexSearcher indexSearcher = newSearcher(w.getReader()); w.close(); final Sort groupSort = Sort.RELEVANCE; final AbstractFirstPassGroupingCollector<?> c1 = createRandomFirstPassCollector(groupField, groupSort, 10); indexSearcher.search(new TermQuery(new Term("content", "random")), c1); final AbstractSecondPassGroupingCollector<?> c2 = createSecondPassCollector(c1, groupField, groupSort, Sort.RELEVANCE, 0, 5, true, true, true); indexSearcher.search(new TermQuery(new Term("content", "random")), c2); final TopGroups<?> groups = c2.getTopGroups(0); assertFalse(Float.isNaN(groups.maxScore)); assertEquals(7, groups.totalHitCount); assertEquals(7, groups.totalGroupedHitCount); assertEquals(4, groups.groups.length); // relevance order: 5, 0, 3, 4, 1, 2, 6 // the later a document is added the higher this docId // value GroupDocs<?> group = groups.groups[0]; compareGroupValue("author3", group); assertEquals(2, group.scoreDocs.length); assertEquals(5, group.scoreDocs[0].doc); assertEquals(4, group.scoreDocs[1].doc); assertTrue(group.scoreDocs[0].score > group.scoreDocs[1].score); group = groups.groups[1]; compareGroupValue("author1", group); assertEquals(3, group.scoreDocs.length); assertEquals(0, group.scoreDocs[0].doc); assertEquals(1, group.scoreDocs[1].doc); assertEquals(2, group.scoreDocs[2].doc); assertTrue(group.scoreDocs[0].score > group.scoreDocs[1].score); assertTrue(group.scoreDocs[1].score > group.scoreDocs[2].score); group = groups.groups[2]; compareGroupValue("author2", group); assertEquals(1, group.scoreDocs.length); assertEquals(3, group.scoreDocs[0].doc); group = groups.groups[3]; compareGroupValue(null, group); assertEquals(1, group.scoreDocs.length); assertEquals(6, group.scoreDocs[0].doc); indexSearcher.getIndexReader().close(); dir.close(); } private void addGroupField(Document doc, String groupField, String value) { doc.add(new SortedDocValuesField(groupField, new BytesRef(value))); } private AbstractFirstPassGroupingCollector<?> createRandomFirstPassCollector(String groupField, Sort groupSort, int topDocs) throws IOException { AbstractFirstPassGroupingCollector<?> selected; if (random().nextBoolean()) { ValueSource vs = new BytesRefFieldSource(groupField); selected = new FunctionFirstPassGroupingCollector(vs, new HashMap<>(), groupSort, topDocs); } else { selected = new TermFirstPassGroupingCollector(groupField, groupSort, topDocs); } if (VERBOSE) { System.out.println("Selected implementation: " + selected.getClass().getName()); } return selected; } private AbstractFirstPassGroupingCollector<?> createFirstPassCollector(String groupField, Sort groupSort, int topDocs, AbstractFirstPassGroupingCollector<?> firstPassGroupingCollector) throws IOException { if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) { ValueSource vs = new BytesRefFieldSource(groupField); return new FunctionFirstPassGroupingCollector(vs, new HashMap<>(), groupSort, topDocs); } else { return new TermFirstPassGroupingCollector(groupField, groupSort, topDocs); } } @SuppressWarnings({"unchecked","rawtypes"}) private <T> AbstractSecondPassGroupingCollector<T> createSecondPassCollector(AbstractFirstPassGroupingCollector firstPassGroupingCollector, String groupField, Sort groupSort, Sort sortWithinGroup, int groupOffset, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException { if (TermFirstPassGroupingCollector.class.isAssignableFrom(firstPassGroupingCollector.getClass())) { Collection<SearchGroup<BytesRef>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields); return (AbstractSecondPassGroupingCollector) new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields); } else { ValueSource vs = new BytesRefFieldSource(groupField); Collection<SearchGroup<MutableValue>> searchGroups = firstPassGroupingCollector.getTopGroups(groupOffset, fillSortFields); return (AbstractSecondPassGroupingCollector) new FunctionSecondPassGroupingCollector(searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap()); } } // Basically converts searchGroups from MutableValue to BytesRef if grouping by ValueSource @SuppressWarnings("unchecked") private AbstractSecondPassGroupingCollector<?> createSecondPassCollector(AbstractFirstPassGroupingCollector<?> firstPassGroupingCollector, String groupField, Collection<SearchGroup<BytesRef>> searchGroups, Sort groupSort, Sort sortWithinGroup, int maxDocsPerGroup, boolean getScores, boolean getMaxScores, boolean fillSortFields) throws IOException { if (firstPassGroupingCollector.getClass().isAssignableFrom(TermFirstPassGroupingCollector.class)) { return new TermSecondPassGroupingCollector(groupField, searchGroups, groupSort, sortWithinGroup, maxDocsPerGroup , getScores, getMaxScores, fillSortFields); } else { ValueSource vs = new BytesRefFieldSource(groupField); List<SearchGroup<MutableValue>> mvalSearchGroups = new ArrayList<>(searchGroups.size()); for (SearchGroup<BytesRef> mergedTopGroup : searchGroups) { SearchGroup<MutableValue> sg = new SearchGroup<>(); MutableValueStr groupValue = new MutableValueStr(); if (mergedTopGroup.groupValue != null) { groupValue.value.copyBytes(mergedTopGroup.groupValue); } else { groupValue.exists = false; } sg.groupValue = groupValue; sg.sortValues = mergedTopGroup.sortValues; mvalSearchGroups.add(sg); } return new FunctionSecondPassGroupingCollector(mvalSearchGroups, groupSort, sortWithinGroup, maxDocsPerGroup, getScores, getMaxScores, fillSortFields, vs, new HashMap<>()); } } private AbstractAllGroupsCollector<?> createAllGroupsCollector(AbstractFirstPassGroupingCollector<?> firstPassGroupingCollector, String groupField) { if (firstPassGroupingCollector.getClass().isAssignableFrom(TermFirstPassGroupingCollector.class)) { return new TermAllGroupsCollector(groupField); } else { ValueSource vs = new BytesRefFieldSource(groupField); return new FunctionAllGroupsCollector(vs, new HashMap<>()); } } private void compareGroupValue(String expected, GroupDocs<?> group) { if (expected == null) { if (group.groupValue == null) { return; } else if (group.groupValue.getClass().isAssignableFrom(MutableValueStr.class)) { return; } else if (((BytesRef) group.groupValue).length == 0) { return; } fail(); } if (group.groupValue.getClass().isAssignableFrom(BytesRef.class)) { assertEquals(new BytesRef(expected), group.groupValue); } else if (group.groupValue.getClass().isAssignableFrom(MutableValueStr.class)) { MutableValueStr v = new MutableValueStr(); v.value.copyChars(expected); assertEquals(v, group.groupValue); } else { fail(); } } private Collection<SearchGroup<BytesRef>> getSearchGroups(AbstractFirstPassGroupingCollector<?> c, int groupOffset, boolean fillFields) { if (TermFirstPassGroupingCollector.class.isAssignableFrom(c.getClass())) { return ((TermFirstPassGroupingCollector) c).getTopGroups(groupOffset, fillFields); } else if (FunctionFirstPassGroupingCollector.class.isAssignableFrom(c.getClass())) { Collection<SearchGroup<MutableValue>> mutableValueGroups = ((FunctionFirstPassGroupingCollector) c).getTopGroups(groupOffset, fillFields); if (mutableValueGroups == null) { return null; } List<SearchGroup<BytesRef>> groups = new ArrayList<>(mutableValueGroups.size()); for (SearchGroup<MutableValue> mutableValueGroup : mutableValueGroups) { SearchGroup<BytesRef> sg = new SearchGroup<>(); sg.groupValue = mutableValueGroup.groupValue.exists() ? ((MutableValueStr) mutableValueGroup.groupValue).value.get() : null; sg.sortValues = mutableValueGroup.sortValues; groups.add(sg); } return groups; } fail(); return null; } @SuppressWarnings({"unchecked", "rawtypes"}) private TopGroups<BytesRef> getTopGroups(AbstractSecondPassGroupingCollector c, int withinGroupOffset) { if (c.getClass().isAssignableFrom(TermSecondPassGroupingCollector.class)) { return ((TermSecondPassGroupingCollector) c).getTopGroups(withinGroupOffset); } else if (c.getClass().isAssignableFrom(FunctionSecondPassGroupingCollector.class)) { TopGroups<MutableValue> mvalTopGroups = ((FunctionSecondPassGroupingCollector) c).getTopGroups(withinGroupOffset); List<GroupDocs<BytesRef>> groups = new ArrayList<>(mvalTopGroups.groups.length); for (GroupDocs<MutableValue> mvalGd : mvalTopGroups.groups) { BytesRef groupValue = mvalGd.groupValue.exists() ? ((MutableValueStr) mvalGd.groupValue).value.get() : null; groups.add(new GroupDocs<>(Float.NaN, mvalGd.maxScore, mvalGd.totalHits, mvalGd.scoreDocs, groupValue, mvalGd.groupSortValues)); } // NOTE: currenlty using diamond operator on MergedIterator (without explicit Term class) causes // errors on Eclipse Compiler (ecj) used for javadoc lint return new TopGroups<BytesRef>(mvalTopGroups.groupSort, mvalTopGroups.withinGroupSort, mvalTopGroups.totalHitCount, mvalTopGroups.totalGroupedHitCount, groups.toArray(new GroupDocs[groups.size()]), Float.NaN); } fail(); return null; } private static class GroupDoc { final int id; final BytesRef group; final BytesRef sort1; final BytesRef sort2; // content must be "realN ..." final String content; float score; float score2; public GroupDoc(int id, BytesRef group, BytesRef sort1, BytesRef sort2, String content) { this.id = id; this.group = group; this.sort1 = sort1; this.sort2 = sort2; this.content = content; } } private Sort getRandomSort() { final List<SortField> sortFields = new ArrayList<>(); if (random().nextInt(7) == 2) { sortFields.add(SortField.FIELD_SCORE); } else { if (random().nextBoolean()) { if (random().nextBoolean()) { sortFields.add(new SortField("sort1", SortField.Type.STRING, random().nextBoolean())); } else { sortFields.add(new SortField("sort2", SortField.Type.STRING, random().nextBoolean())); } } else if (random().nextBoolean()) { sortFields.add(new SortField("sort1", SortField.Type.STRING, random().nextBoolean())); sortFields.add(new SortField("sort2", SortField.Type.STRING, random().nextBoolean())); } } // Break ties: sortFields.add(new SortField("id", SortField.Type.INT)); return new Sort(sortFields.toArray(new SortField[sortFields.size()])); } private Comparator<GroupDoc> getComparator(Sort sort) { final SortField[] sortFields = sort.getSort(); return new Comparator<GroupDoc>() { @Override public int compare(GroupDoc d1, GroupDoc d2) { for(SortField sf : sortFields) { final int cmp; if (sf.getType() == SortField.Type.SCORE) { if (d1.score > d2.score) { cmp = -1; } else if (d1.score < d2.score) { cmp = 1; } else { cmp = 0; } } else if (sf.getField().equals("sort1")) { cmp = d1.sort1.compareTo(d2.sort1); } else if (sf.getField().equals("sort2")) { cmp = d1.sort2.compareTo(d2.sort2); } else { assertEquals(sf.getField(), "id"); cmp = d1.id - d2.id; } if (cmp != 0) { return sf.getReverse() ? -cmp : cmp; } } // Our sort always fully tie breaks: fail(); return 0; } }; } @SuppressWarnings({"unchecked","rawtypes"}) private Comparable<?>[] fillFields(GroupDoc d, Sort sort) { final SortField[] sortFields = sort.getSort(); final Comparable<?>[] fields = new Comparable[sortFields.length]; for(int fieldIDX=0;fieldIDX<sortFields.length;fieldIDX++) { final Comparable<?> c; final SortField sf = sortFields[fieldIDX]; if (sf.getType() == SortField.Type.SCORE) { c = d.score; } else if (sf.getField().equals("sort1")) { c = d.sort1; } else if (sf.getField().equals("sort2")) { c = d.sort2; } else { assertEquals("id", sf.getField()); c = d.id; } fields[fieldIDX] = c; } return fields; } private String groupToString(BytesRef b) { if (b == null) { return "null"; } else { return b.utf8ToString(); } } private TopGroups<BytesRef> slowGrouping(GroupDoc[] groupDocs, String searchTerm, boolean fillFields, boolean getScores, boolean getMaxScores, boolean doAllGroups, Sort groupSort, Sort docSort, int topNGroups, int docsPerGroup, int groupOffset, int docOffset) { final Comparator<GroupDoc> groupSortComp = getComparator(groupSort); Arrays.sort(groupDocs, groupSortComp); final HashMap<BytesRef,List<GroupDoc>> groups = new HashMap<>(); final List<BytesRef> sortedGroups = new ArrayList<>(); final List<Comparable<?>[]> sortedGroupFields = new ArrayList<>(); int totalHitCount = 0; Set<BytesRef> knownGroups = new HashSet<>(); //System.out.println("TEST: slowGrouping"); for(GroupDoc d : groupDocs) { // TODO: would be better to filter by searchTerm before sorting! if (!d.content.startsWith(searchTerm)) { continue; } totalHitCount++; //System.out.println(" match id=" + d.id + " score=" + d.score); if (doAllGroups) { if (!knownGroups.contains(d.group)) { knownGroups.add(d.group); //System.out.println(" add group=" + groupToString(d.group)); } } List<GroupDoc> l = groups.get(d.group); if (l == null) { //System.out.println(" add sortedGroup=" + groupToString(d.group)); sortedGroups.add(d.group); if (fillFields) { sortedGroupFields.add(fillFields(d, groupSort)); } l = new ArrayList<>(); groups.put(d.group, l); } l.add(d); } if (groupOffset >= sortedGroups.size()) { // slice is out of bounds return null; } final int limit = Math.min(groupOffset + topNGroups, groups.size()); final Comparator<GroupDoc> docSortComp = getComparator(docSort); @SuppressWarnings({"unchecked","rawtypes"}) final GroupDocs<BytesRef>[] result = new GroupDocs[limit-groupOffset]; int totalGroupedHitCount = 0; for(int idx=groupOffset;idx < limit;idx++) { final BytesRef group = sortedGroups.get(idx); final List<GroupDoc> docs = groups.get(group); totalGroupedHitCount += docs.size(); Collections.sort(docs, docSortComp); final ScoreDoc[] hits; if (docs.size() > docOffset) { final int docIDXLimit = Math.min(docOffset + docsPerGroup, docs.size()); hits = new ScoreDoc[docIDXLimit - docOffset]; for(int docIDX=docOffset; docIDX < docIDXLimit; docIDX++) { final GroupDoc d = docs.get(docIDX); final FieldDoc fd; if (fillFields) { fd = new FieldDoc(d.id, getScores ? d.score : Float.NaN, fillFields(d, docSort)); } else { fd = new FieldDoc(d.id, getScores ? d.score : Float.NaN); } hits[docIDX-docOffset] = fd; } } else { hits = new ScoreDoc[0]; } result[idx-groupOffset] = new GroupDocs<>(Float.NaN, 0.0f, docs.size(), hits, group, fillFields ? sortedGroupFields.get(idx) : null); } if (doAllGroups) { return new TopGroups<>( new TopGroups<>(groupSort.getSort(), docSort.getSort(), totalHitCount, totalGroupedHitCount, result, Float.NaN), knownGroups.size() ); } else { return new TopGroups<>(groupSort.getSort(), docSort.getSort(), totalHitCount, totalGroupedHitCount, result, Float.NaN); } } private DirectoryReader getDocBlockReader(Directory dir, GroupDoc[] groupDocs) throws IOException { // Coalesce by group, but in random order: Collections.shuffle(Arrays.asList(groupDocs), random()); final Map<BytesRef,List<GroupDoc>> groupMap = new HashMap<>(); final List<BytesRef> groupValues = new ArrayList<>(); for(GroupDoc groupDoc : groupDocs) { if (!groupMap.containsKey(groupDoc.group)) { groupValues.add(groupDoc.group); groupMap.put(groupDoc.group, new ArrayList<GroupDoc>()); } groupMap.get(groupDoc.group).add(groupDoc); } RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); final List<List<Document>> updateDocs = new ArrayList<>(); FieldType groupEndType = new FieldType(StringField.TYPE_NOT_STORED); groupEndType.setIndexOptions(IndexOptions.DOCS); groupEndType.setOmitNorms(true); //System.out.println("TEST: index groups"); for(BytesRef group : groupValues) { final List<Document> docs = new ArrayList<>(); //System.out.println("TEST: group=" + (group == null ? "null" : group.utf8ToString())); for(GroupDoc groupValue : groupMap.get(group)) { Document doc = new Document(); docs.add(doc); if (groupValue.group != null) { doc.add(newStringField("group", groupValue.group.utf8ToString(), Field.Store.YES)); doc.add(new SortedDocValuesField("group", BytesRef.deepCopyOf(groupValue.group))); } doc.add(newStringField("sort1", groupValue.sort1.utf8ToString(), Field.Store.NO)); doc.add(new SortedDocValuesField("sort1", BytesRef.deepCopyOf(groupValue.sort1))); doc.add(newStringField("sort2", groupValue.sort2.utf8ToString(), Field.Store.NO)); doc.add(new SortedDocValuesField("sort2", BytesRef.deepCopyOf(groupValue.sort2))); doc.add(new NumericDocValuesField("id", groupValue.id)); doc.add(newTextField("content", groupValue.content, Field.Store.NO)); //System.out.println("TEST: doc content=" + groupValue.content + " group=" + (groupValue.group == null ? "null" : groupValue.group.utf8ToString()) + " sort1=" + groupValue.sort1.utf8ToString() + " id=" + groupValue.id); } // So we can pull filter marking last doc in block: final Field groupEnd = newField("groupend", "x", groupEndType); docs.get(docs.size()-1).add(groupEnd); // Add as a doc block: w.addDocuments(docs); if (group != null && random().nextInt(7) == 4) { updateDocs.add(docs); } } for(List<Document> docs : updateDocs) { // Just replaces docs w/ same docs: w.updateDocuments(new Term("group", docs.get(0).get("group")), docs); } final DirectoryReader r = w.getReader(); w.close(); return r; } private static class ShardState { public final ShardSearcher[] subSearchers; public final int[] docStarts; public ShardState(IndexSearcher s) { final IndexReaderContext ctx = s.getTopReaderContext(); final List<LeafReaderContext> leaves = ctx.leaves(); subSearchers = new ShardSearcher[leaves.size()]; for(int searcherIDX=0;searcherIDX<subSearchers.length;searcherIDX++) { subSearchers[searcherIDX] = new ShardSearcher(leaves.get(searcherIDX), ctx); } docStarts = new int[subSearchers.length]; for(int subIDX=0;subIDX<docStarts.length;subIDX++) { docStarts[subIDX] = leaves.get(subIDX).docBase; //System.out.println("docStarts[" + subIDX + "]=" + docStarts[subIDX]); } } } public void testRandom() throws Exception { int numberOfRuns = TestUtil.nextInt(random(), 3, 6); for (int iter=0; iter<numberOfRuns; iter++) { if (VERBOSE) { System.out.println("TEST: iter=" + iter); } final int numDocs = TestUtil.nextInt(random(), 100, 1000) * RANDOM_MULTIPLIER; //final int numDocs = _TestUtil.nextInt(random, 5, 20); final int numGroups = TestUtil.nextInt(random(), 1, numDocs); if (VERBOSE) { System.out.println("TEST: numDocs=" + numDocs + " numGroups=" + numGroups); } final List<BytesRef> groups = new ArrayList<>(); for(int i=0;i<numGroups;i++) { String randomValue; do { // B/c of DV based impl we can't see the difference between an empty string and a null value. // For that reason we don't generate empty string // groups. randomValue = TestUtil.randomRealisticUnicodeString(random()); //randomValue = TestUtil.randomSimpleString(random()); } while ("".equals(randomValue)); groups.add(new BytesRef(randomValue)); } final String[] contentStrings = new String[TestUtil.nextInt(random(), 2, 20)]; if (VERBOSE) { System.out.println("TEST: create fake content"); } for(int contentIDX=0;contentIDX<contentStrings.length;contentIDX++) { final StringBuilder sb = new StringBuilder(); sb.append("real").append(random().nextInt(3)).append(' '); final int fakeCount = random().nextInt(10); for(int fakeIDX=0;fakeIDX<fakeCount;fakeIDX++) { sb.append("fake "); } contentStrings[contentIDX] = sb.toString(); if (VERBOSE) { System.out.println(" content=" + sb.toString()); } } Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); Document docNoGroup = new Document(); Field idvGroupField = new SortedDocValuesField("group", new BytesRef()); doc.add(idvGroupField); docNoGroup.add(idvGroupField); Field group = newStringField("group", "", Field.Store.NO); doc.add(group); Field sort1 = new SortedDocValuesField("sort1", new BytesRef()); doc.add(sort1); docNoGroup.add(sort1); Field sort2 = new SortedDocValuesField("sort2", new BytesRef()); doc.add(sort2); docNoGroup.add(sort2); Field content = newTextField("content", "", Field.Store.NO); doc.add(content); docNoGroup.add(content); NumericDocValuesField idDV = new NumericDocValuesField("id", 0); doc.add(idDV); docNoGroup.add(idDV); final GroupDoc[] groupDocs = new GroupDoc[numDocs]; for(int i=0;i<numDocs;i++) { final BytesRef groupValue; if (random().nextInt(24) == 17) { // So we test the "doc doesn't have the group'd // field" case: groupValue = null; } else { groupValue = groups.get(random().nextInt(groups.size())); } final GroupDoc groupDoc = new GroupDoc(i, groupValue, groups.get(random().nextInt(groups.size())), groups.get(random().nextInt(groups.size())), contentStrings[random().nextInt(contentStrings.length)]); if (VERBOSE) { System.out.println(" doc content=" + groupDoc.content + " id=" + i + " group=" + (groupDoc.group == null ? "null" : groupDoc.group.utf8ToString()) + " sort1=" + groupDoc.sort1.utf8ToString() + " sort2=" + groupDoc.sort2.utf8ToString()); } groupDocs[i] = groupDoc; if (groupDoc.group != null) { group.setStringValue(groupDoc.group.utf8ToString()); idvGroupField.setBytesValue(BytesRef.deepCopyOf(groupDoc.group)); } else { // TODO: not true // Must explicitly set empty string, else eg if // the segment has all docs missing the field then // we get null back instead of empty BytesRef: idvGroupField.setBytesValue(new BytesRef()); } sort1.setBytesValue(BytesRef.deepCopyOf(groupDoc.sort1)); sort2.setBytesValue(BytesRef.deepCopyOf(groupDoc.sort2)); content.setStringValue(groupDoc.content); idDV.setLongValue(groupDoc.id); if (groupDoc.group == null) { w.addDocument(docNoGroup); } else { w.addDocument(doc); } } final GroupDoc[] groupDocsByID = new GroupDoc[groupDocs.length]; System.arraycopy(groupDocs, 0, groupDocsByID, 0, groupDocs.length); final DirectoryReader r = w.getReader(); w.close(); final NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id"); DirectoryReader rBlocks = null; Directory dirBlocks = null; final IndexSearcher s = newSearcher(r); if (VERBOSE) { System.out.println("\nTEST: searcher=" + s); } final ShardState shards = new ShardState(s); for(int contentID=0;contentID<3;contentID++) { final ScoreDoc[] hits = s.search(new TermQuery(new Term("content", "real"+contentID)), numDocs).scoreDocs; for(ScoreDoc hit : hits) { final GroupDoc gd = groupDocs[(int) docIDToID.get(hit.doc)]; assertTrue(gd.score == 0.0); gd.score = hit.score; assertEquals(gd.id, docIDToID.get(hit.doc)); } } for(GroupDoc gd : groupDocs) { assertTrue(gd.score != 0.0); } // Build 2nd index, where docs are added in blocks by // group, so we can use single pass collector dirBlocks = newDirectory(); rBlocks = getDocBlockReader(dirBlocks, groupDocs); final Query lastDocInBlock = new TermQuery(new Term("groupend", "x")); final NumericDocValues docIDToIDBlocks = MultiDocValues.getNumericValues(rBlocks, "id"); assertNotNull(docIDToIDBlocks); final IndexSearcher sBlocks = newSearcher(rBlocks); final ShardState shardsBlocks = new ShardState(sBlocks); // ReaderBlocks only increases maxDoc() vs reader, which // means a monotonic shift in scores, so we can // reliably remap them w/ Map: final Map<String,Map<Float,Float>> scoreMap = new HashMap<>(); // Tricky: must separately set .score2, because the doc // block index was created with possible deletions! //System.out.println("fixup score2"); for(int contentID=0;contentID<3;contentID++) { //System.out.println(" term=real" + contentID); final Map<Float,Float> termScoreMap = new HashMap<>(); scoreMap.put("real"+contentID, termScoreMap); //System.out.println("term=real" + contentID + " dfold=" + s.docFreq(new Term("content", "real"+contentID)) + //" dfnew=" + sBlocks.docFreq(new Term("content", "real"+contentID))); final ScoreDoc[] hits = sBlocks.search(new TermQuery(new Term("content", "real"+contentID)), numDocs).scoreDocs; for(ScoreDoc hit : hits) { final GroupDoc gd = groupDocsByID[(int) docIDToIDBlocks.get(hit.doc)]; assertTrue(gd.score2 == 0.0); gd.score2 = hit.score; assertEquals(gd.id, docIDToIDBlocks.get(hit.doc)); //System.out.println(" score=" + gd.score + " score2=" + hit.score + " id=" + docIDToIDBlocks.get(hit.doc)); termScoreMap.put(gd.score, gd.score2); } } for(int searchIter=0;searchIter<100;searchIter++) { if (VERBOSE) { System.out.println("\nTEST: searchIter=" + searchIter); } final String searchTerm = "real" + random().nextInt(3); final boolean fillFields = random().nextBoolean(); boolean getScores = random().nextBoolean(); final boolean getMaxScores = random().nextBoolean(); final Sort groupSort = getRandomSort(); //final Sort groupSort = new Sort(new SortField[] {new SortField("sort1", SortField.STRING), new SortField("id", SortField.INT)}); final Sort docSort = getRandomSort(); getScores |= (groupSort.needsScores() || docSort.needsScores()); final int topNGroups = TestUtil.nextInt(random(), 1, 30); //final int topNGroups = 10; final int docsPerGroup = TestUtil.nextInt(random(), 1, 50); final int groupOffset = TestUtil.nextInt(random(), 0, (topNGroups - 1) / 2); //final int groupOffset = 0; final int docOffset = TestUtil.nextInt(random(), 0, docsPerGroup - 1); //final int docOffset = 0; final boolean doCache = random().nextBoolean(); final boolean doAllGroups = random().nextBoolean(); if (VERBOSE) { System.out.println("TEST: groupSort=" + groupSort + " docSort=" + docSort + " searchTerm=" + searchTerm + " dF=" + r.docFreq(new Term("content", searchTerm)) +" dFBlock=" + rBlocks.docFreq(new Term("content", searchTerm)) + " topNGroups=" + topNGroups + " groupOffset=" + groupOffset + " docOffset=" + docOffset + " doCache=" + doCache + " docsPerGroup=" + docsPerGroup + " doAllGroups=" + doAllGroups + " getScores=" + getScores + " getMaxScores=" + getMaxScores); } String groupField = "group"; if (VERBOSE) { System.out.println(" groupField=" + groupField); } final AbstractFirstPassGroupingCollector<?> c1 = createRandomFirstPassCollector(groupField, groupSort, groupOffset+topNGroups); final CachingCollector cCache; final Collector c; final AbstractAllGroupsCollector<?> allGroupsCollector; if (doAllGroups) { allGroupsCollector = createAllGroupsCollector(c1, groupField); } else { allGroupsCollector = null; } final boolean useWrappingCollector = random().nextBoolean(); if (doCache) { final double maxCacheMB = random().nextDouble(); if (VERBOSE) { System.out.println("TEST: maxCacheMB=" + maxCacheMB); } if (useWrappingCollector) { if (doAllGroups) { cCache = CachingCollector.create(c1, true, maxCacheMB); c = MultiCollector.wrap(cCache, allGroupsCollector); } else { c = cCache = CachingCollector.create(c1, true, maxCacheMB); } } else { // Collect only into cache, then replay multiple times: c = cCache = CachingCollector.create(true, maxCacheMB); } } else { cCache = null; if (doAllGroups) { c = MultiCollector.wrap(c1, allGroupsCollector); } else { c = c1; } } // Search top reader: final Query query = new TermQuery(new Term("content", searchTerm)); s.search(query, c); if (doCache && !useWrappingCollector) { if (cCache.isCached()) { // Replay for first-pass grouping cCache.replay(c1); if (doAllGroups) { // Replay for all groups: cCache.replay(allGroupsCollector); } } else { // Replay by re-running search: s.search(query, c1); if (doAllGroups) { s.search(query, allGroupsCollector); } } } // Get 1st pass top groups final Collection<SearchGroup<BytesRef>> topGroups = getSearchGroups(c1, groupOffset, fillFields); final TopGroups<BytesRef> groupsResult; if (VERBOSE) { System.out.println("TEST: first pass topGroups"); if (topGroups == null) { System.out.println(" null"); } else { for (SearchGroup<BytesRef> searchGroup : topGroups) { System.out.println(" " + (searchGroup.groupValue == null ? "null" : searchGroup.groupValue) + ": " + Arrays.deepToString(searchGroup.sortValues)); } } } // Get 1st pass top groups using shards final TopGroups<BytesRef> topGroupsShards = searchShards(s, shards.subSearchers, query, groupSort, docSort, groupOffset, topNGroups, docOffset, docsPerGroup, getScores, getMaxScores, true, false); final AbstractSecondPassGroupingCollector<?> c2; if (topGroups != null) { if (VERBOSE) { System.out.println("TEST: topGroups"); for (SearchGroup<BytesRef> searchGroup : topGroups) { System.out.println(" " + (searchGroup.groupValue == null ? "null" : searchGroup.groupValue.utf8ToString()) + ": " + Arrays.deepToString(searchGroup.sortValues)); } } c2 = createSecondPassCollector(c1, groupField, groupSort, docSort, groupOffset, docOffset + docsPerGroup, getScores, getMaxScores, fillFields); if (doCache) { if (cCache.isCached()) { if (VERBOSE) { System.out.println("TEST: cache is intact"); } cCache.replay(c2); } else { if (VERBOSE) { System.out.println("TEST: cache was too large"); } s.search(query, c2); } } else { s.search(query, c2); } if (doAllGroups) { TopGroups<BytesRef> tempTopGroups = getTopGroups(c2, docOffset); groupsResult = new TopGroups<>(tempTopGroups, allGroupsCollector.getGroupCount()); } else { groupsResult = getTopGroups(c2, docOffset); } } else { c2 = null; groupsResult = null; if (VERBOSE) { System.out.println("TEST: no results"); } } final TopGroups<BytesRef> expectedGroups = slowGrouping(groupDocs, searchTerm, fillFields, getScores, getMaxScores, doAllGroups, groupSort, docSort, topNGroups, docsPerGroup, groupOffset, docOffset); if (VERBOSE) { if (expectedGroups == null) { System.out.println("TEST: no expected groups"); } else { System.out.println("TEST: expected groups totalGroupedHitCount=" + expectedGroups.totalGroupedHitCount); for(GroupDocs<BytesRef> gd : expectedGroups.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue) + " totalHits=" + gd.totalHits + " scoreDocs.len=" + gd.scoreDocs.length); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + sd.doc + " score=" + sd.score); } } } if (groupsResult == null) { System.out.println("TEST: no matched groups"); } else { System.out.println("TEST: matched groups totalGroupedHitCount=" + groupsResult.totalGroupedHitCount); for(GroupDocs<BytesRef> gd : groupsResult.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue) + " totalHits=" + gd.totalHits); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + docIDToID.get(sd.doc) + " score=" + sd.score); } } if (searchIter == 14) { for(int docIDX=0;docIDX<s.getIndexReader().maxDoc();docIDX++) { System.out.println("ID=" + docIDToID.get(docIDX) + " explain=" + s.explain(query, docIDX)); } } } if (topGroupsShards == null) { System.out.println("TEST: no matched-merged groups"); } else { System.out.println("TEST: matched-merged groups totalGroupedHitCount=" + topGroupsShards.totalGroupedHitCount); for(GroupDocs<BytesRef> gd : topGroupsShards.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue) + " totalHits=" + gd.totalHits); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + docIDToID.get(sd.doc) + " score=" + sd.score); } } } } assertEquals(docIDToID, expectedGroups, groupsResult, true, true, true, getScores, true); // Confirm merged shards match: assertEquals(docIDToID, expectedGroups, topGroupsShards, true, false, fillFields, getScores, true); if (topGroupsShards != null) { verifyShards(shards.docStarts, topGroupsShards); } final boolean needsScores = getScores || getMaxScores || docSort == null; final BlockGroupingCollector c3 = new BlockGroupingCollector(groupSort, groupOffset+topNGroups, needsScores, sBlocks.createNormalizedWeight(lastDocInBlock, false)); final TermAllGroupsCollector allGroupsCollector2; final Collector c4; if (doAllGroups) { // NOTE: must be "group" and not "group_dv" // (groupField) because we didn't index doc // values in the block index: allGroupsCollector2 = new TermAllGroupsCollector("group"); c4 = MultiCollector.wrap(c3, allGroupsCollector2); } else { allGroupsCollector2 = null; c4 = c3; } // Get block grouping result: sBlocks.search(query, c4); @SuppressWarnings({"unchecked","rawtypes"}) final TopGroups<BytesRef> tempTopGroupsBlocks = (TopGroups<BytesRef>) c3.getTopGroups(docSort, groupOffset, docOffset, docOffset+docsPerGroup, fillFields); final TopGroups<BytesRef> groupsResultBlocks; if (doAllGroups && tempTopGroupsBlocks != null) { assertEquals((int) tempTopGroupsBlocks.totalGroupCount, allGroupsCollector2.getGroupCount()); groupsResultBlocks = new TopGroups<>(tempTopGroupsBlocks, allGroupsCollector2.getGroupCount()); } else { groupsResultBlocks = tempTopGroupsBlocks; } if (VERBOSE) { if (groupsResultBlocks == null) { System.out.println("TEST: no block groups"); } else { System.out.println("TEST: block groups totalGroupedHitCount=" + groupsResultBlocks.totalGroupedHitCount); boolean first = true; for(GroupDocs<BytesRef> gd : groupsResultBlocks.groups) { System.out.println(" group=" + (gd.groupValue == null ? "null" : gd.groupValue.utf8ToString()) + " totalHits=" + gd.totalHits); for(ScoreDoc sd : gd.scoreDocs) { System.out.println(" id=" + docIDToIDBlocks.get(sd.doc) + " score=" + sd.score); if (first) { System.out.println("explain: " + sBlocks.explain(query, sd.doc)); first = false; } } } } } // Get shard'd block grouping result: final TopGroups<BytesRef> topGroupsBlockShards = searchShards(sBlocks, shardsBlocks.subSearchers, query, groupSort, docSort, groupOffset, topNGroups, docOffset, docsPerGroup, getScores, getMaxScores, false, false); if (expectedGroups != null) { // Fixup scores for reader2 for (GroupDocs<?> groupDocsHits : expectedGroups.groups) { for(ScoreDoc hit : groupDocsHits.scoreDocs) { final GroupDoc gd = groupDocsByID[hit.doc]; assertEquals(gd.id, hit.doc); //System.out.println("fixup score " + hit.score + " to " + gd.score2 + " vs " + gd.score); hit.score = gd.score2; } } final SortField[] sortFields = groupSort.getSort(); final Map<Float,Float> termScoreMap = scoreMap.get(searchTerm); for(int groupSortIDX=0;groupSortIDX<sortFields.length;groupSortIDX++) { if (sortFields[groupSortIDX].getType() == SortField.Type.SCORE) { for (GroupDocs<?> groupDocsHits : expectedGroups.groups) { if (groupDocsHits.groupSortValues != null) { //System.out.println("remap " + groupDocsHits.groupSortValues[groupSortIDX] + " to " + termScoreMap.get(groupDocsHits.groupSortValues[groupSortIDX])); groupDocsHits.groupSortValues[groupSortIDX] = termScoreMap.get(groupDocsHits.groupSortValues[groupSortIDX]); assertNotNull(groupDocsHits.groupSortValues[groupSortIDX]); } } } } final SortField[] docSortFields = docSort.getSort(); for(int docSortIDX=0;docSortIDX<docSortFields.length;docSortIDX++) { if (docSortFields[docSortIDX].getType() == SortField.Type.SCORE) { for (GroupDocs<?> groupDocsHits : expectedGroups.groups) { for(ScoreDoc _hit : groupDocsHits.scoreDocs) { FieldDoc hit = (FieldDoc) _hit; if (hit.fields != null) { hit.fields[docSortIDX] = termScoreMap.get(hit.fields[docSortIDX]); assertNotNull(hit.fields[docSortIDX]); } } } } } } assertEquals(docIDToIDBlocks, expectedGroups, groupsResultBlocks, false, true, true, getScores, false); assertEquals(docIDToIDBlocks, expectedGroups, topGroupsBlockShards, false, false, fillFields, getScores, false); } r.close(); dir.close(); rBlocks.close(); dirBlocks.close(); } } private void verifyShards(int[] docStarts, TopGroups<BytesRef> topGroups) { for(GroupDocs<?> group : topGroups.groups) { for(int hitIDX=0;hitIDX<group.scoreDocs.length;hitIDX++) { final ScoreDoc sd = group.scoreDocs[hitIDX]; assertEquals("doc=" + sd.doc + " wrong shard", ReaderUtil.subIndex(sd.doc, docStarts), sd.shardIndex); } } } private TopGroups<BytesRef> searchShards(IndexSearcher topSearcher, ShardSearcher[] subSearchers, Query query, Sort groupSort, Sort docSort, int groupOffset, int topNGroups, int docOffset, int topNDocs, boolean getScores, boolean getMaxScores, boolean canUseIDV, boolean preFlex) throws Exception { // TODO: swap in caching, all groups collector hereassertEquals(expected.totalHitCount, actual.totalHitCount); // too... if (VERBOSE) { System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV); } // Run 1st pass collector to get top groups per shard final Weight w = topSearcher.createNormalizedWeight(query, getScores); final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>(); List<AbstractFirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>(); AbstractFirstPassGroupingCollector<?> firstPassCollector = null; boolean shardsCanUseIDV; if (canUseIDV) { if (SlowCompositeReaderWrapper.class.isAssignableFrom(subSearchers[0].getIndexReader().getClass())) { shardsCanUseIDV = false; } else { shardsCanUseIDV = !preFlex; } } else { shardsCanUseIDV = false; } String groupField = "group"; for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) { // First shard determines whether we use IDV or not; // all other shards match that: if (firstPassCollector == null) { firstPassCollector = createRandomFirstPassCollector(groupField, groupSort, groupOffset + topNGroups); } else { firstPassCollector = createFirstPassCollector(groupField, groupSort, groupOffset + topNGroups, firstPassCollector); } if (VERBOSE) { System.out.println(" shard=" + shardIDX + " groupField=" + groupField); System.out.println(" 1st pass collector=" + firstPassCollector); } firstPassGroupingCollectors.add(firstPassCollector); subSearchers[shardIDX].search(w, firstPassCollector); final Collection<SearchGroup<BytesRef>> topGroups = getSearchGroups(firstPassCollector, 0, true); if (topGroups != null) { if (VERBOSE) { System.out.println(" shard " + shardIDX + " s=" + subSearchers[shardIDX] + " totalGroupedHitCount=?" + " " + topGroups.size() + " groups:"); for(SearchGroup<BytesRef> group : topGroups) { System.out.println(" " + groupToString(group.groupValue) + " groupSort=" + Arrays.toString(group.sortValues)); } } shardGroups.add(topGroups); } } final Collection<SearchGroup<BytesRef>> mergedTopGroups = SearchGroup.merge(shardGroups, groupOffset, topNGroups, groupSort); if (VERBOSE) { System.out.println(" top groups merged:"); if (mergedTopGroups == null) { System.out.println(" null"); } else { System.out.println(" " + mergedTopGroups.size() + " top groups:"); for(SearchGroup<BytesRef> group : mergedTopGroups) { System.out.println(" [" + groupToString(group.groupValue) + "] groupSort=" + Arrays.toString(group.sortValues)); } } } if (mergedTopGroups != null) { // Now 2nd pass: @SuppressWarnings({"unchecked","rawtypes"}) final TopGroups<BytesRef>[] shardTopGroups = new TopGroups[subSearchers.length]; for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) { final AbstractSecondPassGroupingCollector<?> secondPassCollector = createSecondPassCollector(firstPassGroupingCollectors.get(shardIDX), groupField, mergedTopGroups, groupSort, docSort, docOffset + topNDocs, getScores, getMaxScores, true); subSearchers[shardIDX].search(w, secondPassCollector); shardTopGroups[shardIDX] = getTopGroups(secondPassCollector, 0); if (VERBOSE) { System.out.println(" " + shardTopGroups[shardIDX].groups.length + " shard[" + shardIDX + "] groups:"); for(GroupDocs<BytesRef> group : shardTopGroups[shardIDX].groups) { System.out.println(" [" + groupToString(group.groupValue) + "] groupSort=" + Arrays.toString(group.groupSortValues) + " numDocs=" + group.scoreDocs.length); } } } TopGroups<BytesRef> mergedGroups = TopGroups.merge(shardTopGroups, groupSort, docSort, docOffset, topNDocs, TopGroups.ScoreMergeMode.None); if (VERBOSE) { System.out.println(" " + mergedGroups.groups.length + " merged groups:"); for(GroupDocs<BytesRef> group : mergedGroups.groups) { System.out.println(" [" + groupToString(group.groupValue) + "] groupSort=" + Arrays.toString(group.groupSortValues) + " numDocs=" + group.scoreDocs.length); } } return mergedGroups; } else { return null; } } private void assertEquals(NumericDocValues docIDtoID, TopGroups<BytesRef> expected, TopGroups<BytesRef> actual, boolean verifyGroupValues, boolean verifyTotalGroupCount, boolean verifySortValues, boolean testScores, boolean idvBasedImplsUsed) { if (expected == null) { assertNull(actual); return; } assertNotNull(actual); assertEquals("expected.groups.length != actual.groups.length", expected.groups.length, actual.groups.length); assertEquals("expected.totalHitCount != actual.totalHitCount", expected.totalHitCount, actual.totalHitCount); assertEquals("expected.totalGroupedHitCount != actual.totalGroupedHitCount", expected.totalGroupedHitCount, actual.totalGroupedHitCount); if (expected.totalGroupCount != null && verifyTotalGroupCount) { assertEquals("expected.totalGroupCount != actual.totalGroupCount", expected.totalGroupCount, actual.totalGroupCount); } for(int groupIDX=0;groupIDX<expected.groups.length;groupIDX++) { if (VERBOSE) { System.out.println(" check groupIDX=" + groupIDX); } final GroupDocs<BytesRef> expectedGroup = expected.groups[groupIDX]; final GroupDocs<BytesRef> actualGroup = actual.groups[groupIDX]; if (verifyGroupValues) { if (idvBasedImplsUsed) { if (actualGroup.groupValue.length == 0) { assertNull(expectedGroup.groupValue); } else { assertEquals(expectedGroup.groupValue, actualGroup.groupValue); } } else { assertEquals(expectedGroup.groupValue, actualGroup.groupValue); } } if (verifySortValues) { assertArrayEquals(expectedGroup.groupSortValues, actualGroup.groupSortValues); } // TODO // assertEquals(expectedGroup.maxScore, actualGroup.maxScore); assertEquals(expectedGroup.totalHits, actualGroup.totalHits); final ScoreDoc[] expectedFDs = expectedGroup.scoreDocs; final ScoreDoc[] actualFDs = actualGroup.scoreDocs; assertEquals(expectedFDs.length, actualFDs.length); for(int docIDX=0;docIDX<expectedFDs.length;docIDX++) { final FieldDoc expectedFD = (FieldDoc) expectedFDs[docIDX]; final FieldDoc actualFD = (FieldDoc) actualFDs[docIDX]; //System.out.println(" actual doc=" + docIDtoID.get(actualFD.doc) + " score=" + actualFD.score); assertEquals(expectedFD.doc, docIDtoID.get(actualFD.doc)); if (testScores) { assertEquals(expectedFD.score, actualFD.score, 0.1); } else { // TODO: too anal for now //assertEquals(Float.NaN, actualFD.score); } if (verifySortValues) { assertArrayEquals(expectedFD.fields, actualFD.fields); } } } } private static class ShardSearcher extends IndexSearcher { private final List<LeafReaderContext> ctx; public ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); this.ctx = Collections.singletonList(ctx); } public void search(Weight weight, Collector collector) throws IOException { search(ctx, weight, collector); } @Override public String toString() { return "ShardSearcher(" + ctx.get(0).reader() + ")"; } } private static class ValueHolder<V> { V value; private ValueHolder(V value) { this.value = value; } } }
fix test bug abusing hit score git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1726326 13f79535-47bb-0310-9956-ffa450edef68
lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
fix test bug abusing hit score
Java
apache-2.0
145205d52482a6ab84ecdb870033f2b372407b27
0
asonipsl/hive,wisgood/hive,WANdisco/hive,asonipsl/hive,wisgood/hive,wisgood/hive,asonipsl/hive,winningsix/hive,wisgood/hive,WANdisco/amplab-hive,wisgood/hive,WANdisco/amplab-hive,WANdisco/amplab-hive,WANdisco/amplab-hive,asonipsl/hive,asonipsl/hive,winningsix/hive,WANdisco/hive,winningsix/hive,asonipsl/hive,WANdisco/hive,WANdisco/hive,WANdisco/amplab-hive,WANdisco/hive,asonipsl/hive,WANdisco/hive,wisgood/hive,winningsix/hive,WANdisco/amplab-hive,WANdisco/hive,WANdisco/amplab-hive,WANdisco/amplab-hive,winningsix/hive,asonipsl/hive,winningsix/hive,winningsix/hive,winningsix/hive,wisgood/hive,wisgood/hive,WANdisco/hive
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.spark.client; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Serializable; import java.io.Writer; import java.net.URL; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import akka.actor.ActorRef; import akka.actor.ActorSelection; import akka.actor.Props; import akka.actor.UntypedActor; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.spark.SparkContext; import org.apache.spark.SparkException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class SparkClientImpl implements SparkClient { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(SparkClientImpl.class); private static final String DEFAULT_CONNECTION_TIMEOUT = "60"; // In seconds private final Map<String, String> conf; private final AtomicInteger childIdGenerator; private final String name; private final ActorRef clientRef; private final Thread driverThread; private final Map<String, JobHandleImpl<?>> jobs; private volatile ActorSelection remoteRef; SparkClientImpl(Map<String, String> conf) throws IOException, SparkException { this.conf = conf; this.childIdGenerator = new AtomicInteger(); this.name = "SparkClient-" + ClientUtils.randomName(); this.clientRef = bind(Props.create(ClientActor.class, this), name); this.jobs = Maps.newConcurrentMap(); this.driverThread = startDriver(); long connectTimeout = 1000 * Integer.parseInt( Optional.fromNullable(conf.get("spark.client.connectTimeout")).or(DEFAULT_CONNECTION_TIMEOUT)); long endTime = System.currentTimeMillis() + connectTimeout; synchronized (this) { while (remoteRef == null) { try { wait(connectTimeout); } catch (InterruptedException ie) { throw new SparkException("Interrupted.", ie); } connectTimeout = endTime - System.currentTimeMillis(); if (remoteRef == null && connectTimeout <= 0) { throw new SparkException("Timed out waiting for remote driver to connect."); } } } } @Override public <T extends Serializable> JobHandle<T> submit(Job<T> job) { String jobId = ClientUtils.randomName(); remoteRef.tell(new Protocol.JobRequest(jobId, job), clientRef); JobHandleImpl<T> handle = new JobHandleImpl<T>(this, jobId); jobs.put(jobId, handle); return handle; } @Override public void stop() { if (remoteRef != null) { LOG.info("Sending EndSession to remote actor."); remoteRef.tell(new Protocol.EndSession(), clientRef); } unbind(clientRef); try { driverThread.join(); // TODO: timeout? } catch (InterruptedException ie) { LOG.debug("Interrupted before driver thread was finished."); } } @Override public Future<?> addJar(URL url) { return submit(new AddJarJob(url.toString())); } @Override public Future<?> addFile(URL url) { return submit(new AddFileJob(url.toString())); } void cancel(String jobId) { remoteRef.tell(new Protocol.CancelJob(jobId), clientRef); } private Thread startDriver() throws IOException { Runnable runnable; if (conf.containsKey(ClientUtils.CONF_KEY_IN_PROCESS)) { // Mostly for testing things quickly. Do not do this in production. LOG.warn("!!!! Running remote driver in-process. !!!!"); runnable = new Runnable() { @Override public void run() { List<String> args = Lists.newArrayList(); args.add("--remote"); args.add(String.format("%s/%s", SparkClientFactory.akkaUrl, name)); args.add("--secret"); args.add(SparkClientFactory.secret); for (Map.Entry<String, String> e : conf.entrySet()) { args.add("--conf"); args.add(String.format("%s=%s", e.getKey(), e.getValue())); } try { RemoteDriver.main(args.toArray(new String[args.size()])); } catch (Exception e) { LOG.error("Error running driver.", e); } } }; } else { // Create a file with all the job properties to be read by spark-submit. Change the // file's permissions so that only the owner can read it. This avoid having the // connection secret show up in the child process's command line. File properties = File.createTempFile("spark-submit.", ".properties"); if (!properties.setReadable(false) || !properties.setReadable(true, true)) { throw new IOException("Cannot change permissions of job properties file."); } Properties allProps = new Properties(); for (Map.Entry<String, String> e : conf.entrySet()) { allProps.put(e.getKey(), e.getValue()); } allProps.put(ClientUtils.CONF_KEY_SECRET, SparkClientFactory.secret); Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8); try { allProps.store(writer, "Spark Context configuration"); } finally { writer.close(); } // Define how to pass options to the child process. If launching in client (or local) // mode, the driver options need to be passed directly on the command line. Otherwise, // SparkSubmit will take care of that for us. String master = conf.get("spark.master"); Preconditions.checkArgument(master != null, "spark.master is not defined."); List<String> argv = Lists.newArrayList(); // If a Spark installation is provided, use the spark-submit script. Otherwise, call the // SparkSubmit class directly, which has some caveats (like having to provide a proper // version of Guava on the classpath depending on the deploy mode). if (conf.get("spark.home") != null) { argv.add(new File(conf.get("spark.home"), "bin/spark-submit").getAbsolutePath()); } else { LOG.info("No spark.home provided, calling SparkSubmit directly."); argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath()); if (master.startsWith("local") || master.startsWith("mesos") || master.endsWith("-client") || master.startsWith("spark")) { String mem = conf.get("spark.driver.memory"); if (mem != null) { argv.add("-Xms" + mem); argv.add("-Xmx" + mem); } String cp = conf.get("spark.driver.extraClassPath"); if (cp != null) { argv.add("-classpath"); argv.add(cp); } String libPath = conf.get("spark.driver.extraLibPath"); if (libPath != null) { argv.add("-Djava.library.path=" + libPath); } String extra = conf.get("spark.driver.extraJavaOptions"); if (extra != null) { for (String opt : extra.split("[ ]")) { if (!opt.trim().isEmpty()) { argv.add(opt.trim()); } } } } argv.add("org.apache.spark.deploy.SparkSubmit"); } argv.add("--properties-file"); argv.add(properties.getAbsolutePath()); argv.add("--class"); argv.add(RemoteDriver.class.getName()); String jar = "spark-internal"; if (SparkContext.jarOfClass(this.getClass()).isDefined()) { jar = SparkContext.jarOfClass(this.getClass()).get(); } argv.add(jar); argv.add("--remote"); argv.add(String.format("%s/%s", SparkClientFactory.akkaUrl, name)); LOG.debug("Running client driver with argv: {}", Joiner.on(" ").join(argv)); ProcessBuilder pb = new ProcessBuilder(argv.toArray(new String[argv.size()])); pb.environment().clear(); final Process child = pb.start(); int childId = childIdGenerator.incrementAndGet(); redirect("stdout-redir-" + childId, child.getInputStream(), System.out); redirect("stderr-redir-" + childId, child.getErrorStream(), System.err); runnable = new Runnable() { @Override public void run() { try { int exitCode = child.waitFor(); if (exitCode != 0) { LOG.warn("Child process exited with code {}.", exitCode); } } catch (Exception e) { LOG.warn("Exception while waiting for child process.", e); } } }; } Thread thread = new Thread(runnable); thread.setDaemon(true); thread.setName("Driver"); thread.start(); return thread; } private void redirect(String name, InputStream in, OutputStream out) { Thread thread = new Thread(new Redirector(in, out)); thread.setName(name); thread.setDaemon(true); thread.start(); } private ActorRef bind(Props props, String name) { return SparkClientFactory.actorSystem.actorOf(props, name); } private void unbind(ActorRef actor) { SparkClientFactory.actorSystem.stop(actor); } private ActorSelection select(String url) { return SparkClientFactory.actorSystem.actorSelection(url); } private class ClientActor extends UntypedActor { @Override public void onReceive(Object message) throws Exception { if (message instanceof Protocol.Error) { Protocol.Error e = (Protocol.Error) message; LOG.error("Error report from remote driver.", e.cause); } else if (message instanceof Protocol.Hello) { Protocol.Hello hello = (Protocol.Hello) message; LOG.info("Received hello from {}", hello.remoteUrl); remoteRef = select(hello.remoteUrl); synchronized (SparkClientImpl.this) { SparkClientImpl.this.notifyAll(); } } else if (message instanceof Protocol.JobMetrics) { Protocol.JobMetrics jm = (Protocol.JobMetrics) message; JobHandleImpl<?> handle = jobs.get(jm.jobId); if (handle != null) { handle.getMetrics().addMetrics(jm.sparkJobId, jm.stageId, jm.taskId, jm.metrics); } else { LOG.warn("Received metrics for unknown job {}", jm.jobId); } } else if (message instanceof Protocol.JobResult) { Protocol.JobResult jr = (Protocol.JobResult) message; JobHandleImpl<?> handle = jobs.remove(jr.id); if (handle != null) { LOG.info("Received result for {}", jr.id); handle.complete(jr.result, jr.error); } else { LOG.warn("Received result for unknown job {}", jr.id); } } else if (message instanceof Protocol.JobSubmitted) { Protocol.JobSubmitted jobSubmitted = (Protocol.JobSubmitted) message; JobHandleImpl<?> handle = jobs.get(jobSubmitted.clientJobId); if (handle != null) { LOG.info("Received spark job ID: {} for {}", jobSubmitted.sparkJobId, jobSubmitted.clientJobId); handle.getSparkJobIds().add(jobSubmitted.sparkJobId); } else { LOG.warn("Received spark job ID: {} for unknown job {}", jobSubmitted.sparkJobId, jobSubmitted.clientJobId); } } } } private class Redirector implements Runnable { private final InputStream in; private final OutputStream out; Redirector(InputStream in, OutputStream out) { this.in = in; this.out = out; } @Override public void run() { try { byte[] buf = new byte[1024]; int len = in.read(buf); while (len != -1) { out.write(buf, 0, len); out.flush(); len = in.read(buf); } } catch (Exception e) { LOG.warn("Error in redirector thread.", e); } } } private static class AddJarJob implements Job<Serializable> { private final String path; AddJarJob(String path) { this.path = path; } @Override public Serializable call(JobContext jc) throws Exception { jc.sc().addJar(path); return null; } } private static class AddFileJob implements Job<Serializable> { private final String path; AddFileJob(String path) { this.path = path; } @Override public Serializable call(JobContext jc) throws Exception { jc.sc().addFile(path); return null; } } }
spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.spark.client; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Serializable; import java.io.Writer; import java.net.URL; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import akka.actor.ActorRef; import akka.actor.ActorSelection; import akka.actor.Props; import akka.actor.UntypedActor; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.spark.SparkContext; import org.apache.spark.SparkException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class SparkClientImpl implements SparkClient { private final static Logger LOG = LoggerFactory.getLogger(SparkClientImpl.class); private final Map<String, String> conf; private final AtomicInteger childIdGenerator; private final String name; private final ActorRef clientRef; private final Thread driverThread; private final Map<String, JobHandleImpl<?>> jobs; private volatile ActorSelection remoteRef; SparkClientImpl(Map<String, String> conf) throws IOException, SparkException { this.conf = conf; this.childIdGenerator = new AtomicInteger(); this.name = "SparkClient-" + ClientUtils.randomName(); this.clientRef = bind(Props.create(ClientActor.class, this), name); this.jobs = Maps.newConcurrentMap(); this.driverThread = startDriver(); long connectTimeout = Integer.parseInt( Optional.fromNullable(conf.get("spark.client.connectTimeout")).or("10")) * 1000; long endTime = System.currentTimeMillis() + connectTimeout; synchronized (this) { while (remoteRef == null) { try { wait(connectTimeout); } catch (InterruptedException ie) { throw new SparkException("Interrupted.", ie); } connectTimeout = endTime - System.currentTimeMillis(); if (remoteRef == null && connectTimeout <= 0) { throw new SparkException("Timed out waiting for remote driver to connect."); } } } } @Override public <T extends Serializable> JobHandle<T> submit(Job<T> job) { String jobId = ClientUtils.randomName(); remoteRef.tell(new Protocol.JobRequest(jobId, job), clientRef); JobHandleImpl<T> handle = new JobHandleImpl<T>(this, jobId); jobs.put(jobId, handle); return handle; } @Override public void stop() { if (remoteRef != null) { LOG.info("Sending EndSession to remote actor."); remoteRef.tell(new Protocol.EndSession(), clientRef); } unbind(clientRef); try { driverThread.join(); // TODO: timeout? } catch (InterruptedException ie) { LOG.debug("Interrupted before driver thread was finished."); } } @Override public Future<?> addJar(URL url) { return submit(new AddJarJob(url.toString())); } @Override public Future<?> addFile(URL url) { return submit(new AddFileJob(url.toString())); } void cancel(String jobId) { remoteRef.tell(new Protocol.CancelJob(jobId), clientRef); } private Thread startDriver() throws IOException { Runnable runnable; if (conf.containsKey(ClientUtils.CONF_KEY_IN_PROCESS)) { // Mostly for testing things quickly. Do not do this in production. LOG.warn("!!!! Running remote driver in-process. !!!!"); runnable = new Runnable() { @Override public void run() { List<String> args = Lists.newArrayList(); args.add("--remote"); args.add(String.format("%s/%s", SparkClientFactory.akkaUrl, name)); args.add("--secret"); args.add(SparkClientFactory.secret); for (Map.Entry<String, String> e : conf.entrySet()) { args.add("--conf"); args.add(String.format("%s=%s", e.getKey(), e.getValue())); } try { RemoteDriver.main(args.toArray(new String[args.size()])); } catch (Exception e) { LOG.error("Error running driver.", e); } } }; } else { // Create a file with all the job properties to be read by spark-submit. Change the // file's permissions so that only the owner can read it. This avoid having the // connection secret show up in the child process's command line. File properties = File.createTempFile("spark-submit.", ".properties"); if (!properties.setReadable(false) || !properties.setReadable(true, true)) { throw new IOException("Cannot change permissions of job properties file."); } Properties allProps = new Properties(); for (Map.Entry<String, String> e : conf.entrySet()) { allProps.put(e.getKey(), e.getValue()); } allProps.put(ClientUtils.CONF_KEY_SECRET, SparkClientFactory.secret); Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8); try { allProps.store(writer, "Spark Context configuration"); } finally { writer.close(); } // Define how to pass options to the child process. If launching in client (or local) // mode, the driver options need to be passed directly on the command line. Otherwise, // SparkSubmit will take care of that for us. String master = conf.get("spark.master"); Preconditions.checkArgument(master != null, "spark.master is not defined."); List<String> argv = Lists.newArrayList(); // If a Spark installation is provided, use the spark-submit script. Otherwise, call the // SparkSubmit class directly, which has some caveats (like having to provide a proper // version of Guava on the classpath depending on the deploy mode). if (conf.get("spark.home") != null) { argv.add(new File(conf.get("spark.home"), "bin/spark-submit").getAbsolutePath()); } else { LOG.info("No spark.home provided, calling SparkSubmit directly."); argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath()); if (master.startsWith("local") || master.startsWith("mesos") || master.endsWith("-client") || master.startsWith("spark")) { String mem = conf.get("spark.driver.memory"); if (mem != null) { argv.add("-Xms" + mem); argv.add("-Xmx" + mem); } String cp = conf.get("spark.driver.extraClassPath"); if (cp != null) { argv.add("-classpath"); argv.add(cp); } String libPath = conf.get("spark.driver.extraLibPath"); if (libPath != null) { argv.add("-Djava.library.path=" + libPath); } String extra = conf.get("spark.driver.extraJavaOptions"); if (extra != null) { for (String opt : extra.split("[ ]")) { if (!opt.trim().isEmpty()) { argv.add(opt.trim()); } } } } argv.add("org.apache.spark.deploy.SparkSubmit"); } argv.add("--properties-file"); argv.add(properties.getAbsolutePath()); argv.add("--class"); argv.add(RemoteDriver.class.getName()); String jar = "spark-internal"; if (SparkContext.jarOfClass(this.getClass()).isDefined()) { jar = SparkContext.jarOfClass(this.getClass()).get(); } argv.add(jar); argv.add("--remote"); argv.add(String.format("%s/%s", SparkClientFactory.akkaUrl, name)); LOG.debug("Running client driver with argv: {}", Joiner.on(" ").join(argv)); ProcessBuilder pb = new ProcessBuilder(argv.toArray(new String[argv.size()])); pb.environment().clear(); final Process child = pb.start(); int childId = childIdGenerator.incrementAndGet(); redirect("stdout-redir-" + childId, child.getInputStream(), System.out); redirect("stderr-redir-" + childId, child.getErrorStream(), System.err); runnable = new Runnable() { @Override public void run() { try { int exitCode = child.waitFor(); if (exitCode != 0) { LOG.warn("Child process exited with code {}.", exitCode); } } catch (Exception e) { LOG.warn("Exception while waiting for child process.", e); } } }; } Thread thread = new Thread(runnable); thread.setDaemon(true); thread.setName("Driver"); thread.start(); return thread; } private void redirect(String name, InputStream in, OutputStream out) { Thread thread = new Thread(new Redirector(in, out)); thread.setName(name); thread.setDaemon(true); thread.start(); } private ActorRef bind(Props props, String name) { return SparkClientFactory.actorSystem.actorOf(props, name); } private void unbind(ActorRef actor) { SparkClientFactory.actorSystem.stop(actor); } private ActorSelection select(String url) { return SparkClientFactory.actorSystem.actorSelection(url); } private class ClientActor extends UntypedActor { @Override public void onReceive(Object message) throws Exception { if (message instanceof Protocol.Error) { Protocol.Error e = (Protocol.Error) message; LOG.error("Error report from remote driver.", e.cause); } else if (message instanceof Protocol.Hello) { Protocol.Hello hello = (Protocol.Hello) message; LOG.info("Received hello from {}", hello.remoteUrl); remoteRef = select(hello.remoteUrl); synchronized (SparkClientImpl.this) { SparkClientImpl.this.notifyAll(); } } else if (message instanceof Protocol.JobMetrics) { Protocol.JobMetrics jm = (Protocol.JobMetrics) message; JobHandleImpl<?> handle = jobs.get(jm.jobId); if (handle != null) { handle.getMetrics().addMetrics(jm.sparkJobId, jm.stageId, jm.taskId, jm.metrics); } else { LOG.warn("Received metrics for unknown job {}", jm.jobId); } } else if (message instanceof Protocol.JobResult) { Protocol.JobResult jr = (Protocol.JobResult) message; JobHandleImpl<?> handle = jobs.remove(jr.id); if (handle != null) { LOG.info("Received result for {}", jr.id); handle.complete(jr.result, jr.error); } else { LOG.warn("Received result for unknown job {}", jr.id); } } else if (message instanceof Protocol.JobSubmitted) { Protocol.JobSubmitted jobSubmitted = (Protocol.JobSubmitted) message; JobHandleImpl<?> handle = jobs.get(jobSubmitted.clientJobId); if (handle != null) { LOG.info("Received spark job ID: {} for {}", jobSubmitted.sparkJobId, jobSubmitted.clientJobId); handle.getSparkJobIds().add(jobSubmitted.sparkJobId); } else { LOG.warn("Received spark job ID: {} for unknown job {}", jobSubmitted.sparkJobId, jobSubmitted.clientJobId); } } } } private class Redirector implements Runnable { private final InputStream in; private final OutputStream out; Redirector(InputStream in, OutputStream out) { this.in = in; this.out = out; } @Override public void run() { try { byte[] buf = new byte[1024]; int len = in.read(buf); while (len != -1) { out.write(buf, 0, len); out.flush(); len = in.read(buf); } } catch (Exception e) { LOG.warn("Error in redirector thread.", e); } } } private static class AddJarJob implements Job<Serializable> { private final String path; AddJarJob(String path) { this.path = path; } @Override public Serializable call(JobContext jc) throws Exception { jc.sc().addJar(path); return null; } } private static class AddFileJob implements Job<Serializable> { private final String path; AddFileJob(String path) { this.path = path; } @Override public Serializable call(JobContext jc) throws Exception { jc.sc().addFile(path); return null; } } }
HIVE-8951: Spark remote context doesn't work with local-cluster [Spark Branch] git-svn-id: b05e31eb9f122b66e3a4bd72ce86d18f60fb8741@1641612 13f79535-47bb-0310-9956-ffa450edef68
spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
HIVE-8951: Spark remote context doesn't work with local-cluster [Spark Branch]
Java
apache-2.0
672c0fe34d36ca9a7bf0b71a41f942a9f98ed5d4
0
HanSolo/Medusa
/* * Copyright (c) 2016 by Gerrit Grunwald * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.hansolo.medusa; import eu.hansolo.medusa.Clock.ClockSkinType; import eu.hansolo.medusa.Gauge.SkinType; import eu.hansolo.medusa.Section.SectionEvent; import eu.hansolo.medusa.events.UpdateEvent; import eu.hansolo.medusa.events.UpdateEvent.EventType; import javafx.animation.AnimationTimer; import javafx.application.Application; import javafx.application.Platform; import javafx.beans.property.BooleanProperty; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.scene.Node; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Button; import javafx.scene.layout.StackPane; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.paint.CycleMethod; import javafx.scene.paint.LinearGradient; import javafx.scene.paint.Stop; import javafx.stage.Stage; import java.math.RoundingMode; import java.text.NumberFormat; import java.time.Duration; import java.time.Instant; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Locale; import java.util.Random; /** * User: hansolo * Date: 04.01.16 * Time: 06:31 */ public class Test extends Application { private static final Random RND = new Random(); private static int noOfNodes = 0; private FGauge fgauge; private Gauge gauge; private Clock clock; private long lastTimerCall; private AnimationTimer timer; private DoubleProperty value; private long epochSeconds; private BooleanProperty toggle; @Override public void init() { NumberFormat numberFormat = NumberFormat.getInstance(new Locale("da", "DK")); numberFormat.setRoundingMode(RoundingMode.HALF_DOWN); numberFormat.setMinimumIntegerDigits(3); numberFormat.setMaximumIntegerDigits(3); numberFormat.setMinimumFractionDigits(0); numberFormat.setMaximumFractionDigits(0); value = new SimpleDoubleProperty(0); toggle = new SimpleBooleanProperty(false); fgauge = FGaugeBuilder.create() .gaugeDesign(GaugeDesign.NONE) .build(); gauge = GaugeBuilder.create() .skinType(SkinType.LCD) //.prefSize(250, 250) //.minValue(0) //.maxValue(100) .animated(true) //.checkThreshold(true) //.onThresholdExceeded(e -> System.out.println("threshold exceeded")) //.lcdVisible(true) //.locale(Locale.GERMANY) //.numberFormat(numberFormat) .title("Title") .unit("ยฐC") .subTitle("SubTitle") //.interactive(true) //.onButtonPressed(o -> System.out.println("Button pressed")) //.title("Title") .sections(new Section(0, 33, Color.RED), new Section(33, 66, Color.YELLOW), new Section(66, 100, Color.LIME)) .sectionsVisible(true) //.autoScale(false) .averagingEnabled(true) .averageVisible(true) .build(); // Calling bind() directly sets a value to gauge gauge.valueProperty().bind(value); gauge.getSections().forEach(section -> section.setOnSectionUpdate(sectionEvent -> gauge.fireUpdateEvent(new UpdateEvent(Test.this, EventType.REDRAW)))); //gauge.valueVisibleProperty().bind(toggle); epochSeconds = Instant.now().getEpochSecond(); clock = ClockBuilder.create() .skinType(ClockSkinType.DESIGN) //.prefSize(400, 400) //.onTimeEvent(e -> System.out.println(e.TYPE)) //.discreteSeconds(false) .locale(Locale.GERMANY) //.secondsVisible(true) //.dateVisible(true) //.customFont(Fonts.latoLight(10)) .shadowsEnabled(true) //.running(true) .build(); lastTimerCall = System.nanoTime(); timer = new AnimationTimer() { @Override public void handle(long now) { if (now > lastTimerCall + 3_000_000_000l) { double v = RND.nextDouble() * gauge.getRange() + gauge.getMinValue(); value.set(v); //gauge.setValue(v); //System.out.println("MovingAverage over " + gauge.getAveragingWindow().size() + " values: " + gauge.getAverage() + " last value = " + v); //toggle.set(!toggle.get()); //System.out.println(gauge.isValueVisible()); //gauge.setValue(v); //epochSeconds+=20; //clock.setTime(epochSeconds); lastTimerCall = now; } } }; } @Override public void start(Stage stage) { StackPane pane = new StackPane(gauge); pane.setPadding(new Insets(20)); LinearGradient gradient = new LinearGradient(0, 0, 0, pane.getLayoutBounds().getHeight(), false, CycleMethod.NO_CYCLE, new Stop(0.0, Color.rgb(38, 38, 38)), new Stop(1.0, Color.rgb(15, 15, 15))); //pane.setBackground(new Background(new BackgroundFill(gradient, CornerRadii.EMPTY, Insets.EMPTY))); //pane.setBackground(new Background(new BackgroundFill(Color.rgb(39,44,50), CornerRadii.EMPTY, Insets.EMPTY))); //pane.setBackground(new Background(new BackgroundFill(Color.WHITE, CornerRadii.EMPTY, Insets.EMPTY))); //pane.setBackground(new Background(new BackgroundFill(Gauge.DARK_COLOR, CornerRadii.EMPTY, Insets.EMPTY))); Scene scene = new Scene(pane); stage.setTitle("Medusa"); stage.setScene(scene); stage.show(); //gauge.setValue(105); // Calculate number of nodes calcNoOfNodes(pane); System.out.println(noOfNodes + " Nodes in SceneGraph"); timer.start(); //gauge.getSections().get(0).setStart(10); //gauge.getSections().get(0).setStop(90); } @Override public void stop() { System.exit(0); } // ******************** Misc ********************************************** private static void calcNoOfNodes(Node node) { if (node instanceof Parent) { if (((Parent) node).getChildrenUnmodifiable().size() != 0) { ObservableList<Node> tempChildren = ((Parent) node).getChildrenUnmodifiable(); noOfNodes += tempChildren.size(); for (Node n : tempChildren) { calcNoOfNodes(n); } } } } public static void main(String[] args) { launch(args); } }
src/main/java/eu/hansolo/medusa/Test.java
/* * Copyright (c) 2016 by Gerrit Grunwald * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.hansolo.medusa; import eu.hansolo.medusa.Clock.ClockSkinType; import eu.hansolo.medusa.Gauge.SkinType; import eu.hansolo.medusa.Section.SectionEvent; import eu.hansolo.medusa.events.UpdateEvent; import eu.hansolo.medusa.events.UpdateEvent.EventType; import javafx.animation.AnimationTimer; import javafx.application.Application; import javafx.application.Platform; import javafx.beans.property.BooleanProperty; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.scene.Node; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Button; import javafx.scene.layout.StackPane; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.paint.CycleMethod; import javafx.scene.paint.LinearGradient; import javafx.scene.paint.Stop; import javafx.stage.Stage; import java.math.RoundingMode; import java.text.NumberFormat; import java.time.Duration; import java.util.Locale; import java.util.Random; /** * User: hansolo * Date: 04.01.16 * Time: 06:31 */ public class Test extends Application { private static final Random RND = new Random(); private static int noOfNodes = 0; private FGauge fgauge; private Gauge gauge; private Clock clock; private long lastTimerCall; private AnimationTimer timer; private DoubleProperty value; private BooleanProperty toggle; @Override public void init() { NumberFormat numberFormat = NumberFormat.getInstance(new Locale("da", "DK")); numberFormat.setRoundingMode(RoundingMode.HALF_DOWN); numberFormat.setMinimumIntegerDigits(3); numberFormat.setMaximumIntegerDigits(3); numberFormat.setMinimumFractionDigits(0); numberFormat.setMaximumFractionDigits(0); value = new SimpleDoubleProperty(0); toggle = new SimpleBooleanProperty(false); fgauge = FGaugeBuilder.create() .gaugeDesign(GaugeDesign.NONE) .build(); gauge = GaugeBuilder.create() .skinType(SkinType.LCD) //.prefSize(250, 250) //.minValue(0) //.maxValue(100) .animated(true) //.checkThreshold(true) //.onThresholdExceeded(e -> System.out.println("threshold exceeded")) //.lcdVisible(true) //.locale(Locale.GERMANY) //.numberFormat(numberFormat) .title("Title") .unit("ยฐC") .subTitle("SubTitle") //.interactive(true) //.onButtonPressed(o -> System.out.println("Button pressed")) //.title("Title") .sections(new Section(0, 33, Color.RED), new Section(33, 66, Color.YELLOW), new Section(66, 100, Color.LIME)) .sectionsVisible(true) //.autoScale(false) .averagingEnabled(true) .averageVisible(true) .build(); // Calling bind() directly sets a value to gauge gauge.valueProperty().bind(value); gauge.getSections().forEach(section -> section.setOnSectionUpdate(sectionEvent -> gauge.fireUpdateEvent(new UpdateEvent(Test.this, EventType.REDRAW)))); //gauge.valueVisibleProperty().bind(toggle); clock = ClockBuilder.create() .skinType(ClockSkinType.DESIGN) //.prefSize(400, 400) //.onTimeEvent(e -> System.out.println(e.TYPE)) //.discreteSeconds(false) .locale(Locale.GERMANY) //.secondsVisible(true) //.dateVisible(true) //.customFont(Fonts.latoLight(10)) .shadowsEnabled(true) .running(true) .build(); lastTimerCall = System.nanoTime(); timer = new AnimationTimer() { @Override public void handle(long now) { if (now > lastTimerCall + 3_000_000_000l) { double v = RND.nextDouble() * gauge.getRange() + gauge.getMinValue(); value.set(v); //gauge.setValue(v); //System.out.println("MovingAverage over " + gauge.getAveragingWindow().size() + " values: " + gauge.getAverage() + " last value = " + v); //toggle.set(!toggle.get()); //System.out.println(gauge.isValueVisible()); //gauge.setValue(v); lastTimerCall = now; } } }; } @Override public void start(Stage stage) { StackPane pane = new StackPane(clock); pane.setPadding(new Insets(20)); LinearGradient gradient = new LinearGradient(0, 0, 0, pane.getLayoutBounds().getHeight(), false, CycleMethod.NO_CYCLE, new Stop(0.0, Color.rgb(38, 38, 38)), new Stop(1.0, Color.rgb(15, 15, 15))); //pane.setBackground(new Background(new BackgroundFill(gradient, CornerRadii.EMPTY, Insets.EMPTY))); //pane.setBackground(new Background(new BackgroundFill(Color.rgb(39,44,50), CornerRadii.EMPTY, Insets.EMPTY))); //pane.setBackground(new Background(new BackgroundFill(Color.WHITE, CornerRadii.EMPTY, Insets.EMPTY))); //pane.setBackground(new Background(new BackgroundFill(Gauge.DARK_COLOR, CornerRadii.EMPTY, Insets.EMPTY))); Scene scene = new Scene(pane); stage.setTitle("Medusa"); stage.setScene(scene); stage.show(); //gauge.setValue(105); // Calculate number of nodes calcNoOfNodes(pane); System.out.println(noOfNodes + " Nodes in SceneGraph"); timer.start(); //gauge.getSections().get(0).setStart(10); //gauge.getSections().get(0).setStop(90); } @Override public void stop() { System.exit(0); } // ******************** Misc ********************************************** private static void calcNoOfNodes(Node node) { if (node instanceof Parent) { if (((Parent) node).getChildrenUnmodifiable().size() != 0) { ObservableList<Node> tempChildren = ((Parent) node).getChildrenUnmodifiable(); noOfNodes += tempChildren.size(); for (Node n : tempChildren) { calcNoOfNodes(n); } } } } public static void main(String[] args) { launch(args); } }
Cosmetics
src/main/java/eu/hansolo/medusa/Test.java
Cosmetics
Java
apache-2.0
6c8532315c23a8196e54282b08e10135c25ca0d0
0
TikhomirovSergey/java-client,TikhomirovSergey/java-client,SrinivasanTarget/java-client,saikrishna321/java-client,appium/java-client,SrinivasanTarget/java-client,TikhomirovSergey/java-client,SrinivasanTarget/java-client,SrinivasanTarget/java-client,TikhomirovSergey/java-client,saikrishna321/java-client,saikrishna321/java-client,saikrishna321/java-client,appium/java-client
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.appium.java_client.pagefactory; import io.appium.java_client.pagefactory.bys.ContentMappedBy; import io.appium.java_client.pagefactory.bys.ContentType; import io.appium.java_client.pagefactory.bys.builder.AppiumByBuilder; import io.appium.java_client.pagefactory.bys.builder.HowToUseSelectors; import org.openqa.selenium.By; import org.openqa.selenium.support.ByIdOrName; import org.openqa.selenium.support.CacheLookup; import org.openqa.selenium.support.FindAll; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Comparator; import java.util.HashMap; import java.util.Map; import java.util.Optional; public class DefaultElementByBuilder extends AppiumByBuilder { private static final String PRIORITY = "priority"; private static final String VALUE = "value"; private static final Class[] ANNOTATION_ARGUMENTS = new Class[] {}; private static final Object[] ANNOTATION_PARAMETERS = new Object[] {}; public DefaultElementByBuilder(String platform, String automation) { super(platform, automation); } private static void checkDisallowedAnnotationPairs(Annotation a1, Annotation a2) throws IllegalArgumentException { if (a1 != null && a2 != null) { throw new IllegalArgumentException( "If you use a '@" + a1.getClass().getSimpleName() + "' annotation, " + "you must not also use a '@" + a2.getClass().getSimpleName() + "' annotation"); } } private static By buildMobileBy(LocatorGroupStrategy locatorGroupStrategy, Annotation[] annotations) { if (annotations.length == 1) { return createBy(new Annotation[] {annotations[0]}, HowToUseSelectors.USE_ONE); } else { LocatorGroupStrategy strategy = Optional.ofNullable(locatorGroupStrategy) .orElse(LocatorGroupStrategy.CHAIN); if (strategy.equals(LocatorGroupStrategy.ALL_POSSIBLE)) { return createBy(annotations, HowToUseSelectors.USE_ANY); } return createBy(annotations, HowToUseSelectors.BUILD_CHAINED); } } @Override protected void assertValidAnnotations() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); FindBy findBy = annotatedElement.getAnnotation(FindBy.class); FindBys findBys = annotatedElement.getAnnotation(FindBys.class); checkDisallowedAnnotationPairs(findBy, findBys); FindAll findAll = annotatedElement.getAnnotation(FindAll.class); checkDisallowedAnnotationPairs(findBy, findAll); checkDisallowedAnnotationPairs(findBys, findAll); } @Override protected By buildDefaultBy() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); By defaultBy = null; FindBy findBy = annotatedElement.getAnnotation(FindBy.class); if (findBy != null) { defaultBy = super.buildByFromFindBy(findBy); } if (defaultBy == null) { FindBys findBys = annotatedElement.getAnnotation(FindBys.class); if (findBys != null) { defaultBy = super.buildByFromFindBys(findBys); } } if (defaultBy == null) { FindAll findAll = annotatedElement.getAnnotation(FindAll.class); if (findAll != null) { defaultBy = super.buildBysFromFindByOneOf(findAll); } } return defaultBy; } @Override protected By buildMobileNativeBy() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); HowToUseLocators howToUseLocators = annotatedElement.getAnnotation(HowToUseLocators.class); if (isSelendroidAutomation()) { SelendroidFindBy[] selendroidFindByArray = annotatedElement.getAnnotationsByType(SelendroidFindBy.class); //should be kept for some time SelendroidFindBys selendroidFindBys = annotatedElement.getAnnotation(SelendroidFindBys.class); SelendroidFindAll selendroidFindByAll = annotatedElement.getAnnotation(SelendroidFindAll.class); if (selendroidFindByArray != null && selendroidFindByArray.length == 1) { return createBy(new Annotation[] {selendroidFindByArray[0]}, HowToUseSelectors.USE_ONE); } if (selendroidFindBys != null) { return createBy(selendroidFindBys.value(), HowToUseSelectors.BUILD_CHAINED); } if (selendroidFindByAll != null) { return createBy(selendroidFindByAll.value(), HowToUseSelectors.USE_ANY); } /////////////////////////////////////// //code that supposed to be supported if (selendroidFindByArray != null && selendroidFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.selendroidAutomation() : null, selendroidFindByArray); } } if (isAndroid()) { AndroidFindBy[] androidFindByArray = annotatedElement.getAnnotationsByType(AndroidFindBy.class); //should be kept for some time AndroidFindBys androidFindBys = annotatedElement.getAnnotation(AndroidFindBys.class); AndroidFindAll androidFindAll = annotatedElement.getAnnotation(AndroidFindAll.class); if (androidFindByArray != null && androidFindByArray.length == 1) { return createBy(new Annotation[] {androidFindByArray[0]}, HowToUseSelectors.USE_ONE); } if (androidFindBys != null) { return createBy(androidFindBys.value(), HowToUseSelectors.BUILD_CHAINED); } if (androidFindAll != null) { return createBy(androidFindAll.value(), HowToUseSelectors.USE_ANY); } /////////////////////////////////////// //code that supposed to be supported if (androidFindByArray != null && androidFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.androidAutomation() : null, androidFindByArray); } } if (isIOSXcuit()) { iOSXCUITFindBy[] xCuitFindByArray = annotatedElement.getAnnotationsByType(iOSXCUITFindBy.class); if (xCuitFindByArray != null && xCuitFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.iOSXCUITAutomation() : null, xCuitFindByArray); } } if (isIOS()) { iOSFindBy[] iOSFindByArray = annotatedElement.getAnnotationsByType(iOSFindBy.class); //should be kept for some time iOSFindBys iOSFindBys = annotatedElement.getAnnotation(iOSFindBys.class); iOSFindAll iOSFindAll = annotatedElement.getAnnotation(iOSFindAll.class); if (iOSFindByArray != null && iOSFindByArray.length == 1) { return createBy(new Annotation[] {iOSFindByArray[0]}, HowToUseSelectors.USE_ONE); } if (iOSFindBys != null) { return createBy(iOSFindBys.value(), HowToUseSelectors.BUILD_CHAINED); } if (iOSFindAll != null) { return createBy(iOSFindAll.value(), HowToUseSelectors.USE_ANY); } /////////////////////////////////////// //code that supposed to be supported if (iOSFindByArray != null && iOSFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.iOSAutomation() : null, iOSFindByArray); } } if (isWindows()) { WindowsFindBy[] windowsFindByArray = annotatedElement.getAnnotationsByType(WindowsFindBy.class); if (windowsFindByArray != null && windowsFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.windowsAutomation() : null, windowsFindByArray); } } return null; } @Override public boolean isLookupCached() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); return (annotatedElement.getAnnotation(CacheLookup.class) != null); } private By returnMappedBy(By byDefault, By nativeAppBy) { Map<ContentType, By> contentMap = new HashMap<>(); contentMap.put(ContentType.HTML_OR_DEFAULT, byDefault); contentMap.put(ContentType.NATIVE_MOBILE_SPECIFIC, nativeAppBy); return new ContentMappedBy(contentMap); } @Override public By buildBy() { assertValidAnnotations(); By defaultBy = buildDefaultBy(); By mobileNativeBy = buildMobileNativeBy(); String idOrName = ((Field) annotatedElementContainer.getAnnotated()).getName(); if (defaultBy == null && mobileNativeBy == null) { defaultBy = new ByIdOrName(((Field) annotatedElementContainer.getAnnotated()).getName()); mobileNativeBy = new By.ById(idOrName); return returnMappedBy(defaultBy, mobileNativeBy); } if (defaultBy == null) { defaultBy = new ByIdOrName(((Field) annotatedElementContainer.getAnnotated()).getName()); return returnMappedBy(defaultBy, mobileNativeBy); } if (mobileNativeBy == null) { mobileNativeBy = defaultBy; return returnMappedBy(defaultBy, mobileNativeBy); } return returnMappedBy(defaultBy, mobileNativeBy); } private static class AnnotationComparator implements Comparator<Annotation> { @Override public int compare(Annotation o1, Annotation o2) { int priority1; int priority2; Method priority; Class<? extends Annotation> c1 = o1.getClass(); Class<? extends Annotation> c2 = o2.getClass(); if (!c1.equals(c2)) { throw new ClassCastException(String.format("Given annotations have different classes (%s, %s). " + "Annotations of the same classes are required.", c1.getName(), c2.getName())); } try { priority = c1.getMethod(PRIORITY, ANNOTATION_ARGUMENTS); } catch (NoSuchMethodException e) { throw new ClassCastException(String.format("Class %s has no '%s' method", c1.getName(), PRIORITY)); } try { priority1 = (int) priority.invoke(o1, ANNOTATION_PARAMETERS); priority2 = (int) priority.invoke(o2, ANNOTATION_PARAMETERS); if (priority2 > priority1) { return -1; } else if (priority2 < priority1){ return 1; } else { return 0; } } catch (IllegalAccessException|InvocationTargetException e) { throw new RuntimeException(e); } } } }
src/main/java/io/appium/java_client/pagefactory/DefaultElementByBuilder.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.appium.java_client.pagefactory; import io.appium.java_client.pagefactory.bys.ContentMappedBy; import io.appium.java_client.pagefactory.bys.ContentType; import io.appium.java_client.pagefactory.bys.builder.AppiumByBuilder; import io.appium.java_client.pagefactory.bys.builder.HowToUseSelectors; import org.openqa.selenium.By; import org.openqa.selenium.support.ByIdOrName; import org.openqa.selenium.support.CacheLookup; import org.openqa.selenium.support.FindAll; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.FindBys; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Map; import java.util.Optional; public class DefaultElementByBuilder extends AppiumByBuilder { public DefaultElementByBuilder(String platform, String automation) { super(platform, automation); } private static void checkDisallowedAnnotationPairs(Annotation a1, Annotation a2) throws IllegalArgumentException { if (a1 != null && a2 != null) { throw new IllegalArgumentException( "If you use a '@" + a1.getClass().getSimpleName() + "' annotation, " + "you must not also use a '@" + a2.getClass().getSimpleName() + "' annotation"); } } private static By buildMobileBy(LocatorGroupStrategy locatorGroupStrategy, Annotation[] annotations) { if (annotations.length == 1) { return createBy(new Annotation[] {annotations[0]}, HowToUseSelectors.USE_ONE); } else { LocatorGroupStrategy strategy = Optional.ofNullable(locatorGroupStrategy) .orElse(LocatorGroupStrategy.CHAIN); if (strategy.equals(LocatorGroupStrategy.ALL_POSSIBLE)) { return createBy(annotations, HowToUseSelectors.USE_ANY); } return createBy(annotations, HowToUseSelectors.BUILD_CHAINED); } } @Override protected void assertValidAnnotations() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); AndroidFindBy androidBy = annotatedElement.getAnnotation(AndroidFindBy.class); AndroidFindBys androidBys = annotatedElement.getAnnotation(AndroidFindBys.class); checkDisallowedAnnotationPairs(androidBy, androidBys); AndroidFindAll androidFindAll = annotatedElement.getAnnotation(AndroidFindAll.class); checkDisallowedAnnotationPairs(androidBy, androidFindAll); checkDisallowedAnnotationPairs(androidBys, androidFindAll); SelendroidFindBy selendroidBy = annotatedElement.getAnnotation(SelendroidFindBy.class); SelendroidFindBys selendroidBys = annotatedElement.getAnnotation(SelendroidFindBys.class); checkDisallowedAnnotationPairs(selendroidBy, selendroidBys); SelendroidFindAll selendroidFindAll = annotatedElement.getAnnotation(SelendroidFindAll.class); checkDisallowedAnnotationPairs(selendroidBy, selendroidFindAll); checkDisallowedAnnotationPairs(selendroidBys, selendroidFindAll); iOSFindBy iOSBy = annotatedElement.getAnnotation(iOSFindBy.class); iOSFindBys iOSBys = annotatedElement.getAnnotation(iOSFindBys.class); checkDisallowedAnnotationPairs(iOSBy, iOSBys); iOSFindAll iOSFindAll = annotatedElement.getAnnotation(iOSFindAll.class); checkDisallowedAnnotationPairs(iOSBy, iOSFindAll); checkDisallowedAnnotationPairs(iOSBys, iOSFindAll); FindBy findBy = annotatedElement.getAnnotation(FindBy.class); FindBys findBys = annotatedElement.getAnnotation(FindBys.class); checkDisallowedAnnotationPairs(findBy, findBys); FindAll findAll = annotatedElement.getAnnotation(FindAll.class); checkDisallowedAnnotationPairs(findBy, findAll); checkDisallowedAnnotationPairs(findBys, findAll); } @Override protected By buildDefaultBy() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); By defaultBy = null; FindBy findBy = annotatedElement.getAnnotation(FindBy.class); if (findBy != null) { defaultBy = super.buildByFromFindBy(findBy); } if (defaultBy == null) { FindBys findBys = annotatedElement.getAnnotation(FindBys.class); if (findBys != null) { defaultBy = super.buildByFromFindBys(findBys); } } if (defaultBy == null) { FindAll findAll = annotatedElement.getAnnotation(FindAll.class); if (findAll != null) { defaultBy = super.buildBysFromFindByOneOf(findAll); } } return defaultBy; } @Override protected By buildMobileNativeBy() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); HowToUseLocators howToUseLocators = annotatedElement.getAnnotation(HowToUseLocators.class); if (isSelendroidAutomation()) { SelendroidFindBy[] selendroidFindByArray = annotatedElement.getAnnotationsByType(SelendroidFindBy.class); //should be kept for some time SelendroidFindBys selendroidFindBys = annotatedElement.getAnnotation(SelendroidFindBys.class); SelendroidFindAll selendroidFindByAll = annotatedElement.getAnnotation(SelendroidFindAll.class); if (selendroidFindByArray != null && selendroidFindByArray.length == 1) { return createBy(new Annotation[] {selendroidFindByArray[0]}, HowToUseSelectors.USE_ONE); } if (selendroidFindBys != null) { return createBy(selendroidFindBys.value(), HowToUseSelectors.BUILD_CHAINED); } if (selendroidFindByAll != null) { return createBy(selendroidFindByAll.value(), HowToUseSelectors.USE_ANY); } /////////////////////////////////////// //code that supposed to be supported if (selendroidFindByArray != null && selendroidFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.selendroidAutomation() : null, selendroidFindByArray); } } if (isAndroid()) { AndroidFindBy[] androidFindByArray = annotatedElement.getAnnotationsByType(AndroidFindBy.class); //should be kept for some time AndroidFindBys androidFindBys = annotatedElement.getAnnotation(AndroidFindBys.class); AndroidFindAll androidFindAll = annotatedElement.getAnnotation(AndroidFindAll.class); if (androidFindByArray != null && androidFindByArray.length == 1) { return createBy(new Annotation[] {androidFindByArray[0]}, HowToUseSelectors.USE_ONE); } if (androidFindBys != null) { return createBy(androidFindBys.value(), HowToUseSelectors.BUILD_CHAINED); } if (androidFindAll != null) { return createBy(androidFindAll.value(), HowToUseSelectors.USE_ANY); } /////////////////////////////////////// //code that supposed to be supported if (androidFindByArray != null && androidFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.androidAutomation() : null, androidFindByArray); } } if (isIOSXcuit()) { iOSXCUITFindBy[] xCuitFindByArray = annotatedElement.getAnnotationsByType(iOSXCUITFindBy.class); if (xCuitFindByArray != null && xCuitFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.iOSXCUITAutomation() : null, xCuitFindByArray); } } if (isIOS()) { iOSFindBy[] iOSFindByArray = annotatedElement.getAnnotationsByType(iOSFindBy.class); //should be kept for some time iOSFindBys iOSFindBys = annotatedElement.getAnnotation(iOSFindBys.class); iOSFindAll iOSFindAll = annotatedElement.getAnnotation(iOSFindAll.class); if (iOSFindByArray != null && iOSFindByArray.length == 1) { return createBy(new Annotation[] {iOSFindByArray[0]}, HowToUseSelectors.USE_ONE); } if (iOSFindBys != null) { return createBy(iOSFindBys.value(), HowToUseSelectors.BUILD_CHAINED); } if (iOSFindAll != null) { return createBy(iOSFindAll.value(), HowToUseSelectors.USE_ANY); } /////////////////////////////////////// //code that supposed to be supported if (iOSFindByArray != null && iOSFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.iOSAutomation() : null, iOSFindByArray); } } if (isWindows()) { WindowsFindBy[] windowsFindByArray = annotatedElement.getAnnotationsByType(WindowsFindBy.class); if (windowsFindByArray != null && windowsFindByArray.length > 0) { return buildMobileBy(howToUseLocators != null ? howToUseLocators.windowsAutomation() : null, windowsFindByArray); } } return null; } @Override public boolean isLookupCached() { AnnotatedElement annotatedElement = annotatedElementContainer.getAnnotated(); return (annotatedElement.getAnnotation(CacheLookup.class) != null); } private By returnMappedBy(By byDefault, By nativeAppBy) { Map<ContentType, By> contentMap = new HashMap<>(); contentMap.put(ContentType.HTML_OR_DEFAULT, byDefault); contentMap.put(ContentType.NATIVE_MOBILE_SPECIFIC, nativeAppBy); return new ContentMappedBy(contentMap); } @Override public By buildBy() { assertValidAnnotations(); By defaultBy = buildDefaultBy(); By mobileNativeBy = buildMobileNativeBy(); String idOrName = ((Field) annotatedElementContainer.getAnnotated()).getName(); if (defaultBy == null && mobileNativeBy == null) { defaultBy = new ByIdOrName(((Field) annotatedElementContainer.getAnnotated()).getName()); mobileNativeBy = new By.ById(idOrName); return returnMappedBy(defaultBy, mobileNativeBy); } if (defaultBy == null) { defaultBy = new ByIdOrName(((Field) annotatedElementContainer.getAnnotated()).getName()); return returnMappedBy(defaultBy, mobileNativeBy); } if (mobileNativeBy == null) { mobileNativeBy = defaultBy; return returnMappedBy(defaultBy, mobileNativeBy); } return returnMappedBy(defaultBy, mobileNativeBy); } }
#565: Annotation comparator.
src/main/java/io/appium/java_client/pagefactory/DefaultElementByBuilder.java
#565: Annotation comparator.
Java
apache-2.0
332e481d87c32fac39bb19a630a414d759b90400
0
Grasia/swellrt,Grasia/swellrt,P2Pvalue/swellrt,P2Pvalue/swellrt,Grasia/swellrt,P2Pvalue/swellrt,P2Pvalue/swellrt,Grasia/swellrt
package org.swellrt.beta.client.platform.java; import java.io.IOException; import java.nio.charset.Charset; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.ResponseHandler; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.swellrt.beta.client.ServiceContext; import org.swellrt.beta.client.rest.ServerOperation.Method; import org.swellrt.beta.client.rest.ServerOperationExecutor; import org.swellrt.beta.client.rest.ServiceOperation.OperationError; import org.swellrt.beta.client.rest.ServiceOperation.Options; import com.google.gson.Gson; public class JavaServerOperationExecutor extends ServerOperationExecutor { CloseableHttpClient hc = HttpClients.createDefault(); Gson gson = new Gson(); protected JavaServerOperationExecutor(ServiceContext context) { super(context); } @SuppressWarnings("unchecked") @Override protected void executeHTTP(Method method, String url, Header[] headers, String body, HTTPCallback httpCallback) throws Exception { HttpUriRequest hm = null; switch (method) { case GET: hm = new HttpGet(url); break; case POST: HttpPost post = new HttpPost(url); post.setEntity(new StringEntity(body)); hm = post; break; case DELETE: hm = new HttpDelete(url); break; default: httpCallback.onFailure(new IllegalStateException("HTTP method not implemented")); return; } for (int i = 0; i < headers.length; i++) { if (headers[i].value != null) { hm.addHeader(headers[i].name, headers[i].value); } } try { hc.execute(hm, new ResponseHandler<String>() { @Override public String handleResponse(HttpResponse response) throws ClientProtocolException, IOException { int status = response.getStatusLine().getStatusCode(); HttpEntity entity = response.getEntity(); String result = entity != null ? EntityUtils.toString(entity, Charset.forName("UTF-8")) : ""; httpCallback.onResponse(status, response.getStatusLine().getReasonPhrase(), result); return result; } }); } catch (ClientProtocolException e) { httpCallback.onFailure(e); } catch (IOException e) { httpCallback.onFailure(e); } } @Override protected OperationError parseServiceError(String json) { return gson.fromJson(json, OperationError.class); } @Override protected <O extends Options> String toJson(O options) { if (options != null) return gson.toJson(options); return null; } }
wave/src/main/java/org/swellrt/beta/client/platform/java/JavaServerOperationExecutor.java
package org.swellrt.beta.client.platform.java; import java.io.IOException; import java.nio.charset.Charset; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.ResponseHandler; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.swellrt.beta.client.ServiceContext; import org.swellrt.beta.client.rest.ServerOperation.Method; import org.swellrt.beta.client.rest.ServerOperationExecutor; import org.swellrt.beta.client.rest.ServiceOperation.OperationError; import org.swellrt.beta.client.rest.ServiceOperation.Options; import org.swellrt.beta.client.rest.ServiceOperation.Response; public class JavaServerOperationExecutor extends ServerOperationExecutor { CloseableHttpClient hc = HttpClients.createDefault(); protected JavaServerOperationExecutor(ServiceContext context) { super(context); } @SuppressWarnings("unchecked") @Override protected void executeHTTP(Method method, String url, Header[] headers, String body, HTTPCallback httpCallback) throws Exception { HttpUriRequest hm = null; switch (method) { case GET: hm = new HttpGet(url); break; case POST: HttpPost post = new HttpPost(url); post.setEntity(new StringEntity(body)); hm = post; break; case DELETE: hm = new HttpDelete(url); break; default: httpCallback.onFailure(new IllegalStateException("HTTP method not implemented")); return; } for (int i = 0; i < headers.length; i++) { if (headers[i].value != null) { hm.addHeader(headers[i].name, headers[i].value); } } try { hc.execute(hm, new ResponseHandler<String>() { @Override public String handleResponse(HttpResponse response) throws ClientProtocolException, IOException { int status = response.getStatusLine().getStatusCode(); HttpEntity entity = response.getEntity(); String result = entity != null ? EntityUtils.toString(entity, Charset.forName("UTF-8")) : ""; httpCallback.onResponse(status, response.getStatusLine().getReasonPhrase(), result); return result; } }); } catch (ClientProtocolException e) { httpCallback.onFailure(e); } catch (IOException e) { httpCallback.onFailure(e); } } @Override protected OperationError parseServiceError(String json) { // TODO Auto-generated method stub return null; } @Override protected <R extends Response> R parseResponse(String json) { // TODO Auto-generated method stub return null; } @Override protected <O extends Options> String toJson(O options) { // TODO Auto-generated method stub return null; } }
using Gson parse/serial. for java op exuecutor
wave/src/main/java/org/swellrt/beta/client/platform/java/JavaServerOperationExecutor.java
using Gson parse/serial. for java op exuecutor
Java
apache-2.0
0f0713aee7a6e4c2f97061b8e1833b6a9674a4c9
0
consulo/consulo-csharp,consulo/consulo-csharp,consulo/consulo-csharp
package org.mustbe.consulo.csharp.ide.highlight.util; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.mustbe.consulo.csharp.ide.highlight.quickFix.RenameQuickFix; import org.mustbe.consulo.csharp.lang.psi.CSharpConstructorDeclaration; import org.mustbe.consulo.csharp.lang.psi.CSharpTypeDeclaration; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.daemon.impl.HighlightInfoType; import com.intellij.codeInsight.daemon.impl.quickfix.QuickFixAction; import com.intellij.openapi.util.Comparing; import com.intellij.psi.PsiElement; /** * @author VISTALL * @since 12.11.14 */ public class ConstructorHighlightUtil { @Nullable public static HighlightInfo checkConstructorDeclaration(@NotNull CSharpConstructorDeclaration declaration) { PsiElement nameIdentifier = declaration.getNameIdentifier(); if(nameIdentifier == null) { return null; } PsiElement parent = declaration.getParent(); if(!(parent instanceof CSharpTypeDeclaration)) { return null; } String expectedTypeName = ((CSharpTypeDeclaration) parent).getName(); if(expectedTypeName == null) { return null; } if(!Comparing.equal(expectedTypeName, nameIdentifier.getText())) { HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR); builder = builder.descriptionAndTooltip("Expected method name"); builder = builder.range(nameIdentifier); HighlightInfo highlightInfo = builder.create(); QuickFixAction.registerQuickFixAction(highlightInfo, new RenameQuickFix(expectedTypeName, declaration)); return highlightInfo; } return null; } }
csharp-impl/src/org/mustbe/consulo/csharp/ide/highlight/util/ConstructorHighlightUtil.java
package org.mustbe.consulo.csharp.ide.highlight.util; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.mustbe.consulo.csharp.ide.highlight.quickFix.RenameQuickFix; import org.mustbe.consulo.csharp.lang.psi.CSharpConstructorDeclaration; import org.mustbe.consulo.csharp.lang.psi.CSharpTypeDeclaration; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.daemon.impl.HighlightInfoType; import com.intellij.codeInsight.daemon.impl.quickfix.QuickFixAction; import com.intellij.openapi.util.Comparing; import com.intellij.psi.PsiElement; /** * @author VISTALL * @since 12.11.14 */ public class ConstructorHighlightUtil { @Nullable public static HighlightInfo checkConstructorDeclaration(@NotNull CSharpConstructorDeclaration declaration) { PsiElement nameIdentifier = declaration.getNameIdentifier(); if(nameIdentifier == null) { return null; } CSharpTypeDeclaration typeDeclaration = (CSharpTypeDeclaration) declaration.getParent(); String expectedTypeName = typeDeclaration.getName(); if(expectedTypeName == null) { return null; } if(!Comparing.equal(expectedTypeName, nameIdentifier.getText())) { HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR); builder = builder.descriptionAndTooltip("Expected method name"); builder = builder.range(nameIdentifier); HighlightInfo highlightInfo = builder.create(); QuickFixAction.registerQuickFixAction(highlightInfo, new RenameQuickFix(expectedTypeName, declaration)); return highlightInfo; } return null; } }
when psi is broken parent can be not type - fix CCE
csharp-impl/src/org/mustbe/consulo/csharp/ide/highlight/util/ConstructorHighlightUtil.java
when psi is broken parent can be not type - fix CCE
Java
apache-2.0
0304088b3171947b0920762131da58f18f5423c2
0
androidx/androidx,androidx/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v4.view; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Parcel; import android.os.Parcelable; import android.os.SystemClock; import android.support.v4.os.ParcelableCompat; import android.support.v4.os.ParcelableCompatCreatorCallbacks; import android.support.v4.widget.EdgeEffectCompat; import android.util.AttributeSet; import android.util.Log; import android.view.FocusFinder; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.VelocityTracker; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewParent; import android.view.accessibility.AccessibilityEvent; import android.view.animation.Interpolator; import android.widget.Scroller; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; /** * Layout manager that allows the user to flip left and right * through pages of data. You supply an implementation of a * {@link PagerAdapter} to generate the pages that the view shows. * * <p>Note this class is currently under early design and * development. The API will likely change in later updates of * the compatibility library, requiring changes to the source code * of apps when they are compiled against the newer version.</p> */ public class ViewPager extends ViewGroup { private static final String TAG = "ViewPager"; private static final boolean DEBUG = false; private static final boolean USE_CACHE = false; private static final int DEFAULT_OFFSCREEN_PAGES = 1; private static final int MAX_SETTLE_DURATION = 600; // ms private static final int[] LAYOUT_ATTRS = new int[] { android.R.attr.layout_gravity }; static class ItemInfo { Object object; int position; boolean scrolling; } private static final Comparator<ItemInfo> COMPARATOR = new Comparator<ItemInfo>(){ @Override public int compare(ItemInfo lhs, ItemInfo rhs) { return lhs.position - rhs.position; }}; private static final Interpolator sInterpolator = new Interpolator() { public float getInterpolation(float t) { // _o(t) = t * t * ((tension + 1) * t + tension) // o(t) = _o(t - 1) + 1 t -= 1.0f; return t * t * t + 1.0f; } }; private final ArrayList<ItemInfo> mItems = new ArrayList<ItemInfo>(); private PagerAdapter mAdapter; private int mCurItem; // Index of currently displayed page. private int mRestoredCurItem = -1; private Parcelable mRestoredAdapterState = null; private ClassLoader mRestoredClassLoader = null; private Scroller mScroller; private PagerObserver mObserver; private int mPageMargin; private Drawable mMarginDrawable; private int mTopPageBounds; private int mBottomPageBounds; private int mChildWidthMeasureSpec; private int mChildHeightMeasureSpec; private boolean mInLayout; private boolean mScrollingCacheEnabled; private boolean mPopulatePending; private boolean mIsPopulating; private boolean mScrolling; private int mOffscreenPageLimit = DEFAULT_OFFSCREEN_PAGES; private boolean mIsBeingDragged; private boolean mIsUnableToDrag; private int mTouchSlop; private float mInitialMotionX; /** * Position of the last motion event. */ private float mLastMotionX; private float mLastMotionY; /** * ID of the active pointer. This is used to retain consistency during * drags/flings if multiple pointers are used. */ private int mActivePointerId = INVALID_POINTER; /** * Sentinel value for no current active pointer. * Used by {@link #mActivePointerId}. */ private static final int INVALID_POINTER = -1; /** * Determines speed during touch scrolling */ private VelocityTracker mVelocityTracker; private int mMinimumVelocity; private int mMaximumVelocity; private float mBaseLineFlingVelocity; private float mFlingVelocityInfluence; private boolean mFakeDragging; private long mFakeDragBeginTime; private EdgeEffectCompat mLeftEdge; private EdgeEffectCompat mRightEdge; private boolean mFirstLayout = true; private boolean mCalledSuper; private int mDecorChildCount; private OnPageChangeListener mOnPageChangeListener; private OnPageChangeListener mInternalPageChangeListener; private OnAdapterChangeListener mAdapterChangeListener; /** * Indicates that the pager is in an idle, settled state. The current page * is fully in view and no animation is in progress. */ public static final int SCROLL_STATE_IDLE = 0; /** * Indicates that the pager is currently being dragged by the user. */ public static final int SCROLL_STATE_DRAGGING = 1; /** * Indicates that the pager is in the process of settling to a final position. */ public static final int SCROLL_STATE_SETTLING = 2; private int mScrollState = SCROLL_STATE_IDLE; /** * Callback interface for responding to changing state of the selected page. */ public interface OnPageChangeListener { /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param positionOffset Value from [0, 1) indicating the offset from the page at position. * @param positionOffsetPixels Value in pixels indicating the offset from position. */ public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels); /** * This method will be invoked when a new page becomes selected. Animation is not * necessarily complete. * * @param position Position index of the new selected page. */ public void onPageSelected(int position); /** * Called when the scroll state changes. Useful for discovering when the user * begins dragging, when the pager is automatically settling to the current page, * or when it is fully stopped/idle. * * @param state The new scroll state. * @see ViewPager#SCROLL_STATE_IDLE * @see ViewPager#SCROLL_STATE_DRAGGING * @see ViewPager#SCROLL_STATE_SETTLING */ public void onPageScrollStateChanged(int state); } /** * Simple implementation of the {@link OnPageChangeListener} interface with stub * implementations of each method. Extend this if you do not intend to override * every method of {@link OnPageChangeListener}. */ public static class SimpleOnPageChangeListener implements OnPageChangeListener { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { // This space for rent } @Override public void onPageSelected(int position) { // This space for rent } @Override public void onPageScrollStateChanged(int state) { // This space for rent } } /** * Used internally to monitor when adapters are switched. */ interface OnAdapterChangeListener { public void onAdapterChanged(PagerAdapter oldAdapter, PagerAdapter newAdapter); } /** * Used internally to tag special types of child views that should be added as * pager decorations by default. */ interface Decor {} public ViewPager(Context context) { super(context); initViewPager(); } public ViewPager(Context context, AttributeSet attrs) { super(context, attrs); initViewPager(); } void initViewPager() { setWillNotDraw(false); setDescendantFocusability(FOCUS_AFTER_DESCENDANTS); setFocusable(true); final Context context = getContext(); mScroller = new Scroller(context, sInterpolator); final ViewConfiguration configuration = ViewConfiguration.get(context); mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration); mMinimumVelocity = configuration.getScaledMinimumFlingVelocity(); mMaximumVelocity = configuration.getScaledMaximumFlingVelocity(); mLeftEdge = new EdgeEffectCompat(context); mRightEdge = new EdgeEffectCompat(context); float density = context.getResources().getDisplayMetrics().density; mBaseLineFlingVelocity = 2500.0f * density; mFlingVelocityInfluence = 0.4f; } private void setScrollState(int newState) { if (mScrollState == newState) { return; } mScrollState = newState; if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrollStateChanged(newState); } } /** * Set a PagerAdapter that will supply views for this pager as needed. * * @param adapter Adapter to use */ public void setAdapter(PagerAdapter adapter) { if (mAdapter != null) { mAdapter.unregisterDataSetObserver(mObserver); mAdapter.startUpdate(this); for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); mAdapter.destroyItem(this, ii.position, ii.object); } mAdapter.finishUpdate(this); mItems.clear(); removeNonDecorViews(); mCurItem = 0; scrollTo(0, 0); } final PagerAdapter oldAdapter = mAdapter; mAdapter = adapter; if (mAdapter != null) { if (mObserver == null) { mObserver = new PagerObserver(); } mAdapter.registerDataSetObserver(mObserver); mPopulatePending = false; if (mRestoredCurItem >= 0) { mAdapter.restoreState(mRestoredAdapterState, mRestoredClassLoader); setCurrentItemInternal(mRestoredCurItem, false, true); mRestoredCurItem = -1; mRestoredAdapterState = null; mRestoredClassLoader = null; } else { populate(); } } if (mAdapterChangeListener != null && oldAdapter != adapter) { mAdapterChangeListener.onAdapterChanged(oldAdapter, adapter); } } private void removeNonDecorViews() { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { removeViewAt(i); i--; } } } /** * Retrieve the current adapter supplying pages. * * @return The currently registered PagerAdapter */ public PagerAdapter getAdapter() { return mAdapter; } void setOnAdapterChangeListener(OnAdapterChangeListener listener) { mAdapterChangeListener = listener; } /** * Set the currently selected page. If the ViewPager has already been through its first * layout there will be a smooth animated transition between the current item and the * specified item. * * @param item Item index to select */ public void setCurrentItem(int item) { mPopulatePending = false; setCurrentItemInternal(item, !mFirstLayout, false); } /** * Set the currently selected page. * * @param item Item index to select * @param smoothScroll True to smoothly scroll to the new item, false to transition immediately */ public void setCurrentItem(int item, boolean smoothScroll) { mPopulatePending = false; setCurrentItemInternal(item, smoothScroll, false); } public int getCurrentItem() { return mCurItem; } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always) { setCurrentItemInternal(item, smoothScroll, always, 0); } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always, int velocity) { if (mAdapter == null || mAdapter.getCount() <= 0) { setScrollingCacheEnabled(false); return; } if (!always && mCurItem == item && mItems.size() != 0) { setScrollingCacheEnabled(false); return; } if (item < 0) { item = 0; } else if (item >= mAdapter.getCount()) { item = mAdapter.getCount() - 1; } final int pageLimit = mOffscreenPageLimit; if (item > (mCurItem + pageLimit) || item < (mCurItem - pageLimit)) { // We are doing a jump by more than one page. To avoid // glitches, we want to keep all current pages in the view // until the scroll ends. for (int i=0; i<mItems.size(); i++) { mItems.get(i).scrolling = true; } } final boolean dispatchSelected = mCurItem != item; mCurItem = item; populate(); final int destX = (getWidth() + mPageMargin) * item; if (smoothScroll) { smoothScrollTo(destX, 0, velocity); if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } } else { if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } completeScroll(); scrollTo(destX, 0); } } /** * Set a listener that will be invoked whenever the page changes or is incrementally * scrolled. See {@link OnPageChangeListener}. * * @param listener Listener to set */ public void setOnPageChangeListener(OnPageChangeListener listener) { mOnPageChangeListener = listener; } /** * Set a separate OnPageChangeListener for internal use by the support library. * * @param listener Listener to set * @return The old listener that was set, if any. */ OnPageChangeListener setInternalPageChangeListener(OnPageChangeListener listener) { OnPageChangeListener oldListener = mInternalPageChangeListener; mInternalPageChangeListener = listener; return oldListener; } /** * Returns the number of pages that will be retained to either side of the * current page in the view hierarchy in an idle state. Defaults to 1. * * @return How many pages will be kept offscreen on either side * @see #setOffscreenPageLimit(int) */ public int getOffscreenPageLimit() { return mOffscreenPageLimit; } /** * Set the number of pages that should be retained to either side of the * current page in the view hierarchy in an idle state. Pages beyond this * limit will be recreated from the adapter when needed. * * <p>This is offered as an optimization. If you know in advance the number * of pages you will need to support or have lazy-loading mechanisms in place * on your pages, tweaking this setting can have benefits in perceived smoothness * of paging animations and interaction. If you have a small number of pages (3-4) * that you can keep active all at once, less time will be spent in layout for * newly created view subtrees as the user pages back and forth.</p> * * <p>You should keep this limit low, especially if your pages have complex layouts. * This setting defaults to 1.</p> * * @param limit How many pages will be kept offscreen in an idle state. */ public void setOffscreenPageLimit(int limit) { if (limit < DEFAULT_OFFSCREEN_PAGES) { Log.w(TAG, "Requested offscreen page limit " + limit + " too small; defaulting to " + DEFAULT_OFFSCREEN_PAGES); limit = DEFAULT_OFFSCREEN_PAGES; } if (limit != mOffscreenPageLimit) { mOffscreenPageLimit = limit; populate(); } } /** * Set the margin between pages. * * @param marginPixels Distance between adjacent pages in pixels * @see #getPageMargin() * @see #setPageMarginDrawable(Drawable) * @see #setPageMarginDrawable(int) */ public void setPageMargin(int marginPixels) { final int oldMargin = mPageMargin; mPageMargin = marginPixels; final int width = getWidth(); recomputeScrollPosition(width, width, marginPixels, oldMargin); requestLayout(); } /** * Return the margin between pages. * * @return The size of the margin in pixels */ public int getPageMargin() { return mPageMargin; } /** * Set a drawable that will be used to fill the margin between pages. * * @param d Drawable to display between pages */ public void setPageMarginDrawable(Drawable d) { mMarginDrawable = d; if (d != null) refreshDrawableState(); setWillNotDraw(d == null); invalidate(); } /** * Set a drawable that will be used to fill the margin between pages. * * @param resId Resource ID of a drawable to display between pages */ public void setPageMarginDrawable(int resId) { setPageMarginDrawable(getContext().getResources().getDrawable(resId)); } @Override protected boolean verifyDrawable(Drawable who) { return super.verifyDrawable(who) || who == mMarginDrawable; } @Override protected void drawableStateChanged() { super.drawableStateChanged(); final Drawable d = mMarginDrawable; if (d != null && d.isStateful()) { d.setState(getDrawableState()); } } // We want the duration of the page snap animation to be influenced by the distance that // the screen has to travel, however, we don't want this duration to be effected in a // purely linear fashion. Instead, we use this method to moderate the effect that the distance // of travel has on the overall snap duration. float distanceInfluenceForSnapDuration(float f) { f -= 0.5f; // center the values about 0. f *= 0.3f * Math.PI / 2.0f; return (float) Math.sin(f); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis */ void smoothScrollTo(int x, int y) { smoothScrollTo(x, y, 0); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis * @param velocity the velocity associated with a fling, if applicable. (0 otherwise) */ void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { // Nothing to do. setScrollingCacheEnabled(false); return; } int sx = getScrollX(); int sy = getScrollY(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); mScrolling = true; setScrollState(SCROLL_STATE_SETTLING); final float pageDelta = (float) Math.abs(dx) / (getWidth() + mPageMargin); int duration = (int) (pageDelta * 100); velocity = Math.abs(velocity); if (velocity > 0) { duration += (duration / (velocity / mBaseLineFlingVelocity)) * mFlingVelocityInfluence; } else { duration += 100; } duration = Math.min(duration, MAX_SETTLE_DURATION); mScroller.startScroll(sx, sy, dx, dy, duration); invalidate(); } void addNewItem(int position, int index) { ItemInfo ii = new ItemInfo(); ii.position = position; ii.object = mAdapter.instantiateItem(this, position); if (index < 0) { mItems.add(ii); } else { mItems.add(index, ii); } } void dataSetChanged() { // This method only gets called if our observer is attached, so mAdapter is non-null. boolean needPopulate = mItems.size() < 3 && mItems.size() < mAdapter.getCount(); int newCurrItem = -1; boolean isUpdating = false; for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); final int newPos = mAdapter.getItemPosition(ii.object); if (newPos == PagerAdapter.POSITION_UNCHANGED) { continue; } if (newPos == PagerAdapter.POSITION_NONE) { mItems.remove(i); i--; if (!isUpdating) { mAdapter.startUpdate(this); isUpdating = true; } mAdapter.destroyItem(this, ii.position, ii.object); needPopulate = true; if (mCurItem == ii.position) { // Keep the current item in the valid range newCurrItem = Math.max(0, Math.min(mCurItem, mAdapter.getCount() - 1)); } continue; } if (ii.position != newPos) { if (ii.position == mCurItem) { // Our current item changed position. Follow it. newCurrItem = newPos; } ii.position = newPos; needPopulate = true; } } if (isUpdating) { mAdapter.finishUpdate(this); } Collections.sort(mItems, COMPARATOR); if (newCurrItem >= 0) { // TODO This currently causes a jump. setCurrentItemInternal(newCurrItem, false, true); needPopulate = true; } if (needPopulate) { populate(); requestLayout(); } } void populate() { if (mAdapter == null) { return; } // Bail now if we are waiting to populate. This is to hold off // on creating views from the time the user releases their finger to // fling to a new position until we have finished the scroll to // that position, avoiding glitches from happening at that point. if (mPopulatePending) { if (DEBUG) Log.i(TAG, "populate is pending, skipping for now..."); return; } // Also, don't populate until we are attached to a window. This is to // avoid trying to populate before we have restored our view hierarchy // state and conflicting with what is restored. if (getWindowToken() == null) { return; } mIsPopulating = true; mAdapter.startUpdate(this); final int pageLimit = mOffscreenPageLimit; final int startPos = Math.max(0, mCurItem - pageLimit); final int N = mAdapter.getCount(); final int endPos = Math.min(N-1, mCurItem + pageLimit); if (DEBUG) Log.v(TAG, "populating: startPos=" + startPos + " endPos=" + endPos); // Add and remove pages in the existing list. int lastPos = -1; for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if ((ii.position < startPos || ii.position > endPos) && !ii.scrolling) { if (DEBUG) Log.i(TAG, "removing: " + ii.position + " @ " + i); mItems.remove(i); i--; mAdapter.destroyItem(this, ii.position, ii.object); } else if (lastPos < endPos && ii.position > startPos) { // The next item is outside of our range, but we have a gap // between it and the last item where we want to have a page // shown. Fill in the gap. lastPos++; if (lastPos < startPos) { lastPos = startPos; } while (lastPos <= endPos && lastPos < ii.position) { if (DEBUG) Log.i(TAG, "inserting: " + lastPos + " @ " + i); addNewItem(lastPos, i); lastPos++; i++; } } lastPos = ii.position; } // Add any new pages we need at the end. lastPos = mItems.size() > 0 ? mItems.get(mItems.size()-1).position : -1; if (lastPos < endPos) { lastPos++; lastPos = lastPos > startPos ? lastPos : startPos; while (lastPos <= endPos) { if (DEBUG) Log.i(TAG, "appending: " + lastPos); addNewItem(lastPos, -1); lastPos++; } } if (DEBUG) { Log.i(TAG, "Current page list:"); for (int i=0; i<mItems.size(); i++) { Log.i(TAG, "#" + i + ": page " + mItems.get(i).position); } } ItemInfo curItem = null; for (int i=0; i<mItems.size(); i++) { if (mItems.get(i).position == mCurItem) { curItem = mItems.get(i); break; } } mAdapter.setPrimaryItem(this, mCurItem, curItem != null ? curItem.object : null); mAdapter.finishUpdate(this); mIsPopulating = false; if (hasFocus()) { View currentFocused = findFocus(); ItemInfo ii = currentFocused != null ? infoForAnyChild(currentFocused) : null; if (ii == null || ii.position != mCurItem) { for (int i=0; i<getChildCount(); i++) { View child = getChildAt(i); ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(FOCUS_FORWARD)) { break; } } } } } } public static class SavedState extends BaseSavedState { int position; Parcelable adapterState; ClassLoader loader; public SavedState(Parcelable superState) { super(superState); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(position); out.writeParcelable(adapterState, flags); } @Override public String toString() { return "FragmentPager.SavedState{" + Integer.toHexString(System.identityHashCode(this)) + " position=" + position + "}"; } public static final Parcelable.Creator<SavedState> CREATOR = ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() { @Override public SavedState createFromParcel(Parcel in, ClassLoader loader) { return new SavedState(in, loader); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }); SavedState(Parcel in, ClassLoader loader) { super(in); if (loader == null) { loader = getClass().getClassLoader(); } position = in.readInt(); adapterState = in.readParcelable(loader); this.loader = loader; } } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.position = mCurItem; if (mAdapter != null) { ss.adapterState = mAdapter.saveState(); } return ss; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState)state; super.onRestoreInstanceState(ss.getSuperState()); if (mAdapter != null) { mAdapter.restoreState(ss.adapterState, ss.loader); setCurrentItemInternal(ss.position, false, true); } else { mRestoredCurItem = ss.position; mRestoredAdapterState = ss.adapterState; mRestoredClassLoader = ss.loader; } } @Override public void addView(View child, int index, ViewGroup.LayoutParams params) { if (!checkLayoutParams(params)) { params = generateLayoutParams(params); } final LayoutParams lp = (LayoutParams) params; lp.isDecor |= child instanceof Decor; if (mInLayout) { if (lp != null && lp.isDecor) { throw new IllegalStateException("Cannot add pager decor view during layout"); } addViewInLayout(child, index, params); child.measure(mChildWidthMeasureSpec, mChildHeightMeasureSpec); } else { super.addView(child, index, params); } if (USE_CACHE) { if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(mScrollingCacheEnabled); } else { child.setDrawingCacheEnabled(false); } } } ItemInfo infoForChild(View child) { for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (mAdapter.isViewFromObject(child, ii.object)) { return ii; } } return null; } ItemInfo infoForAnyChild(View child) { ViewParent parent; while ((parent=child.getParent()) != this) { if (parent == null || !(parent instanceof View)) { return null; } child = (View)parent; } return infoForChild(child); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // For simple implementation, or internal size is always 0. // We depend on the container to specify the layout size of // our view. We can't really know what it is since we will be // adding and removing different arbitrary views and do not // want the layout to change as this happens. setMeasuredDimension(getDefaultSize(0, widthMeasureSpec), getDefaultSize(0, heightMeasureSpec)); // Children are just made to fill our space. int childWidthSize = getMeasuredWidth() - getPaddingLeft() - getPaddingRight(); int childHeightSize = getMeasuredHeight() - getPaddingTop() - getPaddingBottom(); /* * Make sure all children have been properly measured. Decor views first. * Right now we cheat and make this less complicated by assuming decor * views won't intersect. We will pin to edges based on gravity. */ int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp != null && lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; Log.d(TAG, "gravity: " + lp.gravity + " hgrav: " + hgrav + " vgrav: " + vgrav); int widthMode = MeasureSpec.AT_MOST; int heightMode = MeasureSpec.AT_MOST; boolean consumeVertical = vgrav == Gravity.TOP || vgrav == Gravity.BOTTOM; boolean consumeHorizontal = hgrav == Gravity.LEFT || hgrav == Gravity.RIGHT; if (consumeVertical) { widthMode = MeasureSpec.EXACTLY; } else if (consumeHorizontal) { heightMode = MeasureSpec.EXACTLY; } final int widthSpec = MeasureSpec.makeMeasureSpec(childWidthSize, widthMode); final int heightSpec = MeasureSpec.makeMeasureSpec(childHeightSize, heightMode); child.measure(widthSpec, heightSpec); if (consumeVertical) { childHeightSize -= child.getMeasuredHeight(); } else if (consumeHorizontal) { childWidthSize -= child.getMeasuredWidth(); } } } } mChildWidthMeasureSpec = MeasureSpec.makeMeasureSpec(childWidthSize, MeasureSpec.EXACTLY); mChildHeightMeasureSpec = MeasureSpec.makeMeasureSpec(childHeightSize, MeasureSpec.EXACTLY); // Make sure we have created all fragments that we need to have shown. mInLayout = true; populate(); mInLayout = false; // Page views next. size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { if (DEBUG) Log.v(TAG, "Measuring #" + i + " " + child + ": " + mChildWidthMeasureSpec); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp == null || !lp.isDecor) { child.measure(mChildWidthMeasureSpec, mChildHeightMeasureSpec); } } } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Make sure scroll position is set correctly. if (w != oldw) { recomputeScrollPosition(w, oldw, mPageMargin, mPageMargin); } } private void recomputeScrollPosition(int width, int oldWidth, int margin, int oldMargin) { final int widthWithMargin = width + margin; if (oldWidth > 0) { final int oldScrollPos = getScrollX(); final int oldwwm = oldWidth + oldMargin; final int oldScrollItem = oldScrollPos / oldwwm; final float scrollOffset = (float) (oldScrollPos % oldwwm) / oldwwm; final int scrollPos = (int) ((oldScrollItem + scrollOffset) * widthWithMargin); scrollTo(scrollPos, getScrollY()); if (!mScroller.isFinished()) { // We now return to your regularly scheduled scroll, already in progress. final int newDuration = mScroller.getDuration() - mScroller.timePassed(); mScroller.startScroll(scrollPos, 0, mCurItem * widthWithMargin, 0, newDuration); } } else { int scrollPos = mCurItem * widthWithMargin; if (scrollPos != getScrollX()) { completeScroll(); scrollTo(scrollPos, getScrollY()); } } } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { mInLayout = true; populate(); mInLayout = false; final int count = getChildCount(); int width = r - l; int height = b - t; int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); int paddingRight = getPaddingRight(); int paddingBottom = getPaddingBottom(); final int scrollX = getScrollX(); int decorCount = 0; for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); ItemInfo ii; int childLeft = 0; int childTop = 0; if (lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getMeasuredWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } switch (vgrav) { default: childTop = paddingTop; break; case Gravity.TOP: childTop = paddingTop; paddingTop += child.getMeasuredHeight(); break; case Gravity.CENTER_VERTICAL: childTop = Math.max((height - child.getMeasuredHeight()) / 2, paddingTop); break; case Gravity.BOTTOM: childTop = height - paddingBottom - child.getMeasuredHeight(); paddingBottom += child.getMeasuredHeight(); break; } childLeft += scrollX; decorCount++; child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } else if ((ii = infoForChild(child)) != null) { int loff = (width + mPageMargin) * ii.position; childLeft = paddingLeft + loff; childTop = paddingTop; if (DEBUG) Log.v(TAG, "Positioning #" + i + " " + child + " f=" + ii.object + ":" + childLeft + "," + childTop + " " + child.getMeasuredWidth() + "x" + child.getMeasuredHeight()); child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } } } mTopPageBounds = paddingTop; mBottomPageBounds = height - paddingBottom; mDecorChildCount = decorCount; mFirstLayout = false; } @Override public void computeScroll() { if (DEBUG) Log.i(TAG, "computeScroll: finished=" + mScroller.isFinished()); if (!mScroller.isFinished()) { if (mScroller.computeScrollOffset()) { if (DEBUG) Log.i(TAG, "computeScroll: still scrolling"); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); pageScrolled(x); } // Keep on drawing until the animation has finished. invalidate(); return; } } // Done with scroll, clean up state. completeScroll(); } private void pageScrolled(int xpos) { final int widthWithMargin = getWidth() + mPageMargin; final int position = xpos / widthWithMargin; final int offsetPixels = xpos % widthWithMargin; final float offset = (float) offsetPixels / widthWithMargin; mCalledSuper = false; onPageScrolled(position, offset, offsetPixels); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } } /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * If you override this method you must call through to the superclass implementation * (e.g. super.onPageScrolled(position, offset, offsetPixels)) before onPageScrolled * returns. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param offset Value from [0, 1) indicating the offset from the page at position. * @param offsetPixels Value in pixels indicating the offset from position. */ protected void onPageScrolled(int position, float offset, int offsetPixels) { // Offset any decor views if needed - keep them on-screen at all times. if (mDecorChildCount > 0) { final int scrollX = getScrollX(); int paddingLeft = getPaddingLeft(); int paddingRight = getPaddingRight(); final int width = getWidth(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) continue; final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; int childLeft = 0; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } childLeft += scrollX; final int childOffset = childLeft - child.getLeft(); if (childOffset != 0) { child.offsetLeftAndRight(childOffset); } } } if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageScrolled(position, offset, offsetPixels); } mCalledSuper = true; } private void completeScroll() { boolean needPopulate = mScrolling; if (needPopulate) { // Done with scroll, no longer want to cache view drawing. setScrollingCacheEnabled(false); mScroller.abortAnimation(); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); } setScrollState(SCROLL_STATE_IDLE); } mPopulatePending = false; mScrolling = false; for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.scrolling) { needPopulate = true; ii.scrolling = false; } } if (needPopulate) { populate(); } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { /* * This method JUST determines whether we want to intercept the motion. * If we return true, onMotionEvent will be called and we do the actual * scrolling there. */ final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; // Always take care of the touch gesture being complete. if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { // Release the drag. if (DEBUG) Log.v(TAG, "Intercept done!"); mIsBeingDragged = false; mIsUnableToDrag = false; mActivePointerId = INVALID_POINTER; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } return false; } // Nothing more to do here if we have decided whether or not we // are dragging. if (action != MotionEvent.ACTION_DOWN) { if (mIsBeingDragged) { if (DEBUG) Log.v(TAG, "Intercept returning true!"); return true; } if (mIsUnableToDrag) { if (DEBUG) Log.v(TAG, "Intercept returning false!"); return false; } } switch (action) { case MotionEvent.ACTION_MOVE: { /* * mIsBeingDragged == false, otherwise the shortcut would have caught it. Check * whether the user has moved far enough from his original down touch. */ /* * Locally do absolute value. mLastMotionY is set to the y value * of the down event. */ final int activePointerId = mActivePointerId; if (activePointerId == INVALID_POINTER) { // If we don't have a valid id, the touch down wasn't on content. break; } final int pointerIndex = MotionEventCompat.findPointerIndex(ev, activePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float dx = x - mLastMotionX; final float xDiff = Math.abs(dx); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); final int scrollX = getScrollX(); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (canScroll(this, false, (int) dx, (int) x, (int) y)) { // Nested view has scrollable area under this point. Let it be handled there. mInitialMotionX = mLastMotionX = x; mLastMotionY = y; return false; } if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); mLastMotionX = x; setScrollingCacheEnabled(true); } else { if (yDiff > mTouchSlop) { // The finger has moved enough in the vertical // direction to be counted as a drag... abort // any attempt to drag horizontally, to work correctly // with children that have scrolling containers. if (DEBUG) Log.v(TAG, "Starting unable to drag!"); mIsUnableToDrag = true; } } break; } case MotionEvent.ACTION_DOWN: { /* * Remember location of down touch. * ACTION_DOWN always refers to pointer index 0. */ mLastMotionX = mInitialMotionX = ev.getX(); mLastMotionY = ev.getY(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); if (mScrollState == SCROLL_STATE_SETTLING) { // Let the user 'catch' the pager as it animates. mIsBeingDragged = true; mIsUnableToDrag = false; setScrollState(SCROLL_STATE_DRAGGING); } else { completeScroll(); mIsBeingDragged = false; mIsUnableToDrag = false; } if (DEBUG) Log.v(TAG, "Down at " + mLastMotionX + "," + mLastMotionY + " mIsBeingDragged=" + mIsBeingDragged + "mIsUnableToDrag=" + mIsUnableToDrag); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); break; } if (!mIsBeingDragged) { // Track the velocity as long as we aren't dragging. // Once we start a real drag we will track in onTouchEvent. if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); } /* * The only time we want to intercept motion events is if we are in the * drag mode. */ return mIsBeingDragged; } @Override public boolean onTouchEvent(MotionEvent ev) { if (mFakeDragging) { // A fake drag is in progress already, ignore this real one // but still eat the touch events. // (It is likely that the user is multi-touching the screen.) return true; } if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getEdgeFlags() != 0) { // Don't handle edge touches immediately -- they may actually belong to one of our // descendants. return false; } if (mAdapter == null || mAdapter.getCount() == 0) { // Nothing to present or scroll; nothing to touch. return false; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); final int action = ev.getAction(); boolean needsInvalidate = false; switch (action & MotionEventCompat.ACTION_MASK) { case MotionEvent.ACTION_DOWN: { /* * If being flinged and user touches, stop the fling. isFinished * will be false if being flinged. */ completeScroll(); // Remember where the motion event started mLastMotionX = mInitialMotionX = ev.getX(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); break; } case MotionEvent.ACTION_MOVE: if (!mIsBeingDragged) { final int pointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float xDiff = Math.abs(x - mLastMotionX); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; mLastMotionX = x; setScrollState(SCROLL_STATE_DRAGGING); setScrollingCacheEnabled(true); } } if (mIsBeingDragged) { // Scroll to follow the motion event final int activePointerIndex = MotionEventCompat.findPointerIndex( ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); final float deltaX = mLastMotionX - x; mLastMotionX = x; float oldScrollX = getScrollX(); float scrollX = oldScrollX + deltaX; final int width = getWidth(); final int widthWithMargin = width + mPageMargin; final int lastItemIndex = mAdapter.getCount() - 1; final float leftBound = Math.max(0, (mCurItem - 1) * widthWithMargin); final float rightBound = Math.min(mCurItem + 1, lastItemIndex) * widthWithMargin; if (scrollX < leftBound) { if (leftBound == 0) { float over = -scrollX; needsInvalidate = mLeftEdge.onPull(over / width); } scrollX = leftBound; } else if (scrollX > rightBound) { if (rightBound == lastItemIndex * widthWithMargin) { float over = scrollX - rightBound; needsInvalidate = mRightEdge.onPull(over / width); } scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); } break; case MotionEvent.ACTION_UP: if (mIsBeingDragged) { final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int widthWithMargin = getWidth() + mPageMargin; final int scrollX = getScrollX(); final int currentPage = scrollX / widthWithMargin; int nextPage = initialVelocity > 0 ? currentPage : currentPage + 1; setCurrentItemInternal(nextPage, true, true, initialVelocity); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEvent.ACTION_CANCEL: if (mIsBeingDragged) { setCurrentItemInternal(mCurItem, true, true); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); final float x = MotionEventCompat.getX(ev, index); mLastMotionX = x; mActivePointerId = MotionEventCompat.getPointerId(ev, index); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId)); break; } if (needsInvalidate) { invalidate(); } return true; } @Override public void draw(Canvas canvas) { super.draw(canvas); boolean needsInvalidate = false; final int overScrollMode = ViewCompat.getOverScrollMode(this); if (overScrollMode == ViewCompat.OVER_SCROLL_ALWAYS || (overScrollMode == ViewCompat.OVER_SCROLL_IF_CONTENT_SCROLLS && mAdapter != null && mAdapter.getCount() > 1)) { if (!mLeftEdge.isFinished()) { final int restoreCount = canvas.save(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); canvas.rotate(270); canvas.translate(-height + getPaddingTop(), 0); mLeftEdge.setSize(height, getWidth()); needsInvalidate |= mLeftEdge.draw(canvas); canvas.restoreToCount(restoreCount); } if (!mRightEdge.isFinished()) { final int restoreCount = canvas.save(); final int width = getWidth(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); final int itemCount = mAdapter != null ? mAdapter.getCount() : 1; canvas.rotate(90); canvas.translate(-getPaddingTop(), -itemCount * (width + mPageMargin) + mPageMargin); mRightEdge.setSize(height, width); needsInvalidate |= mRightEdge.draw(canvas); canvas.restoreToCount(restoreCount); } } else { mLeftEdge.finish(); mRightEdge.finish(); } if (needsInvalidate) { // Keep animating invalidate(); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); // Draw the margin drawable if needed. if (mPageMargin > 0 && mMarginDrawable != null) { final int scrollX = getScrollX(); final int width = getWidth(); final int offset = scrollX % (width + mPageMargin); if (offset != 0) { // Pages fit completely when settled; we only need to draw when in between final int left = scrollX - offset + width; mMarginDrawable.setBounds(left, mTopPageBounds, left + mPageMargin, mBottomPageBounds); mMarginDrawable.draw(canvas); } } } /** * Start a fake drag of the pager. * * <p>A fake drag can be useful if you want to synchronize the motion of the ViewPager * with the touch scrolling of another view, while still letting the ViewPager * control the snapping motion and fling behavior. (e.g. parallax-scrolling tabs.) * Call {@link #fakeDragBy(float)} to simulate the actual drag motion. Call * {@link #endFakeDrag()} to complete the fake drag and fling as necessary. * * <p>During a fake drag the ViewPager will ignore all touch events. If a real drag * is already in progress, this method will return false. * * @return true if the fake drag began successfully, false if it could not be started. * * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean beginFakeDrag() { if (mIsBeingDragged) { return false; } mFakeDragging = true; setScrollState(SCROLL_STATE_DRAGGING); mInitialMotionX = mLastMotionX = 0; if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } else { mVelocityTracker.clear(); } final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(time, time, MotionEvent.ACTION_DOWN, 0, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); mFakeDragBeginTime = time; return true; } /** * End a fake drag of the pager. * * @see #beginFakeDrag() * @see #fakeDragBy(float) */ public void endFakeDrag() { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int)VelocityTrackerCompat.getYVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; if ((Math.abs(initialVelocity) > mMinimumVelocity) || Math.abs(mInitialMotionX-mLastMotionX) >= (getWidth()/3)) { if (mLastMotionX > mInitialMotionX) { setCurrentItemInternal(mCurItem-1, true, true); } else { setCurrentItemInternal(mCurItem+1, true, true); } } else { setCurrentItemInternal(mCurItem, true, true); } endDrag(); mFakeDragging = false; } /** * Fake drag by an offset in pixels. You must have called {@link #beginFakeDrag()} first. * * @param xOffset Offset in pixels to drag by. * @see #beginFakeDrag() * @see #endFakeDrag() */ public void fakeDragBy(float xOffset) { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } mLastMotionX += xOffset; float scrollX = getScrollX() - xOffset; final int width = getWidth(); final int widthWithMargin = width + mPageMargin; final float leftBound = Math.max(0, (mCurItem - 1) * widthWithMargin); final float rightBound = Math.min(mCurItem + 1, mAdapter.getCount() - 1) * widthWithMargin; if (scrollX < leftBound) { scrollX = leftBound; } else if (scrollX > rightBound) { scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); // Synthesize an event for the VelocityTracker. final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(mFakeDragBeginTime, time, MotionEvent.ACTION_MOVE, mLastMotionX, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); } /** * Returns true if a fake drag is in progress. * * @return true if currently in a fake drag, false otherwise. * * @see #beginFakeDrag() * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean isFakeDragging() { return mFakeDragging; } private void onSecondaryPointerUp(MotionEvent ev) { final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex); if (pointerId == mActivePointerId) { // This was our active pointer going up. Choose a new // active pointer and adjust accordingly. final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mLastMotionX = MotionEventCompat.getX(ev, newPointerIndex); mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex); if (mVelocityTracker != null) { mVelocityTracker.clear(); } } } private void endDrag() { mIsBeingDragged = false; mIsUnableToDrag = false; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } } private void setScrollingCacheEnabled(boolean enabled) { if (mScrollingCacheEnabled != enabled) { mScrollingCacheEnabled = enabled; if (USE_CACHE) { final int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(enabled); } } } } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { // TODO: Add versioned support here for transformed views. // This will not work for transformed views in Honeycomb+ final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && ViewCompat.canScrollHorizontally(v, -dx); } @Override public boolean dispatchKeyEvent(KeyEvent event) { // Let the focused view and/or our descendants get the key first return super.dispatchKeyEvent(event) || executeKeyEvent(event); } /** * You can call this function yourself to have the scroll view perform * scrolling from a key event, just as if the event had been dispatched to * it by the view hierarchy. * * @param event The key event to execute. * @return Return true if the event was handled, else false. */ public boolean executeKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: handled = arrowScroll(FOCUS_LEFT); break; case KeyEvent.KEYCODE_DPAD_RIGHT: handled = arrowScroll(FOCUS_RIGHT); break; case KeyEvent.KEYCODE_TAB: if (Build.VERSION.SDK_INT >= 11) { // The focus finder had a bug handling FOCUS_FORWARD and FOCUS_BACKWARD // before Android 3.0. Ignore the tab key on those devices. if (KeyEventCompat.hasNoModifiers(event)) { handled = arrowScroll(FOCUS_FORWARD); } else if (KeyEventCompat.hasModifiers(event, KeyEvent.META_SHIFT_ON)) { handled = arrowScroll(FOCUS_BACKWARD); } } break; } } return handled; } public boolean arrowScroll(int direction) { View currentFocused = findFocus(); if (currentFocused == this) currentFocused = null; boolean handled = false; View nextFocused = FocusFinder.getInstance().findNextFocus(this, currentFocused, direction); if (nextFocused != null && nextFocused != currentFocused) { if (direction == View.FOCUS_LEFT) { // If there is nothing to the left, or this is causing us to // jump to the right, then what we really want to do is page left. if (currentFocused != null && nextFocused.getLeft() >= currentFocused.getLeft()) { handled = pageLeft(); } else { handled = nextFocused.requestFocus(); } } else if (direction == View.FOCUS_RIGHT) { // If there is nothing to the right, or this is causing us to // jump to the left, then what we really want to do is page right. if (currentFocused != null && nextFocused.getLeft() <= currentFocused.getLeft()) { handled = pageRight(); } else { handled = nextFocused.requestFocus(); } } } else if (direction == FOCUS_LEFT || direction == FOCUS_BACKWARD) { // Trying to move left and nothing there; try to page. handled = pageLeft(); } else if (direction == FOCUS_RIGHT || direction == FOCUS_FORWARD) { // Trying to move right and nothing there; try to page. handled = pageRight(); } if (handled) { playSoundEffect(SoundEffectConstants.getContantForFocusDirection(direction)); } return handled; } boolean pageLeft() { if (mCurItem > 0) { setCurrentItem(mCurItem-1, true); return true; } return false; } boolean pageRight() { if (mAdapter != null && mCurItem < (mAdapter.getCount()-1)) { setCurrentItem(mCurItem+1, true); return true; } return false; } /** * We only want the current page that is being shown to be focusable. */ @Override public void addFocusables(ArrayList<View> views, int direction, int focusableMode) { final int focusableCount = views.size(); final int descendantFocusability = getDescendantFocusability(); if (descendantFocusability != FOCUS_BLOCK_DESCENDANTS) { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addFocusables(views, direction, focusableMode); } } } } // we add ourselves (if focusable) in all cases except for when we are // FOCUS_AFTER_DESCENDANTS and there are some descendants focusable. this is // to avoid the focus search finding layouts when a more precise search // among the focusable children would be more interesting. if ( descendantFocusability != FOCUS_AFTER_DESCENDANTS || // No focusable descendants (focusableCount == views.size())) { // Note that we can't call the superclass here, because it will // add all views in. So we need to do the same thing View does. if (!isFocusable()) { return; } if ((focusableMode & FOCUSABLES_TOUCH_MODE) == FOCUSABLES_TOUCH_MODE && isInTouchMode() && !isFocusableInTouchMode()) { return; } if (views != null) { views.add(this); } } } /** * We only want the current page that is being shown to be touchable. */ @Override public void addTouchables(ArrayList<View> views) { // Note that we don't call super.addTouchables(), which means that // we don't call View.addTouchables(). This is okay because a ViewPager // is itself not touchable. for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addTouchables(views); } } } } /** * We only want the current page that is being shown to be focusable. */ @Override protected boolean onRequestFocusInDescendants(int direction, Rect previouslyFocusedRect) { int index; int increment; int end; int count = getChildCount(); if ((direction & FOCUS_FORWARD) != 0) { index = 0; increment = 1; end = count; } else { index = count - 1; increment = -1; end = -1; } for (int i = index; i != end; i += increment) { View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(direction, previouslyFocusedRect)) { return true; } } } } return false; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { // ViewPagers should only report accessibility info for the current page, // otherwise things get very confusing. // TODO: Should this note something about the paging container? final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { final ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem && child.dispatchPopulateAccessibilityEvent(event)) { return true; } } } return false; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return generateDefaultLayoutParams(); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } private class PagerObserver extends DataSetObserver { @Override public void onChanged() { dataSetChanged(); } @Override public void onInvalidated() { dataSetChanged(); } } public static class LayoutParams extends ViewGroup.LayoutParams { /** * true if this view is a decoration on the pager itself and not * a view supplied by the adapter. */ public boolean isDecor; public int gravity; public LayoutParams() { super(FILL_PARENT, FILL_PARENT); } public LayoutParams(Context context, AttributeSet attrs) { super(context, attrs); final TypedArray a = context.obtainStyledAttributes(attrs, LAYOUT_ATTRS); gravity = a.getInteger(0, Gravity.NO_GRAVITY); a.recycle(); } } }
v4/java/android/support/v4/view/ViewPager.java
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v4.view; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Parcel; import android.os.Parcelable; import android.os.SystemClock; import android.support.v4.os.ParcelableCompat; import android.support.v4.os.ParcelableCompatCreatorCallbacks; import android.support.v4.widget.EdgeEffectCompat; import android.util.AttributeSet; import android.util.Log; import android.view.FocusFinder; import android.view.Gravity; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.VelocityTracker; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewParent; import android.view.accessibility.AccessibilityEvent; import android.view.animation.Interpolator; import android.widget.Scroller; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; /** * Layout manager that allows the user to flip left and right * through pages of data. You supply an implementation of a * {@link PagerAdapter} to generate the pages that the view shows. * * <p>Note this class is currently under early design and * development. The API will likely change in later updates of * the compatibility library, requiring changes to the source code * of apps when they are compiled against the newer version.</p> */ public class ViewPager extends ViewGroup { private static final String TAG = "ViewPager"; private static final boolean DEBUG = false; private static final boolean USE_CACHE = false; private static final int DEFAULT_OFFSCREEN_PAGES = 1; private static final int MAX_SETTLE_DURATION = 600; // ms private static final int[] LAYOUT_ATTRS = new int[] { android.R.attr.layout_gravity }; static class ItemInfo { Object object; int position; boolean scrolling; } private static final Comparator<ItemInfo> COMPARATOR = new Comparator<ItemInfo>(){ @Override public int compare(ItemInfo lhs, ItemInfo rhs) { return lhs.position - rhs.position; }}; private static final Interpolator sInterpolator = new Interpolator() { public float getInterpolation(float t) { // _o(t) = t * t * ((tension + 1) * t + tension) // o(t) = _o(t - 1) + 1 t -= 1.0f; return t * t * t + 1.0f; } }; private final ArrayList<ItemInfo> mItems = new ArrayList<ItemInfo>(); private PagerAdapter mAdapter; private int mCurItem; // Index of currently displayed page. private int mRestoredCurItem = -1; private Parcelable mRestoredAdapterState = null; private ClassLoader mRestoredClassLoader = null; private Scroller mScroller; private PagerObserver mObserver; private int mPageMargin; private Drawable mMarginDrawable; private int mTopPageBounds; private int mBottomPageBounds; private int mChildWidthMeasureSpec; private int mChildHeightMeasureSpec; private boolean mInLayout; private boolean mScrollingCacheEnabled; private boolean mPopulatePending; private boolean mIsPopulating; private boolean mScrolling; private int mOffscreenPageLimit = DEFAULT_OFFSCREEN_PAGES; private boolean mIsBeingDragged; private boolean mIsUnableToDrag; private int mTouchSlop; private float mInitialMotionX; /** * Position of the last motion event. */ private float mLastMotionX; private float mLastMotionY; /** * ID of the active pointer. This is used to retain consistency during * drags/flings if multiple pointers are used. */ private int mActivePointerId = INVALID_POINTER; /** * Sentinel value for no current active pointer. * Used by {@link #mActivePointerId}. */ private static final int INVALID_POINTER = -1; /** * Determines speed during touch scrolling */ private VelocityTracker mVelocityTracker; private int mMinimumVelocity; private int mMaximumVelocity; private float mBaseLineFlingVelocity; private float mFlingVelocityInfluence; private boolean mFakeDragging; private long mFakeDragBeginTime; private EdgeEffectCompat mLeftEdge; private EdgeEffectCompat mRightEdge; private boolean mFirstLayout = true; private boolean mCalledSuper; private int mDecorChildCount; private OnPageChangeListener mOnPageChangeListener; private OnPageChangeListener mInternalPageChangeListener; private OnAdapterChangeListener mAdapterChangeListener; /** * Indicates that the pager is in an idle, settled state. The current page * is fully in view and no animation is in progress. */ public static final int SCROLL_STATE_IDLE = 0; /** * Indicates that the pager is currently being dragged by the user. */ public static final int SCROLL_STATE_DRAGGING = 1; /** * Indicates that the pager is in the process of settling to a final position. */ public static final int SCROLL_STATE_SETTLING = 2; private int mScrollState = SCROLL_STATE_IDLE; /** * Callback interface for responding to changing state of the selected page. */ public interface OnPageChangeListener { /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param positionOffset Value from [0, 1) indicating the offset from the page at position. * @param positionOffsetPixels Value in pixels indicating the offset from position. */ public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels); /** * This method will be invoked when a new page becomes selected. Animation is not * necessarily complete. * * @param position Position index of the new selected page. */ public void onPageSelected(int position); /** * Called when the scroll state changes. Useful for discovering when the user * begins dragging, when the pager is automatically settling to the current page, * or when it is fully stopped/idle. * * @param state The new scroll state. * @see ViewPager#SCROLL_STATE_IDLE * @see ViewPager#SCROLL_STATE_DRAGGING * @see ViewPager#SCROLL_STATE_SETTLING */ public void onPageScrollStateChanged(int state); } /** * Simple implementation of the {@link OnPageChangeListener} interface with stub * implementations of each method. Extend this if you do not intend to override * every method of {@link OnPageChangeListener}. */ public static class SimpleOnPageChangeListener implements OnPageChangeListener { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { // This space for rent } @Override public void onPageSelected(int position) { // This space for rent } @Override public void onPageScrollStateChanged(int state) { // This space for rent } } /** * Used internally to monitor when adapters are switched. */ interface OnAdapterChangeListener { public void onAdapterChanged(PagerAdapter oldAdapter, PagerAdapter newAdapter); } /** * Used internally to tag special types of child views that should be added as * pager decorations by default. */ interface Decor {} public ViewPager(Context context) { super(context); initViewPager(); } public ViewPager(Context context, AttributeSet attrs) { super(context, attrs); initViewPager(); } void initViewPager() { setWillNotDraw(false); setDescendantFocusability(FOCUS_AFTER_DESCENDANTS); setFocusable(true); final Context context = getContext(); mScroller = new Scroller(context, sInterpolator); final ViewConfiguration configuration = ViewConfiguration.get(context); mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration); mMinimumVelocity = configuration.getScaledMinimumFlingVelocity(); mMaximumVelocity = configuration.getScaledMaximumFlingVelocity(); mLeftEdge = new EdgeEffectCompat(context); mRightEdge = new EdgeEffectCompat(context); float density = context.getResources().getDisplayMetrics().density; mBaseLineFlingVelocity = 2500.0f * density; mFlingVelocityInfluence = 0.4f; } private void setScrollState(int newState) { if (mScrollState == newState) { return; } mScrollState = newState; if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrollStateChanged(newState); } } /** * Set a PagerAdapter that will supply views for this pager as needed. * * @param adapter Adapter to use */ public void setAdapter(PagerAdapter adapter) { if (mAdapter != null) { mAdapter.unregisterDataSetObserver(mObserver); mAdapter.startUpdate(this); for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); mAdapter.destroyItem(this, ii.position, ii.object); } mAdapter.finishUpdate(this); mItems.clear(); removeNonDecorViews(); mCurItem = 0; scrollTo(0, 0); } final PagerAdapter oldAdapter = mAdapter; mAdapter = adapter; if (mAdapter != null) { if (mObserver == null) { mObserver = new PagerObserver(); } mAdapter.registerDataSetObserver(mObserver); mPopulatePending = false; if (mRestoredCurItem >= 0) { mAdapter.restoreState(mRestoredAdapterState, mRestoredClassLoader); setCurrentItemInternal(mRestoredCurItem, false, true); mRestoredCurItem = -1; mRestoredAdapterState = null; mRestoredClassLoader = null; } else { populate(); } } if (mAdapterChangeListener != null && oldAdapter != adapter) { mAdapterChangeListener.onAdapterChanged(oldAdapter, adapter); } } private void removeNonDecorViews() { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) { removeViewAt(i); i--; } } } /** * Retrieve the current adapter supplying pages. * * @return The currently registered PagerAdapter */ public PagerAdapter getAdapter() { return mAdapter; } void setOnAdapterChangeListener(OnAdapterChangeListener listener) { mAdapterChangeListener = listener; } /** * Set the currently selected page. If the ViewPager has already been through its first * layout there will be a smooth animated transition between the current item and the * specified item. * * @param item Item index to select */ public void setCurrentItem(int item) { mPopulatePending = false; setCurrentItemInternal(item, !mFirstLayout, false); } /** * Set the currently selected page. * * @param item Item index to select * @param smoothScroll True to smoothly scroll to the new item, false to transition immediately */ public void setCurrentItem(int item, boolean smoothScroll) { mPopulatePending = false; setCurrentItemInternal(item, smoothScroll, false); } public int getCurrentItem() { return mCurItem; } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always) { setCurrentItemInternal(item, smoothScroll, always, 0); } void setCurrentItemInternal(int item, boolean smoothScroll, boolean always, int velocity) { if (mAdapter == null || mAdapter.getCount() <= 0) { setScrollingCacheEnabled(false); return; } if (!always && mCurItem == item && mItems.size() != 0) { setScrollingCacheEnabled(false); return; } if (item < 0) { item = 0; } else if (item >= mAdapter.getCount()) { item = mAdapter.getCount() - 1; } final int pageLimit = mOffscreenPageLimit; if (item > (mCurItem + pageLimit) || item < (mCurItem - pageLimit)) { // We are doing a jump by more than one page. To avoid // glitches, we want to keep all current pages in the view // until the scroll ends. for (int i=0; i<mItems.size(); i++) { mItems.get(i).scrolling = true; } } final boolean dispatchSelected = mCurItem != item; mCurItem = item; populate(); final int destX = (getWidth() + mPageMargin) * item; if (smoothScroll) { smoothScrollTo(destX, 0, velocity); if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } } else { if (dispatchSelected && mOnPageChangeListener != null) { mOnPageChangeListener.onPageSelected(item); } if (dispatchSelected && mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageSelected(item); } completeScroll(); scrollTo(destX, 0); } } /** * Set a listener that will be invoked whenever the page changes or is incrementally * scrolled. See {@link OnPageChangeListener}. * * @param listener Listener to set */ public void setOnPageChangeListener(OnPageChangeListener listener) { mOnPageChangeListener = listener; } /** * Set a separate OnPageChangeListener for internal use by the support library. * * @param listener Listener to set * @return The old listener that was set, if any. */ OnPageChangeListener setInternalPageChangeListener(OnPageChangeListener listener) { OnPageChangeListener oldListener = mInternalPageChangeListener; mInternalPageChangeListener = listener; return oldListener; } /** * Returns the number of pages that will be retained to either side of the * current page in the view hierarchy in an idle state. Defaults to 1. * * @return How many pages will be kept offscreen on either side * @see #setOffscreenPageLimit(int) */ public int getOffscreenPageLimit() { return mOffscreenPageLimit; } /** * Set the number of pages that should be retained to either side of the * current page in the view hierarchy in an idle state. Pages beyond this * limit will be recreated from the adapter when needed. * * <p>This is offered as an optimization. If you know in advance the number * of pages you will need to support or have lazy-loading mechanisms in place * on your pages, tweaking this setting can have benefits in perceived smoothness * of paging animations and interaction. If you have a small number of pages (3-4) * that you can keep active all at once, less time will be spent in layout for * newly created view subtrees as the user pages back and forth.</p> * * <p>You should keep this limit low, especially if your pages have complex layouts. * This setting defaults to 1.</p> * * @param limit How many pages will be kept offscreen in an idle state. */ public void setOffscreenPageLimit(int limit) { if (limit < DEFAULT_OFFSCREEN_PAGES) { Log.w(TAG, "Requested offscreen page limit " + limit + " too small; defaulting to " + DEFAULT_OFFSCREEN_PAGES); limit = DEFAULT_OFFSCREEN_PAGES; } if (limit != mOffscreenPageLimit) { mOffscreenPageLimit = limit; populate(); } } /** * Set the margin between pages. * * @param marginPixels Distance between adjacent pages in pixels * @see #getPageMargin() * @see #setPageMarginDrawable(Drawable) * @see #setPageMarginDrawable(int) */ public void setPageMargin(int marginPixels) { final int oldMargin = mPageMargin; mPageMargin = marginPixels; final int width = getWidth(); recomputeScrollPosition(width, width, marginPixels, oldMargin); requestLayout(); } /** * Return the margin between pages. * * @return The size of the margin in pixels */ public int getPageMargin() { return mPageMargin; } /** * Set a drawable that will be used to fill the margin between pages. * * @param d Drawable to display between pages */ public void setPageMarginDrawable(Drawable d) { mMarginDrawable = d; if (d != null) refreshDrawableState(); setWillNotDraw(d == null); invalidate(); } /** * Set a drawable that will be used to fill the margin between pages. * * @param resId Resource ID of a drawable to display between pages */ public void setPageMarginDrawable(int resId) { setPageMarginDrawable(getContext().getResources().getDrawable(resId)); } @Override protected boolean verifyDrawable(Drawable who) { return super.verifyDrawable(who) || who == mMarginDrawable; } @Override protected void drawableStateChanged() { super.drawableStateChanged(); final Drawable d = mMarginDrawable; if (d != null && d.isStateful()) { d.setState(getDrawableState()); } } // We want the duration of the page snap animation to be influenced by the distance that // the screen has to travel, however, we don't want this duration to be effected in a // purely linear fashion. Instead, we use this method to moderate the effect that the distance // of travel has on the overall snap duration. float distanceInfluenceForSnapDuration(float f) { f -= 0.5f; // center the values about 0. f *= 0.3f * Math.PI / 2.0f; return (float) Math.sin(f); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis */ void smoothScrollTo(int x, int y) { smoothScrollTo(x, y, 0); } /** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis * @param velocity the velocity associated with a fling, if applicable. (0 otherwise) */ void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { // Nothing to do. setScrollingCacheEnabled(false); return; } int sx = getScrollX(); int sy = getScrollY(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); mScrolling = true; setScrollState(SCROLL_STATE_SETTLING); final float pageDelta = (float) Math.abs(dx) / (getWidth() + mPageMargin); int duration = (int) (pageDelta * 100); velocity = Math.abs(velocity); if (velocity > 0) { duration += (duration / (velocity / mBaseLineFlingVelocity)) * mFlingVelocityInfluence; } else { duration += 100; } duration = Math.min(duration, MAX_SETTLE_DURATION); mScroller.startScroll(sx, sy, dx, dy, duration); invalidate(); } void addNewItem(int position, int index) { ItemInfo ii = new ItemInfo(); ii.position = position; ii.object = mAdapter.instantiateItem(this, position); if (index < 0) { mItems.add(ii); } else { mItems.add(index, ii); } } void dataSetChanged() { // This method only gets called if our observer is attached, so mAdapter is non-null. boolean needPopulate = mItems.size() < 3 && mItems.size() < mAdapter.getCount(); int newCurrItem = -1; boolean isUpdating = false; for (int i = 0; i < mItems.size(); i++) { final ItemInfo ii = mItems.get(i); final int newPos = mAdapter.getItemPosition(ii.object); if (newPos == PagerAdapter.POSITION_UNCHANGED) { continue; } if (newPos == PagerAdapter.POSITION_NONE) { mItems.remove(i); i--; if (!isUpdating) { mAdapter.startUpdate(this); isUpdating = true; } mAdapter.destroyItem(this, ii.position, ii.object); needPopulate = true; if (mCurItem == ii.position) { // Keep the current item in the valid range newCurrItem = Math.max(0, Math.min(mCurItem, mAdapter.getCount() - 1)); } continue; } if (ii.position != newPos) { if (ii.position == mCurItem) { // Our current item changed position. Follow it. newCurrItem = newPos; } ii.position = newPos; needPopulate = true; } } if (isUpdating) { mAdapter.finishUpdate(this); } Collections.sort(mItems, COMPARATOR); if (newCurrItem >= 0) { // TODO This currently causes a jump. setCurrentItemInternal(newCurrItem, false, true); needPopulate = true; } if (needPopulate) { populate(); requestLayout(); } } void populate() { if (mAdapter == null) { return; } // Bail now if we are waiting to populate. This is to hold off // on creating views from the time the user releases their finger to // fling to a new position until we have finished the scroll to // that position, avoiding glitches from happening at that point. if (mPopulatePending) { if (DEBUG) Log.i(TAG, "populate is pending, skipping for now..."); return; } // Also, don't populate until we are attached to a window. This is to // avoid trying to populate before we have restored our view hierarchy // state and conflicting with what is restored. if (getWindowToken() == null) { return; } mIsPopulating = true; mAdapter.startUpdate(this); final int pageLimit = mOffscreenPageLimit; final int startPos = Math.max(0, mCurItem - pageLimit); final int N = mAdapter.getCount(); final int endPos = Math.min(N-1, mCurItem + pageLimit); if (DEBUG) Log.v(TAG, "populating: startPos=" + startPos + " endPos=" + endPos); // Add and remove pages in the existing list. int lastPos = -1; for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if ((ii.position < startPos || ii.position > endPos) && !ii.scrolling) { if (DEBUG) Log.i(TAG, "removing: " + ii.position + " @ " + i); mItems.remove(i); i--; mAdapter.destroyItem(this, ii.position, ii.object); } else if (lastPos < endPos && ii.position > startPos) { // The next item is outside of our range, but we have a gap // between it and the last item where we want to have a page // shown. Fill in the gap. lastPos++; if (lastPos < startPos) { lastPos = startPos; } while (lastPos <= endPos && lastPos < ii.position) { if (DEBUG) Log.i(TAG, "inserting: " + lastPos + " @ " + i); addNewItem(lastPos, i); lastPos++; i++; } } lastPos = ii.position; } // Add any new pages we need at the end. lastPos = mItems.size() > 0 ? mItems.get(mItems.size()-1).position : -1; if (lastPos < endPos) { lastPos++; lastPos = lastPos > startPos ? lastPos : startPos; while (lastPos <= endPos) { if (DEBUG) Log.i(TAG, "appending: " + lastPos); addNewItem(lastPos, -1); lastPos++; } } if (DEBUG) { Log.i(TAG, "Current page list:"); for (int i=0; i<mItems.size(); i++) { Log.i(TAG, "#" + i + ": page " + mItems.get(i).position); } } ItemInfo curItem = null; for (int i=0; i<mItems.size(); i++) { if (mItems.get(i).position == mCurItem) { curItem = mItems.get(i); break; } } mAdapter.setPrimaryItem(this, mCurItem, curItem != null ? curItem.object : null); mAdapter.finishUpdate(this); mIsPopulating = false; if (hasFocus()) { View currentFocused = findFocus(); ItemInfo ii = currentFocused != null ? infoForAnyChild(currentFocused) : null; if (ii == null || ii.position != mCurItem) { for (int i=0; i<getChildCount(); i++) { View child = getChildAt(i); ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(FOCUS_FORWARD)) { break; } } } } } } public static class SavedState extends BaseSavedState { int position; Parcelable adapterState; ClassLoader loader; public SavedState(Parcelable superState) { super(superState); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeInt(position); out.writeParcelable(adapterState, flags); } @Override public String toString() { return "FragmentPager.SavedState{" + Integer.toHexString(System.identityHashCode(this)) + " position=" + position + "}"; } public static final Parcelable.Creator<SavedState> CREATOR = ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() { @Override public SavedState createFromParcel(Parcel in, ClassLoader loader) { return new SavedState(in, loader); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }); SavedState(Parcel in, ClassLoader loader) { super(in); if (loader == null) { loader = getClass().getClassLoader(); } position = in.readInt(); adapterState = in.readParcelable(loader); this.loader = loader; } } @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.position = mCurItem; if (mAdapter != null) { ss.adapterState = mAdapter.saveState(); } return ss; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof SavedState)) { super.onRestoreInstanceState(state); return; } SavedState ss = (SavedState)state; super.onRestoreInstanceState(ss.getSuperState()); if (mAdapter != null) { mAdapter.restoreState(ss.adapterState, ss.loader); setCurrentItemInternal(ss.position, false, true); } else { mRestoredCurItem = ss.position; mRestoredAdapterState = ss.adapterState; mRestoredClassLoader = ss.loader; } } @Override public void addView(View child, int index, ViewGroup.LayoutParams params) { if (!checkLayoutParams(params)) { params = generateLayoutParams(params); } final LayoutParams lp = (LayoutParams) params; lp.isDecor |= child instanceof Decor; if (mInLayout) { if (lp != null && lp.isDecor) { throw new IllegalStateException("Cannot add pager decor view during layout"); } addViewInLayout(child, index, params); child.measure(mChildWidthMeasureSpec, mChildHeightMeasureSpec); } else { super.addView(child, index, params); } if (USE_CACHE) { if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(mScrollingCacheEnabled); } else { child.setDrawingCacheEnabled(false); } } } ItemInfo infoForChild(View child) { for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (mAdapter.isViewFromObject(child, ii.object)) { return ii; } } return null; } ItemInfo infoForAnyChild(View child) { ViewParent parent; while ((parent=child.getParent()) != this) { if (parent == null || !(parent instanceof View)) { return null; } child = (View)parent; } return infoForChild(child); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // For simple implementation, or internal size is always 0. // We depend on the container to specify the layout size of // our view. We can't really know what it is since we will be // adding and removing different arbitrary views and do not // want the layout to change as this happens. setMeasuredDimension(getDefaultSize(0, widthMeasureSpec), getDefaultSize(0, heightMeasureSpec)); // Children are just made to fill our space. int childWidthSize = getMeasuredWidth() - getPaddingLeft() - getPaddingRight(); int childHeightSize = getMeasuredHeight() - getPaddingTop() - getPaddingBottom(); /* * Make sure all children have been properly measured. Decor views first. * Right now we cheat and make this less complicated by assuming decor * views won't intersect. We will pin to edges based on gravity. */ int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp != null && lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; Log.d(TAG, "gravity: " + lp.gravity + " hgrav: " + hgrav + " vgrav: " + vgrav); int widthMode = MeasureSpec.AT_MOST; int heightMode = MeasureSpec.AT_MOST; boolean consumeVertical = vgrav == Gravity.TOP || vgrav == Gravity.BOTTOM; boolean consumeHorizontal = hgrav == Gravity.LEFT || hgrav == Gravity.RIGHT; if (consumeVertical) { widthMode = MeasureSpec.EXACTLY; } else if (consumeHorizontal) { heightMode = MeasureSpec.EXACTLY; } final int widthSpec = MeasureSpec.makeMeasureSpec(childWidthSize, widthMode); final int heightSpec = MeasureSpec.makeMeasureSpec(childHeightSize, heightMode); child.measure(widthSpec, heightSpec); if (consumeVertical) { childHeightSize -= child.getMeasuredHeight(); } else if (consumeHorizontal) { childWidthSize -= child.getMeasuredWidth(); } } } } mChildWidthMeasureSpec = MeasureSpec.makeMeasureSpec(childWidthSize, MeasureSpec.EXACTLY); mChildHeightMeasureSpec = MeasureSpec.makeMeasureSpec(childHeightSize, MeasureSpec.EXACTLY); // Make sure we have created all fragments that we need to have shown. mInLayout = true; populate(); mInLayout = false; // Page views next. size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { if (DEBUG) Log.v(TAG, "Measuring #" + i + " " + child + ": " + mChildWidthMeasureSpec); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp == null || !lp.isDecor) { child.measure(mChildWidthMeasureSpec, mChildHeightMeasureSpec); } } } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Make sure scroll position is set correctly. if (w != oldw) { recomputeScrollPosition(w, oldw, mPageMargin, mPageMargin); } } private void recomputeScrollPosition(int width, int oldWidth, int margin, int oldMargin) { final int widthWithMargin = width + margin; if (oldWidth > 0) { final int oldScrollPos = getScrollX(); final int oldwwm = oldWidth + oldMargin; final int oldScrollItem = oldScrollPos / oldwwm; final float scrollOffset = (float) (oldScrollPos % oldwwm) / oldwwm; final int scrollPos = (int) ((oldScrollItem + scrollOffset) * widthWithMargin); scrollTo(scrollPos, getScrollY()); if (!mScroller.isFinished()) { // We now return to your regularly scheduled scroll, already in progress. final int newDuration = mScroller.getDuration() - mScroller.timePassed(); mScroller.startScroll(scrollPos, 0, mCurItem * widthWithMargin, 0, newDuration); } } else { int scrollPos = mCurItem * widthWithMargin; if (scrollPos != getScrollX()) { completeScroll(); scrollTo(scrollPos, getScrollY()); } } } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { mInLayout = true; populate(); mInLayout = false; final int count = getChildCount(); int width = r - l; int height = b - t; int paddingLeft = getPaddingLeft(); int paddingTop = getPaddingTop(); int paddingRight = getPaddingRight(); int paddingBottom = getPaddingBottom(); final int scrollX = getScrollX(); int decorCount = 0; for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { final LayoutParams lp = (LayoutParams) child.getLayoutParams(); ItemInfo ii; int childLeft = 0; int childTop = 0; if (lp.isDecor) { final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; final int vgrav = lp.gravity & Gravity.VERTICAL_GRAVITY_MASK; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getMeasuredWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } switch (vgrav) { default: childTop = paddingTop; break; case Gravity.TOP: childTop = paddingTop; paddingTop += child.getMeasuredHeight(); break; case Gravity.CENTER_VERTICAL: childTop = Math.max((height - child.getMeasuredHeight()) / 2, paddingTop); break; case Gravity.BOTTOM: childTop = height - paddingBottom - child.getMeasuredHeight(); paddingBottom += child.getMeasuredHeight(); break; } childLeft += scrollX; decorCount++; child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } else if ((ii = infoForChild(child)) != null) { int loff = (width + mPageMargin) * ii.position; childLeft = paddingLeft + loff; childTop = paddingTop; if (DEBUG) Log.v(TAG, "Positioning #" + i + " " + child + " f=" + ii.object + ":" + childLeft + "," + childTop + " " + child.getMeasuredWidth() + "x" + child.getMeasuredHeight()); child.layout(childLeft, childTop, childLeft + child.getMeasuredWidth(), childTop + child.getMeasuredHeight()); } } } mTopPageBounds = paddingTop; mBottomPageBounds = height - paddingBottom; mDecorChildCount = decorCount; mFirstLayout = false; } @Override public void computeScroll() { if (DEBUG) Log.i(TAG, "computeScroll: finished=" + mScroller.isFinished()); if (!mScroller.isFinished()) { if (mScroller.computeScrollOffset()) { if (DEBUG) Log.i(TAG, "computeScroll: still scrolling"); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); pageScrolled(x); } // Keep on drawing until the animation has finished. invalidate(); return; } } // Done with scroll, clean up state. completeScroll(); } private void pageScrolled(int xpos) { final int widthWithMargin = getWidth() + mPageMargin; final int position = xpos / widthWithMargin; final int offsetPixels = xpos % widthWithMargin; final float offset = (float) offsetPixels / widthWithMargin; mCalledSuper = false; onPageScrolled(position, offset, offsetPixels); if (!mCalledSuper) { throw new IllegalStateException( "onPageScrolled did not call superclass implementation"); } } /** * This method will be invoked when the current page is scrolled, either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll. * If you override this method you must call through to the superclass implementation * (e.g. super.onPageScrolled(position, offset, offsetPixels)) before onPageScrolled * returns. * * @param position Position index of the first page currently being displayed. * Page position+1 will be visible if positionOffset is nonzero. * @param offset Value from [0, 1) indicating the offset from the page at position. * @param offsetPixels Value in pixels indicating the offset from position. */ protected void onPageScrolled(int position, float offset, int offsetPixels) { // Offset any decor views if needed - keep them on-screen at all times. if (mDecorChildCount > 0) { final int scrollX = getScrollX(); int paddingLeft = getPaddingLeft(); int paddingRight = getPaddingRight(); final int width = getWidth(); final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (!lp.isDecor) continue; final int hgrav = lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK; int childLeft = 0; switch (hgrav) { default: childLeft = paddingLeft; break; case Gravity.LEFT: childLeft = paddingLeft; paddingLeft += child.getWidth(); break; case Gravity.CENTER_HORIZONTAL: childLeft = Math.max((width - child.getMeasuredWidth()) / 2, paddingLeft); break; case Gravity.RIGHT: childLeft = width - paddingRight - child.getMeasuredWidth(); paddingRight += child.getMeasuredWidth(); break; } childLeft += scrollX; final int childOffset = childLeft - child.getLeft(); if (childOffset != 0) { child.offsetLeftAndRight(childOffset); } } } if (mOnPageChangeListener != null) { mOnPageChangeListener.onPageScrolled(position, offset, offsetPixels); } if (mInternalPageChangeListener != null) { mInternalPageChangeListener.onPageScrolled(position, offset, offsetPixels); } mCalledSuper = true; } private void completeScroll() { boolean needPopulate = mScrolling; if (needPopulate) { // Done with scroll, no longer want to cache view drawing. setScrollingCacheEnabled(false); mScroller.abortAnimation(); int oldX = getScrollX(); int oldY = getScrollY(); int x = mScroller.getCurrX(); int y = mScroller.getCurrY(); if (oldX != x || oldY != y) { scrollTo(x, y); } setScrollState(SCROLL_STATE_IDLE); } mPopulatePending = false; mScrolling = false; for (int i=0; i<mItems.size(); i++) { ItemInfo ii = mItems.get(i); if (ii.scrolling) { needPopulate = true; ii.scrolling = false; } } if (needPopulate) { populate(); } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { /* * This method JUST determines whether we want to intercept the motion. * If we return true, onMotionEvent will be called and we do the actual * scrolling there. */ final int action = ev.getAction() & MotionEventCompat.ACTION_MASK; // Always take care of the touch gesture being complete. if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) { // Release the drag. if (DEBUG) Log.v(TAG, "Intercept done!"); mIsBeingDragged = false; mIsUnableToDrag = false; mActivePointerId = INVALID_POINTER; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } return false; } // Nothing more to do here if we have decided whether or not we // are dragging. if (action != MotionEvent.ACTION_DOWN) { if (mIsBeingDragged) { if (DEBUG) Log.v(TAG, "Intercept returning true!"); return true; } if (mIsUnableToDrag) { if (DEBUG) Log.v(TAG, "Intercept returning false!"); return false; } } switch (action) { case MotionEvent.ACTION_MOVE: { /* * mIsBeingDragged == false, otherwise the shortcut would have caught it. Check * whether the user has moved far enough from his original down touch. */ /* * Locally do absolute value. mLastMotionY is set to the y value * of the down event. */ final int activePointerId = mActivePointerId; if (activePointerId == INVALID_POINTER) { // If we don't have a valid id, the touch down wasn't on content. break; } final int pointerIndex = MotionEventCompat.findPointerIndex(ev, activePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float dx = x - mLastMotionX; final float xDiff = Math.abs(dx); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); final int scrollX = getScrollX(); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (canScroll(this, false, (int) dx, (int) x, (int) y)) { // Nested view has scrollable area under this point. Let it be handled there. mInitialMotionX = mLastMotionX = x; mLastMotionY = y; return false; } if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; setScrollState(SCROLL_STATE_DRAGGING); mLastMotionX = x; setScrollingCacheEnabled(true); } else { if (yDiff > mTouchSlop) { // The finger has moved enough in the vertical // direction to be counted as a drag... abort // any attempt to drag horizontally, to work correctly // with children that have scrolling containers. if (DEBUG) Log.v(TAG, "Starting unable to drag!"); mIsUnableToDrag = true; } } break; } case MotionEvent.ACTION_DOWN: { /* * Remember location of down touch. * ACTION_DOWN always refers to pointer index 0. */ mLastMotionX = mInitialMotionX = ev.getX(); mLastMotionY = ev.getY(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); if (mScrollState == SCROLL_STATE_SETTLING) { // Let the user 'catch' the pager as it animates. mIsBeingDragged = true; mIsUnableToDrag = false; setScrollState(SCROLL_STATE_DRAGGING); } else { completeScroll(); mIsBeingDragged = false; mIsUnableToDrag = false; } if (DEBUG) Log.v(TAG, "Down at " + mLastMotionX + "," + mLastMotionY + " mIsBeingDragged=" + mIsBeingDragged + "mIsUnableToDrag=" + mIsUnableToDrag); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); break; } if (!mIsBeingDragged) { // Track the velocity as long as we aren't dragging. // Once we start a real drag we will track in onTouchEvent. if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); } /* * The only time we want to intercept motion events is if we are in the * drag mode. */ return mIsBeingDragged; } @Override public boolean onTouchEvent(MotionEvent ev) { if (mFakeDragging) { // A fake drag is in progress already, ignore this real one // but still eat the touch events. // (It is likely that the user is multi-touching the screen.) return true; } if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getEdgeFlags() != 0) { // Don't handle edge touches immediately -- they may actually belong to one of our // descendants. return false; } if (mAdapter == null || mAdapter.getCount() == 0) { // Nothing to present or scroll; nothing to touch. return false; } if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } mVelocityTracker.addMovement(ev); final int action = ev.getAction(); boolean needsInvalidate = false; switch (action & MotionEventCompat.ACTION_MASK) { case MotionEvent.ACTION_DOWN: { /* * If being flinged and user touches, stop the fling. isFinished * will be false if being flinged. */ completeScroll(); // Remember where the motion event started mLastMotionX = mInitialMotionX = ev.getX(); mActivePointerId = MotionEventCompat.getPointerId(ev, 0); break; } case MotionEvent.ACTION_MOVE: if (!mIsBeingDragged) { final int pointerIndex = MotionEventCompat.findPointerIndex(ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, pointerIndex); final float xDiff = Math.abs(x - mLastMotionX); final float y = MotionEventCompat.getY(ev, pointerIndex); final float yDiff = Math.abs(y - mLastMotionY); if (DEBUG) Log.v(TAG, "Moved x to " + x + "," + y + " diff=" + xDiff + "," + yDiff); if (xDiff > mTouchSlop && xDiff > yDiff) { if (DEBUG) Log.v(TAG, "Starting drag!"); mIsBeingDragged = true; mLastMotionX = x; setScrollState(SCROLL_STATE_DRAGGING); setScrollingCacheEnabled(true); } } if (mIsBeingDragged) { // Scroll to follow the motion event final int activePointerIndex = MotionEventCompat.findPointerIndex( ev, mActivePointerId); final float x = MotionEventCompat.getX(ev, activePointerIndex); final float deltaX = mLastMotionX - x; mLastMotionX = x; float oldScrollX = getScrollX(); float scrollX = oldScrollX + deltaX; final int width = getWidth(); final int widthWithMargin = width + mPageMargin; final int lastItemIndex = mAdapter.getCount() - 1; final float leftBound = Math.max(0, (mCurItem - 1) * widthWithMargin); final float rightBound = Math.min(mCurItem + 1, lastItemIndex) * widthWithMargin; if (scrollX < leftBound) { if (leftBound == 0) { float over = -scrollX; needsInvalidate = mLeftEdge.onPull(over / width); } scrollX = leftBound; } else if (scrollX > rightBound) { if (rightBound == lastItemIndex * widthWithMargin) { float over = scrollX - rightBound; needsInvalidate = mRightEdge.onPull(over / width); } scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); } break; case MotionEvent.ACTION_UP: if (mIsBeingDragged) { final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int) VelocityTrackerCompat.getXVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; final int widthWithMargin = getWidth() + mPageMargin; final int scrollX = getScrollX(); final int currentPage = scrollX / widthWithMargin; int nextPage = initialVelocity > 0 ? currentPage : currentPage + 1; setCurrentItemInternal(nextPage, true, true, initialVelocity); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEvent.ACTION_CANCEL: if (mIsBeingDragged) { setCurrentItemInternal(mCurItem, true, true); mActivePointerId = INVALID_POINTER; endDrag(); needsInvalidate = mLeftEdge.onRelease() | mRightEdge.onRelease(); } break; case MotionEventCompat.ACTION_POINTER_DOWN: { final int index = MotionEventCompat.getActionIndex(ev); final float x = MotionEventCompat.getX(ev, index); mLastMotionX = x; mActivePointerId = MotionEventCompat.getPointerId(ev, index); break; } case MotionEventCompat.ACTION_POINTER_UP: onSecondaryPointerUp(ev); mLastMotionX = MotionEventCompat.getX(ev, MotionEventCompat.findPointerIndex(ev, mActivePointerId)); break; } if (needsInvalidate) { invalidate(); } return true; } @Override public void draw(Canvas canvas) { super.draw(canvas); boolean needsInvalidate = false; final int overScrollMode = ViewCompat.getOverScrollMode(this); if (overScrollMode == ViewCompat.OVER_SCROLL_ALWAYS || (overScrollMode == ViewCompat.OVER_SCROLL_IF_CONTENT_SCROLLS && mAdapter != null && mAdapter.getCount() > 1)) { if (!mLeftEdge.isFinished()) { final int restoreCount = canvas.save(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); canvas.rotate(270); canvas.translate(-height + getPaddingTop(), 0); mLeftEdge.setSize(height, getWidth()); needsInvalidate |= mLeftEdge.draw(canvas); canvas.restoreToCount(restoreCount); } if (!mRightEdge.isFinished()) { final int restoreCount = canvas.save(); final int width = getWidth(); final int height = getHeight() - getPaddingTop() - getPaddingBottom(); final int itemCount = mAdapter != null ? mAdapter.getCount() : 1; canvas.rotate(90); canvas.translate(-getPaddingTop(), -itemCount * (width + mPageMargin) + mPageMargin); mRightEdge.setSize(height, width); needsInvalidate |= mRightEdge.draw(canvas); canvas.restoreToCount(restoreCount); } } else { mLeftEdge.finish(); mRightEdge.finish(); } if (needsInvalidate) { // Keep animating invalidate(); } } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); // Draw the margin drawable if needed. if (mPageMargin > 0 && mMarginDrawable != null) { final int scrollX = getScrollX(); final int width = getWidth(); final int offset = scrollX % (width + mPageMargin); if (offset != 0) { // Pages fit completely when settled; we only need to draw when in between final int left = scrollX - offset + width; mMarginDrawable.setBounds(left, mTopPageBounds, left + mPageMargin, mBottomPageBounds); mMarginDrawable.draw(canvas); } } } /** * Start a fake drag of the pager. * * <p>A fake drag can be useful if you want to synchronize the motion of the ViewPager * with the touch scrolling of another view, while still letting the ViewPager * control the snapping motion and fling behavior. (e.g. parallax-scrolling tabs.) * Call {@link #fakeDragBy(float)} to simulate the actual drag motion. Call * {@link #endFakeDrag()} to complete the fake drag and fling as necessary. * * <p>During a fake drag the ViewPager will ignore all touch events. If a real drag * is already in progress, this method will return false. * * @return true if the fake drag began successfully, false if it could not be started. * * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean beginFakeDrag() { if (mIsBeingDragged) { return false; } mFakeDragging = true; setScrollState(SCROLL_STATE_DRAGGING); mInitialMotionX = mLastMotionX = 0; if (mVelocityTracker == null) { mVelocityTracker = VelocityTracker.obtain(); } else { mVelocityTracker.clear(); } final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(time, time, MotionEvent.ACTION_DOWN, 0, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); mFakeDragBeginTime = time; return true; } /** * End a fake drag of the pager. * * @see #beginFakeDrag() * @see #fakeDragBy(float) */ public void endFakeDrag() { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } final VelocityTracker velocityTracker = mVelocityTracker; velocityTracker.computeCurrentVelocity(1000, mMaximumVelocity); int initialVelocity = (int)VelocityTrackerCompat.getYVelocity( velocityTracker, mActivePointerId); mPopulatePending = true; if ((Math.abs(initialVelocity) > mMinimumVelocity) || Math.abs(mInitialMotionX-mLastMotionX) >= (getWidth()/3)) { if (mLastMotionX > mInitialMotionX) { setCurrentItemInternal(mCurItem-1, true, true); } else { setCurrentItemInternal(mCurItem+1, true, true); } } else { setCurrentItemInternal(mCurItem, true, true); } endDrag(); mFakeDragging = false; } /** * Fake drag by an offset in pixels. You must have called {@link #beginFakeDrag()} first. * * @param xOffset Offset in pixels to drag by. * @see #beginFakeDrag() * @see #endFakeDrag() */ public void fakeDragBy(float xOffset) { if (!mFakeDragging) { throw new IllegalStateException("No fake drag in progress. Call beginFakeDrag first."); } mLastMotionX += xOffset; float scrollX = getScrollX() - xOffset; final int width = getWidth(); final int widthWithMargin = width + mPageMargin; final float leftBound = Math.max(0, (mCurItem - 1) * widthWithMargin); final float rightBound = Math.min(mCurItem + 1, mAdapter.getCount() - 1) * widthWithMargin; if (scrollX < leftBound) { scrollX = leftBound; } else if (scrollX > rightBound) { scrollX = rightBound; } // Don't lose the rounded component mLastMotionX += scrollX - (int) scrollX; scrollTo((int) scrollX, getScrollY()); pageScrolled((int) scrollX); // Synthesize an event for the VelocityTracker. final long time = SystemClock.uptimeMillis(); final MotionEvent ev = MotionEvent.obtain(mFakeDragBeginTime, time, MotionEvent.ACTION_MOVE, mLastMotionX, 0, 0); mVelocityTracker.addMovement(ev); ev.recycle(); } /** * Returns true if a fake drag is in progress. * * @return true if currently in a fake drag, false otherwise. * * @see #beginFakeDrag() * @see #fakeDragBy(float) * @see #endFakeDrag() */ public boolean isFakeDragging() { return mFakeDragging; } private void onSecondaryPointerUp(MotionEvent ev) { final int pointerIndex = MotionEventCompat.getActionIndex(ev); final int pointerId = MotionEventCompat.getPointerId(ev, pointerIndex); if (pointerId == mActivePointerId) { // This was our active pointer going up. Choose a new // active pointer and adjust accordingly. final int newPointerIndex = pointerIndex == 0 ? 1 : 0; mLastMotionX = MotionEventCompat.getX(ev, newPointerIndex); mActivePointerId = MotionEventCompat.getPointerId(ev, newPointerIndex); if (mVelocityTracker != null) { mVelocityTracker.clear(); } } } private void endDrag() { mIsBeingDragged = false; mIsUnableToDrag = false; if (mVelocityTracker != null) { mVelocityTracker.recycle(); mVelocityTracker = null; } } private void setScrollingCacheEnabled(boolean enabled) { if (mScrollingCacheEnabled != enabled) { mScrollingCacheEnabled = enabled; if (USE_CACHE) { final int size = getChildCount(); for (int i = 0; i < size; ++i) { final View child = getChildAt(i); if (child.getVisibility() != GONE) { child.setDrawingCacheEnabled(enabled); } } } } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { // TODO: Add versioned support here for transformed views. // This will not work for transformed views in Honeycomb+ final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && ViewCompat.canScrollHorizontally(v, -dx); } @Override public boolean dispatchKeyEvent(KeyEvent event) { // Let the focused view and/or our descendants get the key first return super.dispatchKeyEvent(event) || executeKeyEvent(event); } /** * You can call this function yourself to have the scroll view perform * scrolling from a key event, just as if the event had been dispatched to * it by the view hierarchy. * * @param event The key event to execute. * @return Return true if the event was handled, else false. */ public boolean executeKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: handled = arrowScroll(FOCUS_LEFT); break; case KeyEvent.KEYCODE_DPAD_RIGHT: handled = arrowScroll(FOCUS_RIGHT); break; case KeyEvent.KEYCODE_TAB: if (KeyEventCompat.hasNoModifiers(event)) { handled = arrowScroll(FOCUS_FORWARD); } else if (KeyEventCompat.hasModifiers(event, KeyEvent.META_SHIFT_ON)) { handled = arrowScroll(FOCUS_BACKWARD); } break; } } return handled; } public boolean arrowScroll(int direction) { View currentFocused = findFocus(); if (currentFocused == this) currentFocused = null; boolean handled = false; View nextFocused = FocusFinder.getInstance().findNextFocus(this, currentFocused, direction); if (nextFocused != null && nextFocused != currentFocused) { if (direction == View.FOCUS_LEFT) { // If there is nothing to the left, or this is causing us to // jump to the right, then what we really want to do is page left. if (currentFocused != null && nextFocused.getLeft() >= currentFocused.getLeft()) { handled = pageLeft(); } else { handled = nextFocused.requestFocus(); } } else if (direction == View.FOCUS_RIGHT) { // If there is nothing to the right, or this is causing us to // jump to the left, then what we really want to do is page right. if (currentFocused != null && nextFocused.getLeft() <= currentFocused.getLeft()) { handled = pageRight(); } else { handled = nextFocused.requestFocus(); } } } else if (direction == FOCUS_LEFT || direction == FOCUS_BACKWARD) { // Trying to move left and nothing there; try to page. handled = pageLeft(); } else if (direction == FOCUS_RIGHT || direction == FOCUS_FORWARD) { // Trying to move right and nothing there; try to page. handled = pageRight(); } if (handled) { playSoundEffect(SoundEffectConstants.getContantForFocusDirection(direction)); } return handled; } boolean pageLeft() { if (mCurItem > 0) { setCurrentItem(mCurItem-1, true); return true; } return false; } boolean pageRight() { if (mAdapter != null && mCurItem < (mAdapter.getCount()-1)) { setCurrentItem(mCurItem+1, true); return true; } return false; } /** * We only want the current page that is being shown to be focusable. */ @Override public void addFocusables(ArrayList<View> views, int direction, int focusableMode) { final int focusableCount = views.size(); final int descendantFocusability = getDescendantFocusability(); if (descendantFocusability != FOCUS_BLOCK_DESCENDANTS) { for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addFocusables(views, direction, focusableMode); } } } } // we add ourselves (if focusable) in all cases except for when we are // FOCUS_AFTER_DESCENDANTS and there are some descendants focusable. this is // to avoid the focus search finding layouts when a more precise search // among the focusable children would be more interesting. if ( descendantFocusability != FOCUS_AFTER_DESCENDANTS || // No focusable descendants (focusableCount == views.size())) { // Note that we can't call the superclass here, because it will // add all views in. So we need to do the same thing View does. if (!isFocusable()) { return; } if ((focusableMode & FOCUSABLES_TOUCH_MODE) == FOCUSABLES_TOUCH_MODE && isInTouchMode() && !isFocusableInTouchMode()) { return; } if (views != null) { views.add(this); } } } /** * We only want the current page that is being shown to be touchable. */ @Override public void addTouchables(ArrayList<View> views) { // Note that we don't call super.addTouchables(), which means that // we don't call View.addTouchables(). This is okay because a ViewPager // is itself not touchable. for (int i = 0; i < getChildCount(); i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { child.addTouchables(views); } } } } /** * We only want the current page that is being shown to be focusable. */ @Override protected boolean onRequestFocusInDescendants(int direction, Rect previouslyFocusedRect) { int index; int increment; int end; int count = getChildCount(); if ((direction & FOCUS_FORWARD) != 0) { index = 0; increment = 1; end = count; } else { index = count - 1; increment = -1; end = -1; } for (int i = index; i != end; i += increment) { View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem) { if (child.requestFocus(direction, previouslyFocusedRect)) { return true; } } } } return false; } @Override public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) { // ViewPagers should only report accessibility info for the current page, // otherwise things get very confusing. // TODO: Should this note something about the paging container? final int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == VISIBLE) { final ItemInfo ii = infoForChild(child); if (ii != null && ii.position == mCurItem && child.dispatchPopulateAccessibilityEvent(event)) { return true; } } } return false; } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return generateDefaultLayoutParams(); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } private class PagerObserver extends DataSetObserver { @Override public void onChanged() { dataSetChanged(); } @Override public void onInvalidated() { dataSetChanged(); } } public static class LayoutParams extends ViewGroup.LayoutParams { /** * true if this view is a decoration on the pager itself and not * a view supplied by the adapter. */ public boolean isDecor; public int gravity; public LayoutParams() { super(FILL_PARENT, FILL_PARENT); } public LayoutParams(Context context, AttributeSet attrs) { super(context, attrs); final TypedArray a = context.obtainStyledAttributes(attrs, LAYOUT_ATTRS); gravity = a.getInteger(0, Gravity.NO_GRAVITY); a.recycle(); } } }
am 5de8804b: Fix bug 5570272 - Monkeys, older devices, and ViewPager * commit '5de8804b248eebaf7c29113ec80a0dff3a03c97a': Fix bug 5570272 - Monkeys, older devices, and ViewPager
v4/java/android/support/v4/view/ViewPager.java
am 5de8804b: Fix bug 5570272 - Monkeys, older devices, and ViewPager
Java
apache-2.0
e93a05258e9583add463eb0d82ec4c25bd9f282c
0
apache/felix-dev,apache/felix-dev,apache/felix-dev,apache/felix-dev
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.http.base.internal.handler; import java.io.Serializable; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import javax.servlet.ServletContext; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpSessionBindingEvent; import javax.servlet.http.HttpSessionBindingListener; import javax.servlet.http.HttpSessionContext; import javax.servlet.http.HttpSessionEvent; import org.apache.felix.http.base.internal.context.ExtServletContext; import org.apache.felix.http.base.internal.service.HttpServiceFactory; /** * The session wrapper keeps track of the internal session, manages their attributes * separately and also handles session timeout. */ public class HttpSessionWrapper implements HttpSession { /** All special attributes are prefixed with this prefix. */ private static final String PREFIX = "org.apache.felix.http.session.context."; /** For each internal session, the attributes are prefixed with this followed by the context id */ private static final String ATTR_PREFIX = PREFIX + "attr."; /** The created time for the internal session (appended with context id) */ private static final String ATTR_CREATED = PREFIX + "created."; /** The last accessed time for the internal session (appended with context id) */ private static final String ATTR_LAST_ACCESSED = PREFIX + "lastaccessed."; /** The max inactive time (appended with context id) */ private static final String ATTR_MAX_INACTIVE = PREFIX + "maxinactive."; /** The underlying container session. */ private final HttpSession delegate; /** The corresponding servlet context. */ private final ExtServletContext context; /** The id for this session. */ private final String sessionId; /** The key prefix for attributes belonging to this session. */ private final String keyPrefix; /** Flag to handle the validity of this session. */ private volatile boolean isInvalid = false; /** The time this has been created. */ private final long created; /** The time this has been last accessed. */ private final long lastAccessed; /** The max timeout interval. */ private int maxTimeout; /** * Is this a new session? */ private final boolean isNew; public static boolean hasSession(final Long contextId, final HttpSession session) { final String sessionId = contextId == null ? String.valueOf(HttpServiceFactory.HTTP_SERVICE_CONTEXT_SERVICE_ID) : String.valueOf(contextId); return session.getAttribute(ATTR_CREATED + sessionId) != null; } public static Set<Long> getExpiredSessionContextIds(final HttpSession session) { final long now = System.currentTimeMillis(); final Set<Long> ids = new HashSet<Long>(); final Enumeration<String> names = session.getAttributeNames(); while ( names.hasMoreElements() ) { final String name = names.nextElement(); if ( name.startsWith(ATTR_LAST_ACCESSED) ) { final String id = name.substring(ATTR_LAST_ACCESSED.length()); final long lastAccess = (Long)session.getAttribute(name); final Integer maxTimeout = (Integer)session.getAttribute(ATTR_MAX_INACTIVE + id); if ( maxTimeout > 0 && lastAccess + maxTimeout < now ) { ids.add(Long.valueOf(id)); } } } return ids; } public static Set<Long> getSessionContextIds(final HttpSession session) { final Set<Long> ids = new HashSet<Long>(); final Enumeration<String> names = session.getAttributeNames(); while ( names.hasMoreElements() ) { final String name = names.nextElement(); if ( name.startsWith(ATTR_LAST_ACCESSED) ) { final String id = name.substring(ATTR_LAST_ACCESSED.length()); ids.add(Long.valueOf(id)); } } return ids; } /** * Creates a new {@link HttpSessionWrapper} instance. */ public HttpSessionWrapper(final Long contextId, final HttpSession session, final ExtServletContext context, final boolean terminate) { this.delegate = session; this.context = context; this.sessionId = contextId == null ? String.valueOf(HttpServiceFactory.HTTP_SERVICE_CONTEXT_SERVICE_ID) : String.valueOf(contextId); this.keyPrefix = contextId == null ? null : ATTR_PREFIX + this.sessionId + "."; if ( this.keyPrefix != null ) { final long now = System.currentTimeMillis(); if ( session.getAttribute(ATTR_CREATED + this.sessionId) == null ) { this.created = now; this.maxTimeout = session.getMaxInactiveInterval(); isNew = true; session.setAttribute(ATTR_CREATED + this.sessionId, this.created); session.setAttribute(ATTR_MAX_INACTIVE + this.sessionId, this.maxTimeout); if ( context.getHttpSessionListener() != null ) { context.getHttpSessionListener().sessionCreated(new HttpSessionEvent(this)); } } else { this.created = (Long)session.getAttribute(ATTR_CREATED + this.sessionId); this.maxTimeout = (Integer)session.getAttribute(ATTR_MAX_INACTIVE + this.sessionId); isNew = false; } this.lastAccessed = now; if ( !terminate ) { session.setAttribute(ATTR_LAST_ACCESSED + this.sessionId, this.lastAccessed); } } else { this.isNew = session.isNew(); this.lastAccessed = session.getLastAccessedTime(); this.created = session.getCreationTime(); } } /** * Helper method to get the real key within the real session. */ private String getKey(final String name) { return this.keyPrefix == null ? name : this.keyPrefix.concat(name); } /** * Check whether this session is still valid. * @throws IllegalStateException if session is not valid anymore */ private void checkInvalid() { if ( this.isInvalid ) { throw new IllegalStateException("Session is invalid."); } } @Override public Object getAttribute(final String name) { this.checkInvalid(); Object result = this.delegate.getAttribute(this.getKey(name)); if ( result instanceof SessionBindingValueListenerWrapper ) { result = ((SessionBindingValueListenerWrapper)result).getHttpSessionBindingListener(); } return result; } @Override public Enumeration<String> getAttributeNames() { this.checkInvalid(); final Enumeration<String> e = this.delegate.getAttributeNames(); return new Enumeration<String>() { String next = peek(); private String peek() { while ( e.hasMoreElements() ) { final String name = e.nextElement(); if ( keyPrefix == null && !name.startsWith(PREFIX) ) { return name; } if ( name.startsWith(keyPrefix)) { return name.substring(keyPrefix.length()); } } return null; } @Override public boolean hasMoreElements() { return next != null; } @Override public String nextElement() { if ( next == null ) { throw new NoSuchElementException(); } final String result = next; next = this.peek(); return result; } }; } @Override public long getCreationTime() { this.checkInvalid(); return this.created; } @Override public String getId() { this.checkInvalid(); return this.delegate.getId() + "-" + this.sessionId; } @Override public long getLastAccessedTime() { this.checkInvalid(); return this.lastAccessed; } @Override public int getMaxInactiveInterval() { // no validity check conforming to the javadocs return this.maxTimeout; } @Override public ServletContext getServletContext() { // no validity check conforming to the javadocs return this.context; } @Override public Object getValue(String name) { return this.getAttribute(name); } @Override public String[] getValueNames() { final List<String> names = new ArrayList<String>(); final Enumeration<String> e = this.getAttributeNames(); while ( e.hasMoreElements() ) { names.add(e.nextElement()); } return names.toArray(new String[names.size()]); } @Override public void invalidate() { this.checkInvalid(); if ( this.keyPrefix != null ) { this.delegate.removeAttribute(ATTR_CREATED + this.sessionId); this.delegate.removeAttribute(ATTR_LAST_ACCESSED + this.sessionId); this.delegate.removeAttribute(ATTR_MAX_INACTIVE + this.sessionId); final Enumeration<String> names = this.delegate.getAttributeNames(); while ( names.hasMoreElements() ) { final String name = names.nextElement(); if ( name.startsWith(this.keyPrefix) ) { this.removeAttribute(name.substring(this.keyPrefix.length())); } } } // if the session is empty we can invalidate final Enumeration<String> names = this.delegate.getAttributeNames(); if ( !names.hasMoreElements() ) { this.delegate.invalidate(); } if ( context.getHttpSessionListener() != null ) { context.getHttpSessionListener().sessionDestroyed(new HttpSessionEvent(this)); } this.isInvalid = true; } @Override public boolean isNew() { this.checkInvalid(); return this.isNew; } @Override public void putValue(final String name, final Object value) { this.setAttribute(name, value); } @Override public void removeAttribute(final String name) { this.checkInvalid(); final Object oldValue = this.getAttribute(name); if ( oldValue != null ) { this.delegate.removeAttribute(this.getKey(name)); if ( this.keyPrefix != null && oldValue instanceof HttpSessionBindingListener ) { ((HttpSessionBindingListener)oldValue).valueUnbound(new HttpSessionBindingEvent(this, name)); } if ( this.context.getHttpSessionAttributeListener() != null ) { this.context.getHttpSessionAttributeListener().attributeRemoved(new HttpSessionBindingEvent(this, name, oldValue)); } } } @Override public void removeValue(final String name) { this.removeAttribute(name); } @Override public void setAttribute(final String name, final Object value) { this.checkInvalid(); if ( value == null ) { this.removeAttribute(name); return; } final Object oldValue = this.getAttribute(name); // wrap http session binding listener to avoid container calling it! if ( this.keyPrefix != null && value instanceof HttpSessionBindingListener ) { this.delegate.setAttribute(this.getKey(name), new SessionBindingValueListenerWrapper((HttpSessionBindingListener)value)); } else { this.delegate.setAttribute(this.getKey(name), value); } if ( this.keyPrefix != null && value instanceof HttpSessionBindingListener ) { ((HttpSessionBindingListener)value).valueBound(new HttpSessionBindingEvent(this, name)); } if ( this.context.getHttpSessionAttributeListener() != null ) { if ( oldValue != null ) { this.context.getHttpSessionAttributeListener().attributeReplaced(new HttpSessionBindingEvent(this, name, oldValue)); } else { this.context.getHttpSessionAttributeListener().attributeAdded(new HttpSessionBindingEvent(this, name, value)); } } } @Override public void setMaxInactiveInterval(final int interval) { if ( this.delegate.getMaxInactiveInterval() < interval ) { this.delegate.setMaxInactiveInterval(interval); } if ( this.keyPrefix != null ) { this.maxTimeout = interval; this.delegate.setAttribute(ATTR_MAX_INACTIVE + this.sessionId, interval); } } @Override @SuppressWarnings("deprecation") public HttpSessionContext getSessionContext() { // no need to check validity conforming to the javadoc return this.delegate.getSessionContext(); } private static final class SessionBindingValueListenerWrapper implements Serializable { private static final long serialVersionUID = 4009563108883768425L; private final HttpSessionBindingListener listener; public SessionBindingValueListenerWrapper(final HttpSessionBindingListener listener) { this.listener = listener; } public HttpSessionBindingListener getHttpSessionBindingListener() { return listener; } } }
http/base/src/main/java/org/apache/felix/http/base/internal/handler/HttpSessionWrapper.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.http.base.internal.handler; import java.io.Serializable; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import javax.servlet.ServletContext; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpSessionBindingEvent; import javax.servlet.http.HttpSessionBindingListener; import javax.servlet.http.HttpSessionContext; import javax.servlet.http.HttpSessionEvent; import org.apache.felix.http.base.internal.context.ExtServletContext; import org.apache.felix.http.base.internal.service.HttpServiceFactory; /** * The session wrapper keeps track of the internal session, manages their attributes * separately and also handles session timeout. */ public class HttpSessionWrapper implements HttpSession { /** All special attributes are prefixed with this prefix. */ private static final String PREFIX = "org.apache.felix.http.session.context."; /** For each internal session, the attributes are prefixed with this followed by the context id */ private static final String ATTR_PREFIX = PREFIX + "attr."; /** The created time for the internal session (appended with context id) */ private static final String ATTR_CREATED = PREFIX + "created."; /** The last accessed time for the internal session (appended with context id) */ private static final String ATTR_LAST_ACCESSED = PREFIX + "lastaccessed."; /** The max inactive time (appended with context id) */ private static final String ATTR_MAX_INACTIVE = PREFIX + "maxinactive."; /** The underlying container session. */ private final HttpSession delegate; /** The corresponding servlet context. */ private final ExtServletContext context; /** The id for this session. */ private final String sessionId; /** The key prefix for attributes belonging to this session. */ private final String keyPrefix; /** Flag to handle the validity of this session. */ private volatile boolean isInvalid = false; /** The time this has been created. */ private final long created; /** The time this has been last accessed. */ private final long lastAccessed; /** The max timeout interval. */ private int maxTimeout; /** * Is this a new session? */ private final boolean isNew; public static boolean hasSession(final Long contextId, final HttpSession session) { final String sessionId = contextId == null ? String.valueOf(HttpServiceFactory.HTTP_SERVICE_CONTEXT_SERVICE_ID) : String.valueOf(contextId); return session.getAttribute(ATTR_CREATED + sessionId) != null; } public static Set<Long> getExpiredSessionContextIds(final HttpSession session) { final long now = System.currentTimeMillis(); final Set<Long> ids = new HashSet<Long>(); final Enumeration<String> names = session.getAttributeNames(); while ( names.hasMoreElements() ) { final String name = names.nextElement(); if ( name.startsWith(ATTR_LAST_ACCESSED) ) { final String id = name.substring(ATTR_LAST_ACCESSED.length()); final long lastAccess = (Long)session.getAttribute(name); final Integer maxTimeout = (Integer)session.getAttribute(ATTR_MAX_INACTIVE + id); if ( lastAccess + maxTimeout < now ) { ids.add(Long.valueOf(id)); } } } return ids; } public static Set<Long> getSessionContextIds(final HttpSession session) { final Set<Long> ids = new HashSet<Long>(); final Enumeration<String> names = session.getAttributeNames(); while ( names.hasMoreElements() ) { final String name = names.nextElement(); if ( name.startsWith(ATTR_LAST_ACCESSED) ) { final String id = name.substring(ATTR_LAST_ACCESSED.length()); ids.add(Long.valueOf(id)); } } return ids; } /** * Creates a new {@link HttpSessionWrapper} instance. */ public HttpSessionWrapper(final Long contextId, final HttpSession session, final ExtServletContext context, final boolean terminate) { this.delegate = session; this.context = context; this.sessionId = contextId == null ? String.valueOf(HttpServiceFactory.HTTP_SERVICE_CONTEXT_SERVICE_ID) : String.valueOf(contextId); this.keyPrefix = contextId == null ? null : ATTR_PREFIX + this.sessionId + "."; if ( this.keyPrefix != null ) { final long now = System.currentTimeMillis(); if ( session.getAttribute(ATTR_CREATED + this.sessionId) == null ) { this.created = now; this.maxTimeout = session.getMaxInactiveInterval(); isNew = true; session.setAttribute(ATTR_CREATED + this.sessionId, this.created); session.setAttribute(ATTR_MAX_INACTIVE + this.sessionId, this.maxTimeout); if ( context.getHttpSessionListener() != null ) { context.getHttpSessionListener().sessionCreated(new HttpSessionEvent(this)); } } else { this.created = (Long)session.getAttribute(ATTR_CREATED + this.sessionId); this.maxTimeout = (Integer)session.getAttribute(ATTR_MAX_INACTIVE + this.sessionId); isNew = false; } this.lastAccessed = now; if ( !terminate ) { session.setAttribute(ATTR_LAST_ACCESSED + this.sessionId, this.lastAccessed); } } else { this.isNew = session.isNew(); this.lastAccessed = session.getLastAccessedTime(); this.created = session.getCreationTime(); } } /** * Helper method to get the real key within the real session. */ private String getKey(final String name) { return this.keyPrefix == null ? name : this.keyPrefix.concat(name); } /** * Check whether this session is still valid. * @throws IllegalStateException if session is not valid anymore */ private void checkInvalid() { if ( this.isInvalid ) { throw new IllegalStateException("Session is invalid."); } } @Override public Object getAttribute(final String name) { this.checkInvalid(); Object result = this.delegate.getAttribute(this.getKey(name)); if ( result instanceof SessionBindingValueListenerWrapper ) { result = ((SessionBindingValueListenerWrapper)result).getHttpSessionBindingListener(); } return result; } @Override public Enumeration<String> getAttributeNames() { this.checkInvalid(); final Enumeration<String> e = this.delegate.getAttributeNames(); return new Enumeration<String>() { String next = peek(); private String peek() { while ( e.hasMoreElements() ) { final String name = e.nextElement(); if ( keyPrefix == null && !name.startsWith(PREFIX) ) { return name; } if ( name.startsWith(keyPrefix)) { return name.substring(keyPrefix.length()); } } return null; } @Override public boolean hasMoreElements() { return next != null; } @Override public String nextElement() { if ( next == null ) { throw new NoSuchElementException(); } final String result = next; next = this.peek(); return result; } }; } @Override public long getCreationTime() { this.checkInvalid(); return this.created; } @Override public String getId() { this.checkInvalid(); return this.delegate.getId() + "-" + this.sessionId; } @Override public long getLastAccessedTime() { this.checkInvalid(); return this.lastAccessed; } @Override public int getMaxInactiveInterval() { // no validity check conforming to the javadocs return this.maxTimeout; } @Override public ServletContext getServletContext() { // no validity check conforming to the javadocs return this.context; } @Override public Object getValue(String name) { return this.getAttribute(name); } @Override public String[] getValueNames() { final List<String> names = new ArrayList<String>(); final Enumeration<String> e = this.getAttributeNames(); while ( e.hasMoreElements() ) { names.add(e.nextElement()); } return names.toArray(new String[names.size()]); } @Override public void invalidate() { this.checkInvalid(); if ( this.keyPrefix != null ) { this.delegate.removeAttribute(ATTR_CREATED + this.sessionId); this.delegate.removeAttribute(ATTR_LAST_ACCESSED + this.sessionId); this.delegate.removeAttribute(ATTR_MAX_INACTIVE + this.sessionId); final Enumeration<String> names = this.delegate.getAttributeNames(); while ( names.hasMoreElements() ) { final String name = names.nextElement(); if ( name.startsWith(this.keyPrefix) ) { this.removeAttribute(name.substring(this.keyPrefix.length())); } } } // if the session is empty we can invalidate final Enumeration<String> names = this.delegate.getAttributeNames(); if ( !names.hasMoreElements() ) { this.delegate.invalidate(); } if ( context.getHttpSessionListener() != null ) { context.getHttpSessionListener().sessionDestroyed(new HttpSessionEvent(this)); } this.isInvalid = true; } @Override public boolean isNew() { this.checkInvalid(); return this.isNew; } @Override public void putValue(final String name, final Object value) { this.setAttribute(name, value); } @Override public void removeAttribute(final String name) { this.checkInvalid(); final Object oldValue = this.getAttribute(name); if ( oldValue != null ) { this.delegate.removeAttribute(this.getKey(name)); if ( this.keyPrefix != null && oldValue instanceof HttpSessionBindingListener ) { ((HttpSessionBindingListener)oldValue).valueUnbound(new HttpSessionBindingEvent(this, name)); } if ( this.context.getHttpSessionAttributeListener() != null ) { this.context.getHttpSessionAttributeListener().attributeRemoved(new HttpSessionBindingEvent(this, name, oldValue)); } } } @Override public void removeValue(final String name) { this.removeAttribute(name); } @Override public void setAttribute(final String name, final Object value) { this.checkInvalid(); if ( value == null ) { this.removeAttribute(name); return; } final Object oldValue = this.getAttribute(name); // wrap http session binding listener to avoid container calling it! if ( this.keyPrefix != null && value instanceof HttpSessionBindingListener ) { this.delegate.setAttribute(this.getKey(name), new SessionBindingValueListenerWrapper((HttpSessionBindingListener)value)); } else { this.delegate.setAttribute(this.getKey(name), value); } if ( this.keyPrefix != null && value instanceof HttpSessionBindingListener ) { ((HttpSessionBindingListener)value).valueBound(new HttpSessionBindingEvent(this, name)); } if ( this.context.getHttpSessionAttributeListener() != null ) { if ( oldValue != null ) { this.context.getHttpSessionAttributeListener().attributeReplaced(new HttpSessionBindingEvent(this, name, oldValue)); } else { this.context.getHttpSessionAttributeListener().attributeAdded(new HttpSessionBindingEvent(this, name, value)); } } } @Override public void setMaxInactiveInterval(final int interval) { if ( this.delegate.getMaxInactiveInterval() < interval ) { this.delegate.setMaxInactiveInterval(interval); } if ( this.keyPrefix != null ) { this.maxTimeout = interval; this.delegate.setAttribute(ATTR_MAX_INACTIVE + this.sessionId, interval); } } @Override @SuppressWarnings("deprecation") public HttpSessionContext getSessionContext() { // no need to check validity conforming to the javadoc return this.delegate.getSessionContext(); } private static final class SessionBindingValueListenerWrapper implements Serializable { private static final long serialVersionUID = 4009563108883768425L; private final HttpSessionBindingListener listener; public SessionBindingValueListenerWrapper(final HttpSessionBindingListener listener) { this.listener = listener; } public HttpSessionBindingListener getHttpSessionBindingListener() { return listener; } } }
FELIX-4782 : Implement session handling git-svn-id: e057f57e93a604d3b43d277ae69bde5ebf332112@1681782 13f79535-47bb-0310-9956-ffa450edef68
http/base/src/main/java/org/apache/felix/http/base/internal/handler/HttpSessionWrapper.java
FELIX-4782 : Implement session handling
Java
bsd-3-clause
4ad1ab4fae911963e362cab4554da02c1e7fc272
0
fmapfmapfmap/onionoo-dev,fmapfmapfmap/onionoo-dev,fmapfmapfmap/onionoo-dev
package org.torproject.onionoo; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; class NodeIndex { private String relaysPublishedString; public void setRelaysPublishedString(String relaysPublishedString) { this.relaysPublishedString = relaysPublishedString; } public String getRelaysPublishedString() { return relaysPublishedString; } private String bridgesPublishedString; public void setBridgesPublishedString(String bridgesPublishedString) { this.bridgesPublishedString = bridgesPublishedString; } public String getBridgesPublishedString() { return bridgesPublishedString; } private List<String> relaysByConsensusWeight; public void setRelaysByConsensusWeight( List<String> relaysByConsensusWeight) { this.relaysByConsensusWeight = relaysByConsensusWeight; } public List<String> getRelaysByConsensusWeight() { return relaysByConsensusWeight; } private Map<String, String> relayFingerprintSummaryLines; public void setRelayFingerprintSummaryLines( Map<String, String> relayFingerprintSummaryLines) { this.relayFingerprintSummaryLines = relayFingerprintSummaryLines; } public Map<String, String> getRelayFingerprintSummaryLines() { return this.relayFingerprintSummaryLines; } private Map<String, String> bridgeFingerprintSummaryLines; public void setBridgeFingerprintSummaryLines( Map<String, String> bridgeFingerprintSummaryLines) { this.bridgeFingerprintSummaryLines = bridgeFingerprintSummaryLines; } public Map<String, String> getBridgeFingerprintSummaryLines() { return this.bridgeFingerprintSummaryLines; } private Map<String, Set<String>> relaysByCountryCode = null; public void setRelaysByCountryCode( Map<String, Set<String>> relaysByCountryCode) { this.relaysByCountryCode = relaysByCountryCode; } public Map<String, Set<String>> getRelaysByCountryCode() { return relaysByCountryCode; } private Map<String, Set<String>> relaysByASNumber = null; public void setRelaysByASNumber( Map<String, Set<String>> relaysByASNumber) { this.relaysByASNumber = relaysByASNumber; } public Map<String, Set<String>> getRelaysByASNumber() { return relaysByASNumber; } private Map<String, Set<String>> relaysByFlag = null; public void setRelaysByFlag(Map<String, Set<String>> relaysByFlag) { this.relaysByFlag = relaysByFlag; } public Map<String, Set<String>> getRelaysByFlag() { return relaysByFlag; } private Map<String, Set<String>> bridgesByFlag = null; public void setBridgesByFlag(Map<String, Set<String>> bridgesByFlag) { this.bridgesByFlag = bridgesByFlag; } public Map<String, Set<String>> getBridgesByFlag() { return bridgesByFlag; } private Map<String, Set<String>> relaysByContact = null; public void setRelaysByContact( Map<String, Set<String>> relaysByContact) { this.relaysByContact = relaysByContact; } public Map<String, Set<String>> getRelaysByContact() { return relaysByContact; } private SortedMap<Integer, Set<String>> relaysByFirstSeenDays; public void setRelaysByFirstSeenDays( SortedMap<Integer, Set<String>> relaysByFirstSeenDays) { this.relaysByFirstSeenDays = relaysByFirstSeenDays; } public SortedMap<Integer, Set<String>> getRelaysByFirstSeenDays() { return relaysByFirstSeenDays; } private SortedMap<Integer, Set<String>> bridgesByFirstSeenDays; public void setBridgesByFirstSeenDays( SortedMap<Integer, Set<String>> bridgesByFirstSeenDays) { this.bridgesByFirstSeenDays = bridgesByFirstSeenDays; } public SortedMap<Integer, Set<String>> getBridgesByFirstSeenDays() { return bridgesByFirstSeenDays; } private SortedMap<Integer, Set<String>> relaysByLastSeenDays; public void setRelaysByLastSeenDays( SortedMap<Integer, Set<String>> relaysByLastSeenDays) { this.relaysByLastSeenDays = relaysByLastSeenDays; } public SortedMap<Integer, Set<String>> getRelaysByLastSeenDays() { return relaysByLastSeenDays; } private SortedMap<Integer, Set<String>> bridgesByLastSeenDays; public void setBridgesByLastSeenDays( SortedMap<Integer, Set<String>> bridgesByLastSeenDays) { this.bridgesByLastSeenDays = bridgesByLastSeenDays; } public SortedMap<Integer, Set<String>> getBridgesByLastSeenDays() { return bridgesByLastSeenDays; } } public class NodeIndexer implements ServletContextListener, Runnable { public void contextInitialized(ServletContextEvent contextEvent) { ServletContext servletContext = contextEvent.getServletContext(); File outDir = new File(servletContext.getInitParameter("outDir")); DocumentStore documentStore = ApplicationFactory.getDocumentStore(); documentStore.setOutDir(outDir); /* The servlet container created us, and we need to avoid that * ApplicationFactory creates another instance of us. */ ApplicationFactory.setNodeIndexer(this); this.startIndexing(); } public void contextDestroyed(ServletContextEvent contextEvent) { this.stopIndexing(); } private long lastIndexed = -1L; private NodeIndex latestNodeIndex = null; private Thread nodeIndexerThread = null; public synchronized long getLastIndexed(long timeoutMillis) { if (this.lastIndexed == -1L && this.nodeIndexerThread != null && timeoutMillis > 0L) { try { this.wait(timeoutMillis); } catch (InterruptedException e) { } } return this.lastIndexed; } public synchronized NodeIndex getLatestNodeIndex(long timeoutMillis) { if (this.latestNodeIndex == null && this.nodeIndexerThread != null && timeoutMillis > 0L) { try { this.wait(timeoutMillis); } catch (InterruptedException e) { } } return this.latestNodeIndex; } public synchronized void startIndexing() { if (this.nodeIndexerThread == null) { this.nodeIndexerThread = new Thread(this); this.nodeIndexerThread.setDaemon(true); this.nodeIndexerThread.start(); } } public void run() { while (this.nodeIndexerThread != null) { this.indexNodeStatuses(); try { Thread.sleep(DateTimeHelper.ONE_MINUTE); } catch (InterruptedException e) { } } } public synchronized void stopIndexing() { Thread indexerThread = this.nodeIndexerThread; this.nodeIndexerThread = null; indexerThread.interrupt(); } private void indexNodeStatuses() { long updateStatusMillis = -1L; DocumentStore documentStore = ApplicationFactory.getDocumentStore(); UpdateStatus updateStatus = documentStore.retrieve(UpdateStatus.class, false); if (updateStatus != null && updateStatus.getDocumentString() != null) { String updateString = updateStatus.getDocumentString(); try { updateStatusMillis = Long.parseLong(updateString.trim()); } catch (NumberFormatException e) { /* Handle below. */ } } synchronized (this) { if (updateStatusMillis <= this.lastIndexed) { return; } } List<String> newRelaysByConsensusWeight = new ArrayList<String>(); Map<String, String> newRelayFingerprintSummaryLines = new HashMap<String, String>(), newBridgeFingerprintSummaryLines = new HashMap<String, String>(); Map<String, Set<String>> newRelaysByCountryCode = new HashMap<String, Set<String>>(), newRelaysByASNumber = new HashMap<String, Set<String>>(), newRelaysByFlag = new HashMap<String, Set<String>>(), newBridgesByFlag = new HashMap<String, Set<String>>(), newRelaysByContact = new HashMap<String, Set<String>>(); SortedMap<Integer, Set<String>> newRelaysByFirstSeenDays = new TreeMap<Integer, Set<String>>(), newBridgesByFirstSeenDays = new TreeMap<Integer, Set<String>>(), newRelaysByLastSeenDays = new TreeMap<Integer, Set<String>>(), newBridgesByLastSeenDays = new TreeMap<Integer, Set<String>>(); Set<NodeStatus> currentRelays = new HashSet<NodeStatus>(), currentBridges = new HashSet<NodeStatus>(); SortedSet<String> fingerprints = documentStore.list(NodeStatus.class, false); long relaysLastValidAfterMillis = 0L, bridgesLastPublishedMillis = 0L; for (String fingerprint : fingerprints) { NodeStatus node = documentStore.retrieve(NodeStatus.class, true, fingerprint); if (node.isRelay()) { relaysLastValidAfterMillis = Math.max( relaysLastValidAfterMillis, node.getLastSeenMillis()); currentRelays.add(node); } else { bridgesLastPublishedMillis = Math.max( bridgesLastPublishedMillis, node.getLastSeenMillis()); currentBridges.add(node); } } Time time = ApplicationFactory.getTime(); List<String> orderRelaysByConsensusWeight = new ArrayList<String>(); for (NodeStatus entry : currentRelays) { String fingerprint = entry.getFingerprint().toUpperCase(); String hashedFingerprint = entry.getHashedFingerprint(). toUpperCase(); entry.setRunning(entry.getLastSeenMillis() == relaysLastValidAfterMillis); String line = formatRelaySummaryLine(entry); newRelayFingerprintSummaryLines.put(fingerprint, line); newRelayFingerprintSummaryLines.put(hashedFingerprint, line); long consensusWeight = entry.getConsensusWeight(); orderRelaysByConsensusWeight.add(String.format("%020d %s", consensusWeight, fingerprint)); orderRelaysByConsensusWeight.add(String.format("%020d %s", consensusWeight, hashedFingerprint)); if (entry.getCountryCode() != null) { String countryCode = entry.getCountryCode(); if (!newRelaysByCountryCode.containsKey(countryCode)) { newRelaysByCountryCode.put(countryCode, new HashSet<String>()); } newRelaysByCountryCode.get(countryCode).add(fingerprint); newRelaysByCountryCode.get(countryCode).add(hashedFingerprint); } if (entry.getASNumber() != null) { String aSNumber = entry.getASNumber(); if (!newRelaysByASNumber.containsKey(aSNumber)) { newRelaysByASNumber.put(aSNumber, new HashSet<String>()); } newRelaysByASNumber.get(aSNumber).add(fingerprint); newRelaysByASNumber.get(aSNumber).add(hashedFingerprint); } for (String flag : entry.getRelayFlags()) { String flagLowerCase = flag.toLowerCase(); if (!newRelaysByFlag.containsKey(flagLowerCase)) { newRelaysByFlag.put(flagLowerCase, new HashSet<String>()); } newRelaysByFlag.get(flagLowerCase).add(fingerprint); newRelaysByFlag.get(flagLowerCase).add(hashedFingerprint); } int daysSinceFirstSeen = (int) ((time.currentTimeMillis() - entry.getFirstSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newRelaysByFirstSeenDays.containsKey(daysSinceFirstSeen)) { newRelaysByFirstSeenDays.put(daysSinceFirstSeen, new HashSet<String>()); } newRelaysByFirstSeenDays.get(daysSinceFirstSeen).add(fingerprint); newRelaysByFirstSeenDays.get(daysSinceFirstSeen).add( hashedFingerprint); int daysSinceLastSeen = (int) ((time.currentTimeMillis() - entry.getLastSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newRelaysByLastSeenDays.containsKey(daysSinceLastSeen)) { newRelaysByLastSeenDays.put(daysSinceLastSeen, new HashSet<String>()); } newRelaysByLastSeenDays.get(daysSinceLastSeen).add(fingerprint); newRelaysByLastSeenDays.get(daysSinceLastSeen).add( hashedFingerprint); String contact = entry.getContact(); if (!newRelaysByContact.containsKey(contact)) { newRelaysByContact.put(contact, new HashSet<String>()); } newRelaysByContact.get(contact).add(fingerprint); newRelaysByContact.get(contact).add(hashedFingerprint); } Collections.sort(orderRelaysByConsensusWeight); newRelaysByConsensusWeight = new ArrayList<String>(); for (String relay : orderRelaysByConsensusWeight) { newRelaysByConsensusWeight.add(relay.split(" ")[1]); } for (NodeStatus entry : currentBridges) { String hashedFingerprint = entry.getFingerprint().toUpperCase(); String hashedHashedFingerprint = entry.getHashedFingerprint(). toUpperCase(); entry.setRunning(entry.getRelayFlags().contains("Running") && entry.getLastSeenMillis() == bridgesLastPublishedMillis); String line = formatBridgeSummaryLine(entry); newBridgeFingerprintSummaryLines.put(hashedFingerprint, line); newBridgeFingerprintSummaryLines.put(hashedHashedFingerprint, line); for (String flag : entry.getRelayFlags()) { String flagLowerCase = flag.toLowerCase(); if (!newBridgesByFlag.containsKey(flagLowerCase)) { newBridgesByFlag.put(flagLowerCase, new HashSet<String>()); } newBridgesByFlag.get(flagLowerCase).add(hashedFingerprint); newBridgesByFlag.get(flagLowerCase).add( hashedHashedFingerprint); } int daysSinceFirstSeen = (int) ((time.currentTimeMillis() - entry.getFirstSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newBridgesByFirstSeenDays.containsKey(daysSinceFirstSeen)) { newBridgesByFirstSeenDays.put(daysSinceFirstSeen, new HashSet<String>()); } newBridgesByFirstSeenDays.get(daysSinceFirstSeen).add( hashedFingerprint); newBridgesByFirstSeenDays.get(daysSinceFirstSeen).add( hashedHashedFingerprint); int daysSinceLastSeen = (int) ((time.currentTimeMillis() - entry.getLastSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newBridgesByLastSeenDays.containsKey(daysSinceLastSeen)) { newBridgesByLastSeenDays.put(daysSinceLastSeen, new HashSet<String>()); } newBridgesByLastSeenDays.get(daysSinceLastSeen).add( hashedFingerprint); newBridgesByLastSeenDays.get(daysSinceLastSeen).add( hashedHashedFingerprint); } NodeIndex newNodeIndex = new NodeIndex(); newNodeIndex.setRelaysByConsensusWeight(newRelaysByConsensusWeight); newNodeIndex.setRelayFingerprintSummaryLines( newRelayFingerprintSummaryLines); newNodeIndex.setBridgeFingerprintSummaryLines( newBridgeFingerprintSummaryLines); newNodeIndex.setRelaysByCountryCode(newRelaysByCountryCode); newNodeIndex.setRelaysByASNumber(newRelaysByASNumber); newNodeIndex.setRelaysByFlag(newRelaysByFlag); newNodeIndex.setBridgesByFlag(newBridgesByFlag); newNodeIndex.setRelaysByContact(newRelaysByContact); newNodeIndex.setRelaysByFirstSeenDays(newRelaysByFirstSeenDays); newNodeIndex.setRelaysByLastSeenDays(newRelaysByLastSeenDays); newNodeIndex.setBridgesByFirstSeenDays(newBridgesByFirstSeenDays); newNodeIndex.setBridgesByLastSeenDays(newBridgesByLastSeenDays); newNodeIndex.setRelaysPublishedString(DateTimeHelper.format( relaysLastValidAfterMillis)); newNodeIndex.setBridgesPublishedString(DateTimeHelper.format( bridgesLastPublishedMillis)); synchronized (this) { this.lastIndexed = updateStatusMillis; this.latestNodeIndex = newNodeIndex; this.notifyAll(); } } private String formatRelaySummaryLine(NodeStatus entry) { String nickname = !entry.getNickname().equals("Unnamed") ? entry.getNickname() : null; String fingerprint = entry.getFingerprint(); String running = entry.getRunning() ? "true" : "false"; List<String> addresses = new ArrayList<String>(); addresses.add(entry.getAddress()); for (String orAddress : entry.getOrAddresses()) { addresses.add(orAddress); } for (String exitAddress : entry.getExitAddresses()) { if (!addresses.contains(exitAddress)) { addresses.add(exitAddress); } } StringBuilder addressesBuilder = new StringBuilder(); int written = 0; for (String address : addresses) { addressesBuilder.append((written++ > 0 ? "," : "") + "\"" + address.toLowerCase() + "\""); } return String.format("{%s\"f\":\"%s\",\"a\":[%s],\"r\":%s}", (nickname == null ? "" : "\"n\":\"" + nickname + "\","), fingerprint, addressesBuilder.toString(), running); } private String formatBridgeSummaryLine(NodeStatus entry) { String nickname = !entry.getNickname().equals("Unnamed") ? entry.getNickname() : null; String hashedFingerprint = entry.getFingerprint(); String running = entry.getRunning() ? "true" : "false"; return String.format("{%s\"h\":\"%s\",\"r\":%s}", (nickname == null ? "" : "\"n\":\"" + nickname + "\","), hashedFingerprint, running); } }
src/org/torproject/onionoo/NodeIndexer.java
package org.torproject.onionoo; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; class NodeIndex { private String relaysPublishedString; public void setRelaysPublishedString(String relaysPublishedString) { this.relaysPublishedString = relaysPublishedString; } public String getRelaysPublishedString() { return relaysPublishedString; } private String bridgesPublishedString; public void setBridgesPublishedString(String bridgesPublishedString) { this.bridgesPublishedString = bridgesPublishedString; } public String getBridgesPublishedString() { return bridgesPublishedString; } private List<String> relaysByConsensusWeight; public void setRelaysByConsensusWeight( List<String> relaysByConsensusWeight) { this.relaysByConsensusWeight = relaysByConsensusWeight; } public List<String> getRelaysByConsensusWeight() { return relaysByConsensusWeight; } private Map<String, String> relayFingerprintSummaryLines; public void setRelayFingerprintSummaryLines( Map<String, String> relayFingerprintSummaryLines) { this.relayFingerprintSummaryLines = relayFingerprintSummaryLines; } public Map<String, String> getRelayFingerprintSummaryLines() { return this.relayFingerprintSummaryLines; } private Map<String, String> bridgeFingerprintSummaryLines; public void setBridgeFingerprintSummaryLines( Map<String, String> bridgeFingerprintSummaryLines) { this.bridgeFingerprintSummaryLines = bridgeFingerprintSummaryLines; } public Map<String, String> getBridgeFingerprintSummaryLines() { return this.bridgeFingerprintSummaryLines; } private Map<String, Set<String>> relaysByCountryCode = null; public void setRelaysByCountryCode( Map<String, Set<String>> relaysByCountryCode) { this.relaysByCountryCode = relaysByCountryCode; } public Map<String, Set<String>> getRelaysByCountryCode() { return relaysByCountryCode; } private Map<String, Set<String>> relaysByASNumber = null; public void setRelaysByASNumber( Map<String, Set<String>> relaysByASNumber) { this.relaysByASNumber = relaysByASNumber; } public Map<String, Set<String>> getRelaysByASNumber() { return relaysByASNumber; } private Map<String, Set<String>> relaysByFlag = null; public void setRelaysByFlag(Map<String, Set<String>> relaysByFlag) { this.relaysByFlag = relaysByFlag; } public Map<String, Set<String>> getRelaysByFlag() { return relaysByFlag; } private Map<String, Set<String>> bridgesByFlag = null; public void setBridgesByFlag(Map<String, Set<String>> bridgesByFlag) { this.bridgesByFlag = bridgesByFlag; } public Map<String, Set<String>> getBridgesByFlag() { return bridgesByFlag; } private Map<String, Set<String>> relaysByContact = null; public void setRelaysByContact( Map<String, Set<String>> relaysByContact) { this.relaysByContact = relaysByContact; } public Map<String, Set<String>> getRelaysByContact() { return relaysByContact; } private SortedMap<Integer, Set<String>> relaysByFirstSeenDays; public void setRelaysByFirstSeenDays( SortedMap<Integer, Set<String>> relaysByFirstSeenDays) { this.relaysByFirstSeenDays = relaysByFirstSeenDays; } public SortedMap<Integer, Set<String>> getRelaysByFirstSeenDays() { return relaysByFirstSeenDays; } private SortedMap<Integer, Set<String>> bridgesByFirstSeenDays; public void setBridgesByFirstSeenDays( SortedMap<Integer, Set<String>> bridgesByFirstSeenDays) { this.bridgesByFirstSeenDays = bridgesByFirstSeenDays; } public SortedMap<Integer, Set<String>> getBridgesByFirstSeenDays() { return bridgesByFirstSeenDays; } private SortedMap<Integer, Set<String>> relaysByLastSeenDays; public void setRelaysByLastSeenDays( SortedMap<Integer, Set<String>> relaysByLastSeenDays) { this.relaysByLastSeenDays = relaysByLastSeenDays; } public SortedMap<Integer, Set<String>> getRelaysByLastSeenDays() { return relaysByLastSeenDays; } private SortedMap<Integer, Set<String>> bridgesByLastSeenDays; public void setBridgesByLastSeenDays( SortedMap<Integer, Set<String>> bridgesByLastSeenDays) { this.bridgesByLastSeenDays = bridgesByLastSeenDays; } public SortedMap<Integer, Set<String>> getBridgesByLastSeenDays() { return bridgesByLastSeenDays; } } public class NodeIndexer implements ServletContextListener, Runnable { public void contextInitialized(ServletContextEvent contextEvent) { ServletContext servletContext = contextEvent.getServletContext(); File outDir = new File(servletContext.getInitParameter("outDir")); DocumentStore documentStore = ApplicationFactory.getDocumentStore(); documentStore.setOutDir(outDir); /* The servlet container created us, and we need to avoid that * ApplicationFactory creates another instance of us. */ ApplicationFactory.setNodeIndexer(this); this.startIndexing(); } public void contextDestroyed(ServletContextEvent contextEvent) { this.stopIndexing(); } private long lastIndexed = -1L; private NodeIndex latestNodeIndex = null; private Thread nodeIndexerThread = null; public synchronized long getLastIndexed(long timeoutMillis) { if (this.lastIndexed == 0L && this.nodeIndexerThread != null && timeoutMillis > 0L) { try { this.wait(timeoutMillis); } catch (InterruptedException e) { } } return this.lastIndexed; } public synchronized NodeIndex getLatestNodeIndex(long timeoutMillis) { if (this.latestNodeIndex == null && this.nodeIndexerThread != null && timeoutMillis > 0L) { try { this.wait(timeoutMillis); } catch (InterruptedException e) { } } return this.latestNodeIndex; } public synchronized void startIndexing() { if (this.nodeIndexerThread == null) { this.nodeIndexerThread = new Thread(this); this.nodeIndexerThread.setDaemon(true); this.nodeIndexerThread.start(); } } public void run() { while (this.nodeIndexerThread != null) { this.indexNodeStatuses(); try { Thread.sleep(DateTimeHelper.ONE_MINUTE); } catch (InterruptedException e) { } } } public synchronized void stopIndexing() { Thread indexerThread = this.nodeIndexerThread; this.nodeIndexerThread = null; indexerThread.interrupt(); } private void indexNodeStatuses() { long updateStatusMillis = -1L; DocumentStore documentStore = ApplicationFactory.getDocumentStore(); UpdateStatus updateStatus = documentStore.retrieve(UpdateStatus.class, false); if (updateStatus != null && updateStatus.getDocumentString() != null) { String updateString = updateStatus.getDocumentString(); try { updateStatusMillis = Long.parseLong(updateString.trim()); } catch (NumberFormatException e) { /* Handle below. */ } } synchronized (this) { if (updateStatusMillis <= this.lastIndexed) { return; } } List<String> newRelaysByConsensusWeight = new ArrayList<String>(); Map<String, String> newRelayFingerprintSummaryLines = new HashMap<String, String>(), newBridgeFingerprintSummaryLines = new HashMap<String, String>(); Map<String, Set<String>> newRelaysByCountryCode = new HashMap<String, Set<String>>(), newRelaysByASNumber = new HashMap<String, Set<String>>(), newRelaysByFlag = new HashMap<String, Set<String>>(), newBridgesByFlag = new HashMap<String, Set<String>>(), newRelaysByContact = new HashMap<String, Set<String>>(); SortedMap<Integer, Set<String>> newRelaysByFirstSeenDays = new TreeMap<Integer, Set<String>>(), newBridgesByFirstSeenDays = new TreeMap<Integer, Set<String>>(), newRelaysByLastSeenDays = new TreeMap<Integer, Set<String>>(), newBridgesByLastSeenDays = new TreeMap<Integer, Set<String>>(); Set<NodeStatus> currentRelays = new HashSet<NodeStatus>(), currentBridges = new HashSet<NodeStatus>(); SortedSet<String> fingerprints = documentStore.list(NodeStatus.class, false); long relaysLastValidAfterMillis = 0L, bridgesLastPublishedMillis = 0L; for (String fingerprint : fingerprints) { NodeStatus node = documentStore.retrieve(NodeStatus.class, true, fingerprint); if (node.isRelay()) { relaysLastValidAfterMillis = Math.max( relaysLastValidAfterMillis, node.getLastSeenMillis()); currentRelays.add(node); } else { bridgesLastPublishedMillis = Math.max( bridgesLastPublishedMillis, node.getLastSeenMillis()); currentBridges.add(node); } } Time time = ApplicationFactory.getTime(); List<String> orderRelaysByConsensusWeight = new ArrayList<String>(); for (NodeStatus entry : currentRelays) { String fingerprint = entry.getFingerprint().toUpperCase(); String hashedFingerprint = entry.getHashedFingerprint(). toUpperCase(); entry.setRunning(entry.getLastSeenMillis() == relaysLastValidAfterMillis); String line = formatRelaySummaryLine(entry); newRelayFingerprintSummaryLines.put(fingerprint, line); newRelayFingerprintSummaryLines.put(hashedFingerprint, line); long consensusWeight = entry.getConsensusWeight(); orderRelaysByConsensusWeight.add(String.format("%020d %s", consensusWeight, fingerprint)); orderRelaysByConsensusWeight.add(String.format("%020d %s", consensusWeight, hashedFingerprint)); if (entry.getCountryCode() != null) { String countryCode = entry.getCountryCode(); if (!newRelaysByCountryCode.containsKey(countryCode)) { newRelaysByCountryCode.put(countryCode, new HashSet<String>()); } newRelaysByCountryCode.get(countryCode).add(fingerprint); newRelaysByCountryCode.get(countryCode).add(hashedFingerprint); } if (entry.getASNumber() != null) { String aSNumber = entry.getASNumber(); if (!newRelaysByASNumber.containsKey(aSNumber)) { newRelaysByASNumber.put(aSNumber, new HashSet<String>()); } newRelaysByASNumber.get(aSNumber).add(fingerprint); newRelaysByASNumber.get(aSNumber).add(hashedFingerprint); } for (String flag : entry.getRelayFlags()) { String flagLowerCase = flag.toLowerCase(); if (!newRelaysByFlag.containsKey(flagLowerCase)) { newRelaysByFlag.put(flagLowerCase, new HashSet<String>()); } newRelaysByFlag.get(flagLowerCase).add(fingerprint); newRelaysByFlag.get(flagLowerCase).add(hashedFingerprint); } int daysSinceFirstSeen = (int) ((time.currentTimeMillis() - entry.getFirstSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newRelaysByFirstSeenDays.containsKey(daysSinceFirstSeen)) { newRelaysByFirstSeenDays.put(daysSinceFirstSeen, new HashSet<String>()); } newRelaysByFirstSeenDays.get(daysSinceFirstSeen).add(fingerprint); newRelaysByFirstSeenDays.get(daysSinceFirstSeen).add( hashedFingerprint); int daysSinceLastSeen = (int) ((time.currentTimeMillis() - entry.getLastSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newRelaysByLastSeenDays.containsKey(daysSinceLastSeen)) { newRelaysByLastSeenDays.put(daysSinceLastSeen, new HashSet<String>()); } newRelaysByLastSeenDays.get(daysSinceLastSeen).add(fingerprint); newRelaysByLastSeenDays.get(daysSinceLastSeen).add( hashedFingerprint); String contact = entry.getContact(); if (!newRelaysByContact.containsKey(contact)) { newRelaysByContact.put(contact, new HashSet<String>()); } newRelaysByContact.get(contact).add(fingerprint); newRelaysByContact.get(contact).add(hashedFingerprint); } Collections.sort(orderRelaysByConsensusWeight); newRelaysByConsensusWeight = new ArrayList<String>(); for (String relay : orderRelaysByConsensusWeight) { newRelaysByConsensusWeight.add(relay.split(" ")[1]); } for (NodeStatus entry : currentBridges) { String hashedFingerprint = entry.getFingerprint().toUpperCase(); String hashedHashedFingerprint = entry.getHashedFingerprint(). toUpperCase(); entry.setRunning(entry.getRelayFlags().contains("Running") && entry.getLastSeenMillis() == bridgesLastPublishedMillis); String line = formatBridgeSummaryLine(entry); newBridgeFingerprintSummaryLines.put(hashedFingerprint, line); newBridgeFingerprintSummaryLines.put(hashedHashedFingerprint, line); for (String flag : entry.getRelayFlags()) { String flagLowerCase = flag.toLowerCase(); if (!newBridgesByFlag.containsKey(flagLowerCase)) { newBridgesByFlag.put(flagLowerCase, new HashSet<String>()); } newBridgesByFlag.get(flagLowerCase).add(hashedFingerprint); newBridgesByFlag.get(flagLowerCase).add( hashedHashedFingerprint); } int daysSinceFirstSeen = (int) ((time.currentTimeMillis() - entry.getFirstSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newBridgesByFirstSeenDays.containsKey(daysSinceFirstSeen)) { newBridgesByFirstSeenDays.put(daysSinceFirstSeen, new HashSet<String>()); } newBridgesByFirstSeenDays.get(daysSinceFirstSeen).add( hashedFingerprint); newBridgesByFirstSeenDays.get(daysSinceFirstSeen).add( hashedHashedFingerprint); int daysSinceLastSeen = (int) ((time.currentTimeMillis() - entry.getLastSeenMillis()) / DateTimeHelper.ONE_DAY); if (!newBridgesByLastSeenDays.containsKey(daysSinceLastSeen)) { newBridgesByLastSeenDays.put(daysSinceLastSeen, new HashSet<String>()); } newBridgesByLastSeenDays.get(daysSinceLastSeen).add( hashedFingerprint); newBridgesByLastSeenDays.get(daysSinceLastSeen).add( hashedHashedFingerprint); } NodeIndex newNodeIndex = new NodeIndex(); newNodeIndex.setRelaysByConsensusWeight(newRelaysByConsensusWeight); newNodeIndex.setRelayFingerprintSummaryLines( newRelayFingerprintSummaryLines); newNodeIndex.setBridgeFingerprintSummaryLines( newBridgeFingerprintSummaryLines); newNodeIndex.setRelaysByCountryCode(newRelaysByCountryCode); newNodeIndex.setRelaysByASNumber(newRelaysByASNumber); newNodeIndex.setRelaysByFlag(newRelaysByFlag); newNodeIndex.setBridgesByFlag(newBridgesByFlag); newNodeIndex.setRelaysByContact(newRelaysByContact); newNodeIndex.setRelaysByFirstSeenDays(newRelaysByFirstSeenDays); newNodeIndex.setRelaysByLastSeenDays(newRelaysByLastSeenDays); newNodeIndex.setBridgesByFirstSeenDays(newBridgesByFirstSeenDays); newNodeIndex.setBridgesByLastSeenDays(newBridgesByLastSeenDays); newNodeIndex.setRelaysPublishedString(DateTimeHelper.format( relaysLastValidAfterMillis)); newNodeIndex.setBridgesPublishedString(DateTimeHelper.format( bridgesLastPublishedMillis)); synchronized (this) { this.lastIndexed = updateStatusMillis; this.latestNodeIndex = newNodeIndex; this.notifyAll(); } } private String formatRelaySummaryLine(NodeStatus entry) { String nickname = !entry.getNickname().equals("Unnamed") ? entry.getNickname() : null; String fingerprint = entry.getFingerprint(); String running = entry.getRunning() ? "true" : "false"; List<String> addresses = new ArrayList<String>(); addresses.add(entry.getAddress()); for (String orAddress : entry.getOrAddresses()) { addresses.add(orAddress); } for (String exitAddress : entry.getExitAddresses()) { if (!addresses.contains(exitAddress)) { addresses.add(exitAddress); } } StringBuilder addressesBuilder = new StringBuilder(); int written = 0; for (String address : addresses) { addressesBuilder.append((written++ > 0 ? "," : "") + "\"" + address.toLowerCase() + "\""); } return String.format("{%s\"f\":\"%s\",\"a\":[%s],\"r\":%s}", (nickname == null ? "" : "\"n\":\"" + nickname + "\","), fingerprint, addressesBuilder.toString(), running); } private String formatBridgeSummaryLine(NodeStatus entry) { String nickname = !entry.getNickname().equals("Unnamed") ? entry.getNickname() : null; String hashedFingerprint = entry.getFingerprint(); String running = entry.getRunning() ? "true" : "false"; return String.format("{%s\"h\":\"%s\",\"r\":%s}", (nickname == null ? "" : "\"n\":\"" + nickname + "\","), hashedFingerprint, running); } }
fixup! Create node index in background thread.
src/org/torproject/onionoo/NodeIndexer.java
fixup! Create node index in background thread.
Java
bsd-3-clause
853a8b2a239cd323c22874c9c8d6f5c4c0647cc7
0
xyberviri/com.xyberviri.amchat
package com.xyberviri.amchat; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; public class AMChatRadio { AMChatRadioManager amcRadMan; // Handle to Radio Manager private String varRadioName; // What is this Radio's Handle KXAN for example private String varRadioOwner; // Who owns me private Location varRadioLoc; // Where do i exist in 4d space. private int varRadioChannel; // What Channel are we transmitting on private int varRadioCode; // What Code are we using to encrypt chat, 0 is disabled. private String varRadioLinkPass; // What password does some one need to enter so they can join //we don't need to save any of these values //private double varRadioRange; // How far do i reach. //private int varRadioAntHt; // How Tall is my antenna. private boolean varRadioIsValid; // Is this radio valid? Can it Transmit? private boolean varRadioIsAdmin; // Is this a admin radio? private int varRadioABlocks; // Antenna blocks private int varRadioIBlocks; // Iron blocks private int varRadioGBlocks; // Gold blocks private int varRadioDBlocks; // Diamond private int varRadioOBlocks; // Obsidian //TODO: Admin Radio //TODO: Player Radio //TODO: Plugin Radio // Server Radios are like player radios but don't Require Range checks // They also don't have a physical tower and the location is set buy the admins // Encryption, Passwords and Range Checks are Disabled. // Block Validation is Disabled. // Admin List returns Server Operator List // Name Checks are Disabled private ArrayList<String> radioMembers; //List of people who are authorized to use me private ArrayList<String> radioAdmins; //List of Admins for this radio private ArrayList<AMChatRadio> radioNetwork; //These are other radios that are linked to me AMChatRadio(AMChatRadioManager amChatRadioManager){ this.amcRadMan = amChatRadioManager; this.radioMembers = new ArrayList<String>(); this.radioAdmins = new ArrayList<String>(); this.radioNetwork = new ArrayList<AMChatRadio>(); this.varRadioIsValid=false; // Is this radio valid? Can it Transmit? this.varRadioIsAdmin=false; // Is this a admin radio? this.varRadioABlocks=0; this.varRadioIBlocks=0; // Iron blocks this.varRadioGBlocks=0; // Gold blocks this.varRadioDBlocks=0; // Diamond this.varRadioOBlocks=0; // Obsidian } // //This is the handle that another Radio is using to send us a relay message // public void rRelay(AMChatRelayPacket amcRelayPacket){ // // } // // //Relay Chat Message, this has to be called from some where else. // //It will check the packet to ensure this is not spam. // private void sendRelay(AMChatRelayPacket amcRelayPacket){ // // } public void chkValid(){ if (varRadioIsAdmin){ setValid(true); } else { update(); } } private void update(){ //Reset block counts this.varRadioABlocks=0; // Antenna blocks this.varRadioIBlocks=0; // Iron blocks this.varRadioGBlocks=0; // Gold blocks this.varRadioDBlocks=0; // Diamond this.varRadioOBlocks=0; // Obsidian if(varRadioLoc.getBlock().getType().equals(Material.JUKEBOX)){ setValid(true); int x = varRadioLoc.getBlockX(); int y = varRadioLoc.getBlockY()+1; int z = varRadioLoc.getBlockZ(); World world = varRadioLoc.getWorld(); for(;y > varRadioLoc.getWorld().getMaxHeight();){ Material blockType = world.getBlockAt(x, y, z).getType(); if(blockType.equals(Material.IRON_FENCE)){ this.varRadioABlocks++; } else if(blockType.equals(Material.IRON_BLOCK)){ this.varRadioIBlocks++; } else if(blockType.equals(Material.GOLD_BLOCK)){ this.varRadioGBlocks++; } else if(blockType.equals(Material.DIAMOND_BLOCK)){ this.varRadioDBlocks++; } else if(blockType.equals(Material.OBSIDIAN)){ this.varRadioOBlocks++; } else{ break; } y++; }//for end } else { //We didn't find a radio block at the location were not valid. setValid(false); } } //Is this a valid Radio tower should we talk to it, does it work. public boolean isValid() {if (varRadioIsAdmin){return true;} else {return varRadioIsValid;}} public void setValid(boolean b){this.varRadioIsValid=b;} //Getter/Setter:Admin Flag public boolean isAdmin(){return varRadioIsAdmin;} public void setAdmin(boolean b){this.varRadioIsAdmin=b;} //Getter/Setter:Owner public String getOwner() {return varRadioOwner;} public void setOwner(String varRadioOwner) {this.varRadioOwner = varRadioOwner;} //Getter/Setter:Frequency public int getChan() {return varRadioChannel;} public void setChan(int varRadioChannel) {this.varRadioChannel = varRadioChannel;} //Getter/Setter:Code public int getCode() {return varRadioCode;} public void setCode(int varRadioCode) {this.varRadioCode = varRadioCode;} //Getter/Setter:Password public String getPass() {return varRadioLinkPass;} public void setPass(String varRadioLinkPass) {this.varRadioLinkPass = varRadioLinkPass;} public boolean chkPass(String varInputPass){return varRadioLinkPass.equals(varInputPass);} //Getter/Setter:Name public String getName() {return varRadioName;} public void setName(String varRadioName) {this.varRadioName = varRadioName;} //Getter/Setter:Location public Location getLoc() {return varRadioLoc;} public void setLoc(Location varRadioLoc) { this.varRadioLoc=varRadioLoc; } public void setLoc(World world,Double locX,Double locY,Double locZ) {this.varRadioLoc = new Location(world, locX, locY, locZ); } public void setLoc(String world,Double locX,Double locY,Double locZ) {this.varRadioLoc = new Location(this.amcRadMan.amcMain.getServer().getWorld(world), locX, locY, locZ);} public void setLoc(String world,String locX,String locY,String locZ) { this.varRadioLoc = new Location(this.amcRadMan.amcMain.getServer().getWorld(world),Double.valueOf(locX), Double.valueOf(locY), Double.valueOf(locZ)); } //Get/Set/Add/Del Members public ArrayList<String> getMembers() {return radioMembers;} public void setMembers(ArrayList<String>radioMembers){ this.radioMembers=radioMembers; } public boolean addMember(String radioMember) { if(!this.radioMembers.contains(radioMember)){ this.radioMembers.add(radioMember); return true; } return false; } public boolean delMember(String radioMember) { if (this.radioMembers.contains(radioMember)){ this.radioMembers.remove(radioMember); return true; } return false; } //Get/Set/Add/Del Admins public ArrayList<String> getAdmins(){return radioAdmins;} public void setAdmins(ArrayList<String>radioAdmins){ this.radioAdmins=radioAdmins; } public boolean addAdmin(String radioMember) { if(!this.radioAdmins.contains(radioMember)){ this.radioAdmins.add(radioMember); return true; } return false; } public boolean delAdmin(String radioMember) { if (this.radioAdmins.contains(radioMember)){ this.radioAdmins.remove(radioMember); return true; } return false; } //Get/Add/Del Network Partners. public boolean isRadioNeworkPartner(AMChatRadio otherRadio) { return radioNetwork.contains(otherRadio); } public boolean setRadioNetworkPartner(AMChatRadio otherRadio) { if (!radioNetwork.contains(otherRadio)){ this.radioNetwork.add(otherRadio); return true; } return false; } public boolean unsetRadioNetworkPartner(AMChatRadio otherRadio) { if (radioNetwork.contains(otherRadio)){ this.radioNetwork.remove(otherRadio); return true; } return false; } public Map<String, Object> getSettings() { Map<String, Object> radioSetting = new HashMap<String,Object>(); radioSetting.put("radio-id", varRadioName); radioSetting.put("owner", varRadioOwner); radioSetting.put("freq",varRadioChannel); radioSetting.put("code",this.varRadioCode); radioSetting.put("pass",this.varRadioLinkPass); radioSetting.put("locw",this.varRadioLoc.getWorld().getName()); radioSetting.put("locx",varRadioLoc.getX()); radioSetting.put("locy",varRadioLoc.getY()); radioSetting.put("locz",varRadioLoc.getZ()); radioSetting.put("admins",this.radioAdmins); radioSetting.put("members",this.radioMembers); radioSetting.put("radio-isadmin",varRadioIsAdmin); return radioSetting; } }//EOF
src/com/xyberviri/amchat/AMChatRadio.java
package com.xyberviri.amchat; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; public class AMChatRadio { AMChatRadioManager amcRadMan; // Handle to Radio Manager private String varRadioName; // What is this Radio's Handle KXAN for example private String varRadioOwner; // Who owns me private Location varRadioLoc; // Where do i exist in 4d space. private int varRadioChannel; // What Channel are we transmitting on private int varRadioCode; // What Code are we using to encrypt chat, 0 is disabled. private String varRadioLinkPass; // What password does some one need to enter so they can join //we don't need to save any of these values //private double varRadioRange; // How far do i reach. //private int varRadioAntHt; // How Tall is my antenna. private boolean varRadioIsValid; // Is this radio valid? Can it Transmit? private boolean varRadioIsAdmin; // Is this a admin radio? private int varRadioABlocks; // Antenna blocks private int varRadioIBlocks; // Iron blocks private int varRadioGBlocks; // Gold blocks private int varRadioDBlocks; // Diamond private int varRadioOBlocks; // Obsidian //TODO: Admin Radio //TODO: Player Radio //TODO: Plugin Radio // Server Radios are like player radios but don't Require Range checks // They also don't have a physical tower and the location is set buy the admins // Encryption, Passwords and Range Checks are Disabled. // Block Validation is Disabled. // Admin List returns Server Operator List // Name Checks are Disabled private ArrayList<String> radioMembers; //List of people who are authorized to use me private ArrayList<String> radioAdmins; //List of Admins for this radio private ArrayList<AMChatRadio> radioNetwork; //These are other radios that are linked to me AMChatRadio(AMChatRadioManager amChatRadioManager){ this.amcRadMan = amChatRadioManager; this.radioMembers = new ArrayList<String>(); this.radioAdmins = new ArrayList<String>(); this.radioNetwork = new ArrayList<AMChatRadio>(); this.varRadioIsValid=false; // Is this radio valid? Can it Transmit? this.varRadioIsAdmin=false; // Is this a admin radio? this.varRadioABlocks=0; this.varRadioIBlocks=0; // Iron blocks this.varRadioGBlocks=0; // Gold blocks this.varRadioDBlocks=0; // Diamond this.varRadioOBlocks=0; // Obsidian } // //This is the handle that another Radio is using to send us a relay message // public void rRelay(AMChatRelayPacket amcRelayPacket){ // // } // // //Relay Chat Message, this has to be called from some where else. // //It will check the packet to ensure this is not spam. // private void sendRelay(AMChatRelayPacket amcRelayPacket){ // // } public void chkValid(){ if (varRadioIsAdmin){ setValid(true); } else { update(); } } private void update(){ setValid(false); this.varRadioABlocks=0; // Antenna blocks this.varRadioIBlocks=0; // Iron blocks this.varRadioGBlocks=0; // Gold blocks this.varRadioDBlocks=0; // Diamond this.varRadioOBlocks=0; // Obsidian int x = varRadioLoc.getBlockX(); int z = varRadioLoc.getBlockZ(); World world = varRadioLoc.getWorld(); for(int y = varRadioLoc.getBlockY(); y > varRadioLoc.getWorld().getMaxHeight();){ Material blockType = world.getBlockAt(x, y, z).getType(); if(blockType.equals(Material.JUKEBOX)){ //TODO:SignSearchRouting(); setValid(true); } else if(blockType.equals(Material.IRON_FENCE)){ this.varRadioABlocks++; } else if(blockType.equals(Material.IRON_BLOCK)){ this.varRadioIBlocks++; } else if(blockType.equals(Material.GOLD_BLOCK)){ this.varRadioGBlocks++; } else if(blockType.equals(Material.DIAMOND_BLOCK)){ this.varRadioDBlocks++; } else if(blockType.equals(Material.OBSIDIAN)){ this.varRadioOBlocks++; } else{ break; } y++; } } //Is this a valid Radio tower should we talk to it, does it work. public boolean isValid() {if (varRadioIsAdmin){return true;} else {return varRadioIsValid;}} public void setValid(boolean b){this.varRadioIsValid=b;} //Getter/Setter:Admin Flag public boolean isAdmin(){return varRadioIsAdmin;} public void setAdmin(boolean b){this.varRadioIsAdmin=b;} //Getter/Setter:Owner public String getOwner() {return varRadioOwner;} public void setOwner(String varRadioOwner) {this.varRadioOwner = varRadioOwner;} //Getter/Setter:Frequency public int getChan() {return varRadioChannel;} public void setChan(int varRadioChannel) {this.varRadioChannel = varRadioChannel;} //Getter/Setter:Code public int getCode() {return varRadioCode;} public void setCode(int varRadioCode) {this.varRadioCode = varRadioCode;} //Getter/Setter:Password public String getPass() {return varRadioLinkPass;} public void setPass(String varRadioLinkPass) {this.varRadioLinkPass = varRadioLinkPass;} public boolean chkPass(String varInputPass){return varRadioLinkPass.equals(varInputPass);} //Getter/Setter:Name public String getName() {return varRadioName;} public void setName(String varRadioName) {this.varRadioName = varRadioName;} //Getter/Setter:Location public Location getLoc() {return varRadioLoc;} public void setLoc(Location varRadioLoc) { this.varRadioLoc=varRadioLoc; } public void setLoc(World world,Double locX,Double locY,Double locZ) {this.varRadioLoc = new Location(world, locX, locY, locZ); } public void setLoc(String world,Double locX,Double locY,Double locZ) {this.varRadioLoc = new Location(this.amcRadMan.amcMain.getServer().getWorld(world), locX, locY, locZ);} public void setLoc(String world,String locX,String locY,String locZ) { this.varRadioLoc = new Location(this.amcRadMan.amcMain.getServer().getWorld(world),Double.valueOf(locX), Double.valueOf(locY), Double.valueOf(locZ)); } //Get/Set/Add/Del Members public ArrayList<String> getMembers() {return radioMembers;} public void setMembers(ArrayList<String>radioMembers){ this.radioMembers=radioMembers; } public boolean addMember(String radioMember) { if(!this.radioMembers.contains(radioMember)){ this.radioMembers.add(radioMember); return true; } return false; } public boolean delMember(String radioMember) { if (this.radioMembers.contains(radioMember)){ this.radioMembers.remove(radioMember); return true; } return false; } //Get/Set/Add/Del Admins public ArrayList<String> getAdmins(){return radioAdmins;} public void setAdmins(ArrayList<String>radioAdmins){ this.radioAdmins=radioAdmins; } public boolean addAdmin(String radioMember) { if(!this.radioAdmins.contains(radioMember)){ this.radioAdmins.add(radioMember); return true; } return false; } public boolean delAdmin(String radioMember) { if (this.radioAdmins.contains(radioMember)){ this.radioAdmins.remove(radioMember); return true; } return false; } //Get/Add/Del Network Partners. public boolean isRadioNeworkPartner(AMChatRadio otherRadio) { return radioNetwork.contains(otherRadio); } public boolean setRadioNetworkPartner(AMChatRadio otherRadio) { if (!radioNetwork.contains(otherRadio)){ this.radioNetwork.add(otherRadio); return true; } return false; } public boolean unsetRadioNetworkPartner(AMChatRadio otherRadio) { if (radioNetwork.contains(otherRadio)){ this.radioNetwork.remove(otherRadio); return true; } return false; } public Map<String, Object> getSettings() { Map<String, Object> radioSetting = new HashMap<String,Object>(); radioSetting.put("radio-id", varRadioName); radioSetting.put("owner", varRadioOwner); radioSetting.put("freq",varRadioChannel); radioSetting.put("code",this.varRadioCode); radioSetting.put("pass",this.varRadioLinkPass); radioSetting.put("locw",this.varRadioLoc.getWorld().getName()); radioSetting.put("locx",varRadioLoc.getX()); radioSetting.put("locy",varRadioLoc.getY()); radioSetting.put("locz",varRadioLoc.getZ()); radioSetting.put("admins",this.radioAdmins); radioSetting.put("members",this.radioMembers); radioSetting.put("radio-isadmin",varRadioIsAdmin); return radioSetting; } }//EOF
updated update(), moved initial check for jukebox to location and basis for starting validation.
src/com/xyberviri/amchat/AMChatRadio.java
updated update(), moved initial check for jukebox to location and basis for starting validation.
Java
bsd-3-clause
7be532a7ed65cbf0c2d1b1258fae66f0ead69328
0
edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon
/* * $Id: DNSSupportTest.java,v 1.2 2004-02-03 21:31:40 troberts Exp $ */ /* Copyright (c) 2004 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.test; import java.io.*; import java.util.*; import java.net.*; import org.lockss.util.*; import org.lockss.test.*; /** */ public class DNSSupportTest extends LockssTestCase { // Tests that exercise multiple threads doing things that cause // DNS lookups. The reason for inculding these tests is that the // various JVMs we have used have had various problems in this // area. // Test parameters static int callTimeSlice = 10000; // NB - gethostbyname takes 150 seconds to time out an unresponsive // name server, so waitTimeSlice * numWaits needs to be a good deal // bigger than 150000 static int waitTimeSlice = 30000; static int numWaits = 10; static int numThreads = 5; static int delayMax = 0; static boolean enableDNS = true; // Set false to prevent actual DNS calls static boolean enablePrint = false; static boolean singleThreadDNS = false; static boolean useRealAddresses = true; static { enablePrint = System.getProperty("org.lockss.devtools.testdnssupport.enableprint", "false").equals("true"); enableDNS = System.getProperty("org.lockss.devtools.testdnssupport.enabledns", "true").equals("true"); singleThreadDNS = System.getProperty("java.net.inetaddress.singlethreaddns", "false").equals("true"); useRealAddresses = System.getProperty("java.net.inetaddress.userealaddresses", "true").equals("true"); try { delayMax = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.delaymax", "200")); } catch (NumberFormatException e) { // No action intended } try { callTimeSlice = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.calltimeslice", "10000")); } catch (NumberFormatException e) { // No action intended } try { numThreads = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.numthreads", "5")); } catch (NumberFormatException e) { // No action intended } try { numWaits = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.numwaits", "10")); } catch (NumberFormatException e) { // No action intended } try { waitTimeSlice = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.waittimeslice", "300000")); } catch (NumberFormatException e) { // No action intended } } static final int RANDOM_CHOICE = 0; static final int GET_ALL_BY_NAME = 1; static final int GET_BY_NAME = 2; static final int GET_LOCAL_HOST = 3; static final int GET_HOST_NAME = 4; static final int NUM_CHOICES = 5; static Random rand = new Random(System.currentTimeMillis()); static boolean anyInCall = false; static String[] names = { "www.lockss.org", "beta1.lockss.org", "beta2.lockss.org", "beta3.lockss.org", "beta4.lockss.org", "beta5.lockss.org", "beta6.lockss.org", "dev1.lockss.org", "dev2.lockss.org", "dev3.lockss.org", "dev4.lockss.org", "128.101.98.17", "128.103.151.241", "128.104.61.100", "128.118.88.75", "128.143.166.238", "128.163.226.19", "128.173.125.34", "128.187.233.114", "128.2.20.112", "128.210.126.253", "128.220.8.19", "128.227.228.239", "128.250.49.71", "128.253.51.42", "128.255.53.85", "128.32.238.64", "128.59.153.87", "129.133.36.186", "129.170.116.62", "129.174.55.28", "129.177.69.19", "129.186.11.214", "129.215.146.174", "129.22.96.62", "129.59.149.8", "129.69.235.7", "129.79.35.17", "130.132.21.8", "130.209.6.17", "130.233.216.6", "130.91.117.146", "131.111.163.151", "131.111.163.154", "131.193.154.81", "131.252.180.30", "132.246.153.17", "134.174.151.131", "134.197.60.201", "134.76.163.110", "137.120.22.129", "137.138.124.185", "139.80.59.42", "140.147.242.244", "141.161.91.7", "141.211.43.136", "142.150.192.44", "143.89.104.80", "145.18.84.80", "146.48.85.108", "146.6.140.9", "147.134.201.39", "148.137.188.242", "150.199.21.213", "152.1.190.9", "155.198.4.17", "157.142.66.26", "159.226.100.36", "160.36.180.55", "160.36.190.176", "160.36.192.220", "170.140.208.253", "170.140.208.43", "171.66.236.34", "171.66.236.35", "171.66.236.36", "171.66.236.38", "171.66.236.39", "171.66.236.51", "171.66.236.52", "171.66.236.53", "171.66.236.54", "171.66.236.55", "171.66.236.56", "172.17.8.122", "18.51.0.202", "192.16.197.238", "192.168.0.27", "193.136.149.42", "195.224.176.122", "199.75.75.172", "200.6.42.3", "204.121.6.38", "216.143.112.42", "35.8.222.234", }; int callsStarted = 0; int callsCompleted = 0; int callsExcepted = 0; int randomChoice(int range) { return (rand.nextInt(range)); } String chooseName() { if (useRealAddresses) { int i = randomChoice(names.length); return names[i]; } else { int i = randomChoice(1000000); return (i + ".test.pss.com"); } } synchronized void startCall(String target) { callsStarted++; if (enablePrint) System.err.println(Thread.currentThread().getName() + ": call " + target + " at " + (new Date()).toString()); } synchronized void completeCall(String target) { callsCompleted++; if (enablePrint) System.err.println(Thread.currentThread().getName() + ": return " + target + " at " + (new Date()).toString()); } synchronized void exceptCall(String target) { callsExcepted++; if (enablePrint) System.err.println(Thread.currentThread().getName() + ": except " + target + " at " + (new Date()).toString()); } class OneThreadOfTest implements Runnable { // Each test runs for callTimeSlice ms with numThreads // threads each doing a loop which waits for [0..delayMax] ms // then does one of GetAllByName(), getByName() and getLocalHost() // on a randomly chosen one of the array of sample names. It then // runs for a further waitTimeSlice ms during which no calls are issued // but some of the calls initiated during the first phase may return. // The test counts the number of calls and returns during both // phases. It cleans up by terminating all the threads // and determines success or failure according as the number of calls // is or is not equal to the number of returns. long ts; int dm; int choice; boolean keepGoing = true; boolean inCall; OneThreadOfTest(long t, int d, int c) { ts = t; dm = d; choice = c; inCall = false; } public void run() { long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime < ts) && keepGoing) { String name = chooseName(); if (name == null || name.length() <= 0) fail("bad name"); if (inCall) fail("inCall: " + name); int myChoice = ( choice == RANDOM_CHOICE ? ((randomChoice(NUM_CHOICES-1)) +1 ) : choice ); startCall(name); switch (myChoice) { case RANDOM_CHOICE: fail("Choice error in test"); break; case GET_ALL_BY_NAME: anyInCall = inCall = true; if (enableDNS) try { InetAddress[] ia = InetAddress.getAllByName(name); } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; case GET_BY_NAME: anyInCall = inCall = true; if (enableDNS) try { InetAddress ia =InetAddress.getByName(name); } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; case GET_LOCAL_HOST: anyInCall = inCall = true; if (enableDNS) try { InetAddress ia = InetAddress.getLocalHost(); } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; case GET_HOST_NAME: anyInCall = inCall = true; if (enableDNS) try { if (Character.isDigit(name.charAt(0))) { /* Its a dotted quad */ InetAddress ia = InetAddress.getByName(name); String s = ia.getHostName(); } else { InetAddress ia = InetAddress.getByName(name); String s = ia.getHostName(); } } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; } completeCall(name); if (dm > 0) { long delay = randomChoice(dm); try { Thread.sleep(delay); } catch (InterruptedException e) { // No action intended } } } } public void pleaseStop() { keepGoing = false; } public boolean busy() { return inCall; } } void PerformTheTest(int numThr, int what, String prefix) { Thread thr[] = new Thread[numThr]; OneThreadOfTest otot[] = new OneThreadOfTest[numThr]; long startTime = System.currentTimeMillis(); callsStarted = 0; callsCompleted = 0; callsExcepted = 0; for (int i = 0; i < thr.length; i++) { otot[i] = new OneThreadOfTest(callTimeSlice, delayMax, what); thr[i] = new Thread(otot[i], prefix + i); thr[i].start(); } try { Thread.sleep(callTimeSlice); } catch (InterruptedException e) { // No action intended } for (int i = 0; i < thr.length; i++) { otot[i].pleaseStop(); } if (callsStarted <= 0) { fail("No calls started"); } while ((callsStarted > callsCompleted) && ((System.currentTimeMillis() - startTime) < (callTimeSlice + numWaits*waitTimeSlice))) { if (enablePrint) System.err.println("waiting for " + (callsStarted - callsCompleted) + " calls in " + prefix + " at " + (new Date()).toString()); try { Thread.sleep(waitTimeSlice); } catch (InterruptedException e) { // No action intended } } if (anyInCall) { DebugUtils.getInstance().threadDump(); TimerUtil.guaranteedSleep(1000); } for (int i = 0; i < thr.length; i++) { thr[i].stop(); thr[i] = null; } try { Thread.sleep(waitTimeSlice); } catch (InterruptedException e) { // No action intended } System.err.println("stop test " + prefix + " at " + (new Date()).toString() + " " + callsCompleted + "/" + callsStarted + " (" + callsExcepted + ")"); if (callsStarted != callsCompleted) { fail("calls started: " + callsStarted + " but calls completed: " + callsCompleted); } } public void testOneThreadAndGetAllByName() { PerformTheTest(1, GET_ALL_BY_NAME, "OneThreadAndGetAllByName_"); } public void testOneThreadAndGetByName() { PerformTheTest(1, GET_BY_NAME, "OneThreadAndGetByName_"); } public void dontTestOneThreadAndGetLocalHost() { PerformTheTest(1, GET_LOCAL_HOST, "OneThreadAndGetLocalHost_"); } public void dontTestOneThreadAndGetHostName() { PerformTheTest(1, GET_HOST_NAME, "OneThreadAndGetHostName_"); } public void dontTestOneThreadAndMixtureOfCalls() { PerformTheTest(1, RANDOM_CHOICE, "OneThreadAndRandomChoice_"); } public void testMultipleThreadsAndGetAllByName() { PerformTheTest(numThreads, GET_ALL_BY_NAME, "MultipleThreadsAndGetAllByName_"); } public void testMultipleThreadsAndGetByName() { PerformTheTest(numThreads, GET_BY_NAME, "MultipleThreadsAndGetByName_"); } public void dontTestMultipleThreadsAndGetLocalHost() { PerformTheTest(numThreads, GET_LOCAL_HOST, "MultipleThreadsAndGetLocalHost_"); } public void testMultipleThreadsAndGetHostName() { PerformTheTest(numThreads, GET_HOST_NAME, "MultipleThreadsAndGetHostName_"); } public void testMultipleThreadsAndMixtureOfCalls() { PerformTheTest(numThreads, RANDOM_CHOICE, "MultipleThreadsAndRandomChoice_"); } public static void main(String[] argv) { String[] testCaseList = {DNSSupportTest.class.getName()}; junit.textui.TestRunner.main(testCaseList); } }
test/src/org/lockss/test/DNSSupportTest.java
/* * $Id: DNSSupportTest.java,v 1.1 2004-02-03 15:19:16 dshr Exp $ */ /* Copyright (c) 2004 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.devtools; import java.io.*; import java.util.*; import java.net.*; import org.lockss.util.*; import org.lockss.test.*; /** */ public class DNSSupportTest extends LockssTestCase { // Tests that exercise multiple threads doing things that cause // DNS lookups. The reason for inculding these tests is that the // various JVMs we have used have had various problems in this // area. // Test parameters static int callTimeSlice = 10000; // NB - gethostbyname takes 150 seconds to time out an unresponsive // name server, so waitTimeSlice * numWaits needs to be a good deal // bigger than 150000 static int waitTimeSlice = 30000; static int numWaits = 10; static int numThreads = 5; static int delayMax = 0; static boolean enableDNS = true; // Set false to prevent actual DNS calls static boolean enablePrint = false; static boolean singleThreadDNS = false; static boolean useRealAddresses = true; static { enablePrint = System.getProperty("org.lockss.devtools.testdnssupport.enableprint", "false").equals("true"); enableDNS = System.getProperty("org.lockss.devtools.testdnssupport.enabledns", "true").equals("true"); singleThreadDNS = System.getProperty("java.net.inetaddress.singlethreaddns", "false").equals("true"); useRealAddresses = System.getProperty("java.net.inetaddress.userealaddresses", "true").equals("true"); try { delayMax = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.delaymax", "200")); } catch (NumberFormatException e) { // No action intended } try { callTimeSlice = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.calltimeslice", "10000")); } catch (NumberFormatException e) { // No action intended } try { numThreads = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.numthreads", "5")); } catch (NumberFormatException e) { // No action intended } try { numWaits = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.numwaits", "10")); } catch (NumberFormatException e) { // No action intended } try { waitTimeSlice = Integer.parseInt(System.getProperty("org.lockss.devtools.testdnssupport.waittimeslice", "300000")); } catch (NumberFormatException e) { // No action intended } } static final int RANDOM_CHOICE = 0; static final int GET_ALL_BY_NAME = 1; static final int GET_BY_NAME = 2; static final int GET_LOCAL_HOST = 3; static final int GET_HOST_NAME = 4; static final int NUM_CHOICES = 5; static Random rand = new Random(System.currentTimeMillis()); static boolean anyInCall = false; static String[] names = { "www.lockss.org", "beta1.lockss.org", "beta2.lockss.org", "beta3.lockss.org", "beta4.lockss.org", "beta5.lockss.org", "beta6.lockss.org", "dev1.lockss.org", "dev2.lockss.org", "dev3.lockss.org", "dev4.lockss.org", "128.101.98.17", "128.103.151.241", "128.104.61.100", "128.118.88.75", "128.143.166.238", "128.163.226.19", "128.173.125.34", "128.187.233.114", "128.2.20.112", "128.210.126.253", "128.220.8.19", "128.227.228.239", "128.250.49.71", "128.253.51.42", "128.255.53.85", "128.32.238.64", "128.59.153.87", "129.133.36.186", "129.170.116.62", "129.174.55.28", "129.177.69.19", "129.186.11.214", "129.215.146.174", "129.22.96.62", "129.59.149.8", "129.69.235.7", "129.79.35.17", "130.132.21.8", "130.209.6.17", "130.233.216.6", "130.91.117.146", "131.111.163.151", "131.111.163.154", "131.193.154.81", "131.252.180.30", "132.246.153.17", "134.174.151.131", "134.197.60.201", "134.76.163.110", "137.120.22.129", "137.138.124.185", "139.80.59.42", "140.147.242.244", "141.161.91.7", "141.211.43.136", "142.150.192.44", "143.89.104.80", "145.18.84.80", "146.48.85.108", "146.6.140.9", "147.134.201.39", "148.137.188.242", "150.199.21.213", "152.1.190.9", "155.198.4.17", "157.142.66.26", "159.226.100.36", "160.36.180.55", "160.36.190.176", "160.36.192.220", "170.140.208.253", "170.140.208.43", "171.66.236.34", "171.66.236.35", "171.66.236.36", "171.66.236.38", "171.66.236.39", "171.66.236.51", "171.66.236.52", "171.66.236.53", "171.66.236.54", "171.66.236.55", "171.66.236.56", "172.17.8.122", "18.51.0.202", "192.16.197.238", "192.168.0.27", "193.136.149.42", "195.224.176.122", "199.75.75.172", "200.6.42.3", "204.121.6.38", "216.143.112.42", "35.8.222.234", }; int callsStarted = 0; int callsCompleted = 0; int callsExcepted = 0; int randomChoice(int range) { return (rand.nextInt(range)); } String chooseName() { if (useRealAddresses) { int i = randomChoice(names.length); return names[i]; } else { int i = randomChoice(1000000); return (i + ".test.pss.com"); } } synchronized void startCall(String target) { callsStarted++; if (enablePrint) System.err.println(Thread.currentThread().getName() + ": call " + target + " at " + (new Date()).toString()); } synchronized void completeCall(String target) { callsCompleted++; if (enablePrint) System.err.println(Thread.currentThread().getName() + ": return " + target + " at " + (new Date()).toString()); } synchronized void exceptCall(String target) { callsExcepted++; if (enablePrint) System.err.println(Thread.currentThread().getName() + ": except " + target + " at " + (new Date()).toString()); } class OneThreadOfTest implements Runnable { // Each test runs for callTimeSlice ms with numThreads // threads each doing a loop which waits for [0..delayMax] ms // then does one of GetAllByName(), getByName() and getLocalHost() // on a randomly chosen one of the array of sample names. It then // runs for a further waitTimeSlice ms during which no calls are issued // but some of the calls initiated during the first phase may return. // The test counts the number of calls and returns during both // phases. It cleans up by terminating all the threads // and determines success or failure according as the number of calls // is or is not equal to the number of returns. long ts; int dm; int choice; boolean keepGoing = true; boolean inCall; OneThreadOfTest(long t, int d, int c) { ts = t; dm = d; choice = c; inCall = false; } public void run() { long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime < ts) && keepGoing) { String name = chooseName(); if (name == null || name.length() <= 0) fail("bad name"); if (inCall) fail("inCall: " + name); int myChoice = ( choice == RANDOM_CHOICE ? ((randomChoice(NUM_CHOICES-1)) +1 ) : choice ); startCall(name); switch (myChoice) { case RANDOM_CHOICE: fail("Choice error in test"); break; case GET_ALL_BY_NAME: anyInCall = inCall = true; if (enableDNS) try { InetAddress[] ia = InetAddress.getAllByName(name); } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; case GET_BY_NAME: anyInCall = inCall = true; if (enableDNS) try { InetAddress ia =InetAddress.getByName(name); } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; case GET_LOCAL_HOST: anyInCall = inCall = true; if (enableDNS) try { InetAddress ia = InetAddress.getLocalHost(); } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; case GET_HOST_NAME: anyInCall = inCall = true; if (enableDNS) try { if (Character.isDigit(name.charAt(0))) { /* Its a dotted quad */ InetAddress ia = InetAddress.getByName(name); String s = ia.getHostName(); } else { InetAddress ia = InetAddress.getByName(name); String s = ia.getHostName(); } } catch (UnknownHostException e) { exceptCall(name); } anyInCall = inCall = false; break; } completeCall(name); if (dm > 0) { long delay = randomChoice(dm); try { Thread.sleep(delay); } catch (InterruptedException e) { // No action intended } } } } public void pleaseStop() { keepGoing = false; } public boolean busy() { return inCall; } } void PerformTheTest(int numThr, int what, String prefix) { Thread thr[] = new Thread[numThr]; OneThreadOfTest otot[] = new OneThreadOfTest[numThr]; long startTime = System.currentTimeMillis(); callsStarted = 0; callsCompleted = 0; callsExcepted = 0; for (int i = 0; i < thr.length; i++) { otot[i] = new OneThreadOfTest(callTimeSlice, delayMax, what); thr[i] = new Thread(otot[i], prefix + i); thr[i].start(); } try { Thread.sleep(callTimeSlice); } catch (InterruptedException e) { // No action intended } for (int i = 0; i < thr.length; i++) { otot[i].pleaseStop(); } if (callsStarted <= 0) { fail("No calls started"); } while ((callsStarted > callsCompleted) && ((System.currentTimeMillis() - startTime) < (callTimeSlice + numWaits*waitTimeSlice))) { if (enablePrint) System.err.println("waiting for " + (callsStarted - callsCompleted) + " calls in " + prefix + " at " + (new Date()).toString()); try { Thread.sleep(waitTimeSlice); } catch (InterruptedException e) { // No action intended } } if (anyInCall) { DebugUtils.getInstance().threadDump(); TimerUtil.guaranteedSleep(1000); } for (int i = 0; i < thr.length; i++) { thr[i].stop(); thr[i] = null; } try { Thread.sleep(waitTimeSlice); } catch (InterruptedException e) { // No action intended } System.err.println("stop test " + prefix + " at " + (new Date()).toString() + " " + callsCompleted + "/" + callsStarted + " (" + callsExcepted + ")"); if (callsStarted != callsCompleted) { fail("calls started: " + callsStarted + " but calls completed: " + callsCompleted); } } public void testOneThreadAndGetAllByName() { PerformTheTest(1, GET_ALL_BY_NAME, "OneThreadAndGetAllByName_"); } public void testOneThreadAndGetByName() { PerformTheTest(1, GET_BY_NAME, "OneThreadAndGetByName_"); } public void dontTestOneThreadAndGetLocalHost() { PerformTheTest(1, GET_LOCAL_HOST, "OneThreadAndGetLocalHost_"); } public void dontTestOneThreadAndGetHostName() { PerformTheTest(1, GET_HOST_NAME, "OneThreadAndGetHostName_"); } public void dontTestOneThreadAndMixtureOfCalls() { PerformTheTest(1, RANDOM_CHOICE, "OneThreadAndRandomChoice_"); } public void testMultipleThreadsAndGetAllByName() { PerformTheTest(numThreads, GET_ALL_BY_NAME, "MultipleThreadsAndGetAllByName_"); } public void testMultipleThreadsAndGetByName() { PerformTheTest(numThreads, GET_BY_NAME, "MultipleThreadsAndGetByName_"); } public void dontTestMultipleThreadsAndGetLocalHost() { PerformTheTest(numThreads, GET_LOCAL_HOST, "MultipleThreadsAndGetLocalHost_"); } public void testMultipleThreadsAndGetHostName() { PerformTheTest(numThreads, GET_HOST_NAME, "MultipleThreadsAndGetHostName_"); } public void testMultipleThreadsAndMixtureOfCalls() { PerformTheTest(numThreads, RANDOM_CHOICE, "MultipleThreadsAndRandomChoice_"); } public static void main(String[] argv) { String[] testCaseList = {DNSSupportTest.class.getName()}; junit.textui.TestRunner.main(testCaseList); } }
fixed incorrect package git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@2466 4f837ed2-42f5-46e7-a7a5-fa17313484d4
test/src/org/lockss/test/DNSSupportTest.java
fixed incorrect package
Java
bsd-3-clause
3599f719dc2351e0443396f15621e91a94a6244c
0
iig-uni-freiburg/SWAT20,iig-uni-freiburg/SWAT20,iig-uni-freiburg/SWAT20
package de.uni.freiburg.iig.telematik.swat.workbench; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.File; import java.io.IOException; import javax.swing.BorderFactory; import javax.swing.ButtonGroup; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JToolBar; import javax.swing.SwingUtilities; import de.invation.code.toval.graphic.DisplayFrame; import de.invation.code.toval.graphic.FileNameChooser; import de.invation.code.toval.properties.PropertyException; import de.invation.code.toval.validate.ParameterException; import de.uni.freiburg.iig.telematik.sepia.graphic.AbstractGraphicalPTNet; import de.uni.freiburg.iig.telematik.sepia.petrinet.cpn.CPN; import de.uni.freiburg.iig.telematik.sepia.petrinet.ifnet.IFNet; import de.uni.freiburg.iig.telematik.sepia.petrinet.pt.PTNet; import de.uni.freiburg.iig.telematik.swat.editor.PTNetEditor; import de.uni.freiburg.iig.telematik.swat.workbench.SwatState.OperatingMode; import de.uni.freiburg.iig.telematik.swat.workbench.action.SaveActiveComponentAction; import de.uni.freiburg.iig.telematik.swat.workbench.action.SaveAllAction; import de.uni.freiburg.iig.telematik.swat.workbench.action.SwitchWorkingDirectoryAction; import de.uni.freiburg.iig.telematik.swat.workbench.listener.SwatStateListener; import de.uni.freiburg.iig.telematik.swat.workbench.properties.SwatProperties; /** * Model for Buttons. Holds buttons like "open", "save", ... With * {@link #getButtonPanel()} the buttons are available inside a {@link JPanel}. * Each button can be accesed through get(enum) * * @author richard * */ public class SwatToolbar extends JToolBar implements ActionListener, SwatStateListener { private static final long serialVersionUID = -4279345402764581310L; private static final String ACTION_COMMAND_EDIT_MODE = "editMode"; private static final String ACTION_COMMAND_ANALYSIS_MODE = "analysisMode"; private static int ICON_SIZE = 32; private static final int ICON_SPACING = 5; private JRadioButton rdbtnEdit = null; private JRadioButton rdbtnAnalysis = null; private JButton openButton = null; private SwatTabView tabView = null; private SwatTreeView treeView = null; public SwatToolbar(SwatTabView tabView, SwatTreeView treeView) { this.tabView = tabView; this.treeView = treeView; setFloatable(false); setRollover(true); setPreferredSize(new Dimension(200,ICON_SIZE+10)); add(new SwatToolbarButton(ToolbarButtonType.SAVE)); add(new SwatToolbarButton(ToolbarButtonType.SAVE_ALL)); add(getSwitchworkingDirectoryButton()); add(getNewPTNetButton()); add(getNewCPNButton()); add(getNewIFNetButton()); add(getEditRadioButton()); add(getAnalysisRadioButton()); ButtonGroup group = new ButtonGroup(); group.add(getAnalysisRadioButton()); group.add(getEditRadioButton()); getEditRadioButton().setSelected(true); try { SwatState.getInstance().addListener(this); } catch (ParameterException e) { // Cannot happen, since this is never null. } // try to get ICONSize try { ICON_SIZE = SwatProperties.getInstance().getIconSize(); } catch (Exception e) { // Cannot read property. Ignore and stay with default value } } private JRadioButton getAnalysisRadioButton(){ if(rdbtnAnalysis == null){ rdbtnAnalysis = new JRadioButton("Analyse"); rdbtnAnalysis.setMnemonic(KeyEvent.VK_A); rdbtnAnalysis.setActionCommand(ACTION_COMMAND_ANALYSIS_MODE); rdbtnAnalysis.addActionListener(this); } return rdbtnAnalysis; } private JRadioButton getEditRadioButton(){ if(rdbtnEdit == null){ rdbtnEdit = new JRadioButton("Edit"); rdbtnEdit.setMnemonic(KeyEvent.VK_E); rdbtnEdit.setActionCommand(ACTION_COMMAND_EDIT_MODE); rdbtnEdit.addActionListener(this); } return rdbtnEdit; } private JButton getSwitchworkingDirectoryButton(){ if (openButton == null) openButton = new SwatToolbarButton(ToolbarButtonType.SWITCH_DIRECTORY); // newButton.addActionListener(new // OpenWorkingDirectoryAction(SwingUtilities.getWindowAncestor(this))); return openButton; } private JButton getNewPTNetButton(){ //TODO Adjust Icon JButton newButton = new SwatToolbarButton(ToolbarButtonType.NEW); newButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String netName = requestFileName("Please choose a name for the new net:", "New P/T-Net"); if(netName != null){ try { File file = getAbsolutePathToWorkingDir(netName); PTNet newNet = new PTNet(); AbstractGraphicalPTNet<?, ?, ?, ?, ?, ?> test; } catch (PropertyException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (ParameterException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } //TODO Put net in components } } }); return newButton; } private JButton getNewCPNButton(){ //TODO Adjust Icon JButton newButton = new SwatToolbarButton(ToolbarButtonType.NEW); newButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String netName = requestFileName("Please choose a name for the new net:", "New P/T-Net"); if(netName != null){ CPN newNet = new CPN(); //TODO Put net in components } } }); return newButton; } private JButton getNewIFNetButton(){ JButton newButton = new SwatToolbarButton(ToolbarButtonType.NEW); newButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String netName = requestFileName("Please choose a name for the new net:", "New P/T-Net"); if(netName != null){ IFNet newNet = new IFNet(); try { // Generate corresponding file File file = getAbsolutePathToWorkingDir(netName); // TODO: PTNetEditor editor = new PTNetEditor(file); SwatComponents.getInstance().putIntoSwatComponent(editor.getNetContainer(), file); } catch (PropertyException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (ParameterException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }); return newButton; } private String requestFileName(String message, String title){ return new FileNameChooser(SwingUtilities.getWindowAncestor(getParent()), message, title, false).requestInput(); } private File getAbsolutePathToWorkingDir(String name) throws PropertyException, ParameterException, IOException { File file = new File(SwatProperties.getInstance().getWorkingDirectory(), name); if (file.exists()) throw new ParameterException("File already exists"); //TODO: Validate, test if SWATComponent already contains net with same name... etc? return file; } @Override public void actionPerformed(ActionEvent e) { try { if (e.getActionCommand().equals(ACTION_COMMAND_ANALYSIS_MODE)) { SwatState.getInstance().setOperatingMode(SwatToolbar.this, OperatingMode.ANALYSIS_MODE); } else if (e.getActionCommand().equals(ACTION_COMMAND_EDIT_MODE)) { SwatState.getInstance().setOperatingMode(SwatToolbar.this, OperatingMode.EDIT_MODE); } } catch (ParameterException ex) { ex.printStackTrace(); } } public static void main(String[] args) { JPanel panel = new JPanel(); panel.add(new SwatToolbar(new SwatTabView(), new SwatTreeView())); new DisplayFrame(panel, true); } @Override public void operatingModeChanged() { switch(SwatState.getInstance().getOperatingMode()){ case ANALYSIS_MODE: getAnalysisRadioButton().setSelected(true); break; case EDIT_MODE: getEditRadioButton().setSelected(true); break; } repaint(); } public void addOpenActionListener(ActionListener listener) { getSwitchworkingDirectoryButton().addActionListener(listener); } private class SwatToolbarButton extends JButton{ private static final long serialVersionUID = 9184814296174960480L; private static final String iconNameFormat = "../resources/icons/%s/%s-%s.png"; public SwatToolbarButton(ToolbarButtonType type){ super(new ImageIcon(SwatToolbar.this.getClass().getResource( String.format(iconNameFormat, ICON_SIZE, type.toString().toLowerCase(), ICON_SIZE)))); setBorder(BorderFactory.createEmptyBorder(0, ICON_SPACING, 0, ICON_SPACING)); setBorderPainted(false); switch(type){ case IMPORT: break; case NEW: break; case OPEN: break; case SAVE: addActionListener(new SaveActiveComponentAction(tabView)); break; case SAVE_ALL: addActionListener(new SaveAllAction()); break; case SWITCH_DIRECTORY: addActionListener(new SwitchWorkingDirectoryAction(treeView, tabView)); break; } } } private enum ToolbarButtonType { NEW, SAVE, SAVE_ALL, OPEN, IMPORT, SWITCH_DIRECTORY; } // class openActionListener implements ActionListener { // // @Override // public void actionPerformed(ActionEvent e) { // WorkingDirectoryDialog dialog = new // WorkingDirectoryDialog(SwingUtilities.getWindowAncestor(SwatToolbar.this)); // String workingDirectory = dialog.getSimulationDirectory(); // try { // Update Properties and reload // SwatComponents.SwatProperties.getInstance().setWorkingDirectory(workingDirectory); // SwatProperties.getInstance().addKnownWorkingDirectory(workingDirectory); // SwatProperties.getInstance().store(); // SwatComponents.getInstance().reload(); // Inform TabView, etc... // tabView.removeAll(); // treeView.removeAndUpdateSwatComponents(); // } catch (ParameterException e2) { // JOptionPane.showMessageDialog(null, e2.getMessage(), // "Parameter Exception", JOptionPane.ERROR_MESSAGE); // e2.printStackTrace(); // } catch (IOException e3) { // JOptionPane.showMessageDialog(null, e3.getMessage(), "IO Exception", // JOptionPane.ERROR_MESSAGE); // e3.printStackTrace(); // } catch (PropertyException e1) { // JOptionPane.showMessageDialog(null, e1.getMessage(), // "Property Exception", JOptionPane.ERROR_MESSAGE); // e1.printStackTrace(); // } // // } // // } }
src/de/uni/freiburg/iig/telematik/swat/workbench/SwatToolbar.java
package de.uni.freiburg.iig.telematik.swat.workbench; import java.awt.Dimension; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.File; import java.io.IOException; import javax.swing.BorderFactory; import javax.swing.ButtonGroup; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JToolBar; import javax.swing.SwingUtilities; import de.invation.code.toval.graphic.DisplayFrame; import de.invation.code.toval.graphic.FileNameChooser; import de.invation.code.toval.properties.PropertyException; import de.invation.code.toval.validate.ParameterException; import de.uni.freiburg.iig.telematik.sepia.petrinet.cpn.CPN; import de.uni.freiburg.iig.telematik.sepia.petrinet.ifnet.IFNet; import de.uni.freiburg.iig.telematik.sepia.petrinet.pt.PTNet; import de.uni.freiburg.iig.telematik.swat.workbench.SwatState.OperatingMode; import de.uni.freiburg.iig.telematik.swat.workbench.action.SaveActiveComponentAction; import de.uni.freiburg.iig.telematik.swat.workbench.action.SaveAllAction; import de.uni.freiburg.iig.telematik.swat.workbench.action.SwitchWorkingDirectoryAction; import de.uni.freiburg.iig.telematik.swat.workbench.listener.SwatStateListener; import de.uni.freiburg.iig.telematik.swat.workbench.properties.SwatProperties; /** * Model for Buttons. Holds buttons like "open", "save", ... With * {@link #getButtonPanel()} the buttons are available inside a {@link JPanel}. * Each button can be accesed through get(enum) * * @author richard * */ public class SwatToolbar extends JToolBar implements ActionListener, SwatStateListener { private static final long serialVersionUID = -4279345402764581310L; private static final String ACTION_COMMAND_EDIT_MODE = "editMode"; private static final String ACTION_COMMAND_ANALYSIS_MODE = "analysisMode"; private static int ICON_SIZE = 32; private static final int ICON_SPACING = 5; private JRadioButton rdbtnEdit = null; private JRadioButton rdbtnAnalysis = null; private JButton openButton = null; private SwatTabView tabView = null; private SwatTreeView treeView = null; public SwatToolbar(SwatTabView tabView, SwatTreeView treeView) { this.tabView = tabView; this.treeView = treeView; setFloatable(false); setRollover(true); setPreferredSize(new Dimension(200,ICON_SIZE+10)); add(new SwatToolbarButton(ToolbarButtonType.SAVE)); add(new SwatToolbarButton(ToolbarButtonType.SAVE_ALL)); add(getSwitchworkingDirectoryButton()); add(getNewPTNetButton()); add(getNewCPNButton()); add(getNewIFNetButton()); add(getEditRadioButton()); add(getAnalysisRadioButton()); ButtonGroup group = new ButtonGroup(); group.add(getAnalysisRadioButton()); group.add(getEditRadioButton()); getEditRadioButton().setSelected(true); try { SwatState.getInstance().addListener(this); } catch (ParameterException e) { // Cannot happen, since this is never null. } // try to get ICONSize try { ICON_SIZE = SwatProperties.getInstance().getIconSize(); } catch (Exception e) { // Cannot read property. Ignore and stay with default value } } private JRadioButton getAnalysisRadioButton(){ if(rdbtnAnalysis == null){ rdbtnAnalysis = new JRadioButton("Analyse"); rdbtnAnalysis.setMnemonic(KeyEvent.VK_A); rdbtnAnalysis.setActionCommand(ACTION_COMMAND_ANALYSIS_MODE); rdbtnAnalysis.addActionListener(this); } return rdbtnAnalysis; } private JRadioButton getEditRadioButton(){ if(rdbtnEdit == null){ rdbtnEdit = new JRadioButton("Edit"); rdbtnEdit.setMnemonic(KeyEvent.VK_E); rdbtnEdit.setActionCommand(ACTION_COMMAND_EDIT_MODE); rdbtnEdit.addActionListener(this); } return rdbtnEdit; } private JButton getSwitchworkingDirectoryButton(){ if (openButton == null) openButton = new SwatToolbarButton(ToolbarButtonType.SWITCH_DIRECTORY); // newButton.addActionListener(new // OpenWorkingDirectoryAction(SwingUtilities.getWindowAncestor(this))); return openButton; } private JButton getNewPTNetButton(){ //TODO Adjust Icon JButton newButton = new SwatToolbarButton(ToolbarButtonType.NEW); newButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String netName = requestFileName("Please choose a name for the new net:", "New P/T-Net"); if(netName != null){ PTNet newNet = new PTNet(); //TODO Put net in components } } }); return newButton; } private JButton getNewCPNButton(){ //TODO Adjust Icon JButton newButton = new SwatToolbarButton(ToolbarButtonType.NEW); newButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String netName = requestFileName("Please choose a name for the new net:", "New P/T-Net"); if(netName != null){ CPN newNet = new CPN(); //TODO Put net in components } } }); return newButton; } private JButton getNewIFNetButton(){ //TODO Adjust Icon JButton newButton = new SwatToolbarButton(ToolbarButtonType.NEW); newButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String netName = requestFileName("Please choose a name for the new net:", "New P/T-Net"); if(netName != null){ IFNet newNet = new IFNet(); // Generate corresponding file try { File file = new File(SwatProperties.getInstance().getWorkingDirectory(), netName); // TODO: // SwatComponents.getInstance().putIntoSwatComponent(newNet, // file); } catch (PropertyException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (ParameterException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } }); return newButton; } private String requestFileName(String message, String title){ return new FileNameChooser(SwingUtilities.getWindowAncestor(getParent()), message, title, false).requestInput(); } @Override public void actionPerformed(ActionEvent e) { try { if (e.getActionCommand().equals(ACTION_COMMAND_ANALYSIS_MODE)) { SwatState.getInstance().setOperatingMode(SwatToolbar.this, OperatingMode.ANALYSIS_MODE); } else if (e.getActionCommand().equals(ACTION_COMMAND_EDIT_MODE)) { SwatState.getInstance().setOperatingMode(SwatToolbar.this, OperatingMode.EDIT_MODE); } } catch (ParameterException ex) { ex.printStackTrace(); } } public static void main(String[] args) { JPanel panel = new JPanel(); panel.add(new SwatToolbar(new SwatTabView(), new SwatTreeView())); new DisplayFrame(panel, true); } @Override public void operatingModeChanged() { switch(SwatState.getInstance().getOperatingMode()){ case ANALYSIS_MODE: getAnalysisRadioButton().setSelected(true); break; case EDIT_MODE: getEditRadioButton().setSelected(true); break; } repaint(); } public void addOpenActionListener(ActionListener listener) { getSwitchworkingDirectoryButton().addActionListener(listener); } private class SwatToolbarButton extends JButton{ private static final long serialVersionUID = 9184814296174960480L; private static final String iconNameFormat = "../resources/icons/%s/%s-%s.png"; public SwatToolbarButton(ToolbarButtonType type){ super(new ImageIcon(SwatToolbar.this.getClass().getResource( String.format(iconNameFormat, ICON_SIZE, type.toString().toLowerCase(), ICON_SIZE)))); setBorder(BorderFactory.createEmptyBorder(0, ICON_SPACING, 0, ICON_SPACING)); setBorderPainted(false); switch(type){ case IMPORT: break; case NEW: break; case OPEN: break; case SAVE: addActionListener(new SaveActiveComponentAction(tabView)); break; case SAVE_ALL: addActionListener(new SaveAllAction()); break; case SWITCH_DIRECTORY: addActionListener(new SwitchWorkingDirectoryAction(treeView, tabView)); break; } } } private enum ToolbarButtonType { NEW, SAVE, SAVE_ALL, OPEN, IMPORT, SWITCH_DIRECTORY; } // class openActionListener implements ActionListener { // // @Override // public void actionPerformed(ActionEvent e) { // WorkingDirectoryDialog dialog = new // WorkingDirectoryDialog(SwingUtilities.getWindowAncestor(SwatToolbar.this)); // String workingDirectory = dialog.getSimulationDirectory(); // try { // Update Properties and reload // SwatComponents.SwatProperties.getInstance().setWorkingDirectory(workingDirectory); // SwatProperties.getInstance().addKnownWorkingDirectory(workingDirectory); // SwatProperties.getInstance().store(); // SwatComponents.getInstance().reload(); // Inform TabView, etc... // tabView.removeAll(); // treeView.removeAndUpdateSwatComponents(); // } catch (ParameterException e2) { // JOptionPane.showMessageDialog(null, e2.getMessage(), // "Parameter Exception", JOptionPane.ERROR_MESSAGE); // e2.printStackTrace(); // } catch (IOException e3) { // JOptionPane.showMessageDialog(null, e3.getMessage(), "IO Exception", // JOptionPane.ERROR_MESSAGE); // e3.printStackTrace(); // } catch (PropertyException e1) { // JOptionPane.showMessageDialog(null, e1.getMessage(), // "Property Exception", JOptionPane.ERROR_MESSAGE); // e1.printStackTrace(); // } // // } // // } }
New IF-Net Button test
src/de/uni/freiburg/iig/telematik/swat/workbench/SwatToolbar.java
New IF-Net Button test
Java
mit
e17cf5dd82e3233bb6c00233c59bd0116ac0507b
0
SWE443-TeamRed/open-bank,SWE443-TeamRed/open-bank,SWE443-TeamRed/open-bank
package com.app.swe443.openbankapp; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Gravity; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import android.widget.Toast; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.JsonObjectRequest; import com.android.volley.toolbox.StringRequest; import com.android.volley.toolbox.Volley; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import static android.content.ContentValues.TAG; public class MainActivity extends AppCompatActivity implements HomeFrag.OnHomeFragMethodSelectedListener{ private DrawerLayout Drawer; private ActionBarDrawerToggle drawerToggle; private ListView drawerList; private Toolbar toolbar; public ActionBar actionBar; private Fragment home_fragment; private Fragment newhome_fragment; private Fragment users_fragment; private Fragment transaction_fragment; private Fragment open_account_fragment; private Fragment logout_fragment; private Fragment contacts_fragment; private FragmentManager fm; private FragmentTransaction transaction; private ArrayList<NavDrawerItem> navDrawerItems; private NavDrawerListAdapter adapter; private MockServerSingleton mockBankServer; String userID; String username; private ArrayList<AccountDisplay> userAccounts; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); //INITIALIZE LEFT ACTIONBAR //This sets the navigation drawer and the top actionbar. Drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawerList = (ListView) findViewById(R.id.left_drawer); toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); actionBar = getSupportActionBar(); drawerToggle = new ActionBarDrawerToggle(this, Drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawerToggle.setDrawerIndicatorEnabled(true); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); Drawer.addDrawerListener(drawerToggle); drawerToggle.syncState(); fm = getSupportFragmentManager(); //Information to get when coming from login menu userID = getIntent().getStringExtra("userID"); username = getIntent().getStringExtra("username"); // //Information to obtain when coming from a previous view (AccountDetails) // /* // Get the account that these fragments will use // */ // Bundle extras = getIntent().getExtras(); // if(extras != null && extras.getString("balance") != null) { // type = extras.getString("type"); // balance = extras.getString("balance"); // accountnum = extras.getString("accountnum"); // username = extras.getString("username"); // userID = extras.getString("userID"); // System.out.println("Got username and id from loginactivity " + userID + " " + username); // } //Initlaize all necessary fragments for MainActivity initFragments(); //Draw the actionbar addDrawerItems(); getUserAccountsFromServer(); /* Add Tina's account set to MainActivity AccountDetails data structure ArrayList<Account> needed for account list in homepage */ } private void addDrawerItems() { navDrawerItems = new ArrayList<NavDrawerItem>(); String[] navMenuTitles = {"Home", "Update My Information", "Open Account", "Logout" }; // adding nav drawer items to array navDrawerItems.add(new NavDrawerItem(navMenuTitles[0].toString())); navDrawerItems.add(new NavDrawerItem(navMenuTitles[1].toString())); navDrawerItems.add(new NavDrawerItem(navMenuTitles[2].toString())); navDrawerItems.add(new NavDrawerItem(navMenuTitles[3].toString())); // setting the nav drawer list adapter adapter = new NavDrawerListAdapter(getApplicationContext(), navDrawerItems); View headerView = View.inflate(this, R.layout.navigation_drawer_header, null); drawerList.addHeaderView(headerView); drawerList.setAdapter(adapter); drawerList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> a, View v, int position, long id) { System.out.println("CLICKED ON "+position); switch (position) { case 1: transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, home_fragment); transaction.commit(); Drawer.closeDrawer(Gravity.LEFT); case 2: transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, users_fragment); transaction.commit(); Drawer.closeDrawer(Gravity.LEFT); break; case 3: transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, open_account_fragment); transaction.addToBackStack(null); transaction.commit(); Drawer.closeDrawer(Gravity.LEFT); break; case 4: Drawer.closeDrawer(Gravity.LEFT); Intent intent = new Intent(v.getContext(), LoginActivity.class); startActivity(intent); break; } } }); } public void backNavigation(View view){ System.out.println("BACKNAVIGATION FOR HEADER"); Drawer.closeDrawer(Gravity.START); } public void initFragments() { /********Home Fragment********/ home_fragment = new HomeFrag(); /********Open Account Fragment********/ open_account_fragment = new CreateBankAccountFrag(); /********Transaction Fragments********/ users_fragment = new UsersFrag(); open_account_fragment = new OpenAccountFrag(); } /* An account was clicked in the homepage, change the screen to display account specific tabs */ public void onAccountSelected(int id) { // The user selected the headline of an article from the HeadlinesFragment // Do something here to display that article getFragmentManager().popBackStack(); /* Send the selected account information to the AccountFrag to display the accounts details */ Intent intent = new Intent(this, AccountDetails.class); intent.putExtra("type", userAccounts.get(id).getdType()); intent.putExtra("accountnum", userAccounts.get(id).getdAccountnum()); intent.putExtra("balance", userAccounts.get(id).getdBalance()); intent.putExtra("username", username); intent.putExtra("userID", userID); startActivity(intent); } public ArrayList<AccountDisplay> getAccounts(){ return userAccounts; } public String getUsername(){ return username; } public boolean onOptionsItemSelected(MenuItem item){ System.out.println("ON OPTIONS SELECTED IN MAIN ACTIVITY "); return true; } @Override public void onBackPressed() { System.out.println("Logout by back press"); finish(); } public void getUserAccountsFromServer(){ StringRequest stringRequest; RequestQueue queue = Volley.newRequestQueue(getApplicationContext()); String url = "http://54.87.197.206:8080/SparkServer/api/v1/account?id="+userID; stringRequest = new StringRequest(Request.Method.GET, url, new Response.Listener<String>() { @Override public void onResponse(String response) { System.out.println("OBTAINED USER ACCOUNTS"); Toast.makeText(getApplicationContext(),response.toString(),Toast.LENGTH_LONG).show(); try { JSONArray obj = new JSONArray(response); userAccounts = getAccountsDisplays(obj); goToHomepage(); }catch(JSONException e){ e.printStackTrace(); Log.d(TAG,response); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { } }); System.out.println("REQUESTING USER ACCOUNTS"); queue.add(stringRequest); } public void goToHomepage(){ //Initiate homepage Fragment when app opens transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, home_fragment);//, "Home_FRAGMENT"); transaction.addToBackStack(null); transaction.commit(); } //Read user's accounts from the server responce array public ArrayList<AccountDisplay> getAccountsDisplays(JSONArray response){ ArrayList<AccountDisplay> myDataset = new ArrayList<AccountDisplay>(); try { System.out.println("USER ACCOUNTS REPSONSE IS "+response); JSONArray accounts = (JSONArray) response.get(1); for(int i=0; i<accounts.length();i++){ try { JSONObject rec = accounts.getJSONObject(i); System.out.println("Got an account "+rec.toString()); String tempbalance = rec.getString("balance"); String temp2 = tempbalance.substring(0,tempbalance.length()-7); String decimal = ""; //TODO temporary fix. Look into resolving this issue further if(temp2.length() > 1) decimal = temp2.substring(temp2.length()-2,temp2.length()); else if(temp2.length()==1) decimal = temp2.substring(temp2.length()-1,temp2.length()); String whole = temp2.substring(0,temp2.length()-2); String type = rec.getString("accountType"); String accountnum = rec.getString("accountNumber"); myDataset.add(new AccountDisplay(type,accountnum,whole+"."+decimal)); }catch(JSONException e){ e.printStackTrace(); Log.d(TAG,response.toString()); } } }catch(JSONException e){ e.printStackTrace(); Log.d(TAG,response.toString()); } return myDataset; } } /* Helper class to organize attributes that will be dispalyed */ class AccountDisplay { private String dType; private String dAccountnum; private String dBalance; public AccountDisplay(String type, int num, int balance){ this.dType = type; this.dAccountnum = Integer.toString(num); this.dBalance = Integer.toString(balance); } public AccountDisplay(String type, String num, String balance){ this.dType = type; this.dAccountnum = num; this.dBalance = balance; //String decimal = balance.substring(0,balance.length()-3) } public String getdType() { return dType; } public void setdType(String dType) { this.dType = dType; } public String getdAccountnum() { return dAccountnum; } public void setdAccountnum(String dAccountnum) { this.dAccountnum = dAccountnum; } public String getdBalance() { return dBalance; } public void setdBalance(String dBalance) { this.dBalance = dBalance; } }
OpenBankApplication/app/src/main/java/com/app/swe443/openbankapp/MainActivity.java
package com.app.swe443.openbankapp; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Gravity; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import android.widget.Toast; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.JsonObjectRequest; import com.android.volley.toolbox.StringRequest; import com.android.volley.toolbox.Volley; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import static android.content.ContentValues.TAG; public class MainActivity extends AppCompatActivity implements HomeFrag.OnHomeFragMethodSelectedListener{ private DrawerLayout Drawer; private ActionBarDrawerToggle drawerToggle; private ListView drawerList; private Toolbar toolbar; public ActionBar actionBar; private Fragment home_fragment; private Fragment newhome_fragment; private Fragment users_fragment; private Fragment transaction_fragment; private Fragment open_account_fragment; private Fragment logout_fragment; private Fragment contacts_fragment; private FragmentManager fm; private FragmentTransaction transaction; private ArrayList<NavDrawerItem> navDrawerItems; private NavDrawerListAdapter adapter; private MockServerSingleton mockBankServer; String userID; String username; private ArrayList<AccountDisplay> userAccounts; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); //INITIALIZE LEFT ACTIONBAR //This sets the navigation drawer and the top actionbar. Drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawerList = (ListView) findViewById(R.id.left_drawer); toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); actionBar = getSupportActionBar(); drawerToggle = new ActionBarDrawerToggle(this, Drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawerToggle.setDrawerIndicatorEnabled(true); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); Drawer.addDrawerListener(drawerToggle); drawerToggle.syncState(); fm = getSupportFragmentManager(); //Information to get when coming from login menu userID = getIntent().getStringExtra("userID"); username = getIntent().getStringExtra("username"); // //Information to obtain when coming from a previous view (AccountDetails) // /* // Get the account that these fragments will use // */ // Bundle extras = getIntent().getExtras(); // if(extras != null && extras.getString("balance") != null) { // type = extras.getString("type"); // balance = extras.getString("balance"); // accountnum = extras.getString("accountnum"); // username = extras.getString("username"); // userID = extras.getString("userID"); // System.out.println("Got username and id from loginactivity " + userID + " " + username); // } //Initlaize all necessary fragments for MainActivity initFragments(); //Draw the actionbar addDrawerItems(); getUserAccountsFromServer(); /* Add Tina's account set to MainActivity AccountDetails data structure ArrayList<Account> needed for account list in homepage */ } private void addDrawerItems() { navDrawerItems = new ArrayList<NavDrawerItem>(); String[] navMenuTitles = {"Home", "Update My Information", "Open Account", "Logout" }; // adding nav drawer items to array navDrawerItems.add(new NavDrawerItem(navMenuTitles[0].toString())); navDrawerItems.add(new NavDrawerItem(navMenuTitles[1].toString())); navDrawerItems.add(new NavDrawerItem(navMenuTitles[2].toString())); navDrawerItems.add(new NavDrawerItem(navMenuTitles[3].toString())); // setting the nav drawer list adapter adapter = new NavDrawerListAdapter(getApplicationContext(), navDrawerItems); View headerView = View.inflate(this, R.layout.navigation_drawer_header, null); drawerList.addHeaderView(headerView); drawerList.setAdapter(adapter); drawerList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> a, View v, int position, long id) { System.out.println("CLICKED ON "+position); switch (position) { case 1: transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, home_fragment); transaction.commit(); Drawer.closeDrawer(Gravity.LEFT); case 2: transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, users_fragment); transaction.commit(); Drawer.closeDrawer(Gravity.LEFT); break; case 3: transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, open_account_fragment); transaction.addToBackStack(null); transaction.commit(); Drawer.closeDrawer(Gravity.LEFT); break; case 4: Drawer.closeDrawer(Gravity.LEFT); Intent intent = new Intent(v.getContext(), LoginActivity.class); startActivity(intent); break; } } }); } public void backNavigation(View view){ System.out.println("BACKNAVIGATION FOR HEADER"); Drawer.closeDrawer(Gravity.START); } public void initFragments() { /********Home Fragment********/ home_fragment = new HomeFrag(); /********Open Account Fragment********/ open_account_fragment = new CreateBankAccountFrag(); /********Transaction Fragments********/ users_fragment = new UsersFrag(); open_account_fragment = new OpenAccountFrag(); } /* An account was clicked in the homepage, change the screen to display account specific tabs */ public void onAccountSelected(int id) { // The user selected the headline of an article from the HeadlinesFragment // Do something here to display that article getFragmentManager().popBackStack(); /* Send the selected account information to the AccountFrag to display the accounts details */ Intent intent = new Intent(this, AccountDetails.class); intent.putExtra("type", userAccounts.get(id).getdType()); intent.putExtra("accountnum", userAccounts.get(id).getdAccountnum()); intent.putExtra("balance", userAccounts.get(id).getdBalance()); intent.putExtra("username", username); intent.putExtra("userID", userID); startActivity(intent); } public ArrayList<AccountDisplay> getAccounts(){ return userAccounts; } public String getUsername(){ return username; } public boolean onOptionsItemSelected(MenuItem item){ System.out.println("ON OPTIONS SELECTED IN MAIN ACTIVITY "); return true; } @Override public void onBackPressed() { System.out.println("Logout by back press"); finish(); } public void getUserAccountsFromServer(){ StringRequest stringRequest; RequestQueue queue = Volley.newRequestQueue(getApplicationContext()); String url = "http://54.87.197.206:8080/SparkServer/api/v1/account?id="+userID; stringRequest = new StringRequest(Request.Method.GET, url, new Response.Listener<String>() { @Override public void onResponse(String response) { System.out.println("OBTAINED USER ACCOUNTS"); Toast.makeText(getApplicationContext(),response.toString(),Toast.LENGTH_LONG).show(); try { JSONArray obj = new JSONArray(response); userAccounts = getAccountsDisplays(obj); goToHomepage(); }catch(JSONException e){ e.printStackTrace(); Log.d(TAG,response); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { } }); System.out.println("REQUESTING USER ACCOUNTS"); queue.add(stringRequest); } public void goToHomepage(){ //Initiate homepage Fragment when app opens transaction = fm.beginTransaction(); transaction.replace(R.id.contentFragment, home_fragment);//, "Home_FRAGMENT"); transaction.addToBackStack(null); transaction.commit(); } //Read user's accounts from the server responce array public ArrayList<AccountDisplay> getAccountsDisplays(JSONArray response){ ArrayList<AccountDisplay> myDataset = new ArrayList<AccountDisplay>(); try { System.out.println("USER ACCOUNTS REPSONSE IS "+response); JSONArray accounts = (JSONArray) response.get(1); for(int i=0; i<accounts.length();i++){ try { JSONObject rec = accounts.getJSONObject(i); System.out.println("Got an account "+rec.toString()); String tempbalance = rec.getString("balance"); String temp2 = tempbalance.substring(0,tempbalance.length()-7); String decimal = temp2.substring(temp2.length()-2,temp2.length()); String whole = temp2.substring(0,temp2.length()-2); String type = rec.getString("accountType"); String accountnum = rec.getString("accountNumber"); myDataset.add(new AccountDisplay(type,accountnum,whole+"."+decimal)); }catch(JSONException e){ e.printStackTrace(); Log.d(TAG,response.toString()); } } }catch(JSONException e){ e.printStackTrace(); Log.d(TAG,response.toString()); } return myDataset; } } /* Helper class to organize attributes that will be dispalyed */ class AccountDisplay { private String dType; private String dAccountnum; private String dBalance; public AccountDisplay(String type, int num, int balance){ this.dType = type; this.dAccountnum = Integer.toString(num); this.dBalance = Integer.toString(balance); } public AccountDisplay(String type, String num, String balance){ this.dType = type; this.dAccountnum = num; this.dBalance = balance; //String decimal = balance.substring(0,balance.length()-3) } public String getdType() { return dType; } public void setdType(String dType) { this.dType = dType; } public String getdAccountnum() { return dAccountnum; } public void setdAccountnum(String dAccountnum) { this.dAccountnum = dAccountnum; } public String getdBalance() { return dBalance; } public void setdBalance(String dBalance) { this.dBalance = dBalance; } }
Commiting to switch back to open_bank_server
OpenBankApplication/app/src/main/java/com/app/swe443/openbankapp/MainActivity.java
Commiting to switch back to open_bank_server
Java
mit
cd8ac0662e8acb4eec43e1a02e48c6323af78e28
0
akonring/multibit-hd-modified,bitcoin-solutions/multibit-hd,oscarguindzberg/multibit-hd,oscarguindzberg/multibit-hd,akonring/multibit-hd-modified,akonring/multibit-hd-modified,bitcoin-solutions/multibit-hd,bitcoin-solutions/multibit-hd,oscarguindzberg/multibit-hd
package org.multibit.hd.brit.dto; import org.bitcoinj.core.ECKey; import org.bitcoinj.core.Utils; import org.bitcoinj.crypto.KeyCrypterScrypt; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; import org.bitcoinj.wallet.Protos; import org.spongycastle.asn1.sec.SECNamedCurves; import org.spongycastle.asn1.x9.X9ECParameters; import org.spongycastle.crypto.params.KeyParameter; import org.spongycastle.math.ec.ECPoint; import java.math.BigInteger; import java.util.Arrays; /** * <p>Data object to provide the following to BRIT wallet seed related classes:</p> * <ul> * <li>Creation of BRIT wallet id from seed</li> * </ul> * * @since 0.0.1 */ public class BRITWalletId { // The salt used in derivation of the britWalletId. // This is different from the similar process of deriving a WalletId (where the salt is 1) // This value is the 39,000,000th prime (http://primes.utm.edu/lists/small/millions/) which seemed like a nice number to use. private static final byte[] BRIT_WALLET_ID_SALT_USED_IN_SCRYPT = BigInteger.valueOf(735_632_797).toByteArray(); private final byte[] britWalletId; /** * Create a BRIT wallet id from the given seed. * This produces a BRIT wallet id from the seed using various trapdoor functions. * The seed is typically generated from the SeedPhraseGenerator#convertToSeed method. * * @param seed The seed to use in deriving the wallet id */ public BRITWalletId(byte[] seed) { Preconditions.checkNotNull(seed); BigInteger seedBigInteger = new BigInteger(1, seed); // Convert the seed to a BRIT wallet id using various trapdoor functions. // Scrypt - scrypt is run using the seedBigInteger.toString() as the 'credentials'. // This returns a byte array (normally used as an AES256 key but here passed on to more trapdoor functions). // The scrypt parameters used are the default, with a salt of BRIT_WALLET_ID_SALT_USED_IN_SCRYPT. Protos.ScryptParameters.Builder scryptParametersBuilder = Protos.ScryptParameters.newBuilder().setSalt(ByteString.copyFrom(BRIT_WALLET_ID_SALT_USED_IN_SCRYPT)); Protos.ScryptParameters scryptParameters = scryptParametersBuilder.build(); KeyCrypterScrypt keyCrypterScrypt = new KeyCrypterScrypt(scryptParameters); KeyParameter keyParameter = keyCrypterScrypt.deriveKey(seedBigInteger.toString()); byte[] derivedKey = keyParameter.getKey(); // Ensure that the seed is within the Bitcoin EC group. X9ECParameters params = SECNamedCurves.getByName("secp256k1"); BigInteger sizeOfGroup = params.getN(); BigInteger derivedKeyBigInteger = new BigInteger(1, derivedKey); derivedKeyBigInteger = derivedKeyBigInteger.mod(sizeOfGroup); // EC curve generator function used to convert the key just derived (a 'private key') to a 'public key' ECPoint point = ECKey.CURVE.getG().multiply(derivedKeyBigInteger); // Note the public key is not compressed byte[] publicKey = point.getEncoded(); // SHA256RIPE160 to generate final britWalletId bytes from the 'public key' britWalletId = Utils.sha256hash160(publicKey); } public BRITWalletId(String britWalletIdInHex) { britWalletId = Utils.parseAsHexOrBase58(britWalletIdInHex); } /** * @return the raw wallet id as a byte[] */ public byte[] getBytes() { byte[] copy = new byte[britWalletId.length]; System.arraycopy(britWalletId, 0, copy, 0, britWalletId.length); return copy; } @Override public String toString() { return Utils.HEX.encode(britWalletId); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BRITWalletId that = (BRITWalletId) o; return Arrays.equals(britWalletId, that.britWalletId); } @Override public int hashCode() { return britWalletId != null ? Arrays.hashCode(britWalletId) : 0; } }
mbhd-brit/src/main/java/org/multibit/hd/brit/dto/BRITWalletId.java
package org.multibit.hd.brit.dto; import org.bitcoinj.core.ECKey; import org.bitcoinj.core.Utils; import org.bitcoinj.crypto.KeyCrypterScrypt; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; import org.bitcoinj.wallet.Protos; import org.spongycastle.asn1.sec.SECNamedCurves; import org.spongycastle.asn1.x9.X9ECParameters; import org.spongycastle.crypto.params.KeyParameter; import org.spongycastle.math.ec.ECPoint; import java.math.BigInteger; import java.util.Arrays; /** * <p>Data object to provide the following to BRIT wallet seed related classes:</p> * <ul> * <li>Creation of BRIT wallet id from seed</li> * </ul> * * @since 0.0.1 */ public class BRITWalletId { // The salt used in derivation of the britWalletId. // This is different from the similar process of deriving a WalletId (where the salt is 1) // This value is the 39,000,000th prime (http://primes.utm.edu/lists/small/millions/) which seemed like a nice number to use. private static final byte[] BRIT_WALLET_ID_SALT_USED_IN_SCRYPT = BigInteger.valueOf(735_632_797).toByteArray(); private final byte[] britWalletId; /** * Create a BRIT wallet id from the given seed. * This produces a BRIT wallet id from the seed using various trapdoor functions. * The seed is typically generated from the SeedPhraseGenerator#convertToSeed method. * * @param seed The seed to use in deriving the wallet id */ public BRITWalletId(byte[] seed) { Preconditions.checkNotNull(seed); BigInteger seedBigInteger = new BigInteger(1, seed); // Convert the seed to a BRIT wallet id using various trapdoor functions. // Scrypt - scrypt is run using the seedBigInteger.toString() as the 'credentials'. // This returns a byte array (normally used as an AES256 key but here passed on to more trapdoor functions). // The scrypt parameters used are the default, with a salt of BRIT_WALLET_ID_SALT_USED_IN_SCRYPT. Protos.ScryptParameters.Builder scryptParametersBuilder = Protos.ScryptParameters.newBuilder().setSalt(ByteString.copyFrom(BRIT_WALLET_ID_SALT_USED_IN_SCRYPT)); Protos.ScryptParameters scryptParameters = scryptParametersBuilder.build(); KeyCrypterScrypt keyCrypterScrypt = new KeyCrypterScrypt(scryptParameters); KeyParameter keyParameter = keyCrypterScrypt.deriveKey(seedBigInteger.toString()); byte[] derivedKey = keyParameter.getKey(); // Ensure that the seed is within the Bitcoin EC group. X9ECParameters params = SECNamedCurves.getByName("secp256k1"); BigInteger sizeOfGroup = params.getN(); BigInteger derivedKeyBigInteger = new BigInteger(1, derivedKey); derivedKeyBigInteger = derivedKeyBigInteger.mod(sizeOfGroup); // EC curve generator function used to convert the key just derived (a 'private key') to a 'public key' ECPoint point = ECKey.CURVE.getG().multiply(derivedKeyBigInteger); // Note the public key is not compressed byte[] publicKey = point.getEncoded(); // SHA256RIPE160 to generate final britWalletId bytes from the 'public key' britWalletId = Utils.sha256hash160(publicKey); } public BRITWalletId(String britWalletIdInHex) { britWalletId = Utils.parseAsHexOrBase58(britWalletIdInHex); } /** * @return the raw wallet id as a byte[] */ public byte[] getBytes() { return britWalletId; } @Override public String toString() { return Utils.HEX.encode(britWalletId); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BRITWalletId that = (BRITWalletId) o; return Arrays.equals(britWalletId, that.britWalletId); } @Override public int hashCode() { return britWalletId != null ? Arrays.hashCode(britWalletId) : 0; } }
#323 Findbugs: Avoid exposing internal state
mbhd-brit/src/main/java/org/multibit/hd/brit/dto/BRITWalletId.java
#323 Findbugs: Avoid exposing internal state
Java
mit
33bd93dc592cdb5c9a42e55e24abe924c4b3e50e
0
Nunnery/MythicDrops
package net.nunnerycode.bukkit.mythicdrops.api.enchantments; import org.bukkit.enchantments.Enchantment; /** * A class containing an {@link Enchantment} and a minimum and maximum level. */ public class MythicEnchantment { private final Enchantment enchantment; private final int minimumLevel; private final int maximumLevel; /** * Instantiate a new MythicEnchantment with an {@link Enchantment} and a minimum and maximum level. * * @param enchantment Enchantment to use * @param minimumLevel minimum level of Enchantment * @param maximumLevel maximum level of Enchantment */ public MythicEnchantment(Enchantment enchantment, int minimumLevel, int maximumLevel) { this.enchantment = enchantment; this.minimumLevel = Math.min(minimumLevel, maximumLevel); this.maximumLevel = Math.max(minimumLevel, maximumLevel); } /** * Gets the {@link Enchantment}. * * @return Enchantment */ public Enchantment getEnchantment() { return enchantment; } /** * Returns the range between the minimum and maximum levels of the Enchantment. * <br> * Equivalent of {@code {@link #getMaximumLevel()} - {@link #getMinimumLevel()} } * * @return range between the minimum and maximum levels */ public double getRange() { return getMaximumLevel() - getMinimumLevel(); } /** * Returns the maximum level of the Enchantment. * * @return maximum level */ public int getMaximumLevel() { return maximumLevel; } /** * Returns the minimum level of the Enchantment. * * @return minimum level */ public int getMinimumLevel() { return minimumLevel; } @Override public int hashCode() { int result; long temp; result = enchantment != null ? enchantment.hashCode() : 0; temp = Double.doubleToLongBits(minimumLevel); result = 31 * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(maximumLevel); result = 31 * result + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof MythicEnchantment)) return false; MythicEnchantment that = (MythicEnchantment) o; if (Double.compare(that.maximumLevel, maximumLevel) != 0) return false; if (Double.compare(that.minimumLevel, minimumLevel) != 0) return false; if (enchantment != null ? !enchantment.equals(that.enchantment) : that.enchantment != null) return false; return true; } }
MythicDrops/src/main/java/net/nunnerycode/bukkit/mythicdrops/api/enchantments/MythicEnchantment.java
package net.nunnerycode.bukkit.mythicdrops.api.enchantments; import org.bukkit.enchantments.Enchantment; public class MythicEnchantment { private final Enchantment enchantment; private final int minimumLevel; private final int maximumLevel; public MythicEnchantment(Enchantment enchantment, int minimumLevel, int maximumLevel) { this.enchantment = enchantment; this.minimumLevel = Math.min(minimumLevel, maximumLevel); this.maximumLevel = Math.max(minimumLevel, maximumLevel); } public Enchantment getEnchantment() { return enchantment; } public int getMinimumLevel() { return minimumLevel; } public int getMaximumLevel() { return maximumLevel; } public double getRange() { return getMaximumLevel() - getMinimumLevel(); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof MythicEnchantment)) return false; MythicEnchantment that = (MythicEnchantment) o; if (Double.compare(that.maximumLevel, maximumLevel) != 0) return false; if (Double.compare(that.minimumLevel, minimumLevel) != 0) return false; if (enchantment != null ? !enchantment.equals(that.enchantment) : that.enchantment != null) return false; return true; } @Override public int hashCode() { int result; long temp; result = enchantment != null ? enchantment.hashCode() : 0; temp = Double.doubleToLongBits(minimumLevel); result = 31 * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(maximumLevel); result = 31 * result + (int) (temp ^ (temp >>> 32)); return result; } }
adding javadoc to MythicEnchantment
MythicDrops/src/main/java/net/nunnerycode/bukkit/mythicdrops/api/enchantments/MythicEnchantment.java
adding javadoc to MythicEnchantment
Java
mit
7d4cef17a65abe57d7a716f36ffdb0edf603bf76
0
nilsschmidt1337/ldparteditor,nilsschmidt1337/ldparteditor
/* MIT - License Copyright (c) 2012 - this year, Nils Schmidt Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.nschmidt.ldparteditor.data; import java.io.File; import java.io.FileNotFoundException; import java.io.UnsupportedEncodingException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.util.regex.Pattern; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.lwjgl.opengl.GL11; import org.lwjgl.opengl.GL13; import org.lwjgl.util.vector.Vector3f; import org.nschmidt.ldparteditor.composites.Composite3D; import org.nschmidt.ldparteditor.composites.compositetab.CompositeTab; import org.nschmidt.ldparteditor.composites.compositetab.CompositeTabState; import org.nschmidt.ldparteditor.data.colour.GCChrome; import org.nschmidt.ldparteditor.data.colour.GCMatteMetal; import org.nschmidt.ldparteditor.data.colour.GCMetal; import org.nschmidt.ldparteditor.enums.MyLanguage; import org.nschmidt.ldparteditor.enums.View; import org.nschmidt.ldparteditor.helpers.composite3d.ViewIdleManager; import org.nschmidt.ldparteditor.helpers.math.HashBiMap; import org.nschmidt.ldparteditor.i18n.I18n; import org.nschmidt.ldparteditor.logger.NLogger; import org.nschmidt.ldparteditor.project.Project; import org.nschmidt.ldparteditor.resources.ResourceManager; import org.nschmidt.ldparteditor.shells.editor3d.Editor3DWindow; import org.nschmidt.ldparteditor.shells.editortext.EditorTextWindow; import org.nschmidt.ldparteditor.text.DatParser; import org.nschmidt.ldparteditor.text.HeaderState; import org.nschmidt.ldparteditor.text.LDParsingException; import org.nschmidt.ldparteditor.text.StringHelper; import org.nschmidt.ldparteditor.text.UTF8BufferedReader; import org.nschmidt.ldparteditor.text.UTF8PrintWriter; import org.nschmidt.ldparteditor.widgets.TreeItem; import org.nschmidt.ldparteditor.workbench.WorkbenchManager; /** * The DAT file class * * @author nils * */ public final class DatFile { private static final Pattern pattern = Pattern.compile("\r?\n|\r"); //$NON-NLS-1$ private final boolean readOnly; private boolean drawSelection = true; private final GData drawChainAnchor = new GDataInit(View.DUMMY_REFERENCE); private final HashBiMap<Integer, GData> drawPerLine = new HashBiMap<Integer, GData>(); private final HashMap<Integer, GData> copy_drawPerLine = new HashMap<Integer, GData>(); private static final GTexture CUBEMAP_TEXTURE = new GTexture(TexType.PLANAR, "cmap.png", null, 1, new Vector3f(1,0,0), new Vector3f(1,1,0), new Vector3f(1,1,1), 0, 0); //$NON-NLS-1$ private static final GDataTEX CUBEMAP = new GDataTEX(null, "", TexMeta.NEXT, CUBEMAP_TEXTURE); //$NON-NLS-1$ private static final GTexture CUBEMAP_MATTE_TEXTURE = new GTexture(TexType.PLANAR, "matte_metal.png", null, 2, new Vector3f(1,0,0), new Vector3f(1,1,0), new Vector3f(1,1,1), 0, 0); //$NON-NLS-1$ private static final GDataTEX CUBEMAP_MATTE = new GDataTEX(null, "", TexMeta.NEXT, CUBEMAP_MATTE_TEXTURE); //$NON-NLS-1$ private static final GTexture CUBEMAP_METAL_TEXTURE = new GTexture(TexType.PLANAR, "metal.png", null, 2, new Vector3f(1,0,0), new Vector3f(1,1,0), new Vector3f(1,1,1), 0, 0); //$NON-NLS-1$ private static final GDataTEX CUBEMAP_METAL = new GDataTEX(null, "", TexMeta.NEXT, CUBEMAP_METAL_TEXTURE); //$NON-NLS-1$ private final VertexManager vertices = new VertexManager(this); private Vertex nearestObjVertex1 = null; private Vertex nearestObjVertex2 = null; private Vertex objVertex1 = null; private Vertex objVertex2 = null; private Vertex objVertex3 = null; private Vertex objVertex4 = null; private boolean virtual; private boolean projectFile; private DatType type = DatType.PART; private long lastModified = 0; private String description; private String oldName; private String newName; private String text = ""; //$NON-NLS-1$ private String originalText = ""; //$NON-NLS-1$ private Date lastSavedOpened = new Date(); private GData drawChainTail = null; private Composite3D lastSelectedComposite = null; private static Composite3D lastHoveredComposite = null; private HistoryManager history = new HistoryManager(this); public DatFile(String path) { this.projectFile = true; this.oldName = path; this.newName = path; this.readOnly = false; this.setVirtual(true); this.setType(DatType.PART); } public DatFile(String path, String description, boolean isReadOnly, DatType type) { this.projectFile = false; this.description = description; this.oldName = path; this.newName = path; this.readOnly = isReadOnly; this.setVirtual(false); this.setType(type); } /** * Draw the DAT file on the Composite3D This method is not intended for * preview renderings, since its too mighty for it * * @param c3d */ public synchronized void draw(Composite3D c3d) { GDataCSG.resetCSG(); GData data2draw = drawChainAnchor; int renderMode = c3d.getRenderMode(); if (!c3d.isDrawingSolidMaterials() && renderMode != 5) vertices.draw(c3d); if (Editor3DWindow.getWindow().isAddingCondlines()) renderMode = 6; switch (renderMode) { case -1: // Wireframe break; case 0: // No BFC data2draw.draw(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.draw(c3d); } break; case 1: // Random Colours data2draw.drawRandomColours(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.drawRandomColours(c3d); } break; case 2: // Front-Backface BFC data2draw.drawBFC(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { switch (GData.accumClip) { case 0: data2draw.drawBFC(c3d); break; default: data2draw.draw(c3d); break; } } break; case 3: // Backface only BFC data2draw.drawBFC_backOnly(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { switch (GData.accumClip) { case 0: data2draw.drawBFC_backOnly(c3d); break; default: data2draw.draw(c3d); break; } } break; case 4: // Real BFC data2draw.drawBFC_Colour(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { switch (GData.accumClip) { case 0: data2draw.drawBFC_Colour(c3d); break; default: data2draw.draw(c3d); break; } } break; case 5: // Real BFC with texture mapping GL11.glEnable(GL11.GL_TEXTURE_2D); data2draw.drawBFC_Textured(c3d); vertices.fillVertexNormalCache(data2draw); data2draw.drawBFC_Textured(c3d); CUBEMAP.drawBFC_Textured(c3d); new GData3(new Vertex(0,0,0), new Vertex(1,0,0), new Vertex(1,1,0), View.DUMMY_REFERENCE, new GColour(0, 0, 0, 0, 0, new GCChrome())).drawBFC_Textured(c3d.getComposite3D()); CUBEMAP_MATTE.drawBFC_Textured(c3d); new GData3(new Vertex(0,0,0), new Vertex(1,0,0), new Vertex(1,1,0), View.DUMMY_REFERENCE, new GColour(0, 0, 0, 0, 0, new GCMatteMetal())).drawBFC_Textured(c3d.getComposite3D()); CUBEMAP_METAL.drawBFC_Textured(c3d); new GData3(new Vertex(0,0,0), new Vertex(1,0,0), new Vertex(1,1,0), View.DUMMY_REFERENCE, new GColour(0, 0, 0, 0, 0, new GCMetal())).drawBFC_Textured(c3d.getComposite3D()); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.drawBFC_Textured(c3d); } vertices.clearVertexNormalCache(); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 0); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 2); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 4); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 8); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 16); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL11.glDisable(GL11.GL_TEXTURE_2D); break; case 6: // Special mode for "Add condlines" data2draw.drawWhileAddCondlines(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.drawWhileAddCondlines(c3d); } break; default: break; } if (c3d.isDrawingSolidMaterials() && renderMode != 5) vertices.showHidden(); } public synchronized void getBFCorientationMap(HashMap<GData, Byte> bfcMap) { GDataCSG.resetCSG(); GData data2draw = drawChainAnchor; data2draw.getBFCorientationMap(bfcMap); while ((data2draw = data2draw.getNext()) != null) { data2draw.getBFCorientationMap(bfcMap); } } /** * @return the real filename from the file stored on disk */ public String getOldName() { return oldName; } /** * Sets the real filename of the file stored on disk * * @param oldName * the real filename */ public void setOldName(String oldName) { this.oldName = oldName; } /** * @return the new filename from the file to be stored. It's typically the * same as the old name. */ public String getNewName() { return newName; } /** * Sets the new filename for the file to be stored * * @param newName * the new filename */ public void setNewName(String newName) { this.newName = newName; } public String getDescription() { return description; } public void setDescription(String d) { description = d; } /** * @return {@code true} if the file is read-only */ public boolean isReadOnly() { return readOnly; } /** * @return the text content of this dat file */ public String getText() { final boolean modified = vertices.isModified(); if (modified || Project.getUnsavedFiles().contains(this)) { if (modified) { StringBuilder sb = new StringBuilder(); GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null && data2draw.getNext() != null) { sb.append(data2draw.toString()); sb.append(StringHelper.getLineDelimiter()); } if (data2draw == null) { vertices.setModified(false, true); } else { sb.append(data2draw.toString()); text = sb.toString(); } } final GData descriptionline = drawChainAnchor.getNext(); if (descriptionline != null) { String descr = descriptionline.toString(); if (descr.length() > 1) descr = descr.substring(2); description = " - " + descr; //$NON-NLS-1$ } } else { parseForData(false); } return text; } public String getOriginalText() { return originalText; } public void setOriginalText(String ot) { setLastSavedOpened(new Date()); originalText = ot; } /** * @param text * the text content of this dat file to set */ public void setText(String text) { final GData descriptionline = drawChainAnchor.getNext(); if (descriptionline != null) { String descr = descriptionline.toString(); if (descr.length() > 1) descr = descr.substring(2); description = " - " + descr; //$NON-NLS-1$ } this.text = text; } /** * @return a list of codelines from this DAT file. * <br> This functions reads the contents from the harddrive if the file was not loaded before. * <br> The list will be empty if the file can't be read or can't be found */ public ArrayList<String> getSource() { ArrayList<String> result = new ArrayList<String>(); if (originalText.isEmpty() && new File(this.getOldName()).exists()) { try { UTF8BufferedReader reader = new UTF8BufferedReader(this.getOldName()); while (true) { String line2 = reader.readLine(); if (line2 == null) { break; } result.add(line2); } reader.close(); } catch (FileNotFoundException e) { } catch (LDParsingException e) { } catch (UnsupportedEncodingException e) {} } else { GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { result.add(data2draw.toString()); } } return result; } /** * Parses the opened dat file for errors and correct data (in realtime, only * when opened in text editor) * * @param compositeText * @param hints * @param warnings * @param errors */ public void parseForErrorAndData(StyledText compositeText, int startOffset_pos, int endOffset_pos, int length, String insertedText, String replacedText, TreeItem hints, TreeItem warnings, TreeItem errors) { HeaderState.state().setState(HeaderState._99_DONE); Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); GData anchorData = drawChainAnchor; GData targetData = null; long start = System.currentTimeMillis(); int startLine = compositeText.getLineAtOffset(startOffset_pos); int startOffset = compositeText.getOffsetAtLine(startLine); int endLine = compositeText.getLineAtOffset(endOffset_pos); int endOffset = compositeText.getOffsetAtLine(endLine) + compositeText.getLine(endLine).length(); startLine++; endLine++; boolean tailRemoved = false; // Dispose overwritten content (and so the connected 3D info) final int rlength = replacedText.length(); if (rlength > 0) { // Difficult, because the old text was overwritten >= 1 old line // change final int newLineCount = endLine - startLine + 1; final int affectedOldLineCount = StringHelper.countOccurences(StringHelper.getLineDelimiter(), replacedText) + 1; final int oldEndLine = startLine + affectedOldLineCount - 1; // Set the anchor GData linkedDraw = drawPerLine.getValue(startLine); if (linkedDraw != null) { GData newAnchor = linkedDraw.getBefore(); if (newAnchor != null) anchorData = newAnchor; } // Set the target GData linkedDraw2 = drawPerLine.getValue(oldEndLine); if (linkedDraw2 != null) { targetData = linkedDraw2.getNext(); } // Remove overwritten content int actionStartLine = startLine; for (int i = 0; i < affectedOldLineCount; i++) { tailRemoved = vertices.remove(drawPerLine.getValue(actionStartLine)) | tailRemoved; drawPerLine.removeByKey(actionStartLine); actionStartLine++; } if (affectedOldLineCount != newLineCount && !drawPerLine.isEmpty()) { // Update references at the tail int diff = newLineCount - affectedOldLineCount; actionStartLine = oldEndLine + 1; GData data; while ((data = drawPerLine.getValue(actionStartLine)) != null) { copy_drawPerLine.put(actionStartLine + diff, data); drawPerLine.removeByKey(actionStartLine); actionStartLine++; } for (Integer i : copy_drawPerLine.keySet()) { GData nd = copy_drawPerLine.get(i); drawPerLine.put(i, nd); } copy_drawPerLine.clear(); } } else if (length > 0) { // Easy, because only new text was inserted = 1 old line change int newLineCount = endLine - startLine; // Insertion within one line if (startLine == endLine) { // The target data is the next data from the old line GData linkedDraw = drawPerLine.getValue(startLine); if (linkedDraw != null) { targetData = linkedDraw.getNext(); // And the anchor data is the data before the old line GData newAnchor = linkedDraw.getBefore(); if (newAnchor != null) anchorData = newAnchor; // And the old line data has to be removed tailRemoved = vertices.remove(drawPerLine.getValue(startLine)) | tailRemoved; drawPerLine.removeByKey(startLine); } } else { // The target data is the next data from the old line GData linkedDraw = drawPerLine.getValue(startLine); if (linkedDraw != null) { targetData = linkedDraw.getNext(); // And the anchor data is the data before the old line GData newAnchor = linkedDraw.getBefore(); if (newAnchor != null) { anchorData = newAnchor; } // And the old line data has to be moved tailRemoved = vertices.remove(drawPerLine.getValue(startLine)) | tailRemoved; drawPerLine.removeByKey(startLine); int lcount = compositeText.getLineCount() - newLineCount + 1; for (int i = startLine + 1; i < lcount; i++) { copy_drawPerLine.put(i + newLineCount, drawPerLine.getValue(i)); drawPerLine.removeByKey(i); } for (Integer i : copy_drawPerLine.keySet()) { GData nd = copy_drawPerLine.get(i); drawPerLine.put(i, nd); } copy_drawPerLine.clear(); } } } NLogger.debug(getClass(), "Time after OpenGL data change: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ warnings.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); errors.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); int offset = compositeText.getLineDelimiter().length(); int position = startOffset; ArrayList<ParsingResult> results; // Clear the cache.. GData.parsedLines.clear(); GData.CACHE_parsedFilesSource.clear(); GData.CACHE_warningsAndErrors.clear(); String line; GData gdata; for (int lineNumber = startLine; lineNumber < endLine + 1; lineNumber++) { line = compositeText.getLine(lineNumber - 1); if (isNotBlank(line)) { results = DatParser.parseLine(line, lineNumber, 0, 0.5f, 0.5f, 0.5f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, false, alreadyParsed, true); gdata = results.get(0).getGraphicalData(); if (gdata == null) { gdata = new GData0(line); } else { gdata.setText(line); GData.CACHE_warningsAndErrors.put(gdata, results); } anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); for (ParsingResult result : results) { switch (result.getTypeNumber()) { case ResultType.WARN: // Warning { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(warnings, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_warning.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; case ResultType.ERROR: // Error { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(errors, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_error.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; default: // Hint break; } } } else { gdata = new GData0(line); anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); } position += line.length() + offset; } anchorData.setNext(targetData); // Get tail if (tailRemoved || drawChainTail == null) { drawChainTail = anchorData; } hints.sortItems(); warnings.sortItems(); errors.sortItems(); hints.getParent().build(); if (DatParser.isUpatePngImages()) { Editor3DWindow.getWindow().updateBgPictureTab(); DatParser.setUpatePngImages(false); } NLogger.debug(getClass(), "Total time to parse: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ vertices.validateState(); NLogger.debug(getClass(), "Total time to parse + validate: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } /** * Parses the opened dat file for errors and correct data (in realtime, only * when opened in text editor) * * @param compositeText * @param hints * @param warnings * @param errors */ public void parseForError(StyledText compositeText, int startOffset_pos, int endOffset_pos, int length, String insertedText, String replacedText, TreeItem hints, TreeItem warnings, TreeItem errors, boolean unselectBgPicture) { if (compositeText.getText().isEmpty()) { return; } Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); long start = System.currentTimeMillis(); int startLine = compositeText.getLineAtOffset(startOffset_pos); int startOffset = compositeText.getOffsetAtLine(startLine); int endLine = compositeText.getLineAtOffset(endOffset_pos); int endOffset = compositeText.getOffsetAtLine(endLine) + compositeText.getLine(endLine).length(); startLine++; endLine++; int rlength = replacedText.length(); warnings.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); errors.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); int offset = StringHelper.getLineDelimiter().length(); int position = startOffset; ArrayList<ParsingResult> results; // Clear the cache.. GData.parsedLines.clear(); GData.CACHE_parsedFilesSource.clear(); String line; for (int lineNumber = startLine; lineNumber < endLine + 1; lineNumber++) { line = compositeText.getLine(lineNumber - 1); if (isNotBlank(line)) { GData gd = drawPerLine.getValue(lineNumber); results = GData.CACHE_warningsAndErrors.get(gd); if (results == null) { results = DatParser.parseLine(line, lineNumber, 0, 0.5f, 0.5f, 0.5f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, true, alreadyParsed, true); GData.CACHE_warningsAndErrors.put(gd, results); } for (ParsingResult result : results) { switch (result.getTypeNumber()) { case ResultType.WARN: // Warning { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(warnings, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_warning.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; case ResultType.ERROR: // Error { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(errors, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_error.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; default: // Hint break; } } } position += line.length() + offset; } if (unselectBgPicture) { vertices.setSelectedBgPicture(null); vertices.setSelectedBgPictureIndex(0); Editor3DWindow.getWindow().updateBgPictureTab(); } hints.sortItems(); warnings.sortItems(); errors.sortItems(); hints.getParent().build(); NLogger.debug(getClass(), "Total time to parse (error check only): {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ vertices.validateState(); NLogger.debug(getClass(), "Total time to parse + validate: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } private boolean isNotBlank(String str) { int strLen; if (str == null || (strLen = str.length()) == 0) { return false; } for (int i = 0; i < strLen; i++) { if (Character.isWhitespace(str.charAt(i)) == false) { return true; } } return false; } public void parseForData(boolean addHistory) { Project.getParsedFiles().add(this); Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); String[] lines; if (Project.getUnsavedFiles().contains(this) ) { lines = pattern.split(text, -1); if (lines.length == 0) { lines = new String[]{""}; //$NON-NLS-1$ } } else { StringBuilder sb = new StringBuilder(); ArrayList<String> lines2 = new ArrayList<String>(4096); try { UTF8BufferedReader reader = new UTF8BufferedReader(this.getOldName()); String line = reader.readLine(); if (line != null) { sb.append(line); lines2.add(line); while (true) { String line2 = reader.readLine(); if (line2 == null) { break; } sb.append(StringHelper.getLineDelimiter()); sb.append(line2); lines2.add(line2); } } else { lines2.add(""); //$NON-NLS-1$ } reader.close(); lastModified = new File(getOldName()).lastModified(); } catch (FileNotFoundException e) { } catch (LDParsingException e) { } catch (UnsupportedEncodingException e) {} lines = lines2.toArray(new String[lines2.size()]); setLastSavedOpened(new Date()); originalText = sb.toString(); text = originalText; } GData anchorData = drawChainAnchor; GData targetData = null; ArrayList<ParsingResult> results; // Parse header { HeaderState h = new HeaderState(); HeaderState.setState(h); int lineNumber = 1; for (String line : lines) { if (isNotBlank(line)) { if (!line.trim().startsWith("0")) { //$NON-NLS-1$ break; } DatParser.parseLine(line, lineNumber, 0, 0f, 0f, 0f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, true, alreadyParsed, false); } lineNumber++; } } HeaderState.state().setState(HeaderState._99_DONE); // Clear the cache.. GData.parsedLines.clear(); GData.CACHE_parsedFilesSource.clear(); drawPerLine.clear(); vertices.clear(); // The vertex structure needs a re-build GData gdata; int lineNumber = 1; for (String line : lines) { if (isNotBlank(line)) { results = DatParser.parseLine(line, lineNumber, 0, 0.5f, 0.5f, 0.5f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, false, alreadyParsed, false); gdata = results.get(0).getGraphicalData(); if (gdata == null) { gdata = new GData0(line); } else { gdata.setText(line); } anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); } else { gdata = new GData0(line); anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); } lineNumber++; } anchorData.setNext(targetData); drawChainTail = anchorData; final GData descriptionline = drawChainAnchor.getNext(); if (descriptionline != null) { String descr = descriptionline.toString(); if (descr.length() > 1) descr = descr.substring(2); description = " - " + descr; //$NON-NLS-1$ } if (addHistory) addHistory(); } public void parseForHints(StyledText compositeText, TreeItem hints) { Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); long start = System.currentTimeMillis(); HeaderState h = new HeaderState(); HeaderState.setState(h); hints.removeAll(); int offset = StringHelper.getLineDelimiter().length(); int position = 0; int lc = compositeText.getLineCount(); ArrayList<ParsingResult> results; lc++; for (int lineNumber = 1; lineNumber < lc; lineNumber++) { String line = compositeText.getLine(lineNumber - 1); if (isNotBlank(line)) { if (!line.trim().startsWith("0")) { //$NON-NLS-1$ HeaderState.state().setState(HeaderState._99_DONE); break; } results = DatParser.parseLine(line, lineNumber, 0, 0f, 0f, 0f, 1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, true, alreadyParsed, false); for (ParsingResult result : results) { if (result.getTypeNumber() == ResultType.HINT) { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(hints, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_info.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } } } position += line.length() + offset; } { h = HeaderState.state(); results = new ArrayList<ParsingResult>(); if (!h.hasTITLE()) results.add(new ParsingResult(I18n.DATFILE_MissingTitle, "[H00] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasNAME()) results.add(new ParsingResult(I18n.DATFILE_MissingFileName, "[H10] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasAUTHOR()) results.add(new ParsingResult(I18n.DATFILE_MissingAuthor, "[H20] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasTYPE()) results.add(new ParsingResult(I18n.DATFILE_MissingPartType, "[H30] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasLICENSE()) results.add(new ParsingResult(I18n.DATFILE_MissingLicense, "[H40] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasBFC()) results.add(new ParsingResult(I18n.DATFILE_MissingBFC, "[H60] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ int fakeLine = -1; for (ParsingResult result : results) { TreeItem trtmNewTreeitem = new TreeItem(hints, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_info.png")); //$NON-NLS-1$ trtmNewTreeitem.setText(new String[] { result.getMessage(), "---", result.getType() }); //$NON-NLS-1$ trtmNewTreeitem.setData(fakeLine); trtmNewTreeitem.setVisible(false); fakeLine--; } } hints.sortItems(); HeaderState.state().setState(HeaderState._99_DONE); NLogger.debug(getClass(), "Total time to parse header: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } public HashBiMap<Integer, GData> getDrawPerLine() { return drawPerLine.copy(); } public HashBiMap<Integer, GData> getDrawPerLine_NOCLONE() { return drawPerLine; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (oldName == null ? 0 : oldName.hashCode()); return result; } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DatFile other = (DatFile) obj; if (oldName == null) { if (other.oldName != null) return false; } else if (!oldName.equals(other.oldName)) return false; return true; } /** * @return the type (1 = Part, 2 = Subpart, 3 = Primitive, 4 = * Hi-Res-Primitive) */ public DatType getType() { return type; } /** * @param type * the type to set (1 = Part, 2 = Subpart, 3 = Primitive, 4 = * Hi-Res-Primitive) */ public void setType(DatType type) { this.type = type; } public VertexManager getVertexManager() { return vertices; } public GData getDrawChainTail() { if (drawChainTail == null) { GData gd = drawChainAnchor; do { drawChainTail = gd; } while ((gd = gd.getNext()) != null); } return drawChainTail; } public GData getDrawChainStart() { return drawChainAnchor; } public void setDrawChainTail(GData drawChainTail) { this.drawChainTail = drawChainTail; } public boolean isVirtual() { return virtual; } private void setVirtual(boolean virtual) { this.virtual = virtual; } public void addToTailOrInsertAfterCursor(GData gdata) { if (Editor3DWindow.getWindow().isInsertingAtCursorPosition()) { insertAfterCursor(gdata); } else { addToTail(gdata); } } public void addToTailOrInsertAfterCursorReset(GData gdata) { if (Editor3DWindow.getWindow().isInsertingAtCursorPosition()) { insertAfterCursor(gdata); } else { addToTail(gdata); } } public void insertAfterCursor(GData gdata) { // The feature is only available when the 3D view and the text editor view are synchronized! if (!WorkbenchManager.getUserSettingState().getSyncWithTextEditor().get()) { addToTail(gdata); return; } for (EditorTextWindow w : Project.getOpenTextWindows()) { for (CTabItem t : w.getTabFolder().getItems()) { CompositeTabState state = ((CompositeTab) t).getState(); if (this.equals(state.getFileNameObj())) { StyledText st = ((CompositeTab) t).getTextComposite(); int s1 = st.getSelectionRange().x; if (s1 > -1) { int line = st.getLineAtOffset(s1) + 1; GData target = null; target = drawPerLine.getValue(line); if (target != null) { boolean doReplace = false; boolean insertEmptyLine = true; if (target.type() == 0) { doReplace = !StringHelper.isNotBlank(target.toString()); } if (doReplace) { GData next = target.getNext(); if (next != null && next.type() == 0) { insertEmptyLine = StringHelper.isNotBlank(next.toString()); } replaceComment(target, gdata); if (insertEmptyLine) insertAfter(gdata, new GData0("")); //$NON-NLS-1$ } else { insertAfter(target, gdata); insertAfter(gdata, new GData0("")); //$NON-NLS-1$ } state.setSync(true); try { if (doReplace) { if (insertEmptyLine) { int offset = st.getOffsetAtLine(line - 1); st.setSelection(offset, offset + target.toString().length()); st.insert(gdata.toString() + StringHelper.getLineDelimiter()); offset += StringHelper.getLineDelimiter().length() + gdata.toString().length(); st.setSelection(offset, offset); } else { int offset = st.getOffsetAtLine(line - 1); st.setSelection(offset, offset + target.toString().length()); st.insert(gdata.toString()); offset += StringHelper.getLineDelimiter().length() + gdata.toString().length(); st.setSelection(offset, offset); } } else { int offset = st.getOffsetAtLine(line - 1) + target.toString().length() + StringHelper.getLineDelimiter().length(); st.setSelection(offset, offset); st.insert(StringHelper.getLineDelimiter() + gdata.toString()); offset += StringHelper.getLineDelimiter().length() + gdata.toString().length(); st.setSelection(offset, offset); } } catch (IllegalArgumentException iae) { } state.setSync(false); } return; } } } } addToTail(gdata); } public void addToTail(GData gdata) { Integer lineNumber = drawPerLine.keySet().size() + 1; drawPerLine.put(lineNumber, gdata); GData tail = drawPerLine.getValue(lineNumber - 1); if (tail == null) { drawChainTail = null; tail = getDrawChainTail(); } tail.setNext(gdata); drawChainTail = gdata; } public void insertAfter(GData target, GData gdata) { GData tail = drawPerLine.getValue(drawPerLine.keySet().size()); if (tail == null) { drawChainTail = null; tail = getDrawChainTail(); } if (target.equals(tail)) { addToTail(gdata); return; } GData next = target.getNext(); target.setNext(gdata); gdata.setNext(next); drawPerLine.clear(); int i = 1; for (GData start = drawChainAnchor.getNext(); start != null; start = start.getNext()) { drawPerLine.put(i, start); i++; } } public void replaceComment(GData target, GData gdata) { if (target.type() != 0) return; GData tail = drawPerLine.getValue(drawPerLine.keySet().size()); if (tail == null) { drawChainTail = null; tail = getDrawChainTail(); } GData next = target.getNext(); GData before = target.getBefore(); before.setNext(gdata); gdata.setNext(next); if (target.equals(tail)) { drawChainTail = gdata; } drawPerLine.put(drawPerLine.getKey(target), gdata); target.derefer(); } public Vertex getNearestObjVertex1() { return nearestObjVertex1; } public void setNearestObjVertex1(Vertex nearestObjVertex1) { this.nearestObjVertex1 = nearestObjVertex1; } public Vertex getNearestObjVertex2() { return nearestObjVertex2; } public void setNearestObjVertex2(Vertex nearestObjVertex2) { this.nearestObjVertex2 = nearestObjVertex2; } public Vertex getObjVertex1() { return objVertex1; } public void setObjVertex1(Vertex objVertex1) { this.objVertex1 = objVertex1; } public Vertex getObjVertex2() { return objVertex2; } public void setObjVertex2(Vertex objVertex2) { this.objVertex2 = objVertex2; } public Vertex getObjVertex3() { return objVertex3; } public void setObjVertex3(Vertex objVertex3) { this.objVertex3 = objVertex3; } public Vertex getObjVertex4() { return objVertex4; } public void setObjVertex4(Vertex objVertex4) { this.objVertex4 = objVertex4; } public void disposeData() { history.deleteHistory(); text = ""; //$NON-NLS-1$ vertices.setModified(false, true); vertices.clear(); Set<Integer> lineNumbers = drawPerLine.keySet(); for (Integer lineNumber : lineNumbers) { drawPerLine.getValue(lineNumber).derefer(); } drawPerLine.clear(); copy_drawPerLine.clear(); drawChainAnchor.setNext(null); Project.getParsedFiles().remove(this); } @Override public String toString() { return oldName; } public String getShortName() { String shortFilename = new File(newName).getName(); shortFilename = shortFilename.toLowerCase(Locale.ENGLISH); try { shortFilename = shortFilename.replaceAll("\\\\", File.separator); //$NON-NLS-1$ } catch (Exception e) { // Workaround for windows OS / JVM BUG shortFilename = shortFilename.replace("\\\\", File.separator); //$NON-NLS-1$ } if (type.equals(DatType.SUBPART)) { shortFilename = "S" + File.separator + shortFilename; //$NON-NLS-1$ } else if (type.equals(DatType.PRIMITIVE8)) { shortFilename = "8" + File.separator + shortFilename; //$NON-NLS-1$ } else if (type.equals(DatType.PRIMITIVE48)) { shortFilename = "48" + File.separator + shortFilename; //$NON-NLS-1$ } return shortFilename; } public boolean isProjectFile() { return projectFile; } public boolean save() { if (readOnly) { // Don't save read only files! return true; } boolean deleteFirst = oldName.equals(newName); try { if (deleteFirst) { File oldFile = new File(oldName); if (oldFile.exists()) { if (checkFileCollision(oldFile)) { return true; } oldFile.delete(); } } else { File newFile = new File(newName); if (newFile.exists()) { if (checkFileCollision(newFile)) { return true; } } File oldFile = new File(oldName); if (oldFile.exists()) { if (oldFile.lastModified() == lastModified) { oldFile.delete(); } } } UTF8PrintWriter r = new UTF8PrintWriter(newName); ArrayList<String> lines = new ArrayList<String>(); lines.addAll(Arrays.asList(text.split("\r?\n|\r", -1))); //$NON-NLS-1$ if (lines.isEmpty()) lines.add(""); //$NON-NLS-1$ for (String line : lines) { r.println(line); } r.flush(); r.close(); if (!deleteFirst) { File oldFile = new File(oldName); if (oldFile.exists()) oldFile.delete(); } // File was saved. It is not virtual anymore. setVirtual(false); originalText = text; oldName = newName; setLastSavedOpened(new Date()); lastModified = new File(getNewName()).lastModified(); Project.removeUnsavedFile(this); HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows()); for (EditorTextWindow win : windows) { win.updateTabWithDatfile(this); } return true; } catch (Exception ex) { return false; } } public boolean saveForced() { try { File newFile = new File(newName); if (newFile.exists()) { newFile.delete(); } UTF8PrintWriter r = new UTF8PrintWriter(newName); ArrayList<String> lines = new ArrayList<String>(); lines.addAll(Arrays.asList(text.split("\r?\n|\r", -1))); //$NON-NLS-1$ if (lines.isEmpty()) lines.add(""); //$NON-NLS-1$ for (String line : lines) { r.println(line); } r.flush(); r.close(); // File was saved. It is not virtual anymore. setVirtual(false); originalText = text; oldName = newName; setLastSavedOpened(new Date()); lastModified = new File(getNewName()).lastModified(); Project.removeUnsavedFile(this); HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows()); for (EditorTextWindow win : windows) { win.updateTabWithDatfile(this); } return true; } catch (Exception ex) { return false; } } public boolean saveAs(String newName) { try { File newFile = new File(newName); if (newFile.exists()) { newFile.delete(); } UTF8PrintWriter r = new UTF8PrintWriter(newName); ArrayList<String> lines = new ArrayList<String>(); lines.addAll(Arrays.asList(text.split("\r?\n|\r", -1))); //$NON-NLS-1$ if (lines.isEmpty()) lines.add(""); //$NON-NLS-1$ // Write the new "0 Name: " if (lines.size() > 1) { final Pattern WHITESPACE = Pattern.compile("\\s+"); //$NON-NLS-1$ final int maxDetectionLines = Math.min(10, lines.size()); // 1. Detect the file type String folderPrefix = ""; //$NON-NLS-1$ for (int i = 0; i < maxDetectionLines; i++) { String tLine = WHITESPACE.matcher(lines.get(i)).replaceAll(" ").trim(); //$NON-NLS-1$ if (tLine.startsWith("0 !LDRAW_ORG")) { //$NON-NLS-1$ String typeSuffix = ""; //$NON-NLS-1$ String path = newFile.getParent(); if (path.endsWith(File.separator + "S") || path.endsWith(File.separator + "s")) { //$NON-NLS-1$ //$NON-NLS-2$ typeSuffix = "Unofficial_Subpart"; //$NON-NLS-1$ folderPrefix = "S\\"; //$NON-NLS-1$ } else if (path.endsWith(File.separator + "P" + File.separator + "48") || path.endsWith(File.separator + "p" + File.separator + "48")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ typeSuffix = "Unofficial_48_Primitive"; //$NON-NLS-1$ folderPrefix = "P\\"; //$NON-NLS-1$ } else if (path.endsWith(File.separator + "P" + File.separator + "8") || path.endsWith(File.separator + "p" + File.separator + "8")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ typeSuffix = "Unofficial_8_Primitive"; //$NON-NLS-1$ folderPrefix = "P\\"; //$NON-NLS-1$ } else if (path.endsWith(File.separator + "P") || path.endsWith(File.separator + "p")) { //$NON-NLS-1$ //$NON-NLS-2$ typeSuffix = "Unofficial_Primitive"; //$NON-NLS-1$ folderPrefix = "P\\"; //$NON-NLS-1$ } if (!"".equals(typeSuffix)) { //$NON-NLS-1$ lines.set(i, "0 !LDRAW_ORG " + typeSuffix); //$NON-NLS-1$ } break; } } // 2. Set the new name for (int i = 0; i < maxDetectionLines; i++) { String tLine = WHITESPACE.matcher(lines.get(i)).replaceAll(" ").trim(); //$NON-NLS-1$ if (tLine.startsWith("0 Name:")) { //$NON-NLS-1$ lines.set(i, "0 Name: " + folderPrefix + newFile.getName()); //$NON-NLS-1$ break; } } } for (String line : lines) { r.println(line); } r.flush(); r.close(); return true; } catch (Exception ex) { return false; } } private boolean checkFileCollision(File theFile) { if (theFile.lastModified() > lastModified) { MessageBox messageBox = new MessageBox(Editor3DWindow.getWindow().getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.CANCEL | SWT.NO); messageBox.setText(I18n.DIALOG_ModifiedTitle); Object[] messageArguments = {getShortName(), getLastSavedOpened()}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DIALOG_Modified); messageBox.setMessage(formatter.format(messageArguments)); int result2 = messageBox.open(); if (result2 == SWT.CANCEL) { return true; } else if (result2 == SWT.YES) { Project.removeUnsavedFile(this); parseForData(true); Editor3DWindow.getWindow().updateTree_unsavedEntries(); HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows()); for (EditorTextWindow win : windows) { win.updateTabWithDatfile(this); } return true; } } return false; } public Date getLastSavedOpened() { return lastSavedOpened; } private void setLastSavedOpened(Date lastSavedOpened) { this.lastSavedOpened = lastSavedOpened; } public long getLastModified() { return lastModified; } public void setLastModified(long lastModified) { this.lastModified = lastModified; } public void updateLastModified() { if (oldName.equals(newName)) { File oldFile = new File(oldName); if (oldFile.exists()) { lastModified = oldFile.lastModified(); } } else { File newFile = new File(newName); if (newFile.exists()) { if (checkFileCollision(newFile)) { lastModified = newFile.lastModified(); } } } } public String getSourceText() { StringBuilder source = new StringBuilder(); if (originalText.isEmpty()) { try { UTF8BufferedReader reader = new UTF8BufferedReader(this.getOldName()); String line = reader.readLine(); if (line != null) { source.append(line); while (true) { String line2 = reader.readLine(); if (line2 == null) { break; } source.append(StringHelper.getLineDelimiter()); source.append(line2); } } reader.close(); } catch (FileNotFoundException e) { } catch (LDParsingException e) { } catch (UnsupportedEncodingException e) {} } else { GData data2draw = drawChainAnchor; if ((data2draw = data2draw.getNext()) != null) { source.append(data2draw.toString()); } if (data2draw != null) { while ((data2draw = data2draw.getNext()) != null) { source.append(StringHelper.getLineDelimiter()); source.append(data2draw.toString()); } } } return source.toString(); } public String getTextDirect() { return text; } public boolean hasNoBackgroundPictures() { GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { if (data2draw.type() == 10) return false; } return true; } public int getBackgroundPictureCount() { int count = 0; GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { if (data2draw.type() == 10) count++; } return count; } public GDataPNG getBackgroundPicture(int index) { int count = 0; GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { if (data2draw.type() == 10) { if (count == index) return (GDataPNG) data2draw; count++; } } return null; } public Composite3D getLastSelectedComposite() { return lastSelectedComposite; } public void setLastSelectedComposite(Composite3D lastSelectedComposite) { this.lastSelectedComposite = lastSelectedComposite; } public boolean isDrawSelection() { return drawSelection; } public void setDrawSelection(boolean drawSelection) { this.drawSelection = drawSelection; } public void setProjectFile(boolean projectFile) { this.projectFile = projectFile; } public HistoryManager getHistory() { return history; } public void setHistory(HistoryManager history) { this.history = history; } public void addHistory() { NLogger.debug(getClass(), "Added history entry for {0}", getShortName()); //$NON-NLS-1$ final long start = System.currentTimeMillis(); final int objCount = drawPerLine.size(); GData[] backup = new GData[objCount]; boolean[] backupSelection = new boolean[objCount]; String[] backupHideShowState = null; int count = 0; GData data2draw = drawChainAnchor; Set<GData> sd = vertices.getSelectedData(); if (vertices.hiddenData.size() > 0) { vertices.cleanupHiddenData(); backupHideShowState = new String[vertices.hiddenData.size()]; int i = 0; for (GData g : vertices.hiddenData) { backupHideShowState[i] = g.getNiceString(); i++; } } while (count < objCount) { data2draw = data2draw.getNext(); backup[count] = data2draw; backupSelection[count] = sd.contains(data2draw); count++; } Vertex[] backupSelectedVertices = vertices.getSelectedVertices().toArray(new Vertex[vertices.getSelectedVertices().size()]); Vertex[] backupHiddenVertices = vertices.getHiddenVertices().toArray(new Vertex[vertices.getHiddenVertices().size()]); history.pushHistory( null, -1, -1, backup, backupSelection, backupHideShowState, backupSelectedVertices, backupHiddenVertices, -1 ); NLogger.debug(getClass(), "Total time to backup history: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } public void addHistory(String text, int selectionStart, int selectionEnd, int topIndex) { final long start = System.currentTimeMillis(); NLogger.debug(getClass(), "Added history entry for {0}", getShortName()); //$NON-NLS-1$ history.pushHistory( text, selectionStart, selectionEnd, null, null, null, null, null, topIndex ); NLogger.debug(getClass(), "Total time to backup history: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } public void undo(final Shell sh) { history.undo(sh); } public void redo(final Shell sh) { history.redo(sh); } public static Composite3D getLastHoveredComposite() { return lastHoveredComposite; } public static void setLastHoveredComposite(Composite3D lastHoveredComposite) { DatFile.lastHoveredComposite = lastHoveredComposite; } }
src/org/nschmidt/ldparteditor/data/DatFile.java
/* MIT - License Copyright (c) 2012 - this year, Nils Schmidt Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.nschmidt.ldparteditor.data; import java.io.File; import java.io.FileNotFoundException; import java.io.UnsupportedEncodingException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.util.regex.Pattern; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.lwjgl.opengl.GL11; import org.lwjgl.opengl.GL13; import org.lwjgl.util.vector.Vector3f; import org.nschmidt.ldparteditor.composites.Composite3D; import org.nschmidt.ldparteditor.composites.compositetab.CompositeTab; import org.nschmidt.ldparteditor.composites.compositetab.CompositeTabState; import org.nschmidt.ldparteditor.data.colour.GCChrome; import org.nschmidt.ldparteditor.data.colour.GCMatteMetal; import org.nschmidt.ldparteditor.data.colour.GCMetal; import org.nschmidt.ldparteditor.enums.MyLanguage; import org.nschmidt.ldparteditor.enums.View; import org.nschmidt.ldparteditor.helpers.composite3d.ViewIdleManager; import org.nschmidt.ldparteditor.helpers.math.HashBiMap; import org.nschmidt.ldparteditor.i18n.I18n; import org.nschmidt.ldparteditor.logger.NLogger; import org.nschmidt.ldparteditor.project.Project; import org.nschmidt.ldparteditor.resources.ResourceManager; import org.nschmidt.ldparteditor.shells.editor3d.Editor3DWindow; import org.nschmidt.ldparteditor.shells.editortext.EditorTextWindow; import org.nschmidt.ldparteditor.text.DatParser; import org.nschmidt.ldparteditor.text.HeaderState; import org.nschmidt.ldparteditor.text.LDParsingException; import org.nschmidt.ldparteditor.text.StringHelper; import org.nschmidt.ldparteditor.text.UTF8BufferedReader; import org.nschmidt.ldparteditor.text.UTF8PrintWriter; import org.nschmidt.ldparteditor.widgets.TreeItem; import org.nschmidt.ldparteditor.workbench.WorkbenchManager; /** * The DAT file class * * @author nils * */ public final class DatFile { private static final Pattern pattern = Pattern.compile("\r?\n|\r"); //$NON-NLS-1$ private final boolean readOnly; private boolean drawSelection = true; private final GData drawChainAnchor = new GDataInit(View.DUMMY_REFERENCE); private final HashBiMap<Integer, GData> drawPerLine = new HashBiMap<Integer, GData>(); private final HashMap<Integer, GData> copy_drawPerLine = new HashMap<Integer, GData>(); private static final GTexture CUBEMAP_TEXTURE = new GTexture(TexType.PLANAR, "cmap.png", null, 1, new Vector3f(1,0,0), new Vector3f(1,1,0), new Vector3f(1,1,1), 0, 0); //$NON-NLS-1$ private static final GDataTEX CUBEMAP = new GDataTEX(null, "", TexMeta.NEXT, CUBEMAP_TEXTURE); //$NON-NLS-1$ private static final GTexture CUBEMAP_MATTE_TEXTURE = new GTexture(TexType.PLANAR, "matte_metal.png", null, 2, new Vector3f(1,0,0), new Vector3f(1,1,0), new Vector3f(1,1,1), 0, 0); //$NON-NLS-1$ private static final GDataTEX CUBEMAP_MATTE = new GDataTEX(null, "", TexMeta.NEXT, CUBEMAP_MATTE_TEXTURE); //$NON-NLS-1$ private static final GTexture CUBEMAP_METAL_TEXTURE = new GTexture(TexType.PLANAR, "metal.png", null, 2, new Vector3f(1,0,0), new Vector3f(1,1,0), new Vector3f(1,1,1), 0, 0); //$NON-NLS-1$ private static final GDataTEX CUBEMAP_METAL = new GDataTEX(null, "", TexMeta.NEXT, CUBEMAP_METAL_TEXTURE); //$NON-NLS-1$ private final VertexManager vertices = new VertexManager(this); private Vertex nearestObjVertex1 = null; private Vertex nearestObjVertex2 = null; private Vertex objVertex1 = null; private Vertex objVertex2 = null; private Vertex objVertex3 = null; private Vertex objVertex4 = null; private boolean virtual; private boolean projectFile; private DatType type = DatType.PART; private long lastModified = 0; private String description; private String oldName; private String newName; private String text = ""; //$NON-NLS-1$ private String originalText = ""; //$NON-NLS-1$ private Date lastSavedOpened = new Date(); private GData drawChainTail = null; private Composite3D lastSelectedComposite = null; private static Composite3D lastHoveredComposite = null; private HistoryManager history = new HistoryManager(this); public DatFile(String path) { this.projectFile = true; this.oldName = path; this.newName = path; this.readOnly = false; this.setVirtual(true); this.setType(DatType.PART); } public DatFile(String path, String description, boolean isReadOnly, DatType type) { this.projectFile = false; this.description = description; this.oldName = path; this.newName = path; this.readOnly = isReadOnly; this.setVirtual(false); this.setType(type); } /** * Draw the DAT file on the Composite3D This method is not intended for * preview renderings, since its too mighty for it * * @param c3d */ public synchronized void draw(Composite3D c3d) { GDataCSG.resetCSG(); GData data2draw = drawChainAnchor; int renderMode = c3d.getRenderMode(); if (!c3d.isDrawingSolidMaterials() && renderMode != 5) vertices.draw(c3d); if (Editor3DWindow.getWindow().isAddingCondlines()) renderMode = 6; switch (renderMode) { case -1: // Wireframe break; case 0: // No BFC data2draw.draw(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.draw(c3d); } break; case 1: // Random Colours data2draw.drawRandomColours(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.drawRandomColours(c3d); } break; case 2: // Front-Backface BFC data2draw.drawBFC(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { switch (GData.accumClip) { case 0: data2draw.drawBFC(c3d); break; default: data2draw.draw(c3d); break; } } break; case 3: // Backface only BFC data2draw.drawBFC_backOnly(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { switch (GData.accumClip) { case 0: data2draw.drawBFC_backOnly(c3d); break; default: data2draw.draw(c3d); break; } } break; case 4: // Real BFC data2draw.drawBFC_Colour(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { switch (GData.accumClip) { case 0: data2draw.drawBFC_Colour(c3d); break; default: data2draw.draw(c3d); break; } } break; case 5: // Real BFC with texture mapping GL11.glEnable(GL11.GL_TEXTURE_2D); data2draw.drawBFC_Textured(c3d); vertices.fillVertexNormalCache(data2draw); data2draw.drawBFC_Textured(c3d); CUBEMAP.drawBFC_Textured(c3d); new GData3(new Vertex(0,0,0), new Vertex(1,0,0), new Vertex(1,1,0), View.DUMMY_REFERENCE, new GColour(0, 0, 0, 0, 0, new GCChrome())).drawBFC_Textured(c3d.getComposite3D()); CUBEMAP_MATTE.drawBFC_Textured(c3d); new GData3(new Vertex(0,0,0), new Vertex(1,0,0), new Vertex(1,1,0), View.DUMMY_REFERENCE, new GColour(0, 0, 0, 0, 0, new GCMatteMetal())).drawBFC_Textured(c3d.getComposite3D()); CUBEMAP_METAL.drawBFC_Textured(c3d); new GData3(new Vertex(0,0,0), new Vertex(1,0,0), new Vertex(1,1,0), View.DUMMY_REFERENCE, new GColour(0, 0, 0, 0, 0, new GCMetal())).drawBFC_Textured(c3d.getComposite3D()); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.drawBFC_Textured(c3d); } vertices.clearVertexNormalCache(); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 0); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 2); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 4); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 8); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL13.glActiveTexture(GL13.GL_TEXTURE0 + 16); GL11.glBindTexture(GL11.GL_TEXTURE_2D, 0); GL11.glDisable(GL11.GL_TEXTURE_2D); break; case 6: // Special mode for "Add condlines" data2draw.drawWhileAddCondlines(c3d); while ((data2draw = data2draw.getNext()) != null && !ViewIdleManager.pause[0].get()) { data2draw.drawWhileAddCondlines(c3d); } break; default: break; } if (c3d.isDrawingSolidMaterials() && renderMode != 5) vertices.showHidden(); } public synchronized void getBFCorientationMap(HashMap<GData, Byte> bfcMap) { GDataCSG.resetCSG(); GData data2draw = drawChainAnchor; data2draw.getBFCorientationMap(bfcMap); while ((data2draw = data2draw.getNext()) != null) { data2draw.getBFCorientationMap(bfcMap); } } /** * @return the real filename from the file stored on disk */ public String getOldName() { return oldName; } /** * Sets the real filename of the file stored on disk * * @param oldName * the real filename */ public void setOldName(String oldName) { this.oldName = oldName; } /** * @return the new filename from the file to be stored. It's typically the * same as the old name. */ public String getNewName() { return newName; } /** * Sets the new filename for the file to be stored * * @param newName * the new filename */ public void setNewName(String newName) { this.newName = newName; } public String getDescription() { return description; } public void setDescription(String d) { description = d; } /** * @return {@code true} if the file is read-only */ public boolean isReadOnly() { return readOnly; } /** * @return the text content of this dat file */ public String getText() { final boolean modified = vertices.isModified(); if (modified || Project.getUnsavedFiles().contains(this)) { if (modified) { StringBuilder sb = new StringBuilder(); GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null && data2draw.getNext() != null) { sb.append(data2draw.toString()); sb.append(StringHelper.getLineDelimiter()); } if (data2draw == null) { vertices.setModified(false, true); } else { sb.append(data2draw.toString()); text = sb.toString(); } } final GData descriptionline = drawChainAnchor.getNext(); if (descriptionline != null) { String descr = descriptionline.toString(); if (descr.length() > 1) descr = descr.substring(2); description = " - " + descr; //$NON-NLS-1$ } } else { parseForData(false); } return text; } public String getOriginalText() { return originalText; } public void setOriginalText(String ot) { setLastSavedOpened(new Date()); originalText = ot; } /** * @param text * the text content of this dat file to set */ public void setText(String text) { final GData descriptionline = drawChainAnchor.getNext(); if (descriptionline != null) { String descr = descriptionline.toString(); if (descr.length() > 1) descr = descr.substring(2); description = " - " + descr; //$NON-NLS-1$ } this.text = text; } /** * @return a list of codelines from this DAT file. * <br> This functions reads the contents from the harddrive if the file was not loaded before. * <br> The list will be empty if the file can't be read or can't be found */ public ArrayList<String> getSource() { ArrayList<String> result = new ArrayList<String>(); if (originalText.isEmpty() && new File(this.getOldName()).exists()) { try { UTF8BufferedReader reader = new UTF8BufferedReader(this.getOldName()); while (true) { String line2 = reader.readLine(); if (line2 == null) { break; } result.add(line2); } reader.close(); } catch (FileNotFoundException e) { } catch (LDParsingException e) { } catch (UnsupportedEncodingException e) {} } else { GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { result.add(data2draw.toString()); } } return result; } /** * Parses the opened dat file for errors and correct data (in realtime, only * when opened in text editor) * * @param compositeText * @param hints * @param warnings * @param errors */ public void parseForErrorAndData(StyledText compositeText, int startOffset_pos, int endOffset_pos, int length, String insertedText, String replacedText, TreeItem hints, TreeItem warnings, TreeItem errors) { HeaderState.state().setState(HeaderState._99_DONE); Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); GData anchorData = drawChainAnchor; GData targetData = null; long start = System.currentTimeMillis(); int startLine = compositeText.getLineAtOffset(startOffset_pos); int startOffset = compositeText.getOffsetAtLine(startLine); int endLine = compositeText.getLineAtOffset(endOffset_pos); int endOffset = compositeText.getOffsetAtLine(endLine) + compositeText.getLine(endLine).length(); startLine++; endLine++; boolean tailRemoved = false; // Dispose overwritten content (and so the connected 3D info) final int rlength = replacedText.length(); if (rlength > 0) { // Difficult, because the old text was overwritten >= 1 old line // change final int newLineCount = endLine - startLine + 1; final int affectedOldLineCount = StringHelper.countOccurences(StringHelper.getLineDelimiter(), replacedText) + 1; final int oldEndLine = startLine + affectedOldLineCount - 1; // Set the anchor GData linkedDraw = drawPerLine.getValue(startLine); if (linkedDraw != null) { GData newAnchor = linkedDraw.getBefore(); if (newAnchor != null) anchorData = newAnchor; } // Set the target GData linkedDraw2 = drawPerLine.getValue(oldEndLine); if (linkedDraw2 != null) { targetData = linkedDraw2.getNext(); } // Remove overwritten content int actionStartLine = startLine; for (int i = 0; i < affectedOldLineCount; i++) { tailRemoved = vertices.remove(drawPerLine.getValue(actionStartLine)) | tailRemoved; drawPerLine.removeByKey(actionStartLine); actionStartLine++; } if (affectedOldLineCount != newLineCount && !drawPerLine.isEmpty()) { // Update references at the tail int diff = newLineCount - affectedOldLineCount; actionStartLine = oldEndLine + 1; GData data; while ((data = drawPerLine.getValue(actionStartLine)) != null) { copy_drawPerLine.put(actionStartLine + diff, data); drawPerLine.removeByKey(actionStartLine); actionStartLine++; } for (Integer i : copy_drawPerLine.keySet()) { GData nd = copy_drawPerLine.get(i); drawPerLine.put(i, nd); } copy_drawPerLine.clear(); } } else if (length > 0) { // Easy, because only new text was inserted = 1 old line change int newLineCount = endLine - startLine; // Insertion within one line if (startLine == endLine) { // The target data is the next data from the old line GData linkedDraw = drawPerLine.getValue(startLine); if (linkedDraw != null) { targetData = linkedDraw.getNext(); // And the anchor data is the data before the old line GData newAnchor = linkedDraw.getBefore(); if (newAnchor != null) anchorData = newAnchor; // And the old line data has to be removed tailRemoved = vertices.remove(drawPerLine.getValue(startLine)) | tailRemoved; drawPerLine.removeByKey(startLine); } } else { // The target data is the next data from the old line GData linkedDraw = drawPerLine.getValue(startLine); if (linkedDraw != null) { targetData = linkedDraw.getNext(); // And the anchor data is the data before the old line GData newAnchor = linkedDraw.getBefore(); if (newAnchor != null) { anchorData = newAnchor; } // And the old line data has to be moved tailRemoved = vertices.remove(drawPerLine.getValue(startLine)) | tailRemoved; drawPerLine.removeByKey(startLine); int lcount = compositeText.getLineCount() - newLineCount + 1; for (int i = startLine + 1; i < lcount; i++) { copy_drawPerLine.put(i + newLineCount, drawPerLine.getValue(i)); drawPerLine.removeByKey(i); } for (Integer i : copy_drawPerLine.keySet()) { GData nd = copy_drawPerLine.get(i); drawPerLine.put(i, nd); } copy_drawPerLine.clear(); } } } NLogger.debug(getClass(), "Time after OpenGL data change: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ warnings.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); errors.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); int offset = compositeText.getLineDelimiter().length(); int position = startOffset; ArrayList<ParsingResult> results; // Clear the cache.. GData.parsedLines.clear(); GData.CACHE_parsedFilesSource.clear(); GData.CACHE_warningsAndErrors.clear(); String line; GData gdata; for (int lineNumber = startLine; lineNumber < endLine + 1; lineNumber++) { line = compositeText.getLine(lineNumber - 1); if (isNotBlank(line)) { results = DatParser.parseLine(line, lineNumber, 0, 0.5f, 0.5f, 0.5f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, false, alreadyParsed, true); gdata = results.get(0).getGraphicalData(); if (gdata == null) { gdata = new GData0(line); } else { gdata.setText(line); GData.CACHE_warningsAndErrors.put(gdata, results); } anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); for (ParsingResult result : results) { switch (result.getTypeNumber()) { case ResultType.WARN: // Warning { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(warnings, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_warning.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; case ResultType.ERROR: // Error { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(errors, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_error.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; default: // Hint break; } } } else { gdata = new GData0(line); anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); } position += line.length() + offset; } anchorData.setNext(targetData); // Get tail if (tailRemoved || drawChainTail == null) { drawChainTail = anchorData; } hints.sortItems(); warnings.sortItems(); errors.sortItems(); hints.getParent().build(); if (DatParser.isUpatePngImages()) { Editor3DWindow.getWindow().updateBgPictureTab(); DatParser.setUpatePngImages(false); } NLogger.debug(getClass(), "Total time to parse: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ vertices.validateState(); NLogger.debug(getClass(), "Total time to parse + validate: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } /** * Parses the opened dat file for errors and correct data (in realtime, only * when opened in text editor) * * @param compositeText * @param hints * @param warnings * @param errors */ public void parseForError(StyledText compositeText, int startOffset_pos, int endOffset_pos, int length, String insertedText, String replacedText, TreeItem hints, TreeItem warnings, TreeItem errors, boolean unselectBgPicture) { if (compositeText.getText().isEmpty()) { return; } Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); long start = System.currentTimeMillis(); int startLine = compositeText.getLineAtOffset(startOffset_pos); int startOffset = compositeText.getOffsetAtLine(startLine); int endLine = compositeText.getLineAtOffset(endOffset_pos); int endOffset = compositeText.getOffsetAtLine(endLine) + compositeText.getLine(endLine).length(); startLine++; endLine++; int rlength = replacedText.length(); warnings.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); errors.removeWithinPosition(compositeText, startOffset, endOffset, length - rlength); int offset = StringHelper.getLineDelimiter().length(); int position = startOffset; ArrayList<ParsingResult> results; // Clear the cache.. GData.parsedLines.clear(); GData.CACHE_parsedFilesSource.clear(); String line; for (int lineNumber = startLine; lineNumber < endLine + 1; lineNumber++) { line = compositeText.getLine(lineNumber - 1); if (isNotBlank(line)) { GData gd = drawPerLine.getValue(lineNumber); results = GData.CACHE_warningsAndErrors.get(gd); if (results == null) { results = DatParser.parseLine(line, lineNumber, 0, 0.5f, 0.5f, 0.5f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, true, alreadyParsed, true); GData.CACHE_warningsAndErrors.put(gd, results); } for (ParsingResult result : results) { switch (result.getTypeNumber()) { case ResultType.WARN: // Warning { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(warnings, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_warning.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; case ResultType.ERROR: // Error { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(errors, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_error.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } break; default: // Hint break; } } } position += line.length() + offset; } if (unselectBgPicture) { vertices.setSelectedBgPicture(null); vertices.setSelectedBgPictureIndex(0); Editor3DWindow.getWindow().updateBgPictureTab(); } hints.sortItems(); warnings.sortItems(); errors.sortItems(); hints.getParent().build(); NLogger.debug(getClass(), "Total time to parse (error check only): {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ vertices.validateState(); NLogger.debug(getClass(), "Total time to parse + validate: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } private boolean isNotBlank(String str) { int strLen; if (str == null || (strLen = str.length()) == 0) { return false; } for (int i = 0; i < strLen; i++) { if (Character.isWhitespace(str.charAt(i)) == false) { return true; } } return false; } public void parseForData(boolean addHistory) { Project.getParsedFiles().add(this); Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); String[] lines; if (Project.getUnsavedFiles().contains(this) ) { lines = pattern.split(text, -1); if (lines.length == 0) { lines = new String[]{""}; //$NON-NLS-1$ } } else { StringBuilder sb = new StringBuilder(); ArrayList<String> lines2 = new ArrayList<String>(4096); try { UTF8BufferedReader reader = new UTF8BufferedReader(this.getOldName()); String line = reader.readLine(); if (line != null) { sb.append(line); lines2.add(line); while (true) { String line2 = reader.readLine(); if (line2 == null) { break; } sb.append(StringHelper.getLineDelimiter()); sb.append(line2); lines2.add(line2); } } else { lines2.add(""); //$NON-NLS-1$ } reader.close(); lastModified = new File(getOldName()).lastModified(); } catch (FileNotFoundException e) { } catch (LDParsingException e) { } catch (UnsupportedEncodingException e) {} lines = lines2.toArray(new String[lines2.size()]); setLastSavedOpened(new Date()); originalText = sb.toString(); text = originalText; } GData anchorData = drawChainAnchor; GData targetData = null; ArrayList<ParsingResult> results; // Parse header { HeaderState h = new HeaderState(); HeaderState.setState(h); int lineNumber = 1; for (String line : lines) { if (isNotBlank(line)) { if (!line.trim().startsWith("0")) { //$NON-NLS-1$ break; } DatParser.parseLine(line, lineNumber, 0, 0f, 0f, 0f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, true, alreadyParsed, false); } lineNumber++; } } HeaderState.state().setState(HeaderState._99_DONE); // Clear the cache.. GData.parsedLines.clear(); GData.CACHE_parsedFilesSource.clear(); drawPerLine.clear(); vertices.clear(); // The vertex structure needs a re-build GData gdata; int lineNumber = 1; for (String line : lines) { if (isNotBlank(line)) { results = DatParser.parseLine(line, lineNumber, 0, 0.5f, 0.5f, 0.5f, 1.1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, false, alreadyParsed, false); gdata = results.get(0).getGraphicalData(); if (gdata == null) { gdata = new GData0(line); } else { gdata.setText(line); } anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); } else { gdata = new GData0(line); anchorData.setNext(gdata); anchorData = gdata; drawPerLine.put(lineNumber, gdata); } lineNumber++; } anchorData.setNext(targetData); drawChainTail = anchorData; final GData descriptionline = drawChainAnchor.getNext(); if (descriptionline != null) { String descr = descriptionline.toString(); if (descr.length() > 1) descr = descr.substring(2); description = " - " + descr; //$NON-NLS-1$ } if (addHistory) addHistory(); } public void parseForHints(StyledText compositeText, TreeItem hints) { Set<String> alreadyParsed = new HashSet<String>(); alreadyParsed.add(getShortName()); long start = System.currentTimeMillis(); HeaderState h = new HeaderState(); HeaderState.setState(h); hints.removeAll(); int offset = StringHelper.getLineDelimiter().length(); int position = 0; int lc = compositeText.getLineCount(); ArrayList<ParsingResult> results; lc++; for (int lineNumber = 1; lineNumber < lc; lineNumber++) { String line = compositeText.getLine(lineNumber - 1); if (isNotBlank(line)) { if (!line.trim().startsWith("0")) { //$NON-NLS-1$ HeaderState.state().setState(HeaderState._99_DONE); break; } results = DatParser.parseLine(line, lineNumber, 0, 0f, 0f, 0f, 1f, View.DUMMY_REFERENCE, View.ID, View.ACCURATE_ID, this, true, alreadyParsed, false); for (ParsingResult result : results) { if (result.getTypeNumber() == ResultType.HINT) { Object[] messageArguments = {lineNumber, position}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DATFILE_Line); TreeItem trtmNewTreeitem = new TreeItem(hints, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_info.png")); //$NON-NLS-1$ trtmNewTreeitem.setVisible(false); trtmNewTreeitem.setText(new String[] { result.getMessage(), formatter.format(messageArguments), result.getType() }); trtmNewTreeitem.setData(position); } } } position += line.length() + offset; } { h = HeaderState.state(); results = new ArrayList<ParsingResult>(); if (!h.hasTITLE()) results.add(new ParsingResult(I18n.DATFILE_MissingTitle, "[H00] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasNAME()) results.add(new ParsingResult(I18n.DATFILE_MissingFileName, "[H10] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasAUTHOR()) results.add(new ParsingResult(I18n.DATFILE_MissingAuthor, "[H20] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasTYPE()) results.add(new ParsingResult(I18n.DATFILE_MissingPartType, "[H30] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasLICENSE()) results.add(new ParsingResult(I18n.DATFILE_MissingLicense, "[H40] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ if (!h.hasBFC()) results.add(new ParsingResult(I18n.DATFILE_MissingBFC, "[H60] " + I18n.DATFILE_HeaderHint, ResultType.HINT)); //$NON-NLS-1$ int fakeLine = -1; for (ParsingResult result : results) { TreeItem trtmNewTreeitem = new TreeItem(hints, SWT.NONE); trtmNewTreeitem.setImage(ResourceManager.getImage("icon16_info.png")); //$NON-NLS-1$ trtmNewTreeitem.setText(new String[] { result.getMessage(), "---", result.getType() }); //$NON-NLS-1$ trtmNewTreeitem.setData(fakeLine); trtmNewTreeitem.setVisible(false); fakeLine--; } } hints.sortItems(); HeaderState.state().setState(HeaderState._99_DONE); NLogger.debug(getClass(), "Total time to parse header: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } public HashBiMap<Integer, GData> getDrawPerLine() { return drawPerLine.copy(); } public HashBiMap<Integer, GData> getDrawPerLine_NOCLONE() { return drawPerLine; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (oldName == null ? 0 : oldName.hashCode()); return result; } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DatFile other = (DatFile) obj; if (oldName == null) { if (other.oldName != null) return false; } else if (!oldName.equals(other.oldName)) return false; return true; } /** * @return the type (1 = Part, 2 = Subpart, 3 = Primitive, 4 = * Hi-Res-Primitive) */ public DatType getType() { return type; } /** * @param type * the type to set (1 = Part, 2 = Subpart, 3 = Primitive, 4 = * Hi-Res-Primitive) */ public void setType(DatType type) { this.type = type; } public VertexManager getVertexManager() { return vertices; } public GData getDrawChainTail() { if (drawChainTail == null) { GData gd = drawChainAnchor; do { drawChainTail = gd; } while ((gd = gd.getNext()) != null); } return drawChainTail; } public GData getDrawChainStart() { return drawChainAnchor; } public void setDrawChainTail(GData drawChainTail) { this.drawChainTail = drawChainTail; } public boolean isVirtual() { return virtual; } private void setVirtual(boolean virtual) { this.virtual = virtual; } public void addToTailOrInsertAfterCursor(GData gdata) { if (Editor3DWindow.getWindow().isInsertingAtCursorPosition()) { insertAfterCursor(gdata); } else { addToTail(gdata); } } public void addToTailOrInsertAfterCursorReset(GData gdata) { if (Editor3DWindow.getWindow().isInsertingAtCursorPosition()) { insertAfterCursor(gdata); } else { addToTail(gdata); } } public void insertAfterCursor(GData gdata) { // The feature is only available when the 3D view and the text editor view are synchronized! if (!WorkbenchManager.getUserSettingState().getSyncWithTextEditor().get()) { addToTail(gdata); return; } for (EditorTextWindow w : Project.getOpenTextWindows()) { for (CTabItem t : w.getTabFolder().getItems()) { CompositeTabState state = ((CompositeTab) t).getState(); if (this.equals(state.getFileNameObj())) { StyledText st = ((CompositeTab) t).getTextComposite(); int s1 = st.getSelectionRange().x; if (s1 > -1) { int line = st.getLineAtOffset(s1) + 1; GData target = null; target = drawPerLine.getValue(line); if (target != null) { boolean doReplace = false; boolean insertEmptyLine = true; if (target.type() == 0) { doReplace = !StringHelper.isNotBlank(target.toString()); } if (doReplace) { GData next = target.getNext(); if (next != null && next.type() == 0) { insertEmptyLine = StringHelper.isNotBlank(next.toString()); } replaceComment(target, gdata); if (insertEmptyLine) insertAfter(gdata, new GData0("")); //$NON-NLS-1$ } else { insertAfter(target, gdata); insertAfter(gdata, new GData0("")); //$NON-NLS-1$ } state.setSync(true); try { if (doReplace) { if (insertEmptyLine) { int offset = st.getOffsetAtLine(line - 1); st.setSelection(offset, offset + target.toString().length()); st.insert(gdata.toString() + StringHelper.getLineDelimiter()); offset += StringHelper.getLineDelimiter().length() + gdata.toString().length(); st.setSelection(offset, offset); } else { int offset = st.getOffsetAtLine(line - 1); st.setSelection(offset, offset + target.toString().length()); st.insert(gdata.toString()); offset += StringHelper.getLineDelimiter().length() + gdata.toString().length(); st.setSelection(offset, offset); } } else { int offset = st.getOffsetAtLine(line - 1) + target.toString().length() + StringHelper.getLineDelimiter().length(); st.setSelection(offset, offset); st.insert(StringHelper.getLineDelimiter() + gdata.toString()); offset += StringHelper.getLineDelimiter().length() + gdata.toString().length(); st.setSelection(offset, offset); } } catch (IllegalArgumentException iae) { } state.setSync(false); } return; } } } } addToTail(gdata); } public void addToTail(GData gdata) { Integer lineNumber = drawPerLine.keySet().size() + 1; drawPerLine.put(lineNumber, gdata); GData tail = drawPerLine.getValue(lineNumber - 1); if (tail == null) { drawChainTail = null; tail = getDrawChainTail(); } tail.setNext(gdata); drawChainTail = gdata; } public void insertAfter(GData target, GData gdata) { GData tail = drawPerLine.getValue(drawPerLine.keySet().size()); if (tail == null) { drawChainTail = null; tail = getDrawChainTail(); } if (target.equals(tail)) { addToTail(gdata); return; } GData next = target.getNext(); target.setNext(gdata); gdata.setNext(next); drawPerLine.clear(); int i = 1; for (GData start = drawChainAnchor.getNext(); start != null; start = start.getNext()) { drawPerLine.put(i, start); i++; } } public void replaceComment(GData target, GData gdata) { if (target.type() != 0) return; GData tail = drawPerLine.getValue(drawPerLine.keySet().size()); if (tail == null) { drawChainTail = null; tail = getDrawChainTail(); } GData next = target.getNext(); GData before = target.getBefore(); before.setNext(gdata); gdata.setNext(next); if (target.equals(tail)) { drawChainTail = gdata; } drawPerLine.put(drawPerLine.getKey(target), gdata); target.derefer(); } public Vertex getNearestObjVertex1() { return nearestObjVertex1; } public void setNearestObjVertex1(Vertex nearestObjVertex1) { this.nearestObjVertex1 = nearestObjVertex1; } public Vertex getNearestObjVertex2() { return nearestObjVertex2; } public void setNearestObjVertex2(Vertex nearestObjVertex2) { this.nearestObjVertex2 = nearestObjVertex2; } public Vertex getObjVertex1() { return objVertex1; } public void setObjVertex1(Vertex objVertex1) { this.objVertex1 = objVertex1; } public Vertex getObjVertex2() { return objVertex2; } public void setObjVertex2(Vertex objVertex2) { this.objVertex2 = objVertex2; } public Vertex getObjVertex3() { return objVertex3; } public void setObjVertex3(Vertex objVertex3) { this.objVertex3 = objVertex3; } public Vertex getObjVertex4() { return objVertex4; } public void setObjVertex4(Vertex objVertex4) { this.objVertex4 = objVertex4; } public void disposeData() { history.deleteHistory(); text = ""; //$NON-NLS-1$ vertices.setModified(false, true); vertices.clear(); Set<Integer> lineNumbers = drawPerLine.keySet(); for (Integer lineNumber : lineNumbers) { drawPerLine.getValue(lineNumber).derefer(); } drawPerLine.clear(); copy_drawPerLine.clear(); drawChainAnchor.setNext(null); Project.getParsedFiles().remove(this); } @Override public String toString() { return oldName; } public String getShortName() { String shortFilename = new File(newName).getName(); shortFilename = shortFilename.toLowerCase(Locale.ENGLISH); try { shortFilename = shortFilename.replaceAll("\\\\", File.separator); //$NON-NLS-1$ } catch (Exception e) { // Workaround for windows OS / JVM BUG shortFilename = shortFilename.replace("\\\\", File.separator); //$NON-NLS-1$ } if (type.equals(DatType.SUBPART)) { shortFilename = "S" + File.separator + shortFilename; //$NON-NLS-1$ } else if (type.equals(DatType.PRIMITIVE8)) { shortFilename = "8" + File.separator + shortFilename; //$NON-NLS-1$ } else if (type.equals(DatType.PRIMITIVE48)) { shortFilename = "48" + File.separator + shortFilename; //$NON-NLS-1$ } return shortFilename; } public boolean isProjectFile() { return projectFile; } public boolean save() { if (readOnly) { // Don't save read only files! return true; } boolean deleteFirst = oldName.equals(newName); try { if (deleteFirst) { File oldFile = new File(oldName); if (oldFile.exists()) { if (checkFileCollision(oldFile)) { return true; } oldFile.delete(); } } else { File newFile = new File(newName); if (newFile.exists()) { if (checkFileCollision(newFile)) { return true; } } File oldFile = new File(oldName); if (oldFile.exists()) { if (oldFile.lastModified() == lastModified) { oldFile.delete(); } } } UTF8PrintWriter r = new UTF8PrintWriter(newName); ArrayList<String> lines = new ArrayList<String>(); lines.addAll(Arrays.asList(text.split("\r?\n|\r", -1))); //$NON-NLS-1$ if (lines.isEmpty()) lines.add(""); //$NON-NLS-1$ for (String line : lines) { r.println(line); } r.flush(); r.close(); if (!deleteFirst) { File oldFile = new File(oldName); if (oldFile.exists()) oldFile.delete(); } // File was saved. It is not virtual anymore. setVirtual(false); originalText = text; oldName = newName; setLastSavedOpened(new Date()); lastModified = new File(getNewName()).lastModified(); Project.removeUnsavedFile(this); HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows()); for (EditorTextWindow win : windows) { win.updateTabWithDatfile(this); } return true; } catch (Exception ex) { return false; } } public boolean saveForced() { try { File newFile = new File(newName); if (newFile.exists()) { newFile.delete(); } UTF8PrintWriter r = new UTF8PrintWriter(newName); ArrayList<String> lines = new ArrayList<String>(); lines.addAll(Arrays.asList(text.split("\r?\n|\r", -1))); //$NON-NLS-1$ if (lines.isEmpty()) lines.add(""); //$NON-NLS-1$ for (String line : lines) { r.println(line); } r.flush(); r.close(); // File was saved. It is not virtual anymore. setVirtual(false); originalText = text; oldName = newName; setLastSavedOpened(new Date()); lastModified = new File(getNewName()).lastModified(); Project.removeUnsavedFile(this); HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows()); for (EditorTextWindow win : windows) { win.updateTabWithDatfile(this); } return true; } catch (Exception ex) { return false; } } public boolean saveAs(String newName) { try { File newFile = new File(newName); if (newFile.exists()) { newFile.delete(); } UTF8PrintWriter r = new UTF8PrintWriter(newName); ArrayList<String> lines = new ArrayList<String>(); lines.addAll(Arrays.asList(text.split("\r?\n|\r", -1))); //$NON-NLS-1$ if (lines.isEmpty()) lines.add(""); //$NON-NLS-1$ // Write the new "0 Name: " if (lines.size() > 1) { final Pattern WHITESPACE = Pattern.compile("\\s+"); //$NON-NLS-1$ final int maxDetectionLines = Math.min(10, lines.size()); // 1. Detect the file type String folderPrefix = ""; //$NON-NLS-1$ for (int i = 0; i < maxDetectionLines; i++) { String tLine = WHITESPACE.matcher(lines.get(i)).replaceAll(" ").trim(); //$NON-NLS-1$ if (tLine.startsWith("0 !LDRAW_ORG")) { //$NON-NLS-1$ String typeSuffix = ""; //$NON-NLS-1$ String path = newFile.getParent(); if (path.endsWith(File.separator + "S") || path.endsWith(File.separator + "s")) { //$NON-NLS-1$ //$NON-NLS-2$ typeSuffix = "Unofficial_Subpart"; //$NON-NLS-1$ folderPrefix = "S\\"; //$NON-NLS-1$ } else if (path.endsWith(File.separator + "P" + File.separator + "48") || path.endsWith(File.separator + "p" + File.separator + "48")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ typeSuffix = "Unofficial_48_Primitive"; //$NON-NLS-1$ folderPrefix = "P\\"; //$NON-NLS-1$ } else if (path.endsWith(File.separator + "P" + File.separator + "8") || path.endsWith(File.separator + "p" + File.separator + "8")) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ typeSuffix = "Unofficial_8_Primitive"; //$NON-NLS-1$ folderPrefix = "P\\"; //$NON-NLS-1$ } else if (path.endsWith(File.separator + "P") || path.endsWith(File.separator + "p")) { //$NON-NLS-1$ //$NON-NLS-2$ typeSuffix = "Unofficial_Primitive"; //$NON-NLS-1$ folderPrefix = "P\\"; //$NON-NLS-1$ } if (!"".equals(typeSuffix)) { //$NON-NLS-1$ lines.set(i, "0 !LDRAW_ORG " + typeSuffix); //$NON-NLS-1$ } break; } } // 2. Set the new name for (int i = 0; i < maxDetectionLines; i++) { String tLine = WHITESPACE.matcher(lines.get(i)).replaceAll(" ").trim(); //$NON-NLS-1$ if (tLine.startsWith("0 Name:")) { //$NON-NLS-1$ lines.set(i, "0 Name: " + folderPrefix + newFile.getName()); //$NON-NLS-1$ break; } } } for (String line : lines) { r.println(line); } r.flush(); r.close(); return true; } catch (Exception ex) { return false; } } private boolean checkFileCollision(File theFile) { if (theFile.lastModified() > lastModified) { MessageBox messageBox = new MessageBox(Editor3DWindow.getWindow().getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.CANCEL | SWT.NO); messageBox.setText(I18n.DIALOG_ModifiedTitle); Object[] messageArguments = {getShortName(), getLastSavedOpened()}; MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$ formatter.setLocale(MyLanguage.LOCALE); formatter.applyPattern(I18n.DIALOG_Modified); messageBox.setMessage(formatter.format(messageArguments)); int result2 = messageBox.open(); if (result2 == SWT.CANCEL) { return true; } else if (result2 == SWT.YES) { Project.removeUnsavedFile(this); parseForData(true); Editor3DWindow.getWindow().updateTree_unsavedEntries(); HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows()); for (EditorTextWindow win : windows) { win.updateTabWithDatfile(this); } return true; } } return false; } public Date getLastSavedOpened() { return lastSavedOpened; } private void setLastSavedOpened(Date lastSavedOpened) { this.lastSavedOpened = lastSavedOpened; } public long getLastModified() { return lastModified; } public void setLastModified(long lastModified) { this.lastModified = lastModified; } public void updateLastModified() { if (oldName.equals(newName)) { File oldFile = new File(oldName); if (oldFile.exists()) { lastModified = oldFile.lastModified(); } } else { File newFile = new File(newName); if (newFile.exists()) { if (checkFileCollision(newFile)) { lastModified = newFile.lastModified(); } } } } public String getSourceText() { StringBuilder source = new StringBuilder(); if (originalText.isEmpty()) { try { UTF8BufferedReader reader = new UTF8BufferedReader(this.getOldName()); String line = reader.readLine(); if (line != null) { source.append(line); while (true) { String line2 = reader.readLine(); if (line2 == null) { break; } source.append(StringHelper.getLineDelimiter()); source.append(line2); } } reader.close(); } catch (FileNotFoundException e) { } catch (LDParsingException e) { } catch (UnsupportedEncodingException e) {} } else { GData data2draw = drawChainAnchor; if ((data2draw = data2draw.getNext()) != null) { source.append(data2draw.toString()); } if (data2draw != null) { while ((data2draw = data2draw.getNext()) != null) { source.append(StringHelper.getLineDelimiter()); source.append(data2draw.toString()); } } } return source.toString(); } public String getTextDirect() { return text; } public boolean hasNoBackgroundPictures() { GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { if (data2draw.type() == 10) return false; } return true; } public int getBackgroundPictureCount() { int count = 0; GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { if (data2draw.type() == 10) count++; } return count; } public GDataPNG getBackgroundPicture(int index) { int count = 0; GData data2draw = drawChainAnchor; while ((data2draw = data2draw.getNext()) != null) { if (data2draw.type() == 10) { if (count == index) return (GDataPNG) data2draw; count++; } } return null; } public Composite3D getLastSelectedComposite() { return lastSelectedComposite; } public void setLastSelectedComposite(Composite3D lastSelectedComposite) { this.lastSelectedComposite = lastSelectedComposite; } public boolean isDrawSelection() { return drawSelection; } public void setDrawSelection(boolean drawSelection) { this.drawSelection = drawSelection; } public void setProjectFile(boolean projectFile) { this.projectFile = projectFile; } public HistoryManager getHistory() { return history; } public void setHistory(HistoryManager history) { this.history = history; } public void addHistory() { NLogger.debug(getClass(), "Added history entry for {0}", getShortName()); //$NON-NLS-1$ final long start = System.currentTimeMillis(); final int objCount = drawPerLine.size(); GData[] backup = new GData[objCount]; boolean[] backupSelection = new boolean[objCount]; String[] backupHideShowState = null; int count = 0; GData data2draw = drawChainAnchor; Set<GData> sd = vertices.getSelectedData(); if (vertices.hiddenData.size() > 0) { vertices.cleanupHiddenData(); backupHideShowState = new String[vertices.hiddenData.size()]; int i = 0; for (GData g : vertices.hiddenData) { backupHideShowState[i] = g.getNiceString(); i++; } while (count < objCount) { data2draw = data2draw.getNext(); backup[count] = data2draw; backupSelection[count] = sd.contains(data2draw); count++; } } else { while (count < objCount) { data2draw = data2draw.getNext(); backup[count] = data2draw; backupSelection[count] = sd.contains(data2draw); count++; } } Vertex[] backupSelectedVertices = vertices.getSelectedVertices().toArray(new Vertex[vertices.getSelectedVertices().size()]); Vertex[] backupHiddenVertices = vertices.getHiddenVertices().toArray(new Vertex[vertices.getHiddenVertices().size()]); history.pushHistory( null, -1, -1, backup, backupSelection, backupHideShowState, backupSelectedVertices, backupHiddenVertices, -1 ); NLogger.debug(getClass(), "Total time to backup history: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } public void addHistory(String text, int selectionStart, int selectionEnd, int topIndex) { final long start = System.currentTimeMillis(); NLogger.debug(getClass(), "Added history entry for {0}", getShortName()); //$NON-NLS-1$ history.pushHistory( text, selectionStart, selectionEnd, null, null, null, null, null, topIndex ); NLogger.debug(getClass(), "Total time to backup history: {0} ms", System.currentTimeMillis() - start); //$NON-NLS-1$ } public void undo(final Shell sh) { history.undo(sh); } public void redo(final Shell sh) { history.redo(sh); } public static Composite3D getLastHoveredComposite() { return lastHoveredComposite; } public static void setLastHoveredComposite(Composite3D lastHoveredComposite) { DatFile.lastHoveredComposite = lastHoveredComposite; } }
Prepared fix for issue #195.
src/org/nschmidt/ldparteditor/data/DatFile.java
Prepared fix for issue #195.
Java
mit
a17c05ea759f7e70deeff631290422f7f3f5327a
0
Ziver/zutil,Ziver/zutil,Ziver/zutil
/* * Copyright (c) 2015 Ziver * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package zutil.parser; import zutil.log.LogUtil; import zutil.struct.MutableInt; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * Class for generating dynamic text/code from set data. * The syntax is similar to the javascript mustache library. * * <br /><br /> * Supported tags: * <ul> * <li><b> {{key}} </b><br> * <b> {{obj.attr}} </b><br> * Will be replaced with the string from the key.</li> * <li><b> {{#key}}...{{/key}} </b><br> * <b> {{#obj.attr}}...{{/obj.attr}} </b><br> * Will display content between the tags if: * key is defined, * if the key references a list the content will be iterated * for every element, the element can be referenced by the tag {{.}}, * if key is a true boolean (false will not display content).</li> * <li><b> {{^key}}</b><br> * <b> {{^obj.attr}}...{{/obj.attr}} </b><br> * A negative condition, will display content if: * the key is undefined, * the key is a empty list, * the key is a false boolean.</li> * <li><b>{{! ignore me }}</b><br> * Comment, will be ignored.</li> * </ul> * * TODO: {{> file}}: include file * TODO: {{=<% %>=}}: change delimiter * * @author Ziver koc */ public class Templator { private static final Logger log = LogUtil.getLogger(); private HashMap<String,Object> data; private TemplateEntity tmplRoot; public Templator(String tmpl){ this.data = new HashMap<String, Object>(); parseTemplate(tmpl); } public void set(String key, Object data){ this.data.put(key, data); } /** * Will clear all data attributes */ public void clear(){ data.clear(); } public String compile(){ StringBuilder str = new StringBuilder(); if(tmplRoot != null) tmplRoot.compile(str); return str.toString(); } /** * Will pare or re-parse the source template. */ private void parseTemplate(String tmpl){ tmplRoot = parseTemplate(new TemplateNode(), tmpl, new MutableInt(), null); } private TemplateNode parseTemplate(TemplateNode root, String tmpl, MutableInt m, String parentTag){ StringBuilder data = new StringBuilder(); boolean tagOpen = false; for(; m.i<tmpl.length(); ++m.i){ char c = tmpl.charAt(m.i); String d = ""+ c + (m.i+1<tmpl.length() ? tmpl.charAt(m.i+1) : ' '); switch( d ){ case "{{": root.add(new TemplateStaticString(data.toString())); data.delete(0, data.length()); tagOpen = true; ++m.i; break; case "}}": if(!tagOpen){ // Tag not opened, incorrect enclosure data.append(c); continue; } tagOpen = false; ++m.i; String tagName = data.toString(); data.delete(0, data.length()); switch(tagName.charAt(0)) { case '#': // Condition ++m.i; root.add(parseTemplate(new TemplateCondition(tagName.substring(1)), tmpl, m, tagName)); break; case '^': // Negative condition ++m.i; root.add(parseTemplate(new TemplateNegativeCondition(tagName.substring(1)), tmpl, m, tagName)); break; case '/': // End tag // Is this tag closing the parent? if(parentTag != null && tagName.endsWith(parentTag.substring(1))) return root; log.severe("Closing non-opened tag: {{" + tagName + "}}"); root.add(new TemplateStaticString("{{"+tagName+"}}")); break; case '!': // Comment break; default: root.add(new TemplateDataAttribute(tagName)); } break; default: data.append(c); break; } } if(tagOpen) // Incomplete tag, insert it as normal text data.insert(0, "{{"); if(data.length() > 0) // Still some text left, add to node root.add(new TemplateStaticString(data.toString())); // If we get to this point means that this node is incorrectly close // or this is the end of the file, so we convert it to a normal node if(parentTag != null) { root = new TemplateNode(root); String tagName = "{{"+parentTag+"}}"; log.severe("Missing closure of tag: " + tagName); root.addFirst(new TemplateStaticString(tagName)); } return root; } /**************************** Template Helper Classes *************************************/ protected interface TemplateEntity { public void compile(StringBuilder str); } protected class TemplateNode implements TemplateEntity { private List<TemplateEntity> entities; public TemplateNode(){ this.entities = new ArrayList<TemplateEntity>(); } public TemplateNode(TemplateNode node){ this.entities = node.entities; } public void addFirst(TemplateEntity s){ entities.add(0, s); } public void add(TemplateEntity s){ entities.add(s); } public void compile(StringBuilder str) { for(TemplateEntity sec : entities) sec.compile(str); } } protected class TemplateCondition extends TemplateNode { private TemplateDataAttribute attrib; public TemplateCondition(String key){ this.attrib = new TemplateDataAttribute(key); } public void compile(StringBuilder str) { Object obj = attrib.getObject(); if(obj != null) { if(obj instanceof Boolean){ if ((Boolean) obj) super.compile(str); } else if(obj instanceof Iterable){ for(Object o : (Iterable)obj){ // Iterate through the whole list set(".", o); super.compile(str); } set(".", null); } else super.compile(str); } } } protected class TemplateNegativeCondition extends TemplateNode { private TemplateDataAttribute attrib; public TemplateNegativeCondition(String key){ this.attrib = new TemplateDataAttribute(key); } public void compile(StringBuilder str) { Object obj = attrib.getObject(); if(obj == null) super.compile(str); else { if(obj instanceof Boolean) { if ( ! (Boolean) obj) super.compile(str); } else if(obj instanceof Collection) { if (((Collection) obj).isEmpty()) super.compile(str); } } } } protected class TemplateStaticString implements TemplateEntity { private String text; public TemplateStaticString(String text){ this.text = text; } public void compile(StringBuilder str) { str.append(text); } } protected class TemplateDataAttribute implements TemplateEntity { private String tag; private String key; private String attrib; public TemplateDataAttribute(String tag){ this.tag = tag; String[] s = tag.trim().split("\\.", 2); this.key = s[0]; if(s.length > 1) this.attrib = s[1]; } public Object getObject(){ if (data.containsKey(tag)) return data.get(tag); else if (data.containsKey(key)) { if (attrib != null) { Object obj = getFieldValue(data.get(key), attrib); if(obj != null) return obj; } else return data.get(key); } return null; } protected Object getFieldValue(Object obj, String attrib){ try { for (Field field : obj.getClass().getDeclaredFields()) { if(field.getName().equals(attrib)) { field.setAccessible(true); return field.get(obj); } } }catch (IllegalAccessException e){ log.log(Level.WARNING, null, e); } return null; } public void compile(StringBuilder str) { Object obj = getObject(); if(obj != null) str.append(obj.toString()); else str.append("{{").append(tag).append("}}"); } } }
src/zutil/parser/Templator.java
/* * Copyright (c) 2015 Ziver * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package zutil.parser; import zutil.log.LogUtil; import zutil.struct.MutableInt; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * Class for generating dynamic text/code from set data. * The syntax is similar to the javascript mustache library. * * <br /><br /> * Supported tags: * <ul> * <li><b> {{key}} </b><br> * <b> {{obj.attr}} </b><br> * Will be replaced with the string from the key.</li> * <li><b> {{#key}}...{{/key}} </b><br> * <b> {{#obj.attr}}...{{/obj.attr}} </b><br> * Will display content between the tags if: * key is defined, * if the key references a list the content will be iterated * for every element, the element can be referenced by the tag {{.}}, * if key is a true boolean (false will not display content).</li> * <li><b> {{^key}}</b><br> * <b> {{^obj.attr}}...{{/obj.attr}} </b><br> * A negative condition, will display content if: * the key is undefined, * the key is a empty list, * the key is a false boolean.</li> * <li><b>{{! ignore me }}</b><br> * Comment, will be ignored.</li> * </ul> * * TODO: {{#key}}: support for boolean * TODO: {{> file}}: include file * TODO: {{=<% %>=}}: change delimiter * * @author Ziver koc */ public class Templator { private static final Logger log = LogUtil.getLogger(); private HashMap<String,Object> data; private TemplateEntity tmplRoot; public Templator(String tmpl){ this.data = new HashMap<String, Object>(); parseTemplate(tmpl); } public void set(String key, Object data){ this.data.put(key, data); } /** * Will clear all data attributes */ public void clear(){ data.clear(); } public String compile(){ StringBuilder str = new StringBuilder(); if(tmplRoot != null) tmplRoot.compile(str); return str.toString(); } /** * Will pare or re-parse the source template. */ private void parseTemplate(String tmpl){ tmplRoot = parseTemplate(new TemplateNode(), tmpl, new MutableInt(), null); } private TemplateNode parseTemplate(TemplateNode root, String tmpl, MutableInt m, String parentTag){ StringBuilder data = new StringBuilder(); boolean tagOpen = false; for(; m.i<tmpl.length(); ++m.i){ char c = tmpl.charAt(m.i); String d = ""+ c + (m.i+1<tmpl.length() ? tmpl.charAt(m.i+1) : ' '); switch( d ){ case "{{": root.add(new TemplateStaticString(data.toString())); data.delete(0, data.length()); tagOpen = true; ++m.i; break; case "}}": if(!tagOpen){ // Tag not opened, incorrect enclosure data.append(c); continue; } tagOpen = false; ++m.i; String tagName = data.toString(); data.delete(0, data.length()); switch(tagName.charAt(0)) { case '#': // Condition ++m.i; root.add(parseTemplate(new TemplateCondition(tagName.substring(1)), tmpl, m, tagName)); break; case '^': // Negative condition ++m.i; root.add(parseTemplate(new TemplateNegativeCondition(tagName.substring(1)), tmpl, m, tagName)); break; case '/': // End tag // Is this tag closing the parent? if(parentTag != null && tagName.endsWith(parentTag.substring(1))) return root; log.severe("Closing non-opened tag: {{" + tagName + "}}"); root.add(new TemplateStaticString("{{"+tagName+"}}")); break; case '!': // Comment break; default: root.add(new TemplateDataAttribute(tagName)); } break; default: data.append(c); break; } } if(tagOpen) // Incomplete tag, insert it as normal text data.insert(0, "{{"); if(data.length() > 0) // Still some text left, add to node root.add(new TemplateStaticString(data.toString())); // If we get to this point means that this node is incorrectly close // or this is the end of the file, so we convert it to a normal node if(parentTag != null) { root = new TemplateNode(root); String tagName = "{{"+parentTag+"}}"; log.severe("Missing closure of tag: " + tagName); root.addFirst(new TemplateStaticString(tagName)); } return root; } /**************************** Template Helper Classes *************************************/ protected interface TemplateEntity { public void compile(StringBuilder str); } protected class TemplateNode implements TemplateEntity { private List<TemplateEntity> entities; public TemplateNode(){ this.entities = new ArrayList<TemplateEntity>(); } public TemplateNode(TemplateNode node){ this.entities = node.entities; } public void addFirst(TemplateEntity s){ entities.add(0, s); } public void add(TemplateEntity s){ entities.add(s); } public void compile(StringBuilder str) { for(TemplateEntity sec : entities) sec.compile(str); } } protected class TemplateCondition extends TemplateNode { private TemplateDataAttribute attrib; public TemplateCondition(String key){ this.attrib = new TemplateDataAttribute(key); } public void compile(StringBuilder str) { Object obj = attrib.getObject(); if(obj != null) { if(obj instanceof Boolean){ if ((Boolean) obj) super.compile(str); } else if(obj instanceof Iterable){ for(Object o : (Iterable)obj){ // Iterate through the whole list set(".", o); super.compile(str); } set(".", null); } else super.compile(str); } } } protected class TemplateNegativeCondition extends TemplateNode { private TemplateDataAttribute attrib; public TemplateNegativeCondition(String key){ this.attrib = new TemplateDataAttribute(key); } public void compile(StringBuilder str) { Object obj = attrib.getObject(); if(obj == null) super.compile(str); else { if(obj instanceof Boolean) { if ( ! (Boolean) obj) super.compile(str); } else if(obj instanceof Collection) { if (((Collection) obj).isEmpty()) super.compile(str); } } } } protected class TemplateStaticString implements TemplateEntity { private String text; public TemplateStaticString(String text){ this.text = text; } public void compile(StringBuilder str) { str.append(text); } } protected class TemplateDataAttribute implements TemplateEntity { private String tag; private String key; private String attrib; public TemplateDataAttribute(String tag){ this.tag = tag; String[] s = tag.trim().split("\\.", 2); this.key = s[0]; if(s.length > 1) this.attrib = s[1]; } public Object getObject(){ if (data.containsKey(tag)) return data.get(tag); else if (data.containsKey(key)) { if (attrib != null) { Object obj = getFieldValue(data.get(key), attrib); if(obj != null) return obj; } else return data.get(key); } return null; } protected Object getFieldValue(Object obj, String attrib){ try { for (Field field : obj.getClass().getDeclaredFields()) { if(field.getName().equals(attrib)) { field.setAccessible(true); return field.get(obj); } } }catch (IllegalAccessException e){ log.log(Level.WARNING, null, e); } return null; } public void compile(StringBuilder str) { Object obj = getObject(); if(obj != null) str.append(obj.toString()); else str.append("{{").append(tag).append("}}"); } } }
Updated comment
src/zutil/parser/Templator.java
Updated comment
Java
agpl-3.0
652a77957437fb6f65c94269ef02c9e262ccf8d1
0
kkronenb/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,kkronenb/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs-devops-automation-fork,ua-eas/kfs,quikkian-ua-devops/kfs,bhutchinson/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,smith750/kfs,smith750/kfs,UniversityOfHawaii/kfs,ua-eas/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs,smith750/kfs,bhutchinson/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs,kuali/kfs,quikkian-ua-devops/will-financials,kuali/kfs,kuali/kfs,quikkian-ua-devops/will-financials,bhutchinson/kfs,smith750/kfs,kkronenb/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs,kuali/kfs,bhutchinson/kfs,kuali/kfs
/* * Copyright 2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.ar.document.service.impl; import java.sql.Date; import java.sql.Timestamp; import java.text.MessageFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.DateUtils; import org.kuali.kfs.module.ar.ArConstants; import org.kuali.kfs.module.ar.ArKeyConstants; import org.kuali.kfs.module.ar.businessobject.AccountsReceivableDocumentHeader; import org.kuali.kfs.module.ar.businessobject.Customer; import org.kuali.kfs.module.ar.businessobject.CustomerAddress; import org.kuali.kfs.module.ar.businessobject.CustomerBillingStatement; import org.kuali.kfs.module.ar.businessobject.CustomerInvoiceDetail; import org.kuali.kfs.module.ar.businessobject.CustomerInvoiceRecurrenceDetails; import org.kuali.kfs.module.ar.businessobject.InvoicePaidApplied; import org.kuali.kfs.module.ar.businessobject.NonInvoicedDistribution; import org.kuali.kfs.module.ar.businessobject.OrganizationOptions; import org.kuali.kfs.module.ar.document.CustomerInvoiceDocument; import org.kuali.kfs.module.ar.document.dataaccess.CustomerInvoiceDocumentDao; import org.kuali.kfs.module.ar.document.service.AccountsReceivableDocumentHeaderService; import org.kuali.kfs.module.ar.document.service.CustomerAddressService; import org.kuali.kfs.module.ar.document.service.CustomerInvoiceDetailService; import org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService; import org.kuali.kfs.module.ar.document.service.InvoicePaidAppliedService; import org.kuali.kfs.module.ar.document.service.NonInvoicedDistributionService; import org.kuali.kfs.module.ar.document.service.ReceivableAccountingLineService; import org.kuali.kfs.module.ar.report.util.CustomerStatementResultHolder; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.KFSPropertyConstants; import org.kuali.kfs.sys.businessobject.ChartOrgHolder; import org.kuali.kfs.sys.businessobject.FinancialSystemDocumentHeader; import org.kuali.kfs.sys.service.FinancialSystemUserService; import org.kuali.kfs.sys.service.UniversityDateService; import org.kuali.kfs.sys.util.KfsDateUtils; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.core.api.datetime.DateTimeService; import org.kuali.rice.core.api.util.type.KualiDecimal; import org.kuali.rice.coreservice.framework.parameter.ParameterService; import org.kuali.rice.kew.api.WorkflowDocument; import org.kuali.rice.kew.api.action.ActionTaken; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kim.api.identity.PersonService; import org.kuali.rice.kim.api.identity.principal.Principal; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.krad.bo.Note; import org.kuali.rice.krad.dao.DocumentDao; import org.kuali.rice.krad.document.Document; import org.kuali.rice.krad.exception.InfrastructureException; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.service.NoteService; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.ObjectUtils; import org.springframework.transaction.annotation.Transactional; @Transactional public class CustomerInvoiceDocumentServiceImpl implements CustomerInvoiceDocumentService { protected static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(CustomerInvoiceDocumentServiceImpl.class); protected AccountsReceivableDocumentHeaderService accountsReceivableDocumentHeaderService; protected BusinessObjectService businessObjectService; protected CustomerAddressService customerAddressService; protected CustomerInvoiceDetailService customerInvoiceDetailService; protected CustomerInvoiceDocumentDao customerInvoiceDocumentDao; protected CustomerInvoiceRecurrenceDetails customerInvoiceRecurrenceDetails; protected ConfigurationService configurationService; protected DateTimeService dateTimeService; protected DocumentService documentService; protected DocumentDao documentDao; protected FinancialSystemUserService financialSystemUserService; protected InvoicePaidAppliedService<CustomerInvoiceDetail> invoicePaidAppliedService; protected NonInvoicedDistributionService nonInvoicedDistributionService; protected ParameterService parameterService; protected PersonService personService; protected ReceivableAccountingLineService receivableAccountingLineService; protected UniversityDateService universityDateService; protected NoteService noteService; /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#convertDiscountsToPaidApplieds(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void convertDiscountsToPaidApplieds(CustomerInvoiceDocument invoice) { // this needs a little explanation. we have to calculate manually // whether we've written off the whole thing, because the regular // code uses the invoice paid applieds to discount, but since those // are added but not committed in this transaction, they're also not // visible in this transaction, so we do it manually. KualiDecimal openAmount = invoice.getOpenAmount(); String invoiceNumber = invoice.getDocumentNumber(); List<CustomerInvoiceDetail> discounts = invoice.getDiscounts(); // retrieve the number of current paid applieds, so we dont have item number overlap Integer paidAppliedItemNumber = 0; for (CustomerInvoiceDetail discount : discounts) { // if credit amount is zero, do nothing if (KualiDecimal.ZERO.equals(discount.getAmount())) { continue; } if (paidAppliedItemNumber == 0) { paidAppliedItemNumber = invoicePaidAppliedService.getNumberOfInvoicePaidAppliedsForInvoiceDetail(invoiceNumber, discount.getInvoiceItemNumber()); } // create and save the paidApplied InvoicePaidApplied invoicePaidApplied = new InvoicePaidApplied(); invoicePaidApplied.setDocumentNumber(invoiceNumber); invoicePaidApplied.setPaidAppliedItemNumber(paidAppliedItemNumber++); invoicePaidApplied.setFinancialDocumentReferenceInvoiceNumber(invoiceNumber); invoicePaidApplied.setInvoiceItemNumber(discount.getInvoiceItemNumber()); invoicePaidApplied.setUniversityFiscalYear(universityDateService.getCurrentFiscalYear()); invoicePaidApplied.setUniversityFiscalPeriodCode(universityDateService.getCurrentUniversityDate().getUniversityFiscalAccountingPeriod()); invoicePaidApplied.setInvoiceItemAppliedAmount(discount.getAmount().abs()); openAmount = openAmount.subtract(discount.getAmount().abs()); businessObjectService.save(invoicePaidApplied); } // if its open, but now with a zero openamount, then close it if (KualiDecimal.ZERO.equals(openAmount)) { invoice.setOpenInvoiceIndicator(false); invoice.setClosedDate(dateTimeService.getCurrentSqlDate()); documentService.updateDocument(invoice); } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllOpenCustomerInvoiceDocuments() */ @Override public Collection<CustomerInvoiceDocument> getAllOpenCustomerInvoiceDocuments() { return getAllOpenCustomerInvoiceDocuments(true); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllOpenCustomerInvoiceDocumentsWithoutWorkflow() */ @Override public Collection<CustomerInvoiceDocument> getAllOpenCustomerInvoiceDocumentsWithoutWorkflow() { return getAllOpenCustomerInvoiceDocuments(false); } /** * @param includeWorkflowHeaders * @return */ public Collection<CustomerInvoiceDocument> getAllOpenCustomerInvoiceDocuments(boolean includeWorkflowHeaders) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // retrieve the set of documents without workflow headers invoices = customerInvoiceDocumentDao.getAllOpen(); // if we dont need workflow headers, then we're done if (!includeWorkflowHeaders) { return invoices; } // make a list of necessary workflow docs to retrieve List<String> documentHeaderIds = new ArrayList<String>(); for (CustomerInvoiceDocument invoice : invoices) { documentHeaderIds.add(invoice.getDocumentNumber()); } // get all of our docs with full workflow headers List<CustomerInvoiceDocument> docs = new ArrayList<CustomerInvoiceDocument>(); try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { docs.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } return docs; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#attachWorkflowHeadersToTheInvoices(java.util.Collection) */ @Override public Collection<CustomerInvoiceDocument> attachWorkflowHeadersToTheInvoices(Collection<CustomerInvoiceDocument> invoices) { List<CustomerInvoiceDocument> docs = new ArrayList<CustomerInvoiceDocument>(); if (invoices == null || invoices.isEmpty()) { return docs; } // make a list of necessary workflow docs to retrieve List<String> documentHeaderIds = new ArrayList<String>(); for (CustomerInvoiceDocument invoice : invoices) { documentHeaderIds.add(invoice.getDocumentNumber()); } // get all of our docs with full workflow headers try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { docs.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } return docs; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerNumber(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerNumber(String customerNumber) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // customer number is not required to be populated, so we need to check that it's not null first if (StringUtils.isNotEmpty(customerNumber)) { // trim and force-caps the customer number customerNumber = customerNumber.trim().toUpperCase(); } invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerNumber(customerNumber)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerNameByCustomerType(java.lang.String, * java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerNameByCustomerType(String customerName, String customerTypeCode) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // trim and force-caps the customer name customerName = StringUtils.replace(customerName, KFSConstants.WILDCARD_CHARACTER, KFSConstants.PERCENTAGE_SIGN); customerName = customerName.trim(); if (customerName.indexOf("%") < 0) { customerName += "%"; } // trim and force-caps customerTypeCode = customerTypeCode.trim().toUpperCase(); invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerNameByCustomerType(customerName, customerTypeCode)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerName(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerName(String customerName) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // trim and force-caps the customer name customerName = StringUtils.replace(customerName, KFSConstants.WILDCARD_CHARACTER, KFSConstants.PERCENTAGE_SIGN); customerName = customerName.trim(); if (customerName.indexOf("%") < 0) { customerName += "%"; } invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerName(customerName)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerType(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerType(String customerTypeCode) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // trim and force-caps customerTypeCode = customerTypeCode.trim().toUpperCase(); invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerType(customerTypeCode)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDetailsForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public Collection<CustomerInvoiceDetail> getCustomerInvoiceDetailsForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { return getCustomerInvoiceDetailsForCustomerInvoiceDocument(customerInvoiceDocument.getDocumentNumber()); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDetailsForCustomerInvoiceDocumentWithCaching(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public Collection<CustomerInvoiceDetail> getCustomerInvoiceDetailsForCustomerInvoiceDocumentWithCaching(CustomerInvoiceDocument customerInvoiceDocument) { return customerInvoiceDetailService.getCustomerInvoiceDetailsForInvoiceWithCaching(customerInvoiceDocument.getDocumentNumber()); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDetailsForCustomerInvoiceDocument(java.lang.String) */ @Override public Collection<CustomerInvoiceDetail> getCustomerInvoiceDetailsForCustomerInvoiceDocument(String customerInvoiceDocumentNumber) { return customerInvoiceDetailService.getCustomerInvoiceDetailsForInvoice(customerInvoiceDocumentNumber); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenAmountForCustomerInvoiceDocument(java.lang.String) */ @Override public KualiDecimal getOpenAmountForCustomerInvoiceDocument(String customerInvoiceDocumentNumber) { if (null == customerInvoiceDocumentNumber) { return null; } return getOpenAmountForCustomerInvoiceDocument(getInvoiceByInvoiceDocumentNumber(customerInvoiceDocumentNumber)); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenAmountForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getOpenAmountForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { KualiDecimal total = new KualiDecimal(0); if (customerInvoiceDocument.isOpenInvoiceIndicator()) { Collection<CustomerInvoiceDetail> customerInvoiceDetails = customerInvoiceDocument.getCustomerInvoiceDetailsWithoutDiscounts(); for (CustomerInvoiceDetail detail : customerInvoiceDetails) { // note that we're now dealing with conditionally applying discounts // depending on whether the doc is saved or approved one level down, // in the CustomerInvoiceDetail.getAmountOpen() detail.setCustomerInvoiceDocument(customerInvoiceDocument); total = total.add(detail.getAmountOpen()); } } return total; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOriginalTotalAmountForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getOriginalTotalAmountForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { LOG.info("\n\n\n\t\t invoice: " + customerInvoiceDocument.getDocumentNumber() + "\n\t\t 111111111 HEADER TOTAL AMOUNT (should be null): " + customerInvoiceDocument.getFinancialSystemDocumentHeader().getFinancialDocumentTotalAmount() + "\n\n"); customerInvoiceDocument.getDocumentNumber(); HashMap criteria = new HashMap(); criteria.put(KFSPropertyConstants.DOCUMENT_NUMBER, customerInvoiceDocument.getDocumentHeader().getDocumentTemplateNumber()); FinancialSystemDocumentHeader financialSystemDocumentHeader = businessObjectService.findByPrimaryKey(FinancialSystemDocumentHeader.class, criteria); KualiDecimal originalTotalAmount = KualiDecimal.ZERO; originalTotalAmount = financialSystemDocumentHeader.getFinancialDocumentTotalAmount(); LOG.info("\n\n\n\t\t invoice: " + customerInvoiceDocument.getDocumentNumber() + "\n\t\t 333333333333 HEADER TOTAL AMOUNT (should be set now): " + customerInvoiceDocument.getFinancialSystemDocumentHeader().getFinancialDocumentTotalAmount() + "\n\n"); return originalTotalAmount; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getInvoicesByCustomerNumber(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByCustomerNumber(String customerNumber) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); Map<String, String> fieldValues = new HashMap<String, String>(); fieldValues.put("customerNumber", customerNumber); Collection<AccountsReceivableDocumentHeader> documentHeaders = businessObjectService.findMatching(AccountsReceivableDocumentHeader.class, fieldValues); List<String> documentHeaderIds = new ArrayList<String>(); for (AccountsReceivableDocumentHeader header : documentHeaders) { documentHeaderIds.add(header.getDocumentHeader().getDocumentNumber()); } if (0 < documentHeaderIds.size()) { try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { invoices.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { LOG.error("getCustomerInvoiceDocumentsByCustomerNumber " + customerNumber + " failed", e); } } return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerByOrganizationInvoiceNumber(java.lang.String) */ @Override public Customer getCustomerByOrganizationInvoiceNumber(String organizationInvoiceNumber) { CustomerInvoiceDocument invoice = getInvoiceByOrganizationInvoiceNumber(organizationInvoiceNumber); return invoice.getAccountsReceivableDocumentHeader().getCustomer(); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getInvoiceByOrganizationInvoiceNumber(java.lang.String) */ @Override public CustomerInvoiceDocument getInvoiceByOrganizationInvoiceNumber(String organizationInvoiceNumber) { return customerInvoiceDocumentDao.getInvoiceByOrganizationInvoiceNumber(organizationInvoiceNumber); } /** * @param invoiceDocumentNumber * @return */ @Override public Customer getCustomerByInvoiceDocumentNumber(String invoiceDocumentNumber) { CustomerInvoiceDocument invoice = getInvoiceByInvoiceDocumentNumber(invoiceDocumentNumber); return invoice.getAccountsReceivableDocumentHeader().getCustomer(); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getInvoiceByInvoiceDocumentNumber(java.lang.String) */ @Override public CustomerInvoiceDocument getInvoiceByInvoiceDocumentNumber(String invoiceDocumentNumber) { return customerInvoiceDocumentDao.getInvoiceByInvoiceDocumentNumber(invoiceDocumentNumber); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPrintableCustomerInvoiceDocumentsByInitiatorPrincipalName(java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsByInitiatorPrincipalName(String initiatorPrincipalName) { if (StringUtils.isBlank(initiatorPrincipalName)) { throw new IllegalArgumentException("The parameter [initiatorPrincipalName] passed in was null or blank."); } // IMPORTANT NOTES ABOUT THIS METHOD // // This method behaves differently than the other invoice printing methods. This is // because there's no way from within KFS to do a direct DB call to get all the invoices // you want. This is because workflow holds the document initiator, and you cant guarantee // that in a given implementation that you have access to that other db. It could be on // another box in another network, and you only have web-services access to the Rice box. // // Given that, we try to minimize the resource hit of this call as much as possible. First // we retrieve all invoices that havent been printed (ie, dont have a print date) and that // are marked for the USER print queue. At any given time that should be a manageable number of // documents. // // Then we walk through them, retrieve the full workflow-populated version of it, and only // return the ones that match the initiator. // // This isnt as performant a solution as the other getPrintableCustomerInvoiceBy... // methods, but its the best we can do in this release, and it should be manageable. // // attempt to retrieve the initiator person specified, and puke if not found Principal initiator = KimApiServiceLocator.getIdentityService().getPrincipalByPrincipalName(initiatorPrincipalName); if (initiator == null) { throw new IllegalArgumentException("The parameter value for initiatorPrincipalName [" + initiatorPrincipalName + "] passed in doesnt map to a person."); } // retrieve all the ready-to-print docs in the user-queue for all users List<String> printableUserQueueDocNumbers = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersFromUserQueue(); // get all the documents that might be right, but this set includes documents generated // by the wrong user List<CustomerInvoiceDocument> customerInvoiceDocumentsSuperSet = new ArrayList<CustomerInvoiceDocument>(); if (printableUserQueueDocNumbers.size() > 0) { try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, printableUserQueueDocNumbers) ) { customerInvoiceDocumentsSuperSet.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } } else { customerInvoiceDocumentsSuperSet = new ArrayList<CustomerInvoiceDocument>(); } // filter only the ones initiated by the correct user List<CustomerInvoiceDocument> customerInvoiceDocuments = new ArrayList<CustomerInvoiceDocument>(); for (CustomerInvoiceDocument superSetDocument : customerInvoiceDocumentsSuperSet) { if ( StringUtils.equalsIgnoreCase(superSetDocument.getDocumentHeader().getWorkflowDocument().getInitiatorPrincipalId(), initiator.getPrincipalId())) { customerInvoiceDocuments.add(superSetDocument); } } return customerInvoiceDocuments; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPrintableCustomerInvoiceDocumentsByBillingChartAndOrg(java.lang.String, * java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsByBillingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersByBillingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } protected List<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByDocumentNumbers( List<String> documentHeaderIds ) { List<CustomerInvoiceDocument> customerInvoiceDocuments = new ArrayList<CustomerInvoiceDocument>(documentHeaderIds.size()); if (documentHeaderIds != null && !documentHeaderIds.isEmpty()) { try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { customerInvoiceDocuments.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } } return customerInvoiceDocuments; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPrintableCustomerInvoiceDocumentsForBillingStatementByBillingChartAndOrg(java.lang.String, * java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsForBillingStatementByBillingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersForBillingStatementByBillingChartAndOrg(chartOfAccountsCode, organizationCode); List<CustomerInvoiceDocument> customerInvoiceDocuments = new ArrayList<CustomerInvoiceDocument>(); if (documentHeaderIds != null && !documentHeaderIds.isEmpty()) { try { for (Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds)) { customerInvoiceDocuments.add((CustomerInvoiceDocument) doc); } } catch (WorkflowException e) { throw new InfrastructureException("Unable to retrieve Customer Invoice Documents", e); } } return customerInvoiceDocuments; } /** * @see org.kuali.module.ar.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByCustomerNumber(java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsByProcessingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersByProcessingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByAccountNumber(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByAccountNumber(String accountNumber) { List<String> documentHeaderIds = customerInvoiceDetailService.getCustomerInvoiceDocumentNumbersByAccountNumber(accountNumber); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByBillingChartAndOrg(java.lang.String, * java.lang.String) */ @Override public List<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByBillingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getCustomerInvoiceDocumentNumbersByBillingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * @see org.kuali.module.ar.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByCustomerNumber(java.lang.String) */ @Override public List<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByProcessingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getCustomerInvoiceDocumentNumbersByProcessingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * Refactor to have all the setters in here. * * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#setupDefaultValuesForNewCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void setupDefaultValuesForNewCustomerInvoiceDocument(CustomerInvoiceDocument document) { setupBasicDefaultValuesForCustomerInvoiceDocument(document); // set up the default values for the AR DOC Header AccountsReceivableDocumentHeader accountsReceivableDocumentHeader = accountsReceivableDocumentHeaderService.getNewAccountsReceivableDocumentHeaderForCurrentUser(); accountsReceivableDocumentHeader.setDocumentNumber(document.getDocumentNumber()); document.setAccountsReceivableDocumentHeader(accountsReceivableDocumentHeader); // set up the primary key for AR_INV_RCURRNC_DTL_T CustomerInvoiceRecurrenceDetails recurrenceDetails = new CustomerInvoiceRecurrenceDetails(); recurrenceDetails.setInvoiceNumber(document.getDocumentNumber()); // recurrenceDetails.setCustomerNumber(document.getCustomer().getCustomerNumber()); document.setCustomerInvoiceRecurrenceDetails(recurrenceDetails); Map<String, String> criteria = new HashMap<String, String>(); criteria.put(KFSPropertyConstants.CHART_OF_ACCOUNTS_CODE, document.getBillByChartOfAccountCode()); criteria.put(KFSPropertyConstants.ORGANIZATION_CODE, document.getBilledByOrganizationCode()); OrganizationOptions organizationOptions = businessObjectService.findByPrimaryKey(OrganizationOptions.class, criteria); if (ObjectUtils.isNotNull(organizationOptions)) { document.setPrintInvoiceIndicator(organizationOptions.getPrintInvoiceIndicator()); document.setInvoiceTermsText(organizationOptions.getOrganizationPaymentTermsText()); } // If document is using receivable option, set receivable accounting line for customer invoice document String receivableOffsetOption = parameterService.getParameterValueAsString(CustomerInvoiceDocument.class, ArConstants.GLPE_RECEIVABLE_OFFSET_GENERATION_METHOD); boolean isUsingReceivableFAU = ArConstants.GLPE_RECEIVABLE_OFFSET_GENERATION_METHOD_FAU.equals(receivableOffsetOption); if (isUsingReceivableFAU) { receivableAccountingLineService.setReceivableAccountingLineForCustomerInvoiceDocument(document); } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#loadCustomerAddressesForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void loadCustomerAddressesForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { // if address identifier is provided, try to refresh customer address data if (ObjectUtils.isNotNull(customerInvoiceDocument.getAccountsReceivableDocumentHeader())) { CustomerAddress customerShipToAddress = customerAddressService.getByPrimaryKey(customerInvoiceDocument.getAccountsReceivableDocumentHeader().getCustomerNumber(), customerInvoiceDocument.getCustomerShipToAddressIdentifier()); CustomerAddress customerBillToAddress = customerAddressService.getByPrimaryKey(customerInvoiceDocument.getAccountsReceivableDocumentHeader().getCustomerNumber(), customerInvoiceDocument.getCustomerBillToAddressIdentifier()); if (ObjectUtils.isNotNull(customerShipToAddress)) { customerInvoiceDocument.setCustomerShipToAddress(customerShipToAddress); customerInvoiceDocument.setCustomerShipToAddressOnInvoice(customerShipToAddress); } if (ObjectUtils.isNotNull(customerBillToAddress)) { customerInvoiceDocument.setCustomerBillToAddress(customerBillToAddress); customerInvoiceDocument.setCustomerBillToAddressOnInvoice(customerBillToAddress); } } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#setupDefaultValuesForCopiedCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void setupDefaultValuesForCopiedCustomerInvoiceDocument(CustomerInvoiceDocument document) { setupBasicDefaultValuesForCustomerInvoiceDocument(document); // Save customer number since it will get overwritten when we retrieve the accounts receivable document header from service String customerNumber = document.getAccountsReceivableDocumentHeader().getCustomerNumber(); // Set up the default values for the AR DOC Header AccountsReceivableDocumentHeader accountsReceivableDocumentHeader = accountsReceivableDocumentHeaderService.getNewAccountsReceivableDocumentHeaderForCurrentUser(); accountsReceivableDocumentHeader.setDocumentNumber(document.getDocumentNumber()); accountsReceivableDocumentHeader.setCustomerNumber(customerNumber); document.setAccountsReceivableDocumentHeader(accountsReceivableDocumentHeader); // set up the primary key for AR_INV_RCURRNC_DTL_T CustomerInvoiceRecurrenceDetails recurrenceDetails = new CustomerInvoiceRecurrenceDetails(); recurrenceDetails.setInvoiceNumber(document.getDocumentNumber()); // recurrenceDetails.setCustomerNumber(document.getAccountsReceivableDocumentHeader().getCustomerNumber()); document.setCustomerInvoiceRecurrenceDetails(recurrenceDetails); // make open invoice indicator to true document.setOpenInvoiceIndicator(true); document.setPrintDate(null); document.setBillingDate(dateTimeService.getCurrentSqlDateMidnight()); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getNonInvoicedDistributionsForInvoice(java.lang.String) */ @Override public Collection<NonInvoicedDistribution> getNonInvoicedDistributionsForInvoice(String documentNumber) { return nonInvoicedDistributionService.getNonInvoicedDistributionsForInvoice(documentNumber); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getNonInvoicedTotalForInvoice(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getNonInvoicedTotalForInvoice(CustomerInvoiceDocument invoice) { Collection<NonInvoicedDistribution> payments = this.nonInvoicedDistributionService.getNonInvoicedDistributionsForInvoice(invoice); KualiDecimal total = new KualiDecimal(0); for (NonInvoicedDistribution payment : payments) { total = total.add(payment.getFinancialDocumentLineAmount()); } return total; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getNonInvoicedTotalForInvoice(java.lang.String) */ @Override public KualiDecimal getNonInvoicedTotalForInvoice(String documentNumber) { return getNonInvoicedTotalForInvoice(getInvoiceByInvoiceDocumentNumber(documentNumber)); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPaidAppliedTotalForInvoice(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getPaidAppliedTotalForInvoice(CustomerInvoiceDocument invoice) { Collection<InvoicePaidApplied> payments = invoicePaidAppliedService.getInvoicePaidAppliedsForInvoice(invoice); KualiDecimal total = new KualiDecimal(0); for (InvoicePaidApplied payment : payments) { total = total.add(payment.getInvoiceItemAppliedAmount()); } return total; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPaidAppliedTotalForInvoice(java.lang.String) */ @Override public KualiDecimal getPaidAppliedTotalForInvoice(String documentNumber) { return getPaidAppliedTotalForInvoice(getInvoiceByInvoiceDocumentNumber(documentNumber)); } /** * @param document */ protected void setupBasicDefaultValuesForCustomerInvoiceDocument(CustomerInvoiceDocument document) { ChartOrgHolder currentUser = financialSystemUserService.getPrimaryOrganization(GlobalVariables.getUserSession().getPerson(), ArConstants.AR_NAMESPACE_CODE); if (currentUser != null) { document.setBillByChartOfAccountCode(currentUser.getChartOfAccountsCode()); document.setBilledByOrganizationCode(currentUser.getOrganizationCode()); } document.setInvoiceDueDate(getDefaultInvoiceDueDate()); document.setOpenInvoiceIndicator(true); } /** * This method sets due date equal to todays date +30 days by default * * @param dateTimeService */ protected Date getDefaultInvoiceDueDate() { Calendar cal = dateTimeService.getCurrentCalendar(); cal.add(Calendar.DATE, 30); Date sqlDueDate = null; try { sqlDueDate = dateTimeService.convertToSqlDate(new Timestamp(cal.getTime().getTime())); } catch (ParseException e) { // TODO: throw an error here, but don't die } return sqlDueDate; } @Override public void closeCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { customerInvoiceDocument.setOpenInvoiceIndicator(false); customerInvoiceDocument.setClosedDate(dateTimeService.getCurrentSqlDate()); businessObjectService.save(customerInvoiceDocument); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#updateReportedDate(String) */ @Override public void updateReportedDate(String docNumber) { HashMap<String, String> criteria = new HashMap<String, String>(); criteria.put("documentNumber", docNumber); CustomerInvoiceDocument customerInvoiceDocument = businessObjectService.findByPrimaryKey(CustomerInvoiceDocument.class, criteria); Date reportedDate = dateTimeService.getCurrentSqlDate(); if (ObjectUtils.isNotNull(customerInvoiceDocument)) { customerInvoiceDocument.setReportedDate(reportedDate); businessObjectService.save(customerInvoiceDocument); } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#updateReportedInvoiceInfo(CustomerStatementResultHolder) */ @Override public void updateReportedInvoiceInfo(CustomerStatementResultHolder data) { HashMap<String, String> criteria = new HashMap<String, String>(); criteria.put("customerNumber", data.getCustomerNumber()); CustomerBillingStatement customerBillingStatement = businessObjectService.findByPrimaryKey(CustomerBillingStatement.class, criteria); if (ObjectUtils.isNotNull(customerBillingStatement)) { customerBillingStatement.setPreviouslyBilledAmount(data.getCurrentBilledAmount()); customerBillingStatement.setReportedDate(dateTimeService.getCurrentSqlDate()); } else { customerBillingStatement = new CustomerBillingStatement(); customerBillingStatement.setCustomerNumber(data.getCustomerNumber()); customerBillingStatement.setPreviouslyBilledAmount(data.getCurrentBilledAmount()); customerBillingStatement.setReportedDate(dateTimeService.getCurrentSqlDate()); } businessObjectService.save(customerBillingStatement); } public CustomerInvoiceDocumentDao getCustomerInvoiceDocumentDao() { return customerInvoiceDocumentDao; } public void setCustomerInvoiceDocumentDao(CustomerInvoiceDocumentDao customerInvoiceDocumentDao) { this.customerInvoiceDocumentDao = customerInvoiceDocumentDao; } public DocumentService getDocumentService() { return documentService; } public void setDocumentService(DocumentService documentService) { this.documentService = documentService; } public BusinessObjectService getBusinessObjectService() { return businessObjectService; } public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } public DateTimeService getDateTimeService() { return dateTimeService; } public void setDateTimeService(DateTimeService dateTimeService) { this.dateTimeService = dateTimeService; } public ReceivableAccountingLineService getReceivableAccountingLineService() { return receivableAccountingLineService; } public void setReceivableAccountingLineService(ReceivableAccountingLineService receivableAccountingLineService) { this.receivableAccountingLineService = receivableAccountingLineService; } public AccountsReceivableDocumentHeaderService getAccountsReceivableDocumentHeaderService() { return accountsReceivableDocumentHeaderService; } public void setAccountsReceivableDocumentHeaderService(AccountsReceivableDocumentHeaderService accountsReceivableDocumentHeaderService) { this.accountsReceivableDocumentHeaderService = accountsReceivableDocumentHeaderService; } public CustomerAddressService getCustomerAddressService() { return customerAddressService; } public void setCustomerAddressService(CustomerAddressService customerAddressService) { this.customerAddressService = customerAddressService; } public void setDocumentDao(DocumentDao documentDao) { this.documentDao = documentDao; } public void setInvoicePaidAppliedService(InvoicePaidAppliedService invoicePaidAppliedService) { this.invoicePaidAppliedService = invoicePaidAppliedService; } public void setNonInvoicedDistributionService(NonInvoicedDistributionService nonInvoicedDistributionService) { this.nonInvoicedDistributionService = nonInvoicedDistributionService; } public void setCustomerInvoiceDetailService(CustomerInvoiceDetailService customerInvoiceDetailService) { this.customerInvoiceDetailService = customerInvoiceDetailService; } public void setUniversityDateService(UniversityDateService universityDateService) { this.universityDateService = universityDateService; } public void setNoteService(NoteService noteService) { this.noteService = noteService; } /** * @return Returns the personService. */ public PersonService getPersonService() { return personService; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#checkIfInvoiceNumberIsFinal(java.lang.String) */ @Override public boolean checkIfInvoiceNumberIsFinal(String invDocumentNumber) { boolean isSuccess = true; if (StringUtils.isBlank(invDocumentNumber)) { isSuccess &= false; } else { CustomerInvoiceDocument customerInvoiceDocument = getInvoiceByInvoiceDocumentNumber(invDocumentNumber); if (ObjectUtils.isNull(customerInvoiceDocument)) { isSuccess &= false; } else { Document doc = null; try { doc = documentService.getByDocumentHeaderId(invDocumentNumber); } catch (WorkflowException e) { isSuccess &= false; } if (ObjectUtils.isNull(doc) || ObjectUtils.isNull(doc.getDocumentHeader()) || doc.getDocumentHeader().getWorkflowDocument() == null || !(doc.getDocumentHeader().getWorkflowDocument().isApproved() || doc.getDocumentHeader().getWorkflowDocument().isProcessed())) { isSuccess &= false; } } } return isSuccess; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByBilling(java.util.List, java.util.List, java.lang.Integer) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByBilling(List<String> charts, List<String> organizations, Integer invoiceAge) { Date invoiceBillingDateFrom = null; Date invoiceBillingDateTo = this.getPastDate(invoiceAge - 1) ; return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByBilling(charts, organizations, invoiceBillingDateFrom, invoiceBillingDateTo); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByAccounts(java.util.List, java.util.List, java.lang.Integer) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByAccounts(List<String> charts, List<String> accounts, Integer invoiceAge) { Date invoiceBillingDateFrom = null; Date invoiceBillingDateTo = this.getPastDate(invoiceAge - 1) ; return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByAccounts(charts, accounts, invoiceBillingDateFrom, invoiceBillingDateTo); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByProcessing(java.util.List, java.util.List, java.lang.Integer) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByProcessing(List<String> charts, List<String> organizations, Integer invoiceAge) { Date invoiceBillingDateFrom = null; Date invoiceBillingDateTo = this.getPastDate(invoiceAge - 1) ; return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByProcessing(charts, organizations, invoiceBillingDateFrom, invoiceBillingDateTo); } /** * get the date before the given amount of days */ protected Date getPastDate(Integer amount){ Integer pastDateAmount = -1 * amount; java.util.Date today = this.getDateTimeService().getCurrentDate(); java.util.Date pastDate = DateUtils.addDays(today, pastDateAmount); return KfsDateUtils.convertToSqlDate(pastDate); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByCustomerTypes(java.util.List, java.lang.Integer, java.sql.Date) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByCustomerTypes(List<String> customerTypes, Integer invoiceAge, Date invoiceDueDateFrom) { Date pastDate = this.getPastDate(invoiceAge - 1) ; Date invoiceDueDateTo = KfsDateUtils.convertToSqlDate(DateUtils.addDays(pastDate, 1)); LOG.info("invoiceDueDateTo" + invoiceDueDateTo); return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByCustomerTypes(customerTypes, invoiceDueDateFrom, invoiceDueDateTo); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#addCloseNote */ @Override public void addCloseNote(CustomerInvoiceDocument documentToClose, WorkflowDocument closingDocument) { if (!documentToClose.isOpenInvoiceIndicator()) { // If it already is closed, no need to add a note return; } String principalName = "Unknown"; List<ActionTaken> actionsTaken = closingDocument.getActionsTaken(); if(ObjectUtils.isNotNull(actionsTaken)){ ActionTaken completeAction = actionsTaken.get(0); for(ActionTaken action : actionsTaken){ // we're looking for the person who completed the closing document, aren't we? if(new String("C").equals(action.getActionTaken().getCode())){ principalName = getPersonService().getPerson(action.getPrincipalId()).getName(); } } } final String noteTextPattern = getConfigurationService().getPropertyValueAsString(ArKeyConstants.INVOICE_CLOSE_NOTE_TEXT); Object[] arguments = { principalName, closingDocument.getDocumentTypeName(), closingDocument.getDocumentId() }; String noteText = MessageFormat.format(noteTextPattern, arguments); Note note = getDocumentService().createNoteFromDocument(documentToClose, noteText); note.setAuthorUniversalIdentifier(KimApiServiceLocator.getIdentityService().getPrincipalByPrincipalName(KFSConstants.SYSTEM_USER).getPrincipalId()); documentToClose.addNote(noteService.save(note)); } public void setParameterService(ParameterService parameterService) { this.parameterService = parameterService; } public void setPersonService(PersonService personService) { this.personService = personService; } public FinancialSystemUserService getFinancialSystemUserService() { return financialSystemUserService; } public void setFinancialSystemUserService(FinancialSystemUserService financialSystemUserService) { this.financialSystemUserService = financialSystemUserService; } public ConfigurationService getConfigurationService() { return configurationService; } public void setConfigurationService(ConfigurationService configurationService) { this.configurationService = configurationService; } }
work/src/org/kuali/kfs/module/ar/document/service/impl/CustomerInvoiceDocumentServiceImpl.java
/* * Copyright 2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.ar.document.service.impl; import java.sql.Date; import java.sql.Timestamp; import java.text.MessageFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.DateUtils; import org.kuali.kfs.module.ar.ArConstants; import org.kuali.kfs.module.ar.ArKeyConstants; import org.kuali.kfs.module.ar.businessobject.AccountsReceivableDocumentHeader; import org.kuali.kfs.module.ar.businessobject.Customer; import org.kuali.kfs.module.ar.businessobject.CustomerAddress; import org.kuali.kfs.module.ar.businessobject.CustomerBillingStatement; import org.kuali.kfs.module.ar.businessobject.CustomerInvoiceDetail; import org.kuali.kfs.module.ar.businessobject.CustomerInvoiceRecurrenceDetails; import org.kuali.kfs.module.ar.businessobject.InvoicePaidApplied; import org.kuali.kfs.module.ar.businessobject.NonInvoicedDistribution; import org.kuali.kfs.module.ar.businessobject.OrganizationOptions; import org.kuali.kfs.module.ar.document.CustomerInvoiceDocument; import org.kuali.kfs.module.ar.document.dataaccess.CustomerInvoiceDocumentDao; import org.kuali.kfs.module.ar.document.service.AccountsReceivableDocumentHeaderService; import org.kuali.kfs.module.ar.document.service.CustomerAddressService; import org.kuali.kfs.module.ar.document.service.CustomerInvoiceDetailService; import org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService; import org.kuali.kfs.module.ar.document.service.InvoicePaidAppliedService; import org.kuali.kfs.module.ar.document.service.NonInvoicedDistributionService; import org.kuali.kfs.module.ar.document.service.ReceivableAccountingLineService; import org.kuali.kfs.module.ar.report.util.CustomerStatementResultHolder; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.KFSPropertyConstants; import org.kuali.kfs.sys.businessobject.ChartOrgHolder; import org.kuali.kfs.sys.businessobject.FinancialSystemDocumentHeader; import org.kuali.kfs.sys.service.FinancialSystemUserService; import org.kuali.kfs.sys.service.UniversityDateService; import org.kuali.kfs.sys.util.KfsDateUtils; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.core.api.datetime.DateTimeService; import org.kuali.rice.core.api.util.type.KualiDecimal; import org.kuali.rice.coreservice.framework.parameter.ParameterService; import org.kuali.rice.kew.api.WorkflowDocument; import org.kuali.rice.kew.api.action.ActionTaken; import org.kuali.rice.kew.api.action.WorkflowDocumentActionsService; import org.kuali.rice.kew.api.document.WorkflowDocumentService; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.kim.api.identity.PersonService; import org.kuali.rice.kim.api.identity.principal.Principal; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.krad.bo.Note; import org.kuali.rice.krad.dao.DocumentDao; import org.kuali.rice.krad.document.Document; import org.kuali.rice.krad.exception.InfrastructureException; import org.kuali.rice.krad.service.BusinessObjectService; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.service.NoteService; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.ObjectUtils; import org.springframework.transaction.annotation.Transactional; @Transactional public class CustomerInvoiceDocumentServiceImpl implements CustomerInvoiceDocumentService { protected static org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(CustomerInvoiceDocumentServiceImpl.class); protected AccountsReceivableDocumentHeaderService accountsReceivableDocumentHeaderService; protected BusinessObjectService businessObjectService; protected CustomerAddressService customerAddressService; protected CustomerInvoiceDetailService customerInvoiceDetailService; protected CustomerInvoiceDocumentDao customerInvoiceDocumentDao; protected CustomerInvoiceRecurrenceDetails customerInvoiceRecurrenceDetails; protected ConfigurationService configurationService; protected DateTimeService dateTimeService; protected DocumentService documentService; protected DocumentDao documentDao; protected FinancialSystemUserService financialSystemUserService; protected InvoicePaidAppliedService<CustomerInvoiceDetail> invoicePaidAppliedService; protected NonInvoicedDistributionService nonInvoicedDistributionService; protected ParameterService parameterService; protected PersonService personService; protected ReceivableAccountingLineService receivableAccountingLineService; protected UniversityDateService universityDateService; protected NoteService noteService; /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#convertDiscountsToPaidApplieds(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void convertDiscountsToPaidApplieds(CustomerInvoiceDocument invoice) { // this needs a little explanation. we have to calculate manually // whether we've written off the whole thing, because the regular // code uses the invoice paid applieds to discount, but since those // are added but not committed in this transaction, they're also not // visible in this transaction, so we do it manually. KualiDecimal openAmount = invoice.getOpenAmount(); String invoiceNumber = invoice.getDocumentNumber(); List<CustomerInvoiceDetail> discounts = invoice.getDiscounts(); // retrieve the number of current paid applieds, so we dont have item number overlap Integer paidAppliedItemNumber = 0; for (CustomerInvoiceDetail discount : discounts) { // if credit amount is zero, do nothing if (KualiDecimal.ZERO.equals(discount.getAmount())) { continue; } if (paidAppliedItemNumber == 0) { paidAppliedItemNumber = invoicePaidAppliedService.getNumberOfInvoicePaidAppliedsForInvoiceDetail(invoiceNumber, discount.getInvoiceItemNumber()); } // create and save the paidApplied InvoicePaidApplied invoicePaidApplied = new InvoicePaidApplied(); invoicePaidApplied.setDocumentNumber(invoiceNumber); invoicePaidApplied.setPaidAppliedItemNumber(paidAppliedItemNumber++); invoicePaidApplied.setFinancialDocumentReferenceInvoiceNumber(invoiceNumber); invoicePaidApplied.setInvoiceItemNumber(discount.getInvoiceItemNumber()); invoicePaidApplied.setUniversityFiscalYear(universityDateService.getCurrentFiscalYear()); invoicePaidApplied.setUniversityFiscalPeriodCode(universityDateService.getCurrentUniversityDate().getUniversityFiscalAccountingPeriod()); invoicePaidApplied.setInvoiceItemAppliedAmount(discount.getAmount().abs()); openAmount = openAmount.subtract(discount.getAmount().abs()); businessObjectService.save(invoicePaidApplied); } // if its open, but now with a zero openamount, then close it if (KualiDecimal.ZERO.equals(openAmount)) { invoice.setOpenInvoiceIndicator(false); invoice.setClosedDate(dateTimeService.getCurrentSqlDate()); documentService.updateDocument(invoice); } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllOpenCustomerInvoiceDocuments() */ @Override public Collection<CustomerInvoiceDocument> getAllOpenCustomerInvoiceDocuments() { return getAllOpenCustomerInvoiceDocuments(true); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllOpenCustomerInvoiceDocumentsWithoutWorkflow() */ @Override public Collection<CustomerInvoiceDocument> getAllOpenCustomerInvoiceDocumentsWithoutWorkflow() { return getAllOpenCustomerInvoiceDocuments(false); } /** * @param includeWorkflowHeaders * @return */ public Collection<CustomerInvoiceDocument> getAllOpenCustomerInvoiceDocuments(boolean includeWorkflowHeaders) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // retrieve the set of documents without workflow headers invoices = customerInvoiceDocumentDao.getAllOpen(); // if we dont need workflow headers, then we're done if (!includeWorkflowHeaders) { return invoices; } // make a list of necessary workflow docs to retrieve List<String> documentHeaderIds = new ArrayList<String>(); for (CustomerInvoiceDocument invoice : invoices) { documentHeaderIds.add(invoice.getDocumentNumber()); } // get all of our docs with full workflow headers List<CustomerInvoiceDocument> docs = new ArrayList<CustomerInvoiceDocument>(); try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { docs.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } return docs; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#attachWorkflowHeadersToTheInvoices(java.util.Collection) */ @Override public Collection<CustomerInvoiceDocument> attachWorkflowHeadersToTheInvoices(Collection<CustomerInvoiceDocument> invoices) { List<CustomerInvoiceDocument> docs = new ArrayList<CustomerInvoiceDocument>(); if (invoices == null || invoices.isEmpty()) { return docs; } // make a list of necessary workflow docs to retrieve List<String> documentHeaderIds = new ArrayList<String>(); for (CustomerInvoiceDocument invoice : invoices) { documentHeaderIds.add(invoice.getDocumentNumber()); } // get all of our docs with full workflow headers try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { docs.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } return docs; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerNumber(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerNumber(String customerNumber) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // customer number is not required to be populated, so we need to check that it's not null first if (StringUtils.isNotEmpty(customerNumber)) { // trim and force-caps the customer number customerNumber = customerNumber.trim().toUpperCase(); } invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerNumber(customerNumber)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerNameByCustomerType(java.lang.String, * java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerNameByCustomerType(String customerName, String customerTypeCode) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // trim and force-caps the customer name customerName = StringUtils.replace(customerName, KFSConstants.WILDCARD_CHARACTER, KFSConstants.PERCENTAGE_SIGN); customerName = customerName.trim(); if (customerName.indexOf("%") < 0) { customerName += "%"; } // trim and force-caps customerTypeCode = customerTypeCode.trim().toUpperCase(); invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerNameByCustomerType(customerName, customerTypeCode)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerName(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerName(String customerName) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // trim and force-caps the customer name customerName = StringUtils.replace(customerName, KFSConstants.WILDCARD_CHARACTER, KFSConstants.PERCENTAGE_SIGN); customerName = customerName.trim(); if (customerName.indexOf("%") < 0) { customerName += "%"; } invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerName(customerName)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenInvoiceDocumentsByCustomerType(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getOpenInvoiceDocumentsByCustomerType(String customerTypeCode) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); // trim and force-caps customerTypeCode = customerTypeCode.trim().toUpperCase(); invoices.addAll(customerInvoiceDocumentDao.getOpenByCustomerType(customerTypeCode)); return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDetailsForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public Collection<CustomerInvoiceDetail> getCustomerInvoiceDetailsForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { return getCustomerInvoiceDetailsForCustomerInvoiceDocument(customerInvoiceDocument.getDocumentNumber()); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDetailsForCustomerInvoiceDocumentWithCaching(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public Collection<CustomerInvoiceDetail> getCustomerInvoiceDetailsForCustomerInvoiceDocumentWithCaching(CustomerInvoiceDocument customerInvoiceDocument) { return customerInvoiceDetailService.getCustomerInvoiceDetailsForInvoiceWithCaching(customerInvoiceDocument.getDocumentNumber()); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDetailsForCustomerInvoiceDocument(java.lang.String) */ @Override public Collection<CustomerInvoiceDetail> getCustomerInvoiceDetailsForCustomerInvoiceDocument(String customerInvoiceDocumentNumber) { return customerInvoiceDetailService.getCustomerInvoiceDetailsForInvoice(customerInvoiceDocumentNumber); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenAmountForCustomerInvoiceDocument(java.lang.String) */ @Override public KualiDecimal getOpenAmountForCustomerInvoiceDocument(String customerInvoiceDocumentNumber) { if (null == customerInvoiceDocumentNumber) { return null; } return getOpenAmountForCustomerInvoiceDocument(getInvoiceByInvoiceDocumentNumber(customerInvoiceDocumentNumber)); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOpenAmountForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getOpenAmountForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { KualiDecimal total = new KualiDecimal(0); if (customerInvoiceDocument.isOpenInvoiceIndicator()) { Collection<CustomerInvoiceDetail> customerInvoiceDetails = customerInvoiceDocument.getCustomerInvoiceDetailsWithoutDiscounts(); for (CustomerInvoiceDetail detail : customerInvoiceDetails) { // note that we're now dealing with conditionally applying discounts // depending on whether the doc is saved or approved one level down, // in the CustomerInvoiceDetail.getAmountOpen() detail.setCustomerInvoiceDocument(customerInvoiceDocument); total = total.add(detail.getAmountOpen()); } } return total; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getOriginalTotalAmountForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getOriginalTotalAmountForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { LOG.info("\n\n\n\t\t invoice: " + customerInvoiceDocument.getDocumentNumber() + "\n\t\t 111111111 HEADER TOTAL AMOUNT (should be null): " + customerInvoiceDocument.getFinancialSystemDocumentHeader().getFinancialDocumentTotalAmount() + "\n\n"); customerInvoiceDocument.getDocumentNumber(); HashMap criteria = new HashMap(); criteria.put(KFSPropertyConstants.DOCUMENT_NUMBER, customerInvoiceDocument.getDocumentHeader().getDocumentTemplateNumber()); FinancialSystemDocumentHeader financialSystemDocumentHeader = businessObjectService.findByPrimaryKey(FinancialSystemDocumentHeader.class, criteria); KualiDecimal originalTotalAmount = KualiDecimal.ZERO; originalTotalAmount = financialSystemDocumentHeader.getFinancialDocumentTotalAmount(); LOG.info("\n\n\n\t\t invoice: " + customerInvoiceDocument.getDocumentNumber() + "\n\t\t 333333333333 HEADER TOTAL AMOUNT (should be set now): " + customerInvoiceDocument.getFinancialSystemDocumentHeader().getFinancialDocumentTotalAmount() + "\n\n"); return originalTotalAmount; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getInvoicesByCustomerNumber(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByCustomerNumber(String customerNumber) { Collection<CustomerInvoiceDocument> invoices = new ArrayList<CustomerInvoiceDocument>(); Map<String, String> fieldValues = new HashMap<String, String>(); fieldValues.put("customerNumber", customerNumber); Collection<AccountsReceivableDocumentHeader> documentHeaders = businessObjectService.findMatching(AccountsReceivableDocumentHeader.class, fieldValues); List<String> documentHeaderIds = new ArrayList<String>(); for (AccountsReceivableDocumentHeader header : documentHeaders) { documentHeaderIds.add(header.getDocumentHeader().getDocumentNumber()); } if (0 < documentHeaderIds.size()) { try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { invoices.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { LOG.error("getCustomerInvoiceDocumentsByCustomerNumber " + customerNumber + " failed", e); } } return invoices; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerByOrganizationInvoiceNumber(java.lang.String) */ @Override public Customer getCustomerByOrganizationInvoiceNumber(String organizationInvoiceNumber) { CustomerInvoiceDocument invoice = getInvoiceByOrganizationInvoiceNumber(organizationInvoiceNumber); return invoice.getAccountsReceivableDocumentHeader().getCustomer(); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getInvoiceByOrganizationInvoiceNumber(java.lang.String) */ @Override public CustomerInvoiceDocument getInvoiceByOrganizationInvoiceNumber(String organizationInvoiceNumber) { return customerInvoiceDocumentDao.getInvoiceByOrganizationInvoiceNumber(organizationInvoiceNumber); } /** * @param invoiceDocumentNumber * @return */ @Override public Customer getCustomerByInvoiceDocumentNumber(String invoiceDocumentNumber) { CustomerInvoiceDocument invoice = getInvoiceByInvoiceDocumentNumber(invoiceDocumentNumber); return invoice.getAccountsReceivableDocumentHeader().getCustomer(); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getInvoiceByInvoiceDocumentNumber(java.lang.String) */ @Override public CustomerInvoiceDocument getInvoiceByInvoiceDocumentNumber(String invoiceDocumentNumber) { return customerInvoiceDocumentDao.getInvoiceByInvoiceDocumentNumber(invoiceDocumentNumber); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPrintableCustomerInvoiceDocumentsByInitiatorPrincipalName(java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsByInitiatorPrincipalName(String initiatorPrincipalName) { if (StringUtils.isBlank(initiatorPrincipalName)) { throw new IllegalArgumentException("The parameter [initiatorPrincipalName] passed in was null or blank."); } // IMPORTANT NOTES ABOUT THIS METHOD // // This method behaves differently than the other invoice printing methods. This is // because there's no way from within KFS to do a direct DB call to get all the invoices // you want. This is because workflow holds the document initiator, and you cant guarantee // that in a given implementation that you have access to that other db. It could be on // another box in another network, and you only have web-services access to the Rice box. // // Given that, we try to minimize the resource hit of this call as much as possible. First // we retrieve all invoices that havent been printed (ie, dont have a print date) and that // are marked for the USER print queue. At any given time that should be a manageable number of // documents. // // Then we walk through them, retrieve the full workflow-populated version of it, and only // return the ones that match the initiator. // // This isnt as performant a solution as the other getPrintableCustomerInvoiceBy... // methods, but its the best we can do in this release, and it should be manageable. // // attempt to retrieve the initiator person specified, and puke if not found Principal initiator = KimApiServiceLocator.getIdentityService().getPrincipalByPrincipalName(initiatorPrincipalName); if (initiator == null) { throw new IllegalArgumentException("The parameter value for initiatorPrincipalName [" + initiatorPrincipalName + "] passed in doesnt map to a person."); } // retrieve all the ready-to-print docs in the user-queue for all users List<String> printableUserQueueDocNumbers = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersFromUserQueue(); // get all the documents that might be right, but this set includes documents generated // by the wrong user List<CustomerInvoiceDocument> customerInvoiceDocumentsSuperSet = new ArrayList<CustomerInvoiceDocument>(); if (printableUserQueueDocNumbers.size() > 0) { try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, printableUserQueueDocNumbers) ) { customerInvoiceDocumentsSuperSet.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } } else { customerInvoiceDocumentsSuperSet = new ArrayList<CustomerInvoiceDocument>(); } // filter only the ones initiated by the correct user List<CustomerInvoiceDocument> customerInvoiceDocuments = new ArrayList<CustomerInvoiceDocument>(); for (CustomerInvoiceDocument superSetDocument : customerInvoiceDocumentsSuperSet) { if ( StringUtils.equalsIgnoreCase(superSetDocument.getDocumentHeader().getWorkflowDocument().getInitiatorPrincipalId(), initiator.getPrincipalId())) { customerInvoiceDocuments.add(superSetDocument); } } return customerInvoiceDocuments; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPrintableCustomerInvoiceDocumentsByBillingChartAndOrg(java.lang.String, * java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsByBillingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersByBillingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } protected List<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByDocumentNumbers( List<String> documentHeaderIds ) { List<CustomerInvoiceDocument> customerInvoiceDocuments = new ArrayList<CustomerInvoiceDocument>(documentHeaderIds.size()); if (documentHeaderIds != null && !documentHeaderIds.isEmpty()) { try { for ( Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds) ) { customerInvoiceDocuments.add( (CustomerInvoiceDocument) doc ); } } catch (WorkflowException e) { throw new RuntimeException("Unable to retrieve Customer Invoice Documents", e); } } return customerInvoiceDocuments; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPrintableCustomerInvoiceDocumentsForBillingStatementByBillingChartAndOrg(java.lang.String, * java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsForBillingStatementByBillingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersForBillingStatementByBillingChartAndOrg(chartOfAccountsCode, organizationCode); List<CustomerInvoiceDocument> customerInvoiceDocuments = new ArrayList<CustomerInvoiceDocument>(); if (documentHeaderIds != null && !documentHeaderIds.isEmpty()) { try { for (Document doc : documentService.getDocumentsByListOfDocumentHeaderIds(CustomerInvoiceDocument.class, documentHeaderIds)) { customerInvoiceDocuments.add((CustomerInvoiceDocument) doc); } } catch (WorkflowException e) { throw new InfrastructureException("Unable to retrieve Customer Invoice Documents", e); } } return customerInvoiceDocuments; } /** * @see org.kuali.module.ar.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByCustomerNumber(java.lang.String) */ @Override public List<CustomerInvoiceDocument> getPrintableCustomerInvoiceDocumentsByProcessingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getPrintableCustomerInvoiceDocumentNumbersByProcessingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByAccountNumber(java.lang.String) */ @Override public Collection<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByAccountNumber(String accountNumber) { List<String> documentHeaderIds = customerInvoiceDetailService.getCustomerInvoiceDocumentNumbersByAccountNumber(accountNumber); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByBillingChartAndOrg(java.lang.String, * java.lang.String) */ @Override public List<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByBillingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getCustomerInvoiceDocumentNumbersByBillingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * @see org.kuali.module.ar.service.CustomerInvoiceDocumentService#getCustomerInvoiceDocumentsByCustomerNumber(java.lang.String) */ @Override public List<CustomerInvoiceDocument> getCustomerInvoiceDocumentsByProcessingChartAndOrg(String chartOfAccountsCode, String organizationCode) { List<String> documentHeaderIds = customerInvoiceDocumentDao.getCustomerInvoiceDocumentNumbersByProcessingChartAndOrg(chartOfAccountsCode, organizationCode); return getCustomerInvoiceDocumentsByDocumentNumbers(documentHeaderIds); } /** * Refactor to have all the setters in here. * * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#setupDefaultValuesForNewCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void setupDefaultValuesForNewCustomerInvoiceDocument(CustomerInvoiceDocument document) { setupBasicDefaultValuesForCustomerInvoiceDocument(document); // set up the default values for the AR DOC Header AccountsReceivableDocumentHeader accountsReceivableDocumentHeader = accountsReceivableDocumentHeaderService.getNewAccountsReceivableDocumentHeaderForCurrentUser(); accountsReceivableDocumentHeader.setDocumentNumber(document.getDocumentNumber()); document.setAccountsReceivableDocumentHeader(accountsReceivableDocumentHeader); // set up the primary key for AR_INV_RCURRNC_DTL_T CustomerInvoiceRecurrenceDetails recurrenceDetails = new CustomerInvoiceRecurrenceDetails(); recurrenceDetails.setInvoiceNumber(document.getDocumentNumber()); // recurrenceDetails.setCustomerNumber(document.getCustomer().getCustomerNumber()); document.setCustomerInvoiceRecurrenceDetails(recurrenceDetails); Map<String, String> criteria = new HashMap<String, String>(); criteria.put(KFSPropertyConstants.CHART_OF_ACCOUNTS_CODE, document.getBillByChartOfAccountCode()); criteria.put(KFSPropertyConstants.ORGANIZATION_CODE, document.getBilledByOrganizationCode()); OrganizationOptions organizationOptions = businessObjectService.findByPrimaryKey(OrganizationOptions.class, criteria); if (ObjectUtils.isNotNull(organizationOptions)) { document.setPrintInvoiceIndicator(organizationOptions.getPrintInvoiceIndicator()); document.setInvoiceTermsText(organizationOptions.getOrganizationPaymentTermsText()); } // If document is using receivable option, set receivable accounting line for customer invoice document String receivableOffsetOption = parameterService.getParameterValueAsString(CustomerInvoiceDocument.class, ArConstants.GLPE_RECEIVABLE_OFFSET_GENERATION_METHOD); boolean isUsingReceivableFAU = ArConstants.GLPE_RECEIVABLE_OFFSET_GENERATION_METHOD_FAU.equals(receivableOffsetOption); if (isUsingReceivableFAU) { receivableAccountingLineService.setReceivableAccountingLineForCustomerInvoiceDocument(document); } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#loadCustomerAddressesForCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void loadCustomerAddressesForCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { // if address identifier is provided, try to refresh customer address data if (ObjectUtils.isNotNull(customerInvoiceDocument.getAccountsReceivableDocumentHeader())) { CustomerAddress customerShipToAddress = customerAddressService.getByPrimaryKey(customerInvoiceDocument.getAccountsReceivableDocumentHeader().getCustomerNumber(), customerInvoiceDocument.getCustomerShipToAddressIdentifier()); CustomerAddress customerBillToAddress = customerAddressService.getByPrimaryKey(customerInvoiceDocument.getAccountsReceivableDocumentHeader().getCustomerNumber(), customerInvoiceDocument.getCustomerBillToAddressIdentifier()); if (ObjectUtils.isNotNull(customerShipToAddress)) { customerInvoiceDocument.setCustomerShipToAddress(customerShipToAddress); customerInvoiceDocument.setCustomerShipToAddressOnInvoice(customerShipToAddress); } if (ObjectUtils.isNotNull(customerBillToAddress)) { customerInvoiceDocument.setCustomerBillToAddress(customerBillToAddress); customerInvoiceDocument.setCustomerBillToAddressOnInvoice(customerBillToAddress); } } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#setupDefaultValuesForCopiedCustomerInvoiceDocument(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public void setupDefaultValuesForCopiedCustomerInvoiceDocument(CustomerInvoiceDocument document) { setupBasicDefaultValuesForCustomerInvoiceDocument(document); // Save customer number since it will get overwritten when we retrieve the accounts receivable document header from service String customerNumber = document.getAccountsReceivableDocumentHeader().getCustomerNumber(); // Set up the default values for the AR DOC Header AccountsReceivableDocumentHeader accountsReceivableDocumentHeader = accountsReceivableDocumentHeaderService.getNewAccountsReceivableDocumentHeaderForCurrentUser(); accountsReceivableDocumentHeader.setDocumentNumber(document.getDocumentNumber()); accountsReceivableDocumentHeader.setCustomerNumber(customerNumber); document.setAccountsReceivableDocumentHeader(accountsReceivableDocumentHeader); // set up the primary key for AR_INV_RCURRNC_DTL_T CustomerInvoiceRecurrenceDetails recurrenceDetails = new CustomerInvoiceRecurrenceDetails(); recurrenceDetails.setInvoiceNumber(document.getDocumentNumber()); // recurrenceDetails.setCustomerNumber(document.getAccountsReceivableDocumentHeader().getCustomerNumber()); document.setCustomerInvoiceRecurrenceDetails(recurrenceDetails); // make open invoice indicator to true document.setOpenInvoiceIndicator(true); document.setPrintDate(null); document.setBillingDate(dateTimeService.getCurrentSqlDateMidnight()); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getNonInvoicedDistributionsForInvoice(java.lang.String) */ @Override public Collection<NonInvoicedDistribution> getNonInvoicedDistributionsForInvoice(String documentNumber) { return nonInvoicedDistributionService.getNonInvoicedDistributionsForInvoice(documentNumber); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getNonInvoicedTotalForInvoice(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getNonInvoicedTotalForInvoice(CustomerInvoiceDocument invoice) { Collection<NonInvoicedDistribution> payments = this.nonInvoicedDistributionService.getNonInvoicedDistributionsForInvoice(invoice); KualiDecimal total = new KualiDecimal(0); for (NonInvoicedDistribution payment : payments) { total = total.add(payment.getFinancialDocumentLineAmount()); } return total; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getNonInvoicedTotalForInvoice(java.lang.String) */ @Override public KualiDecimal getNonInvoicedTotalForInvoice(String documentNumber) { return getNonInvoicedTotalForInvoice(getInvoiceByInvoiceDocumentNumber(documentNumber)); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPaidAppliedTotalForInvoice(org.kuali.kfs.module.ar.document.CustomerInvoiceDocument) */ @Override public KualiDecimal getPaidAppliedTotalForInvoice(CustomerInvoiceDocument invoice) { Collection<InvoicePaidApplied> payments = invoicePaidAppliedService.getInvoicePaidAppliedsForInvoice(invoice); KualiDecimal total = new KualiDecimal(0); for (InvoicePaidApplied payment : payments) { total = total.add(payment.getInvoiceItemAppliedAmount()); } return total; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getPaidAppliedTotalForInvoice(java.lang.String) */ @Override public KualiDecimal getPaidAppliedTotalForInvoice(String documentNumber) { return getPaidAppliedTotalForInvoice(getInvoiceByInvoiceDocumentNumber(documentNumber)); } /** * @param document */ protected void setupBasicDefaultValuesForCustomerInvoiceDocument(CustomerInvoiceDocument document) { ChartOrgHolder currentUser = financialSystemUserService.getPrimaryOrganization(GlobalVariables.getUserSession().getPerson(), ArConstants.AR_NAMESPACE_CODE); if (currentUser != null) { document.setBillByChartOfAccountCode(currentUser.getChartOfAccountsCode()); document.setBilledByOrganizationCode(currentUser.getOrganizationCode()); } document.setInvoiceDueDate(getDefaultInvoiceDueDate()); document.setOpenInvoiceIndicator(true); } /** * This method sets due date equal to todays date +30 days by default * * @param dateTimeService */ protected Date getDefaultInvoiceDueDate() { Calendar cal = dateTimeService.getCurrentCalendar(); cal.add(Calendar.DATE, 30); Date sqlDueDate = null; try { sqlDueDate = dateTimeService.convertToSqlDate(new Timestamp(cal.getTime().getTime())); } catch (ParseException e) { // TODO: throw an error here, but don't die } return sqlDueDate; } @Override public void closeCustomerInvoiceDocument(CustomerInvoiceDocument customerInvoiceDocument) { customerInvoiceDocument.setOpenInvoiceIndicator(false); customerInvoiceDocument.setClosedDate(dateTimeService.getCurrentSqlDate()); businessObjectService.save(customerInvoiceDocument); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#updateReportedDate(String) */ @Override public void updateReportedDate(String docNumber) { HashMap<String, String> criteria = new HashMap<String, String>(); criteria.put("documentNumber", docNumber); CustomerInvoiceDocument customerInvoiceDocument = businessObjectService.findByPrimaryKey(CustomerInvoiceDocument.class, criteria); Date reportedDate = dateTimeService.getCurrentSqlDate(); if (ObjectUtils.isNotNull(customerInvoiceDocument)) { customerInvoiceDocument.setReportedDate(reportedDate); businessObjectService.save(customerInvoiceDocument); } } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#updateReportedInvoiceInfo(CustomerStatementResultHolder) */ @Override public void updateReportedInvoiceInfo(CustomerStatementResultHolder data) { HashMap<String, String> criteria = new HashMap<String, String>(); criteria.put("customerNumber", data.getCustomerNumber()); CustomerBillingStatement customerBillingStatement = businessObjectService.findByPrimaryKey(CustomerBillingStatement.class, criteria); if (ObjectUtils.isNotNull(customerBillingStatement)) { customerBillingStatement.setPreviouslyBilledAmount(data.getCurrentBilledAmount()); customerBillingStatement.setReportedDate(dateTimeService.getCurrentSqlDate()); } else { customerBillingStatement = new CustomerBillingStatement(); customerBillingStatement.setCustomerNumber(data.getCustomerNumber()); customerBillingStatement.setPreviouslyBilledAmount(data.getCurrentBilledAmount()); customerBillingStatement.setReportedDate(dateTimeService.getCurrentSqlDate()); } businessObjectService.save(customerBillingStatement); } public CustomerInvoiceDocumentDao getCustomerInvoiceDocumentDao() { return customerInvoiceDocumentDao; } public void setCustomerInvoiceDocumentDao(CustomerInvoiceDocumentDao customerInvoiceDocumentDao) { this.customerInvoiceDocumentDao = customerInvoiceDocumentDao; } public DocumentService getDocumentService() { return documentService; } public void setDocumentService(DocumentService documentService) { this.documentService = documentService; } public BusinessObjectService getBusinessObjectService() { return businessObjectService; } public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } public DateTimeService getDateTimeService() { return dateTimeService; } public void setDateTimeService(DateTimeService dateTimeService) { this.dateTimeService = dateTimeService; } public ReceivableAccountingLineService getReceivableAccountingLineService() { return receivableAccountingLineService; } public void setReceivableAccountingLineService(ReceivableAccountingLineService receivableAccountingLineService) { this.receivableAccountingLineService = receivableAccountingLineService; } public AccountsReceivableDocumentHeaderService getAccountsReceivableDocumentHeaderService() { return accountsReceivableDocumentHeaderService; } public void setAccountsReceivableDocumentHeaderService(AccountsReceivableDocumentHeaderService accountsReceivableDocumentHeaderService) { this.accountsReceivableDocumentHeaderService = accountsReceivableDocumentHeaderService; } public CustomerAddressService getCustomerAddressService() { return customerAddressService; } public void setCustomerAddressService(CustomerAddressService customerAddressService) { this.customerAddressService = customerAddressService; } public void setDocumentDao(DocumentDao documentDao) { this.documentDao = documentDao; } public void setInvoicePaidAppliedService(InvoicePaidAppliedService invoicePaidAppliedService) { this.invoicePaidAppliedService = invoicePaidAppliedService; } public void setNonInvoicedDistributionService(NonInvoicedDistributionService nonInvoicedDistributionService) { this.nonInvoicedDistributionService = nonInvoicedDistributionService; } public void setCustomerInvoiceDetailService(CustomerInvoiceDetailService customerInvoiceDetailService) { this.customerInvoiceDetailService = customerInvoiceDetailService; } public void setUniversityDateService(UniversityDateService universityDateService) { this.universityDateService = universityDateService; } public void setNoteService(NoteService noteService) { this.noteService = noteService; } /** * @return Returns the personService. */ public PersonService getPersonService() { return personService; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#checkIfInvoiceNumberIsFinal(java.lang.String) */ @Override public boolean checkIfInvoiceNumberIsFinal(String invDocumentNumber) { boolean isSuccess = true; if (StringUtils.isBlank(invDocumentNumber)) { isSuccess &= false; } else { CustomerInvoiceDocument customerInvoiceDocument = getInvoiceByInvoiceDocumentNumber(invDocumentNumber); if (ObjectUtils.isNull(customerInvoiceDocument)) { isSuccess &= false; } else { Document doc = null; try { doc = documentService.getByDocumentHeaderId(invDocumentNumber); } catch (WorkflowException e) { isSuccess &= false; } if (ObjectUtils.isNull(doc) || ObjectUtils.isNull(doc.getDocumentHeader()) || doc.getDocumentHeader().getWorkflowDocument() == null || !(doc.getDocumentHeader().getWorkflowDocument().isApproved() || doc.getDocumentHeader().getWorkflowDocument().isProcessed())) { isSuccess &= false; } } } return isSuccess; } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByBilling(java.util.List, java.util.List, java.lang.Integer) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByBilling(List<String> charts, List<String> organizations, Integer invoiceAge) { Date invoiceBillingDateFrom = null; Date invoiceBillingDateTo = this.getPastDate(invoiceAge - 1) ; return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByBilling(charts, organizations, invoiceBillingDateFrom, invoiceBillingDateTo); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByAccounts(java.util.List, java.util.List, java.lang.Integer) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByAccounts(List<String> charts, List<String> accounts, Integer invoiceAge) { Date invoiceBillingDateFrom = null; Date invoiceBillingDateTo = this.getPastDate(invoiceAge - 1) ; return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByAccounts(charts, accounts, invoiceBillingDateFrom, invoiceBillingDateTo); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByProcessing(java.util.List, java.util.List, java.lang.Integer) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByProcessing(List<String> charts, List<String> organizations, Integer invoiceAge) { Date invoiceBillingDateFrom = null; Date invoiceBillingDateTo = this.getPastDate(invoiceAge - 1) ; return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByProcessing(charts, organizations, invoiceBillingDateFrom, invoiceBillingDateTo); } /** * get the date before the given amount of days */ protected Date getPastDate(Integer amount){ Integer pastDateAmount = -1 * amount; java.util.Date today = this.getDateTimeService().getCurrentDate(); java.util.Date pastDate = DateUtils.addDays(today, pastDateAmount); return KfsDateUtils.convertToSqlDate(pastDate); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#getAllAgingInvoiceDocumentsByCustomerTypes(java.util.List, java.lang.Integer, java.sql.Date) */ @Override public Collection<CustomerInvoiceDocument> getAllAgingInvoiceDocumentsByCustomerTypes(List<String> customerTypes, Integer invoiceAge, Date invoiceDueDateFrom) { Date pastDate = this.getPastDate(invoiceAge - 1) ; Date invoiceDueDateTo = KfsDateUtils.convertToSqlDate(DateUtils.addDays(pastDate, 1)); LOG.info("invoiceDueDateTo" + invoiceDueDateTo); return customerInvoiceDocumentDao.getAllAgingInvoiceDocumentsByCustomerTypes(customerTypes, invoiceDueDateFrom, invoiceDueDateTo); } /** * @see org.kuali.kfs.module.ar.document.service.CustomerInvoiceDocumentService#addCloseNote */ @Override public void addCloseNote(CustomerInvoiceDocument documentToClose, WorkflowDocument closingDocument) { if (!documentToClose.isOpenInvoiceIndicator()) { // If it already is closed, no need to add a note return; } String principalName = "Unknown"; List<ActionTaken> actionsTaken = closingDocument.getActionsTaken(); if(ObjectUtils.isNotNull(actionsTaken)){ ActionTaken completeAction = actionsTaken.get(0); for(ActionTaken action : actionsTaken){ // we're looking for the person who completed the closing document, aren't we? if(new String("C").equals(action.getActionTaken().getCode())){ principalName = SpringContext.getBean(PersonService.class).getPerson(action.getPrincipalId()).getName(); } } } final String noteTextPattern = SpringContext.getBean(ConfigurationService.class).getPropertyValueAsString(ArKeyConstants.INVOICE_CLOSE_NOTE_TEXT); Object[] arguments = { principalName, closingDocument.getDocumentTypeName(), closingDocument.getDocumentId() }; String noteText = MessageFormat.format(noteTextPattern, arguments); final String noteTextPattern = getConfigurationService().getPropertyValueAsString(ArKeyConstants.INVOICE_CLOSE_NOTE_TEXT); Object[] arguments = { principalName, closingDocumentTypeCode, closingDocumentNumber }; String noteText = MessageFormat.format(noteTextPattern, arguments); Note note = getDocumentService().createNoteFromDocument(documentToClose, noteText); note.setAuthorUniversalIdentifier(KimApiServiceLocator.getIdentityService().getPrincipalByPrincipalName(KFSConstants.SYSTEM_USER).getPrincipalId()); documentToClose.addNote(noteService.save(note)); } public void setParameterService(ParameterService parameterService) { this.parameterService = parameterService; } public void setPersonService(PersonService personService) { this.personService = personService; } public FinancialSystemUserService getFinancialSystemUserService() { return financialSystemUserService; } public void setFinancialSystemUserService(FinancialSystemUserService financialSystemUserService) { this.financialSystemUserService = financialSystemUserService; } public ConfigurationService getConfigurationService() { return configurationService; } public void setConfigurationService(ConfigurationService configurationService) { this.configurationService = configurationService; } }
Fix compile errors from merge
work/src/org/kuali/kfs/module/ar/document/service/impl/CustomerInvoiceDocumentServiceImpl.java
Fix compile errors from merge
Java
agpl-3.0
899743cfd310894a9a15d4921dd8640dc3a10232
0
VoltDB/voltdb,deerwalk/voltdb,migue/voltdb,zuowang/voltdb,VoltDB/voltdb,zuowang/voltdb,VoltDB/voltdb,ingted/voltdb,creative-quant/voltdb,creative-quant/voltdb,migue/voltdb,wolffcm/voltdb,kumarrus/voltdb,deerwalk/voltdb,creative-quant/voltdb,wolffcm/voltdb,migue/voltdb,kumarrus/voltdb,ingted/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,zuowang/voltdb,flybird119/voltdb,deerwalk/voltdb,zuowang/voltdb,wolffcm/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,migue/voltdb,wolffcm/voltdb,migue/voltdb,paulmartel/voltdb,ingted/voltdb,flybird119/voltdb,deerwalk/voltdb,flybird119/voltdb,VoltDB/voltdb,zuowang/voltdb,paulmartel/voltdb,kumarrus/voltdb,paulmartel/voltdb,kumarrus/voltdb,kumarrus/voltdb,paulmartel/voltdb,ingted/voltdb,migue/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,flybird119/voltdb,creative-quant/voltdb,creative-quant/voltdb,flybird119/voltdb,paulmartel/voltdb,kumarrus/voltdb,migue/voltdb,paulmartel/voltdb,paulmartel/voltdb,creative-quant/voltdb,ingted/voltdb,zuowang/voltdb,ingted/voltdb,wolffcm/voltdb,ingted/voltdb,ingted/voltdb,creative-quant/voltdb,flybird119/voltdb,kumarrus/voltdb,deerwalk/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,flybird119/voltdb,kumarrus/voltdb,simonzhangsm/voltdb,wolffcm/voltdb,flybird119/voltdb,wolffcm/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,zuowang/voltdb,migue/voltdb,zuowang/voltdb,paulmartel/voltdb,wolffcm/voltdb,simonzhangsm/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2013 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.utils; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import org.supercsv.cellprocessor.Optional; import org.supercsv.cellprocessor.ift.CellProcessor; import org.voltcore.logging.VoltLogger; import org.voltdb.CLIConfig; import org.voltdb.VoltTable; import org.voltdb.VoltType; import org.voltdb.client.Client; import org.voltdb.client.ClientConfig; import org.voltdb.client.ClientFactory; import org.voltdb.client.ClientResponse; import org.voltdb.client.ProcedureCallback; import au.com.bytecode.opencsv_voltpatches.CSVParser; import au.com.bytecode.opencsv_voltpatches.CSVReader; /** * CSVLoader is a simple utility to load data from a CSV formatted file to a * table (or pass it to any stored proc, but ignoring any result other than the * success code.). */ public class CSVLoader { public static String pathInvalidrowfile = ""; public static String pathReportfile = "csvloaderReport.log"; public static String pathLogfile = "csvloaderLog.log"; protected static final VoltLogger m_log = new VoltLogger("CONSOLE"); private static final AtomicLong inCount = new AtomicLong(0); private static final AtomicLong outCount = new AtomicLong(0); private static final int reportEveryNRows = 10000; private static final int waitSeconds = 10; private static CSVConfig config = null; private static long latency = 0; private static long start = 0; private static boolean standin = false; private static BufferedWriter out_invaliderowfile; private static BufferedWriter out_logfile; private static BufferedWriter out_reportfile; private static String insertProcedure = ""; private static Map<Long, String[]> errorInfo = new TreeMap<Long, String[]>(); private static int columnCnt = 0; private static boolean isProcExist = false; private static Map <VoltType, String> blankValues = new HashMap<VoltType, String>(); static { blankValues.put(VoltType.NUMERIC, "0"); blankValues.put(VoltType.TINYINT, "0"); blankValues.put(VoltType.SMALLINT, "0"); blankValues.put(VoltType.INTEGER, "0"); blankValues.put(VoltType.BIGINT, "0"); blankValues.put(VoltType.FLOAT, "0.0"); blankValues.put(VoltType.TIMESTAMP, "0"); blankValues.put(VoltType.STRING, ""); blankValues.put(VoltType.DECIMAL, "0"); blankValues.put(VoltType.VARBINARY, ""); } private static List <VoltType> typeList = new ArrayList<VoltType>(); private static final class MyCallback implements ProcedureCallback { private final long m_lineNum; private final CSVConfig m_config; private final String m_rowdata; MyCallback(long lineNumber, CSVConfig cfg, String rowdata) { m_lineNum = lineNumber; m_config = cfg; m_rowdata = rowdata; } @Override public void clientCallback(ClientResponse response) throws Exception { if (response.getStatus() != ClientResponse.SUCCESS) { m_log.error( response.getStatusString() ); synchronized (errorInfo) { if (!errorInfo.containsKey(m_lineNum)) { String[] info = { m_rowdata, response.getStatusString() }; errorInfo.put(m_lineNum, info); } if (errorInfo.size() >= m_config.maxerrors) { m_log.error("The number of Failure row data exceeds " + m_config.maxerrors); produceFiles(); close_cleanup(); System.exit(-1); } } return; } long currentCount = inCount.incrementAndGet(); if (currentCount % reportEveryNRows == 0) { m_log.info( "Inserted " + currentCount + " rows" ); } } } private static class CSVConfig extends CLIConfig { @Option(shortOpt = "f", desc = "location of CSV input file") String file = ""; @Option(shortOpt = "p", desc = "procedure name to insert the data into the database") String procedure = ""; @Option(desc = "maximum rows to be read from the CSV file") int limitrows = Integer.MAX_VALUE; @Option(shortOpt = "r", desc = "directory path for report files") String reportdir = System.getProperty("user.dir"); @Option(shortOpt = "m", desc = "maximum errors allowed") int maxerrors = 100; @Option(desc = "different ways to handle blank items: {error|null|empty} (default: error)") String blank = "error"; @Option(desc = "delimiter to use for separating entries") char separator = CSVParser.DEFAULT_SEPARATOR; @Option(desc = "character to use for quoted elements (default: \")") char quotechar = CSVParser.DEFAULT_QUOTE_CHARACTER; @Option(desc = "character to use for escaping a separator or quote (default: \\)") char escape = CSVParser.DEFAULT_ESCAPE_CHARACTER; @Option(desc = "require all input values to be enclosed in quotation marks", hasArg = false) boolean strictquotes = CSVParser.DEFAULT_STRICT_QUOTES; @Option(desc = "number of lines to skip before inserting rows into the database") int skip = CSVReader.DEFAULT_SKIP_LINES; @Option(desc = "do not allow whitespace between values and separators", hasArg = false) boolean nowhitespace = !CSVParser.DEFAULT_IGNORE_LEADING_WHITESPACE; @Option(shortOpt = "s", desc = "list of servers to connect to (default: localhost)") String servers = "localhost"; @Option(desc = "username when connecting to the servers") String user = ""; @Option(desc = "password to use when connecting to servers") String password = ""; @Option(desc = "port to use when connecting to database (default: 21212)") int port = Client.VOLTDB_SERVER_PORT; @AdditionalArgs(desc = "insert the data into database by TABLENAME.insert procedure by default") String table = ""; @Override public void validate() { if (maxerrors < 0) exitWithMessageAndUsage("abortfailurecount must be >=0"); if (procedure.equals("") && table.equals("")) exitWithMessageAndUsage("procedure name or a table name required"); if (!procedure.equals("") && !table.equals("")) exitWithMessageAndUsage("Only a procedure name or a table name required, pass only one please"); if (skip < 0) exitWithMessageAndUsage("skipline must be >= 0"); if (limitrows > Integer.MAX_VALUE) exitWithMessageAndUsage("limitrows to read must be < " + Integer.MAX_VALUE); if (port < 0) exitWithMessageAndUsage("port number must be >= 0"); if ((blank.equalsIgnoreCase("error") || blank.equalsIgnoreCase("null") || blank.equalsIgnoreCase("empty")) == false) exitWithMessageAndUsage("blank configuration specified must be one of {error|null|empty}"); } @Override public void printUsage() { System.out .println("Usage: csvloader [args] tablename"); System.out .println(" csvloader [args] -p procedurename"); super.printUsage(); } } /** * Sets up the processors used for the SuperCSV Reader. * * @return the cell processors */ private static CellProcessor[] getProcessors() { final CellProcessor[] processors = new CellProcessor[columnCnt]; Arrays.fill( processors, new Optional()); return processors; } public static void main(String[] args) throws IOException, InterruptedException { start = System.currentTimeMillis(); int waits = 0; int shortWaits = 0; CSVConfig cfg = new CSVConfig(); cfg.parse(CSVLoader.class.getName(), args); config = cfg; configuration(); CSVReader csvReader = null; try { if (CSVLoader.standin) csvReader = new CSVReader(new BufferedReader( new InputStreamReader(System.in)), config.separator, config.quotechar, config.escape, config.skip, config.strictquotes, config.nowhitespace); else csvReader = new CSVReader(new FileReader(config.file), config.separator, config.quotechar, config.escape, config.skip, config.strictquotes, config.nowhitespace); } catch (FileNotFoundException e) { m_log.error("CSV file '" + config.file + "' could not be found."); System.exit(-1); } assert(csvReader != null); // Split server list String[] serverlist = config.servers.split(","); // Create connection ClientConfig c_config = new ClientConfig(config.user, config.password); c_config.setProcedureCallTimeout(0); // Set procedure call to infinite // timeout, see ENG-2670 Client csvClient = null; try { csvClient = CSVLoader.getClient(c_config, serverlist, config.port); } catch (Exception e) { m_log.error("Error to connect to the servers:" + config.servers); close_cleanup(); System.exit(-1); } assert(csvClient != null); try { ProcedureCallback cb = null; boolean lastOK = true; String line[] = null; while ((config.limitrows-- > 0) && (line = csvReader.readNext()) != null) { outCount.incrementAndGet(); boolean queued = false; while (queued == false) { StringBuilder linedata = new StringBuilder(); for (int i = 0; i < line.length; i++) { linedata.append("\"" + line[i] + "\""); if (i != line.length - 1) linedata.append(","); } String[] correctedLine = line; cb = new MyCallback(outCount.get(), config, linedata.toString()); String lineCheckResult; if ((lineCheckResult = checkparams_trimspace(correctedLine, columnCnt)) != null) { synchronized (errorInfo) { if (!errorInfo.containsKey(outCount.get())) { String[] info = { linedata.toString(), lineCheckResult }; errorInfo.put(outCount.get(), info); } if (errorInfo.size() >= config.maxerrors) { m_log.error("The number of Failure row data exceeds " + config.maxerrors); produceFiles(); close_cleanup(); System.exit(-1); } } break; } queued = csvClient.callProcedure(cb, insertProcedure, (Object[]) correctedLine); if (queued == false) { ++waits; if (lastOK == false) { ++shortWaits; } Thread.sleep(waitSeconds); } lastOK = queued; } } csvClient.drain(); } catch (Exception e) { e.printStackTrace(); } m_log.info("Inserted " + outCount.get() + " and acknowledged " + inCount.get() + " rows (final)"); if (waits > 0) { m_log.info("Waited " + waits + " times"); if (shortWaits > 0) { m_log.info( "Waited too briefly? " + shortWaits + " times" ); } } produceFiles(); close_cleanup(); csvReader.close(); csvClient.close(); } private static String checkparams_trimspace(String[] slot, int columnCnt) { if (slot.length == 1 && slot[0].equals("")) { return "Error: blank line"; } if (slot.length != columnCnt) { return "Error: Incorrect number of columns. " + slot.length + " found, " + columnCnt + " expected."; } for (int i = 0; i < slot.length; i++) { // trim white space in this line. slot[i] = slot[i].trim(); // treat NULL, \N and "\N" as actual null value if ((slot[i]).equals("NULL") || slot[i].equals(VoltTable.CSV_NULL) || !config.strictquotes && slot[i].equals(VoltTable.QUOTED_CSV_NULL)) slot[i] = null; else if (slot[i].equals("")) { if (config.blank.equalsIgnoreCase("null") ) slot[i] = null; else if (config.blank.equalsIgnoreCase("empty")) slot[i] = blankValues.get(typeList.get(i)); } } return null; } private static void configuration() throws IOException, InterruptedException { if (config.file.equals("")) standin = true; if (!config.table.equals("")) { insertProcedure = config.table.toUpperCase() + ".insert"; } else { insertProcedure = config.procedure; } if (!config.reportdir.endsWith("/")) config.reportdir += "/"; try { File dir = new File(config.reportdir); if (!dir.exists()) { dir.mkdirs(); } } catch (Exception x) { m_log.error(x.getMessage(), x); System.exit(-1); } String myinsert = insertProcedure; myinsert = myinsert.replaceAll("\\.", "_"); pathInvalidrowfile = config.reportdir + "csvloader_" + myinsert + "_" + "invalidrows.csv"; pathLogfile = config.reportdir + "csvloader_" + myinsert + "_" + "log.log"; pathReportfile = config.reportdir + "csvloader_" + myinsert + "_" + "report.log"; try { out_invaliderowfile = new BufferedWriter(new FileWriter( pathInvalidrowfile)); out_logfile = new BufferedWriter(new FileWriter(pathLogfile)); out_reportfile = new BufferedWriter(new FileWriter(pathReportfile)); } catch (IOException e) { m_log.error(e.getMessage()); System.exit(-1); } // Split server list String[] serverlist = config.servers.split(","); // Create connection ClientConfig c_config = new ClientConfig(config.user, config.password); c_config.setProcedureCallTimeout(0); // Set procedure call to infinite // timeout, see ENG-2670 Client csvClient = null; try { csvClient = CSVLoader.getClient(c_config, serverlist, config.port); } catch (Exception e) { m_log.error("Error to connect to the servers:" + config.servers); close_cleanup(); System.exit(-1); } assert(csvClient != null); try { columnCnt = 0; VoltTable procInfo = null; isProcExist = false; try { procInfo = csvClient.callProcedure("@SystemCatalog", "PROCEDURECOLUMNS").getResults()[0]; while (procInfo.advanceRow()) { if (insertProcedure.matches((String) procInfo.get( "PROCEDURE_NAME", VoltType.STRING))) { columnCnt++; isProcExist = true; String typeStr = (String)procInfo.get("TYPE_NAME", VoltType.STRING); typeList.add(VoltType.typeFromString(typeStr)); } } } catch (Exception e) { m_log.error(e.getMessage(), e); close_cleanup(); System.exit(-1); } if (isProcExist == false) { m_log.error("No matching insert procedure available"); close_cleanup(); System.exit(-1); } } catch (Exception e) { e.printStackTrace(); } csvClient.close(); } private static Client getClient(ClientConfig config, String[] servers, int port) throws Exception { final Client client = ClientFactory.createClient(config); for (String server : servers) client.createConnection(server.trim(), port); return client; } private static void produceFiles() { latency = System.currentTimeMillis() - start; m_log.info("CSVLoader elapsed: " + latency / 1000F + " seconds"); int bulkflush = 300; // by default right now try { long linect = 0; for (Long irow : errorInfo.keySet()) { String info[] = errorInfo.get(irow); if (info.length != 2) System.out .println("internal error, information is not enough"); linect++; out_invaliderowfile.write(info[0] + "\n"); String message = "Invalid input on line " + irow + ".\n Contents:" + info[0]; m_log.error(message); out_logfile.write(message + "\n " + info[1] + "\n"); if (linect % bulkflush == 0) { out_invaliderowfile.flush(); out_logfile.flush(); } } // Get elapsed time in seconds float elapsedTimeSec = latency / 1000F; out_reportfile.write("csvloader elaspsed: " + elapsedTimeSec + " seconds\n"); out_reportfile.write("Number of rows read from input: " + outCount.get() + "\n"); out_reportfile.write("Number of rows successfully inserted: " + inCount.get() + "\n"); // if prompted msg changed, change it also for test case out_reportfile.write("Number of rows that could not be inserted: " + errorInfo.size() + "\n"); out_reportfile.write("CSVLoader rate: " + outCount.get() / elapsedTimeSec + " row/s\n"); m_log.info("invalid row file is generated to:" + pathInvalidrowfile); m_log.info("log file is generated to:" + pathLogfile); m_log.info("report file is generated to:" + pathReportfile); out_invaliderowfile.flush(); out_logfile.flush(); out_reportfile.flush(); } catch (FileNotFoundException e) { m_log.error("CSV report directory '" + config.reportdir + "' does not exist."); } catch (Exception x) { m_log.error(x.getMessage()); } } private static void close_cleanup() throws IOException, InterruptedException { inCount.set(0); outCount.set(0); errorInfo.clear(); typeList.clear(); out_invaliderowfile.close(); out_logfile.close(); out_reportfile.close(); } }
src/frontend/org/voltdb/utils/CSVLoader.java
/* This file is part of VoltDB. * Copyright (C) 2008-2013 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.utils; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import org.voltcore.logging.VoltLogger; import org.voltdb.CLIConfig; import org.voltdb.VoltTable; import org.voltdb.VoltType; import org.voltdb.client.Client; import org.voltdb.client.ClientConfig; import org.voltdb.client.ClientFactory; import org.voltdb.client.ClientResponse; import org.voltdb.client.ProcedureCallback; import au.com.bytecode.opencsv_voltpatches.CSVParser; import au.com.bytecode.opencsv_voltpatches.CSVReader; /** * CSVLoader is a simple utility to load data from a CSV formatted file to a * table (or pass it to any stored proc, but ignoring any result other than the * success code.). */ public class CSVLoader { public static String pathInvalidrowfile = ""; public static String pathReportfile = "csvloaderReport.log"; public static String pathLogfile = "csvloaderLog.log"; protected static final VoltLogger m_log = new VoltLogger("CONSOLE"); private static final AtomicLong inCount = new AtomicLong(0); private static final AtomicLong outCount = new AtomicLong(0); private static final int reportEveryNRows = 10000; private static final int waitSeconds = 10; private static CSVConfig config = null; private static long latency = 0; private static long start = 0; private static boolean standin = false; private static BufferedWriter out_invaliderowfile; private static BufferedWriter out_logfile; private static BufferedWriter out_reportfile; private static String insertProcedure = ""; private static Map<Long, String[]> errorInfo = new TreeMap<Long, String[]>(); private static Map <VoltType, String> blankValues = new HashMap<VoltType, String>(); static { blankValues.put(VoltType.NUMERIC, "0"); blankValues.put(VoltType.TINYINT, "0"); blankValues.put(VoltType.SMALLINT, "0"); blankValues.put(VoltType.INTEGER, "0"); blankValues.put(VoltType.BIGINT, "0"); blankValues.put(VoltType.FLOAT, "0.0"); blankValues.put(VoltType.TIMESTAMP, "0"); blankValues.put(VoltType.STRING, ""); blankValues.put(VoltType.DECIMAL, "0"); blankValues.put(VoltType.VARBINARY, ""); } private static List <VoltType> typeList = new ArrayList<VoltType>(); private static final class MyCallback implements ProcedureCallback { private final long m_lineNum; private final CSVConfig m_config; private final String m_rowdata; MyCallback(long lineNumber, CSVConfig cfg, String rowdata) { m_lineNum = lineNumber; m_config = cfg; m_rowdata = rowdata; } @Override public void clientCallback(ClientResponse response) throws Exception { if (response.getStatus() != ClientResponse.SUCCESS) { m_log.error( response.getStatusString() ); synchronized (errorInfo) { if (!errorInfo.containsKey(m_lineNum)) { String[] info = { m_rowdata, response.getStatusString() }; errorInfo.put(m_lineNum, info); } if (errorInfo.size() >= m_config.maxerrors) { m_log.error("The number of Failure row data exceeds " + m_config.maxerrors); produceFiles(); close_cleanup(); System.exit(-1); } } return; } long currentCount = inCount.incrementAndGet(); if (currentCount % reportEveryNRows == 0) { m_log.info( "Inserted " + currentCount + " rows" ); } } } private static class CSVConfig extends CLIConfig { @Option(shortOpt = "f", desc = "location of CSV input file") String file = ""; @Option(shortOpt = "p", desc = "procedure name to insert the data into the database") String procedure = ""; @Option(desc = "maximum rows to be read from the CSV file") int limitrows = Integer.MAX_VALUE; @Option(shortOpt = "r", desc = "directory path for report files") String reportdir = System.getProperty("user.dir"); @Option(shortOpt = "m", desc = "maximum errors allowed") int maxerrors = 100; @Option(desc = "different ways to handle blank items: {error|null|empty} (default: error)") String blank = "error"; @Option(desc = "delimiter to use for separating entries") char separator = CSVParser.DEFAULT_SEPARATOR; @Option(desc = "character to use for quoted elements (default: \")") char quotechar = CSVParser.DEFAULT_QUOTE_CHARACTER; @Option(desc = "character to use for escaping a separator or quote (default: \\)") char escape = CSVParser.DEFAULT_ESCAPE_CHARACTER; @Option(desc = "require all input values to be enclosed in quotation marks", hasArg = false) boolean strictquotes = CSVParser.DEFAULT_STRICT_QUOTES; @Option(desc = "number of lines to skip before inserting rows into the database") int skip = CSVReader.DEFAULT_SKIP_LINES; @Option(desc = "do not allow whitespace between values and separators", hasArg = false) boolean nowhitespace = !CSVParser.DEFAULT_IGNORE_LEADING_WHITESPACE; @Option(shortOpt = "s", desc = "list of servers to connect to (default: localhost)") String servers = "localhost"; @Option(desc = "username when connecting to the servers") String user = ""; @Option(desc = "password to use when connecting to servers") String password = ""; @Option(desc = "port to use when connecting to database (default: 21212)") int port = Client.VOLTDB_SERVER_PORT; @AdditionalArgs(desc = "insert the data into database by TABLENAME.insert procedure by default") String table = ""; @Override public void validate() { if (maxerrors < 0) exitWithMessageAndUsage("abortfailurecount must be >=0"); if (procedure.equals("") && table.equals("")) exitWithMessageAndUsage("procedure name or a table name required"); if (!procedure.equals("") && !table.equals("")) exitWithMessageAndUsage("Only a procedure name or a table name required, pass only one please"); if (skip < 0) exitWithMessageAndUsage("skipline must be >= 0"); if (limitrows > Integer.MAX_VALUE) exitWithMessageAndUsage("limitrows to read must be < " + Integer.MAX_VALUE); if (port < 0) exitWithMessageAndUsage("port number must be >= 0"); if ((blank.equalsIgnoreCase("error") || blank.equalsIgnoreCase("null") || blank.equalsIgnoreCase("empty")) == false) exitWithMessageAndUsage("blank configuration specified must be one of {error|null|empty}"); } @Override public void printUsage() { System.out .println("Usage: csvloader [args] tablename"); System.out .println(" csvloader [args] -p procedurename"); super.printUsage(); } } public static void main(String[] args) throws IOException, InterruptedException { start = System.currentTimeMillis(); int waits = 0; int shortWaits = 0; CSVConfig cfg = new CSVConfig(); cfg.parse(CSVLoader.class.getName(), args); config = cfg; configuration(); CSVReader csvReader = null; try { if (CSVLoader.standin) csvReader = new CSVReader(new BufferedReader( new InputStreamReader(System.in)), config.separator, config.quotechar, config.escape, config.skip, config.strictquotes, config.nowhitespace); else csvReader = new CSVReader(new FileReader(config.file), config.separator, config.quotechar, config.escape, config.skip, config.strictquotes, config.nowhitespace); } catch (FileNotFoundException e) { m_log.error("CSV file '" + config.file + "' could not be found."); System.exit(-1); } assert(csvReader != null); // Split server list String[] serverlist = config.servers.split(","); // Create connection ClientConfig c_config = new ClientConfig(config.user, config.password); c_config.setProcedureCallTimeout(0); // Set procedure call to infinite // timeout, see ENG-2670 Client csvClient = null; try { csvClient = CSVLoader.getClient(c_config, serverlist, config.port); } catch (Exception e) { m_log.error("Error to connect to the servers:" + config.servers); close_cleanup(); System.exit(-1); } assert(csvClient != null); try { ProcedureCallback cb = null; boolean lastOK = true; String line[] = null; int columnCnt = 0; VoltTable procInfo = null; boolean isProcExist = false; try { procInfo = csvClient.callProcedure("@SystemCatalog", "PROCEDURECOLUMNS").getResults()[0]; while (procInfo.advanceRow()) { if (insertProcedure.matches((String) procInfo.get( "PROCEDURE_NAME", VoltType.STRING))) { columnCnt++; isProcExist = true; String typeStr = (String)procInfo.get("TYPE_NAME", VoltType.STRING); typeList.add(VoltType.typeFromString(typeStr)); } } } catch (Exception e) { m_log.error(e.getMessage(), e); close_cleanup(); System.exit(-1); } if (isProcExist == false) { m_log.error("No matching insert procedure available"); close_cleanup(); System.exit(-1); } while ((config.limitrows-- > 0) && (line = csvReader.readNext()) != null) { outCount.incrementAndGet(); boolean queued = false; while (queued == false) { StringBuilder linedata = new StringBuilder(); for (int i = 0; i < line.length; i++) { linedata.append("\"" + line[i] + "\""); if (i != line.length - 1) linedata.append(","); } String[] correctedLine = line; cb = new MyCallback(outCount.get(), config, linedata.toString()); String lineCheckResult; if ((lineCheckResult = checkparams_trimspace(correctedLine, columnCnt)) != null) { synchronized (errorInfo) { if (!errorInfo.containsKey(outCount.get())) { String[] info = { linedata.toString(), lineCheckResult }; errorInfo.put(outCount.get(), info); } if (errorInfo.size() >= config.maxerrors) { m_log.error("The number of Failure row data exceeds " + config.maxerrors); produceFiles(); close_cleanup(); System.exit(-1); } } break; } queued = csvClient.callProcedure(cb, insertProcedure, (Object[]) correctedLine); if (queued == false) { ++waits; if (lastOK == false) { ++shortWaits; } Thread.sleep(waitSeconds); } lastOK = queued; } } csvClient.drain(); } catch (Exception e) { e.printStackTrace(); } m_log.info("Inserted " + outCount.get() + " and acknowledged " + inCount.get() + " rows (final)"); if (waits > 0) { m_log.info("Waited " + waits + " times"); if (shortWaits > 0) { m_log.info( "Waited too briefly? " + shortWaits + " times" ); } } produceFiles(); close_cleanup(); csvReader.close(); csvClient.close(); } private static String checkparams_trimspace(String[] slot, int columnCnt) { if (slot.length == 1 && slot[0].equals("")) { return "Error: blank line"; } if (slot.length != columnCnt) { return "Error: Incorrect number of columns. " + slot.length + " found, " + columnCnt + " expected."; } for (int i = 0; i < slot.length; i++) { // trim white space in this line. slot[i] = slot[i].trim(); // treat NULL, \N and "\N" as actual null value if ((slot[i]).equals("NULL") || slot[i].equals(VoltTable.CSV_NULL) || !config.strictquotes && slot[i].equals(VoltTable.QUOTED_CSV_NULL)) slot[i] = null; else if (slot[i].equals("")) { if (config.blank.equalsIgnoreCase("null") ) slot[i] = null; else if (config.blank.equalsIgnoreCase("empty")) slot[i] = blankValues.get(typeList.get(i)); } } return null; } private static void configuration() { if (config.file.equals("")) standin = true; if (!config.table.equals("")) { insertProcedure = config.table.toUpperCase() + ".insert"; } else { insertProcedure = config.procedure; } if (!config.reportdir.endsWith("/")) config.reportdir += "/"; try { File dir = new File(config.reportdir); if (!dir.exists()) { dir.mkdirs(); } } catch (Exception x) { m_log.error(x.getMessage(), x); System.exit(-1); } String myinsert = insertProcedure; myinsert = myinsert.replaceAll("\\.", "_"); pathInvalidrowfile = config.reportdir + "csvloader_" + myinsert + "_" + "invalidrows.csv"; pathLogfile = config.reportdir + "csvloader_" + myinsert + "_" + "log.log"; pathReportfile = config.reportdir + "csvloader_" + myinsert + "_" + "report.log"; try { out_invaliderowfile = new BufferedWriter(new FileWriter( pathInvalidrowfile)); out_logfile = new BufferedWriter(new FileWriter(pathLogfile)); out_reportfile = new BufferedWriter(new FileWriter(pathReportfile)); } catch (IOException e) { m_log.error(e.getMessage()); System.exit(-1); } } private static Client getClient(ClientConfig config, String[] servers, int port) throws Exception { final Client client = ClientFactory.createClient(config); for (String server : servers) client.createConnection(server.trim(), port); return client; } private static void produceFiles() { latency = System.currentTimeMillis() - start; m_log.info("CSVLoader elapsed: " + latency / 1000F + " seconds"); int bulkflush = 300; // by default right now try { long linect = 0; for (Long irow : errorInfo.keySet()) { String info[] = errorInfo.get(irow); if (info.length != 2) System.out .println("internal error, information is not enough"); linect++; out_invaliderowfile.write(info[0] + "\n"); String message = "Invalid input on line " + irow + ".\n Contents:" + info[0]; m_log.error(message); out_logfile.write(message + "\n " + info[1] + "\n"); if (linect % bulkflush == 0) { out_invaliderowfile.flush(); out_logfile.flush(); } } // Get elapsed time in seconds float elapsedTimeSec = latency / 1000F; out_reportfile.write("csvloader elaspsed: " + elapsedTimeSec + " seconds\n"); out_reportfile.write("Number of rows read from input: " + outCount.get() + "\n"); out_reportfile.write("Number of rows successfully inserted: " + inCount.get() + "\n"); // if prompted msg changed, change it also for test case out_reportfile.write("Number of rows that could not be inserted: " + errorInfo.size() + "\n"); out_reportfile.write("CSVLoader rate: " + outCount.get() / elapsedTimeSec + " row/s\n"); m_log.info("invalid row file is generated to:" + pathInvalidrowfile); m_log.info("log file is generated to:" + pathLogfile); m_log.info("report file is generated to:" + pathReportfile); out_invaliderowfile.flush(); out_logfile.flush(); out_reportfile.flush(); } catch (FileNotFoundException e) { m_log.error("CSV report directory '" + config.reportdir + "' does not exist."); } catch (Exception x) { m_log.error(x.getMessage()); } } private static void close_cleanup() throws IOException, InterruptedException { inCount.set(0); outCount.set(0); errorInfo.clear(); typeList.clear(); out_invaliderowfile.close(); out_logfile.close(); out_reportfile.close(); } }
step 1. getProcessors() added to CSVLoader, next Step>>>switch to SuperCSV listReader
src/frontend/org/voltdb/utils/CSVLoader.java
step 1. getProcessors() added to CSVLoader, next Step>>>switch to SuperCSV listReader
Java
agpl-3.0
6caa96343f8240df2c5edad5a64fa64f51d52036
0
after-the-sunrise/cryptotrader,after-the-sunrise/cryptotrader
package com.after_sunrise.cryptocurrency.cryptotrader.service.template; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Adviser.Advice; import com.after_sunrise.cryptocurrency.cryptotrader.framework.*; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Context.Key; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Instruction.CancelInstruction; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Instruction.CreateInstruction; import com.google.common.annotations.VisibleForTesting; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.*; import static java.lang.Boolean.TRUE; import static java.math.BigDecimal.ONE; import static java.math.BigDecimal.ZERO; import static java.math.RoundingMode.DOWN; import static java.math.RoundingMode.UP; import static java.util.Collections.emptyList; import static java.util.Optional.ofNullable; import static org.apache.commons.lang3.math.NumberUtils.INTEGER_ZERO; /** * @author takanori.takase * @version 0.0.1 */ @Slf4j public class TemplateInstructor implements Instructor { private static final List<Order> EMPTY = emptyList(); private static final BigDecimal EPSILON = ONE.movePointLeft(SCALE); private final String id; public TemplateInstructor(String id) { this.id = id; } @Override public String get() { return id; } @Override public List<Instruction> instruct(Context context, Request request, Advice advice) { List<CreateInstruction> creates = new ArrayList<>(); creates.addAll(createBuys(context, request, advice)); creates.addAll(createSells(context, request, advice)); return merge(creates, createCancels(context, request)); } @VisibleForTesting Map<CancelInstruction, Order> createCancels(Context context, Request request) { Key key = Key.from(request); List<Order> orders = ofNullable(context.listActiveOrders(key)).orElse(EMPTY); Map<CancelInstruction, Order> cancels = new IdentityHashMap<>(); orders.stream() .filter(Objects::nonNull) .filter(o -> StringUtils.isNotEmpty(o.getId())) .filter(o -> TRUE.equals(o.getActive())) .forEach(o -> cancels.put(CancelInstruction.builder().id(o.getId()).build(), o)); cancels.forEach((k, v) -> log.trace("Cancel candidate : {}", v)); return cancels; } @VisibleForTesting List<CreateInstruction> createBuys(Context context, Request request, Advice adv) { List<BigDecimal> s = splitSize(context, request, adv.getBuyLimitSize()); List<BigDecimal> p = splitPrice(context, request, adv.getBuyLimitPrice(), s.size(), EPSILON.negate()); List<CreateInstruction> instructions = new ArrayList<>(s.size()); for (int i = 0; i < s.size(); i++) { instructions.add(CreateInstruction.builder().price(p.get(i)).size(s.get(i)).build()); } instructions.forEach((v) -> log.trace("Buy candidate : {}", v)); return instructions; } @VisibleForTesting List<CreateInstruction> createSells(Context context, Request request, Advice adv) { List<BigDecimal> s = splitSize(context, request, adv.getSellLimitSize()); List<BigDecimal> p = splitPrice(context, request, adv.getSellLimitPrice(), s.size(), EPSILON); List<CreateInstruction> instructions = new ArrayList<>(s.size()); for (int i = 0; i < s.size(); i++) { BigDecimal size = s.get(i) == null ? null : s.get(i).negate(); instructions.add(CreateInstruction.builder().price(p.get(i)).size(size).build()); } instructions.forEach((v) -> log.trace("Sell candidate : {}", v)); return instructions; } private List<BigDecimal> splitSize(Context context, Request request, BigDecimal value) { Key key = Key.from(request); BigDecimal total = context.roundLotSize(key, value, DOWN); if (total == null || total.signum() <= 0) { return emptyList(); } BigDecimal unit = Optional.ofNullable(context.roundLotSize(key, EPSILON, UP)).orElse(total); BigDecimal split = request.getTradingSplit().setScale(INTEGER_ZERO, DOWN).max(ONE); BigDecimal fair = total.divide(split, SCALE, DOWN); BigDecimal floor = Optional.ofNullable(context.roundLotSize(key, fair, DOWN)).orElse(ZERO); BigDecimal residual = total.subtract(split.multiply(floor)); List<BigDecimal> results = new ArrayList<>(split.intValue()); for (int i = 0; i < split.intValue(); i++) { BigDecimal slice = floor; if (residual.compareTo(unit) >= 0) { slice = slice.add(unit); residual = residual.subtract(unit); } if (slice.signum() <= 0) { continue; } results.add(slice); } return results; } private List<BigDecimal> splitPrice(Context context, Request request, BigDecimal value, int size, BigDecimal delta) { Key key = Key.from(request); List<BigDecimal> values = new ArrayList<>(size); RoundingMode mode = delta.signum() >= 0 ? UP : DOWN; values.add(context.roundTickSize(key, value, mode)); for (int i = 1; i < size; i++) { BigDecimal previous = values.get(i - 1); BigDecimal adjusted = previous == null ? null : previous.add(delta); BigDecimal rounded = context.roundTickSize(key, adjusted, mode); values.add(rounded == null ? previous : rounded); } return values; } @VisibleForTesting List<Instruction> merge(List<CreateInstruction> creates, Map<CancelInstruction, Order> cancels) { Map<CancelInstruction, Order> remainingCancels = new IdentityHashMap<>(cancels); List<CreateInstruction> remainingCreates = new ArrayList<>(creates); Iterator<CreateInstruction> createItr = remainingCreates.iterator(); while (createItr.hasNext()) { CreateInstruction create = createItr.next(); Iterator<Map.Entry<CancelInstruction, Order>> cancelItr = remainingCancels.entrySet().iterator(); while (cancelItr.hasNext()) { Map.Entry<CancelInstruction, Order> entry = cancelItr.next(); if (isDifferent(entry.getValue().getOrderPrice(), create.getPrice())) { continue; } if (isDifferent(entry.getValue().getRemainingQuantity(), create.getSize())) { continue; } log.trace("Netting cancel/create : {} - {}", create, entry.getKey()); cancelItr.remove(); createItr.remove(); break; } } List<Instruction> instructions = new ArrayList<>(); instructions.addAll(remainingCancels.keySet()); instructions.addAll(remainingCreates); instructions.forEach(v -> log.trace("Merged candidate : {}", v)); return instructions; } private boolean isDifferent(BigDecimal v1, BigDecimal v2) { return v1 == null || v2 == null || v1.compareTo(v2) != 0; } }
src/main/java/com/after_sunrise/cryptocurrency/cryptotrader/service/template/TemplateInstructor.java
package com.after_sunrise.cryptocurrency.cryptotrader.service.template; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Adviser.Advice; import com.after_sunrise.cryptocurrency.cryptotrader.framework.*; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Context.Key; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Instruction.CancelInstruction; import com.after_sunrise.cryptocurrency.cryptotrader.framework.Instruction.CreateInstruction; import com.google.common.annotations.VisibleForTesting; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import java.math.BigDecimal; import java.util.*; import static java.lang.Boolean.TRUE; import static java.math.BigDecimal.ONE; import static java.math.BigDecimal.ZERO; import static java.math.RoundingMode.DOWN; import static java.math.RoundingMode.UP; import static java.util.Collections.emptyList; import static java.util.Optional.ofNullable; import static org.apache.commons.lang3.math.NumberUtils.INTEGER_ZERO; /** * @author takanori.takase * @version 0.0.1 */ @Slf4j public class TemplateInstructor implements Instructor { private static final List<Order> EMPTY = emptyList(); private static final BigDecimal EPSILON = ONE.movePointLeft(SCALE); private final String id; public TemplateInstructor(String id) { this.id = id; } @Override public String get() { return id; } @Override public List<Instruction> instruct(Context context, Request request, Advice advice) { List<CreateInstruction> creates = new ArrayList<>(); creates.addAll(createBuys(context, request, advice)); creates.addAll(createSells(context, request, advice)); return merge(creates, createCancels(context, request)); } @VisibleForTesting Map<CancelInstruction, Order> createCancels(Context context, Request request) { Key key = Key.from(request); List<Order> orders = ofNullable(context.listActiveOrders(key)).orElse(EMPTY); Map<CancelInstruction, Order> cancels = new IdentityHashMap<>(); orders.stream() .filter(Objects::nonNull) .filter(o -> StringUtils.isNotEmpty(o.getId())) .filter(o -> TRUE.equals(o.getActive())) .forEach(o -> cancels.put(CancelInstruction.builder().id(o.getId()).build(), o)); cancels.forEach((k, v) -> log.trace("Cancel candidate : {}", v)); return cancels; } @VisibleForTesting List<CreateInstruction> createBuys(Context context, Request request, Advice adv) { List<BigDecimal> s = splitSize(context, request, adv.getBuyLimitSize()); List<BigDecimal> p = splitPrice(context, request, adv.getBuyLimitPrice(), s.size(), EPSILON.negate()); List<CreateInstruction> instructions = new ArrayList<>(s.size()); for (int i = 0; i < s.size(); i++) { instructions.add(CreateInstruction.builder().price(p.get(i)).size(s.get(i)).build()); } instructions.forEach((v) -> log.trace("Buy candidate : {}", v)); return instructions; } @VisibleForTesting List<CreateInstruction> createSells(Context context, Request request, Advice adv) { List<BigDecimal> s = splitSize(context, request, adv.getSellLimitSize()); List<BigDecimal> p = splitPrice(context, request, adv.getSellLimitPrice(), s.size(), EPSILON); List<CreateInstruction> instructions = new ArrayList<>(s.size()); for (int i = 0; i < s.size(); i++) { BigDecimal size = s.get(i) == null ? null : s.get(i).negate(); instructions.add(CreateInstruction.builder().price(p.get(i)).size(size).build()); } instructions.forEach((v) -> log.trace("Sell candidate : {}", v)); return instructions; } private List<BigDecimal> splitSize(Context context, Request request, BigDecimal value) { Key key = Key.from(request); BigDecimal total = context.roundLotSize(key, value, DOWN); if (total == null || total.signum() <= 0) { return emptyList(); } BigDecimal unit = Optional.ofNullable(context.roundLotSize(key, EPSILON, UP)).orElse(total); BigDecimal split = request.getTradingSplit().setScale(INTEGER_ZERO, DOWN).max(ONE); BigDecimal fair = total.divide(split, SCALE, DOWN); BigDecimal floor = Optional.ofNullable(context.roundLotSize(key, fair, DOWN)).orElse(ZERO); BigDecimal residual = total.subtract(split.multiply(floor)); List<BigDecimal> results = new ArrayList<>(split.intValue()); for (int i = 0; i < split.intValue(); i++) { BigDecimal slice = floor; if (residual.compareTo(unit) >= 0) { slice = slice.add(unit); residual = residual.subtract(unit); } if (slice.signum() <= 0) { continue; } results.add(slice); } return results; } private List<BigDecimal> splitPrice(Context context, Request request, BigDecimal value, int size, BigDecimal delta) { List<BigDecimal> values = new ArrayList<>(size); BigDecimal previous = value; Key key = Key.from(request); for (int i = 0; i < size; i++) { BigDecimal raw = previous == null ? null : previous.add(delta); BigDecimal rounded = context.roundTickSize(key, raw, delta.signum() >= 0 ? UP : DOWN); values.add(rounded == null ? previous : rounded); previous = rounded; } return values; } @VisibleForTesting List<Instruction> merge(List<CreateInstruction> creates, Map<CancelInstruction, Order> cancels) { Map<CancelInstruction, Order> remainingCancels = new IdentityHashMap<>(cancels); List<CreateInstruction> remainingCreates = new ArrayList<>(creates); Iterator<CreateInstruction> createItr = remainingCreates.iterator(); while (createItr.hasNext()) { CreateInstruction create = createItr.next(); Iterator<Map.Entry<CancelInstruction, Order>> cancelItr = remainingCancels.entrySet().iterator(); while (cancelItr.hasNext()) { Map.Entry<CancelInstruction, Order> entry = cancelItr.next(); if (isDifferent(entry.getValue().getOrderPrice(), create.getPrice())) { continue; } if (isDifferent(entry.getValue().getRemainingQuantity(), create.getSize())) { continue; } log.trace("Netting cancel/create : {} - {}", create, entry.getKey()); cancelItr.remove(); createItr.remove(); break; } } List<Instruction> instructions = new ArrayList<>(); instructions.addAll(remainingCancels.keySet()); instructions.addAll(remainingCreates); instructions.forEach(v -> log.trace("Merged candidate : {}", v)); return instructions; } private boolean isDifferent(BigDecimal v1, BigDecimal v2) { return v1 == null || v2 == null || v1.compareTo(v2) != 0; } }
Fixed price rounding.
src/main/java/com/after_sunrise/cryptocurrency/cryptotrader/service/template/TemplateInstructor.java
Fixed price rounding.
Java
agpl-3.0
e471a7cd3a73f6f465dd31265b12436ea351176b
0
aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2016 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2016 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.core.test.elasticsearch; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import org.apache.commons.io.FileUtils; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeBuilder; import org.junit.rules.ExternalResource; /** * This class starts up an embedded Elasticsearch node for use in integration * tests. * * @author Seth */ public class JUnitElasticsearchServer extends ExternalResource { private Node m_node; private Path m_temporaryDirectory; @Override public void before() throws Exception { m_temporaryDirectory = Files.createTempDirectory("elasticsearch-data"); ImmutableSettings.Builder elasticsearchSettings = ImmutableSettings.settingsBuilder() // By default, the service will listen on a free port from 9200 to 9300 //.put("http.enabled", "false") //network.publish_host: 192.168.0.1 // Randomize the cluster.name so that Elasticsearch instances don't cluster with each other during tests. // Use the same method as org.opennms.core.test.db.TemporaryDatabasePostgreSQL. .put("cluster.name", String.format("opennms_test_%s_%06d_%s", System.currentTimeMillis(), System.nanoTime(), Math.abs(this.hashCode()))) .put("path.data", m_temporaryDirectory); m_node = NodeBuilder.nodeBuilder() //.local(true) .settings(elasticsearchSettings.build()) .node(); } public Client getClient() { return m_node.client(); } @Override public void after() { m_node.close(); try { FileUtils.deleteDirectory(new File(m_temporaryDirectory.toUri())); } catch (IOException e) { throw new RuntimeException("Could not delete data directory of embedded elasticsearch server", e); } } }
core/test-api/elasticsearch/src/main/java/org/opennms/core/test/elasticsearch/JUnitElasticsearchServer.java
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2016 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2016 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.core.test.elasticsearch; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import org.apache.commons.io.FileUtils; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeBuilder; import org.junit.rules.ExternalResource; /** * This class starts up an embedded Elasticsearch node for use in integration * tests. * * @author Seth */ public class JUnitElasticsearchServer extends ExternalResource { private Node m_node; private Path m_temporaryDirectory; @Override public void before() throws Exception { m_temporaryDirectory = Files.createTempDirectory("elasticsearch-data"); ImmutableSettings.Builder elasticsearchSettings = ImmutableSettings.settingsBuilder() // By default, the service will listen on a free port from 9200 to 9300 //.put("http.enabled", "false") //network.publish_host: 192.168.0.1 .put("cluster.name", "opennms") .put("path.data", m_temporaryDirectory); m_node = NodeBuilder.nodeBuilder() //.local(true) .settings(elasticsearchSettings.build()) .node(); } public Client getClient() { return m_node.client(); } @Override public void after() { m_node.close(); try { FileUtils.deleteDirectory(new File(m_temporaryDirectory.toUri())); } catch (IOException e) { throw new RuntimeException("Could not delete data directory of embedded elasticsearch server", e); } } }
[bamboo] Automated branch merge (from foundation-2017:d9b39d5b5914d91870e3d8f9c192d339a7e6b77a)
core/test-api/elasticsearch/src/main/java/org/opennms/core/test/elasticsearch/JUnitElasticsearchServer.java
[bamboo] Automated branch merge (from foundation-2017:d9b39d5b5914d91870e3d8f9c192d339a7e6b77a)
Java
lgpl-2.1
576c71c633ddbf0868ce1a2e5bca3b4aaee128ec
0
xwiki/xwiki-enterprise,xwiki/xwiki-enterprise
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package com.xpn.xwiki.it.selenium; import junit.framework.Test; import com.xpn.xwiki.it.selenium.framework.AbstractXWikiTestCase; import com.xpn.xwiki.it.selenium.framework.AlbatrossSkinExecutor; import com.xpn.xwiki.it.selenium.framework.XWikiTestSuite; /** * Verify the caching features of XWiki. * * @version $Id: $ */ public class CacheTest extends AbstractXWikiTestCase { public static Test suite() { XWikiTestSuite suite = new XWikiTestSuite("Verify the caching features of XWiki"); suite.addTestSuite(CacheTest.class, AlbatrossSkinExecutor.class); return suite; } protected void setUp() throws Exception { super.setUp(); loginAsAdmin(); } /** * Tests that the document dates are always of the type java.util.Date, as hibernate returns * java.sql.Timestamp, which is not entirely compatible with java.util.Date. When the cache * storage is enabled, this problem isn't detected until the document is removed from the cache. */ public void testDateClass() { createPage("Main", "TestDateClass", "$xwiki.flushCache()\n$xwiki.getDocument(\"Main.WebHome\").date.class"); waitForCondition("selenium.page().bodyText().indexOf('java.util.Date')!=-1;"); } }
distribution-test/selenium-tests/src/test/it/com/xpn/xwiki/it/selenium/CacheTest.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package com.xpn.xwiki.it.selenium; import junit.framework.Test; import com.xpn.xwiki.it.selenium.framework.AbstractXWikiTestCase; import com.xpn.xwiki.it.selenium.framework.AlbatrossSkinExecutor; import com.xpn.xwiki.it.selenium.framework.XWikiTestSuite; /** * Verify the caching features of XWiki. * * @version $Id: $ */ public class CacheTest extends AbstractXWikiTestCase { public static Test suite() { XWikiTestSuite suite = new XWikiTestSuite("Verify the caching features of XWiki"); suite.addTestSuite(CacheTest.class, AlbatrossSkinExecutor.class); return suite; } protected void setUp() throws Exception { super.setUp(); loginAsAdmin(); } /** * Tests that the document dates are always of the type java.util.Date, as hibernate returns * java.sql.Timestamp, which is not entirely compatible with java.util.Date. When the cache * storage is enabled, this problem isn't detected until the document is removed from the cache. */ public void testDateClass() { createPage("Main", "TestDateClass", "$xwiki.flushCache()\n$xwiki.getDocument(\"Main.WebHome\").date.class"); waitForCondition("selenium.page().bodyText().indexOf('java.util.Date')!=-1;"); } /** * Tests that $context.setCacheDuration don't breaks links * XWIKI-2672 */ public void testCacheDuration() { createPage("Main", "CacheDuration", "$context.setCacheDuration(1800)\n http://some:123/link"); assertTextPresent("http://some:123/link"); open("Main", "CacheDuration"); assertTextPresent("http://some:123/link"); } }
XWIKI-2672: new parseTemplate breaks setCacheDuration relpace selenium test by unit. git-svn-id: cf27bad30c6b7316185bdac65b014e8c16cd40b6@12734 f329d543-caf0-0310-9063-dda96c69346f
distribution-test/selenium-tests/src/test/it/com/xpn/xwiki/it/selenium/CacheTest.java
XWIKI-2672: new parseTemplate breaks setCacheDuration relpace selenium test by unit.
Java
lgpl-2.1
dec635a9a4aa3e32fb18d9bf4f0342302dae30d6
0
threerings/narya,threerings/narya,threerings/narya,threerings/narya,threerings/narya
// // $Id: Table.java,v 1.13 2003/03/23 02:20:49 mdb Exp $ package com.threerings.parlor.data; import com.samskivert.util.StringUtil; import com.threerings.presents.dobj.DSet; import com.threerings.parlor.data.ParlorCodes; import com.threerings.parlor.game.GameConfig; /** * This class represents a table that is being used to matchmake a game by * the Parlor services. */ public class Table implements DSet.Entry, ParlorCodes { /** The unique identifier for this table. */ public Integer tableId; /** The object id of the lobby object with which this table is * associated. */ public int lobbyOid; /** The oid of the game that was created from this table or -1 if the * table is still in matchmaking mode. */ public int gameOid = -1; /** An array of the usernames of the occupants of this table (some * slots may not be filled). */ public String[] occupants; /** The body oids of the occupants of this table. (This is not * propagated to remote instances.) */ public transient int[] bodyOids; /** The game config for the game that is being matchmade. This config * instance will also implement {@link TableConfig}. */ public GameConfig config; /** * Creates a new table instance, and assigns it the next monotonically * increasing table id. The supplied config instance must implement * {@link TableConfig} or a <code>ClassCastException</code> will be * thrown. * * @param lobbyOid the object id of the lobby in which this table is * to live. * @param config the configuration of the game being matchmade by this * table. */ public Table (int lobbyOid, GameConfig config) { // assign a unique table id tableId = new Integer(++_tableIdCounter); // keep track of our lobby oid this.lobbyOid = lobbyOid; // keep a casted reference around _tconfig = (TableConfig)config; this.config = config; // make room for the maximum number of players occupants = new String[_tconfig.getMaximumPlayers()]; bodyOids = new int[occupants.length]; } /** * Constructs a blank table instance, suitable for unserialization. */ public Table () { } /** * A convenience function for accessing the table id as an int. */ public int getTableId () { return tableId.intValue(); } /** * Once a table is ready to play (see {@link #readyToStart}), the * players array can be fetched using this method. It will return an * array containing the usernames of all of the players in the game, * sized properly and with each player in the appropriate position. */ public String[] getPlayers () { // count up the players int pcount = 0; for (int i = 0; i < occupants.length; i++) { if (!StringUtil.blank(occupants[i])) { pcount++; } } // create and populate the players array String[] players = new String[pcount]; pcount = 0; for (int i = 0; i < occupants.length; i++) { if (!StringUtil.blank(occupants[i])) { players[pcount++] = occupants[i]; } } return players; } /** * Requests to seat the specified user at the specified position in * this table. * * @param position the position in which to seat the user. * @param username the username of the user to be set. * @param bodyOid the body object id of the user to be set. * * @return null if the user was successfully seated, a string error * code explaining the failure if the user was not able to be seated * at that position. */ public String setOccupant (int position, String username, int bodyOid) { // find out how many positions we have for occupation int maxpos = _tconfig.getDesiredPlayers(); // if there is no desired number of players, use the max if (maxpos == -1) { maxpos = _tconfig.getMaximumPlayers(); } // make sure the requested position is a valid one if (position >= maxpos) { return INVALID_TABLE_POSITION; } // make sure the requested position is not already occupied if (!StringUtil.blank(occupants[position])) { return TABLE_POSITION_OCCUPIED; } // otherwise all is well, stick 'em in occupants[position] = username; bodyOids[position] = bodyOid; return null; } /** * Requests that the specified user be removed from their seat at this * table. * * @return true if the user was seated at the table and has now been * removed, false if the user was never seated at the table in the * first place. */ public boolean clearOccupant (String username) { for (int i = 0; i < occupants.length; i++) { if (username.equals(occupants[i])) { occupants[i] = ""; bodyOids[i] = 0; return true; } } return false; } /** * Requests that the user identified by the specified body object id * be removed from their seat at this table. * * @return true if the user was seated at the table and has now been * removed, false if the user was never seated at the table in the * first place. */ public boolean clearOccupant (int bodyOid) { for (int i = 0; i < bodyOids.length; i++) { if (bodyOid == bodyOids[i]) { occupants[i] = ""; bodyOids[i] = 0; return true; } } return false; } /** * Returns true if this table has occupants in all of the desired * positions and should be started. */ public boolean readyToStart () { int need = _tconfig.getDesiredPlayers(); if (need == -1) { need = _tconfig.getMaximumPlayers(); } // make sure the first "need" players are filled in for (int i = 0; i < need; i++) { if (StringUtil.blank(occupants[i])) { return false; } } return true; } /** * Returns true if there is no one sitting at this table. */ public boolean isEmpty () { for (int i = 0; i < occupants.length; i++) { if (!StringUtil.blank(occupants[i])) { return false; } } return true; } /** * Returns true if this table is in play, false if it is still being * matchmade. */ public boolean inPlay () { return gameOid != -1; } // documentation inherited public Comparable getKey () { return tableId; } /** * Returns true if this table is equal to the supplied object (which * must be a table with the same table id). */ public boolean equals (Object other) { if (other != null && other instanceof Table) { return ((Table)other).tableId.equals(tableId); } else { return false; } } /** * Generates a string representation of this table instance. */ public String toString () { return "[tableId=" + tableId + ", lobbyOid=" + lobbyOid + ", gameOid=" + gameOid + ", occupants=" + StringUtil.toString(occupants) + ", bodyOids=" + StringUtil.toString(bodyOids) + ", config=" + config + "]"; } /** A casted reference of our game config object. */ protected transient TableConfig _tconfig; /** A counter for assigning table ids. */ protected static int _tableIdCounter = 0; }
src/java/com/threerings/parlor/data/Table.java
// // $Id: Table.java,v 1.12 2002/12/20 23:41:26 mdb Exp $ package com.threerings.parlor.data; import com.samskivert.util.StringUtil; import com.threerings.presents.dobj.DSet; import com.threerings.parlor.data.ParlorCodes; import com.threerings.parlor.game.GameConfig; /** * This class represents a table that is being used to matchmake a game by * the Parlor services. */ public class Table implements DSet.Entry, ParlorCodes { /** The unique identifier for this table. */ public Integer tableId; /** The object id of the lobby object with which this table is * associated. */ public int lobbyOid; /** The oid of the game that was created from this table or -1 if the * table is still in matchmaking mode. */ public int gameOid = -1; /** An array of the usernames of the occupants of this table (some * slots may not be filled). */ public String[] occupants; /** The body oids of the occupants of this table. (This is not * propagated to remote instances.) */ public transient int[] bodyOids; /** The game config for the game that is being matchmade. This config * instance will also implement {@link TableConfig}. */ public GameConfig config; /** * Creates a new table instance, and assigns it the next monotonically * increasing table id. The supplied config instance must implement * {@link TableConfig} or a <code>ClassCastException</code> will be * thrown. * * @param lobbyOid the object id of the lobby in which this table is * to live. * @param config the configuration of the game being matchmade by this * table. */ public Table (int lobbyOid, GameConfig config) { // assign a unique table id tableId = new Integer(++_tableIdCounter); // keep track of our lobby oid this.lobbyOid = lobbyOid; // keep a casted reference around _tconfig = (TableConfig)config; this.config = config; // make room for the maximum number of players occupants = new String[_tconfig.getMaximumPlayers()]; bodyOids = new int[occupants.length]; } /** * Constructs a blank table instance, suitable for unserialization. */ public Table () { } /** * A convenience function for accessing the table id as an int. */ public int getTableId () { return tableId.intValue(); } /** * Once a table is ready to play (see {@link #readyToStart}), the * players array can be fetched using this method. It will return an * array containing the usernames of all of the players in the game, * sized properly and with each player in the appropriate position. */ public String[] getPlayers () { // count up the players int pcount = 0; for (int i = 0; i < occupants.length; i++) { if (!StringUtil.blank(occupants[i])) { pcount++; } } // create and populate the players array String[] players = new String[pcount]; pcount = 0; for (int i = 0; i < occupants.length; i++) { if (!StringUtil.blank(occupants[i])) { players[pcount++] = occupants[i]; } } return players; } /** * Requests to seat the specified user at the specified position in * this table. * * @param position the position in which to seat the user. * @param username the username of the user to be set. * @param bodyOid the body object id of the user to be set. * * @return null if the user was successfully seated, a string error * code explaining the failure if the user was not able to be seated * at that position. */ public String setOccupant (int position, String username, int bodyOid) { // find out how many positions we have for occupation int maxpos = _tconfig.getDesiredPlayers(); // if there is no desired number of players, use the max if (maxpos == -1) { maxpos = _tconfig.getMaximumPlayers(); } // make sure the requested position is a valid one if (position >= maxpos) { return INVALID_TABLE_POSITION; } // make sure the requested position is not already occupied if (!StringUtil.blank(occupants[position])) { return TABLE_POSITION_OCCUPIED; } // otherwise all is well, stick 'em in occupants[position] = username; bodyOids[position] = bodyOid; return null; } /** * Requests that the specified user be removed from their seat at this * table. * * @return true if the user was seated at the table and has now been * removed, false if the user was never seated at the table in the * first place. */ public boolean clearOccupant (String username) { for (int i = 0; i < occupants.length; i++) { if (username.equals(occupants[i])) { occupants[i] = ""; bodyOids[i] = 0; return true; } } return false; } /** * Requests that the user identified by the specified body object id * be removed from their seat at this table. * * @return true if the user was seated at the table and has now been * removed, false if the user was never seated at the table in the * first place. */ public boolean clearOccupant (int bodyOid) { for (int i = 0; i < bodyOids.length; i++) { if (bodyOid == bodyOids[i]) { occupants[i] = ""; bodyOids[i] = 0; return true; } } return false; } /** * Returns true if this table has occupants in all of the desired * positions and should be started. */ public boolean readyToStart () { int need = _tconfig.getDesiredPlayers(); if (need == -1) { need = _tconfig.getMaximumPlayers(); } // make sure the first "need" players are filled in for (int i = 0; i < need; i++) { if (StringUtil.blank(occupants[i])) { return false; } } return true; } /** * Returns true if there is no one sitting at this table. */ public boolean isEmpty () { for (int i = 0; i < occupants.length; i++) { if (!StringUtil.blank(occupants[i])) { return false; } } return true; } /** * Returns true if this table is in play, false if it is still being * matchmade. */ public boolean inPlay () { return gameOid != -1; } // documentation inherited public Comparable getKey () { return tableId; } /** * Returns true if this table is equal to the supplied object (which * must be a table with the same table id). */ public boolean equals (Object other) { if (other != null && other instanceof Table) { return ((Table)other).tableId.equals(tableId); } else { return false; } } /** * Generates a string representation of this table instance. */ public String toString () { return "[tableId=" + tableId + ", lobbyOid=" + lobbyOid + ", gameOid=" + gameOid + ", occupants=" + StringUtil.toString(occupants) + ", bodyOids=" + StringUtil.toString(bodyOids) + ", config=" + config + "]"; } /** A casted reference of our game config object. */ protected TableConfig _tconfig; /** A counter for assigning table ids. */ protected static int _tableIdCounter = 0; }
That feller needs to be transient. git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@2317 542714f4-19e9-0310-aa3c-eee0fc999fb1
src/java/com/threerings/parlor/data/Table.java
That feller needs to be transient.
Java
apache-2.0
c520567676de4ace31c7080faa43d92a208067c7
0
heiko-braun/wildfly-swarm-1,gastaldi/wildfly-swarm,heiko-braun/wildfly-swarm-1,bobmcwhirter/wildfly-swarm,kenfinnigan/wildfly-swarm,jamezp/wildfly-swarm,wildfly-swarm/wildfly-swarm,kenfinnigan/wildfly-swarm,heiko-braun/wildfly-swarm-1,jamesnetherton/wildfly-swarm,Ladicek/wildfly-swarm,gastaldi/wildfly-swarm,emag/wildfly-swarm,wildfly-swarm/wildfly-swarm-core,wildfly-swarm/wildfly-swarm-core,juangon/wildfly-swarm,nelsongraca/wildfly-swarm,jamezp/wildfly-swarm,jamesnetherton/wildfly-swarm,nelsongraca/wildfly-swarm,christian-posta/wildfly-swarm,nelsongraca/wildfly-swarm,wildfly-swarm/wildfly-swarm-core,gastaldi/wildfly-swarm,wildfly-swarm/wildfly-swarm,heiko-braun/wildfly-swarm-1,wildfly-swarm/wildfly-swarm-core,juangon/wildfly-swarm,Ladicek/wildfly-swarm,jamesnetherton/wildfly-swarm,juangon/wildfly-swarm,nelsongraca/wildfly-swarm,Ladicek/wildfly-swarm,kenfinnigan/wildfly-swarm,jamesnetherton/wildfly-swarm,christian-posta/wildfly-swarm,emag/wildfly-swarm,bobmcwhirter/wildfly-swarm,juangon/wildfly-swarm,nelsongraca/wildfly-swarm,heiko-braun/wildfly-swarm-1,jamesnetherton/wildfly-swarm,wildfly-swarm/wildfly-swarm-core,Ladicek/wildfly-swarm,bobmcwhirter/wildfly-swarm,emag/wildfly-swarm,christian-posta/wildfly-swarm,Ladicek/wildfly-swarm,wildfly-swarm/wildfly-swarm,jamezp/wildfly-swarm,gastaldi/wildfly-swarm,wildfly-swarm/wildfly-swarm,emag/wildfly-swarm,bobmcwhirter/wildfly-swarm,jamezp/wildfly-swarm,wildfly-swarm/wildfly-swarm,christian-posta/wildfly-swarm,christian-posta/wildfly-swarm,jamezp/wildfly-swarm,kenfinnigan/wildfly-swarm,bobmcwhirter/wildfly-swarm,juangon/wildfly-swarm,emag/wildfly-swarm,gastaldi/wildfly-swarm,kenfinnigan/wildfly-swarm
/** * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm.plugin.gradle; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.gradle.api.Action; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.DependencySet; import org.gradle.api.artifacts.ResolvedDependency; import org.gradle.api.artifacts.repositories.MavenArtifactRepository; import org.gradle.api.internal.artifacts.DefaultExcludeRule; import org.gradle.api.internal.artifacts.dependencies.DefaultDependencyArtifact; import org.gradle.api.internal.artifacts.dependencies.DefaultExternalModuleDependency; import org.gradle.api.internal.artifacts.dependencies.DefaultProjectDependency; import org.gradle.api.internal.project.DefaultProjectAccessListener; import org.gradle.api.internal.project.ProjectInternal; import org.wildfly.swarm.tools.ArtifactResolvingHelper; import org.wildfly.swarm.tools.ArtifactSpec; /** * @author Bob McWhirter */ public class GradleArtifactResolvingHelper implements ArtifactResolvingHelper { private final Project project; Map<String, Project> projects; public GradleArtifactResolvingHelper(Project project) { this.project = project; this.projects = project.getRootProject().getAllprojects().stream().collect(Collectors.toMap(p -> p.getGroup() + ":" + p.getName() + ":" + p.getVersion(), p -> p)); this.project.getRepositories().maven(new Action<MavenArtifactRepository>() { @Override public void execute(MavenArtifactRepository repo) { repo.setName("jboss-public"); repo.setUrl("http://repository.jboss.org/nexus/content/groups/public/"); } }); } @Override public ArtifactSpec resolve(final ArtifactSpec spec) { if (spec.file != null) { return spec; } final Iterator<ResolvedDependency> iterator = doResolve(new HashSet<>(Collections.singletonList(spec))).iterator(); if (iterator.hasNext()) { spec.file = iterator.next() .getModuleArtifacts() .iterator().next() .getFile(); return spec; } return null; } @Override public Set<ArtifactSpec> resolveAll(final Set<ArtifactSpec> specs) throws Exception { if (specs.isEmpty()) { return specs; } final Set<ArtifactSpec> resolvedSpecs = new HashSet<>(); doResolve(specs).forEach(dep -> dep.getModuleArtifacts() .forEach(artifact -> resolvedSpecs .add(new ArtifactSpec(dep.getConfiguration(), dep.getModuleGroup(), artifact.getName(), dep.getModuleVersion(), artifact.getExtension(), artifact.getClassifier(), artifact.getFile())))); return resolvedSpecs.stream() .filter(a -> !"system".equals(a.scope)) .collect(Collectors.toSet()); } private Set<ResolvedDependency> doResolve(final Collection<ArtifactSpec> deps) { final Configuration config = this.project.getConfigurations().detachedConfiguration(); final DependencySet dependencySet = config.getDependencies(); deps.stream() .forEach(spec -> { if (projects.containsKey(spec.groupId() + ":" + spec.artifactId() + ":" + spec.version())) { dependencySet.add(new DefaultProjectDependency((ProjectInternal) projects.get(spec.groupId() + ":" + spec.artifactId() + ":" + spec.version()), new DefaultProjectAccessListener(), false)); } else { final DefaultExternalModuleDependency d = new DefaultExternalModuleDependency(spec.groupId(), spec.artifactId(), spec.version()); final DefaultDependencyArtifact da = new DefaultDependencyArtifact(spec.artifactId(), spec.type(), spec.type(), spec.classifier(), null); d.addArtifact(da); d.getExcludeRules().add(new DefaultExcludeRule()); dependencySet.add(d); } }); return config.getResolvedConfiguration().getFirstLevelModuleDependencies(); } }
src/main/java/org/wildfly/swarm/plugin/gradle/GradleArtifactResolvingHelper.java
/** * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm.plugin.gradle; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.stream.Collectors; import org.gradle.api.Action; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.DependencySet; import org.gradle.api.artifacts.ResolvedDependency; import org.gradle.api.artifacts.repositories.MavenArtifactRepository; import org.gradle.api.internal.artifacts.DefaultExcludeRule; import org.gradle.api.internal.artifacts.dependencies.DefaultDependencyArtifact; import org.gradle.api.internal.artifacts.dependencies.DefaultExternalModuleDependency; import org.wildfly.swarm.tools.ArtifactResolvingHelper; import org.wildfly.swarm.tools.ArtifactSpec; /** * @author Bob McWhirter */ public class GradleArtifactResolvingHelper implements ArtifactResolvingHelper { private final Project project; public GradleArtifactResolvingHelper(Project project) { this.project = project; this.project.getRepositories().maven(new Action<MavenArtifactRepository>() { @Override public void execute(MavenArtifactRepository repo) { repo.setName("jboss-public"); repo.setUrl("http://repository.jboss.org/nexus/content/groups/public/"); } }); } @Override public ArtifactSpec resolve(final ArtifactSpec spec) { if (spec.file != null) { return spec; } final Iterator<ResolvedDependency> iterator = doResolve(new HashSet<>(Collections.singletonList(spec))).iterator(); if (iterator.hasNext()) { spec.file = iterator.next() .getModuleArtifacts() .iterator().next() .getFile(); return spec; } return null; } @Override public Set<ArtifactSpec> resolveAll(final Set<ArtifactSpec> specs) throws Exception { if (specs.isEmpty()) { return specs; } final Set<ArtifactSpec> resolvedSpecs = new HashSet<>(); doResolve(specs).forEach(dep -> dep.getAllModuleArtifacts() .forEach(artifact -> resolvedSpecs .add(new ArtifactSpec(dep.getConfiguration(), dep.getModuleGroup(), artifact.getName(), dep.getModuleVersion(), artifact.getExtension(), artifact.getClassifier(), artifact.getFile())))); return resolvedSpecs.stream() .filter(a -> !"system".equals(a.scope)) .collect(Collectors.toSet()); } private Set<ResolvedDependency> doResolve(final Collection<ArtifactSpec> deps) { final Configuration config = this.project.getConfigurations().detachedConfiguration(); final DependencySet dependencySet = config.getDependencies(); deps.forEach(spec -> { final DefaultExternalModuleDependency d = new DefaultExternalModuleDependency(spec.groupId(), spec.artifactId(), spec.version()); final DefaultDependencyArtifact da = new DefaultDependencyArtifact(spec.artifactId(), spec.type(), spec.type(), spec.classifier(), null); d.addArtifact(da); d.getExcludeRules().add(new DefaultExcludeRule()); dependencySet.add(d); }); return config.getResolvedConfiguration().getFirstLevelModuleDependencies(); } }
Add multi-project support to GradleArtifactResolvingHelper
src/main/java/org/wildfly/swarm/plugin/gradle/GradleArtifactResolvingHelper.java
Add multi-project support to GradleArtifactResolvingHelper
Java
apache-2.0
01dda2a8a9a3c6db2c9cdf3586b484c2fd7f8650
0
debezium/debezium,debezium/debezium,jpechane/debezium,jpechane/debezium,debezium/debezium,jpechane/debezium,debezium/debezium,jpechane/debezium
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.postgresql; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.kafka.connect.errors.ConnectException; import org.postgresql.core.BaseConnection; import org.postgresql.core.TypeInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.debezium.connector.postgresql.connection.PostgresConnection; import io.debezium.util.Collect; /** * A registry of types supported by a PostgreSQL instance. Allows lookup of the types according to * type name or OID. * * @author Jiri Pechanec * */ public class TypeRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(TypeRegistry.class); public static final String TYPE_NAME_GEOGRAPHY = "geography"; public static final String TYPE_NAME_GEOMETRY = "geometry"; public static final String TYPE_NAME_CITEXT = "citext"; public static final String TYPE_NAME_HSTORE = "hstore"; public static final String TYPE_NAME_LTREE = "ltree"; public static final String TYPE_NAME_HSTORE_ARRAY = "_hstore"; public static final String TYPE_NAME_GEOGRAPHY_ARRAY = "_geography"; public static final String TYPE_NAME_GEOMETRY_ARRAY = "_geometry"; public static final String TYPE_NAME_CITEXT_ARRAY = "_citext"; public static final String TYPE_NAME_LTREE_ARRAY = "_ltree"; public static final int NO_TYPE_MODIFIER = -1; public static final int UNKNOWN_LENGTH = -1; // PostgreSQL driver reports user-defined Domain types as Types.DISTINCT public static final int DOMAIN_TYPE = Types.DISTINCT; private static final String CATEGORY_ARRAY = "A"; private static final String CATEGORY_ENUM = "E"; private static final String SQL_NON_ARRAY_TYPES = "SELECT t.oid AS oid, t.typname AS name, t.typbasetype AS parentoid, t.typtypmod as modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.typcategory <> 'A'"; private static final String SQL_ARRAY_TYPES = "SELECT t.oid AS oid, t.typname AS name, t.typelem AS element, t.typbasetype AS parentoid, t.typtypmod as modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.typcategory = 'A'"; private static final String SQL_NAME_LOOKUP = "SELECT t.oid as oid, t.typname AS name, t.typelem AS element, t.typbasetype AS parentoid, t.typtypmod AS modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.typname = ?"; private static final String SQL_OID_LOOKUP = "SELECT t.oid as oid, t.typname AS name, t.typelem AS element, t.typbasetype AS parentoid, t.typtypmod AS modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.oid = ?"; private static final String SQL_ENUM_VALUES_LOOKUP = "select t.enumlabel as enum_value " + "FROM pg_catalog.pg_enum t " + "WHERE t.enumtypid=? ORDER BY t.enumsortorder"; private static final Map<String, String> LONG_TYPE_NAMES = Collections.unmodifiableMap(getLongTypeNames()); private static Map<String, String> getLongTypeNames() { Map<String, String> longTypeNames = new HashMap<>(); longTypeNames.put("bigint", "int8"); longTypeNames.put("bit varying", "varbit"); longTypeNames.put("boolean", "bool"); longTypeNames.put("character", "bpchar"); longTypeNames.put("character varying", "varchar"); longTypeNames.put("double precision", "float8"); longTypeNames.put("integer", "int4"); longTypeNames.put("real", "float4"); longTypeNames.put("smallint", "int2"); longTypeNames.put("timestamp without time zone", "timestamp"); longTypeNames.put("timestamp with time zone", "timestamptz"); longTypeNames.put("time without time zone", "time"); longTypeNames.put("time with time zone", "timetz"); return longTypeNames; } private final Map<String, PostgresType> nameToType = new HashMap<>(); private final Map<Integer, PostgresType> oidToType = new HashMap<>(); private final PostgresConnection connection; private int geometryOid = Integer.MIN_VALUE; private int geographyOid = Integer.MIN_VALUE; private int citextOid = Integer.MIN_VALUE; private int hstoreOid = Integer.MIN_VALUE; private int ltreeOid = Integer.MIN_VALUE; private int hstoreArrayOid = Integer.MIN_VALUE; private int geometryArrayOid = Integer.MIN_VALUE; private int geographyArrayOid = Integer.MIN_VALUE; private int citextArrayOid = Integer.MIN_VALUE; private int ltreeArrayOid = Integer.MIN_VALUE; public TypeRegistry(PostgresConnection connection) { this.connection = connection; prime(); } private void addType(PostgresType type) { oidToType.put(type.getOid(), type); nameToType.put(type.getName(), type); if (TYPE_NAME_GEOMETRY.equals(type.getName())) { geometryOid = type.getOid(); } else if (TYPE_NAME_GEOGRAPHY.equals(type.getName())) { geographyOid = type.getOid(); } else if (TYPE_NAME_CITEXT.equals(type.getName())) { citextOid = type.getOid(); } else if (TYPE_NAME_HSTORE.equals(type.getName())) { hstoreOid = type.getOid(); } else if (TYPE_NAME_LTREE.equals(type.getName())) { ltreeOid = type.getOid(); } else if (TYPE_NAME_HSTORE_ARRAY.equals(type.getName())) { hstoreArrayOid = type.getOid(); } else if (TYPE_NAME_GEOMETRY_ARRAY.equals(type.getName())) { geometryArrayOid = type.getOid(); } else if (TYPE_NAME_GEOGRAPHY_ARRAY.equals(type.getName())) { geographyArrayOid = type.getOid(); } else if (TYPE_NAME_CITEXT_ARRAY.equals(type.getName())) { citextArrayOid = type.getOid(); } else if (TYPE_NAME_LTREE_ARRAY.equals(type.getName())) { ltreeArrayOid = type.getOid(); } } /** * * @param oid - PostgreSQL OID * @return type associated with the given OID */ public PostgresType get(int oid) { PostgresType r = oidToType.get(oid); if (r == null) { r = resolveUnknownType(oid); if (r == null) { LOGGER.warn("Unknown OID {} requested", oid); r = PostgresType.UNKNOWN; } } return r; } /** * * @param name - PostgreSQL type name * @return type associated with the given type name */ public PostgresType get(String name) { switch (name) { case "serial": name = "int4"; break; case "smallserial": name = "int2"; break; case "bigserial": name = "int8"; break; } String[] parts = name.split("\\."); if (parts.length > 1) { name = parts[1]; } if (name.charAt(0) == '"') { name = name.substring(1, name.length() - 1); } PostgresType r = nameToType.get(name); if (r == null) { r = resolveUnknownType(name); if (r == null) { LOGGER.warn("Unknown type named {} requested", name); r = PostgresType.UNKNOWN; } } return r; } /** * * @return OID for {@code GEOMETRY} type of this PostgreSQL instance */ public int geometryOid() { return geometryOid; } /** * * @return OID for {@code GEOGRAPHY} type of this PostgreSQL instance */ public int geographyOid() { return geographyOid; } /** * * @return OID for {@code CITEXT} type of this PostgreSQL instance */ public int citextOid() { return citextOid; } /** * * @return OID for {@code HSTORE} type of this PostgreSQL instance */ public int hstoreOid() { return hstoreOid; } /** * * @return OID for {@code LTREE} type of this PostgreSQL instance */ public int ltreeOid() { return ltreeOid; } /** * * @return OID for array of {@code HSTORE} type of this PostgreSQL instance */ public int hstoreArrayOid() { return hstoreArrayOid; } /** * * @return OID for array of {@code GEOMETRY} type of this PostgreSQL instance */ public int geometryArrayOid() { return geometryArrayOid; } /** * * @return OID for array of {@code GEOGRAPHY} type of this PostgreSQL instance */ public int geographyArrayOid() { return geographyArrayOid; } /** * * @return OID for array of {@code CITEXT} type of this PostgreSQL instance */ public int citextArrayOid() { return citextArrayOid; } /** * * @return OID for array of {@code LTREE} type of this PostgreSQL instance */ public int ltreeArrayOid() { return ltreeArrayOid; } /** * Converts a type name in long (readable) format like <code>boolean</code> to s standard * data type name like <code>bool</code>. * * @param typeName - a type name in long format * @return - the type name in standardized format */ public static String normalizeTypeName(String typeName) { return LONG_TYPE_NAMES.getOrDefault(typeName, typeName); } /** * Prime the {@link TypeRegistry} with all existing database types */ private void prime() { Connection pgConnection = null; try { pgConnection = connection.connection(); final TypeInfo typeInfo = ((BaseConnection) pgConnection).getTypeInfo(); final SqlTypeMapper sqlTypeMapper = new SqlTypeMapper(pgConnection, typeInfo); try (final Statement statement = pgConnection.createStatement()) { // Read non-array types try (final ResultSet rs = statement.executeQuery(SQL_NON_ARRAY_TYPES)) { final List<PostgresType.Builder> delayResolvedBuilders = new ArrayList<>(); while (rs.next()) { // Coerce long to int so large unsigned values are represented as signed // Same technique is used in TypeInfoCache final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); String category = rs.getString("category"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); if (CATEGORY_ENUM.equals(category)) { builder = builder.enumValues(resolveEnumValues(pgConnection, oid)); } // If the type does have have a base type, we can build/add immediately. if (parentTypeOid == 0) { addType(builder.build()); continue; } // For types with base type mappings, they need to be delayed. builder = builder.parentType(parentTypeOid); delayResolvedBuilders.add(builder); } // Resolve delayed builders for (PostgresType.Builder builder : delayResolvedBuilders) { addType(builder.build()); } } // Read array types try (final ResultSet rs = statement.executeQuery(SQL_ARRAY_TYPES)) { final List<PostgresType.Builder> delayResolvedBuilders = new ArrayList<>(); while (rs.next()) { // int2vector and oidvector will not be treated as arrays final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); builder = builder.elementType((int) rs.getLong("element")); // If the type doesnot have a base type, we can build/add immediately if (parentTypeOid == 0) { addType(builder.build()); continue; } // For types with base type mappings, they need to be delayed. builder = builder.parentType(parentTypeOid); delayResolvedBuilders.add(builder); } // Resolve delayed builders for (PostgresType.Builder builder : delayResolvedBuilders) { addType(builder.build()); } } } } catch (SQLException e) { if (pgConnection == null) { throw new ConnectException("Could not create PG connection", e); } else { throw new ConnectException("Could not initialize type registry", e); } } } private PostgresType resolveUnknownType(String name) { try { LOGGER.trace("Type '{}' not cached, attempting to lookup from database.", name); final Connection connection = this.connection.connection(); try (final PreparedStatement statement = connection.prepareStatement(SQL_NAME_LOOKUP)) { statement.setString(1, name); return loadType(connection, statement); } } catch (SQLException e) { throw new ConnectException("Database connection failed during resolving unknown type", e); } } private PostgresType resolveUnknownType(int lookupOid) { try { LOGGER.trace("Type OID '{}' not cached, attempting to lookup from database.", lookupOid); final Connection connection = this.connection.connection(); try (final PreparedStatement statement = connection.prepareStatement(SQL_OID_LOOKUP)) { statement.setInt(1, lookupOid); return loadType(connection, statement); } } catch (SQLException e) { throw new ConnectException("Database connection failed during resolving unknown type", e); } } private PostgresType loadType(Connection connection, PreparedStatement statement) throws SQLException { final TypeInfo typeInfo = ((BaseConnection) connection).getTypeInfo(); final SqlTypeMapper sqlTypeMapper = new SqlTypeMapper(connection, typeInfo); try (final ResultSet rs = statement.executeQuery()) { while (rs.next()) { final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); String category = rs.getString("category"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); if (CATEGORY_ENUM.equals(category)) { builder = builder.enumValues(resolveEnumValues(connection, oid)); } else if (CATEGORY_ARRAY.equals(category)) { builder = builder.elementType((int) rs.getLong("element")); } PostgresType result = builder.parentType(parentTypeOid).build(); addType(result); return result; } } return null; } private List<String> resolveEnumValues(Connection pgConnection, int enumOid) throws SQLException { List<String> enumValues = new ArrayList<>(); try (final PreparedStatement enumStatement = pgConnection.prepareStatement(SQL_ENUM_VALUES_LOOKUP)) { enumStatement.setInt(1, enumOid); try (final ResultSet enumRs = enumStatement.executeQuery()) { while (enumRs.next()) { enumValues.add(enumRs.getString("enum_value")); } } } return enumValues.isEmpty() ? null : enumValues; } /** * Allows to obtain the SQL type corresponding to PG types. This uses a custom statement instead of going through * {@link PgDatabaseMetaData#getTypeInfo()} as the latter causes N+1 SELECTs, making it very slow on installations * with many custom types. * * @author Gunnar Morling * @see DBZ-899 */ private static class SqlTypeMapper { /** * Based on org.postgresql.jdbc.TypeInfoCache.getSQLType(String). To emulate the original statement's behavior * (which works for single types only), PG's DISTINCT ON extension is used to just return the first entry should a * type exist in multiple schemas. */ private static final String SQL_TYPE_DETAILS = "SELECT DISTINCT ON (typname) typname, typinput='array_in'::regproc, typtype, sp.r, pg_type.oid " + " FROM pg_catalog.pg_type " + " LEFT " + " JOIN (select ns.oid as nspoid, ns.nspname, r.r " + " from pg_namespace as ns " // -- go with older way of unnesting array to be compatible with 8.0 + " join ( select s.r, (current_schemas(false))[s.r] as nspname " + " from generate_series(1, array_upper(current_schemas(false), 1)) as s(r) ) as r " + " using ( nspname ) " + " ) as sp " + " ON sp.nspoid = typnamespace " + " ORDER BY typname, sp.r, pg_type.oid;"; private final TypeInfo typeInfo; private final Set<String> preloadedSqlTypes; private final Map<String, Integer> sqlTypesByPgTypeNames; private SqlTypeMapper(Connection db, TypeInfo typeInfo) throws SQLException { this.typeInfo = typeInfo; this.preloadedSqlTypes = Collect.unmodifiableSet(typeInfo.getPGTypeNamesWithSQLTypes()); this.sqlTypesByPgTypeNames = getSqlTypes(db, typeInfo); } public int getSqlType(String typeName) throws SQLException { boolean isCoreType = preloadedSqlTypes.contains(typeName); // obtain core types such as bool, int2 etc. from the driver, as it correctly maps these types to the JDBC // type codes. Also those values are cached in TypeInfoCache. if (isCoreType) { return typeInfo.getSQLType(typeName); } if (typeName.endsWith("[]")) { return Types.ARRAY; } // get custom type mappings from the map which was built up with a single query else { try { return sqlTypesByPgTypeNames.get(typeName); } catch (Exception e) { LOGGER.warn("Failed to obtain SQL type information for type {} via custom statement, falling back to TypeInfo#getSQLType()", typeName, e); return typeInfo.getSQLType(typeName); } } } /** * Builds up a map of SQL (JDBC) types by PG type name; contains only values for non-core types. */ private static Map<String, Integer> getSqlTypes(Connection db, TypeInfo typeInfo) throws SQLException { Map<String, Integer> sqlTypesByPgTypeNames = new HashMap<>(); try (final Statement statement = db.createStatement()) { try (final ResultSet rs = statement.executeQuery(SQL_TYPE_DETAILS)) { while (rs.next()) { int type; boolean isArray = rs.getBoolean(2); String typtype = rs.getString(3); if (isArray) { type = Types.ARRAY; } else if ("c".equals(typtype)) { type = Types.STRUCT; } else if ("d".equals(typtype)) { type = Types.DISTINCT; } else if ("e".equals(typtype)) { type = Types.VARCHAR; } else { type = Types.OTHER; } sqlTypesByPgTypeNames.put(rs.getString(1), type); } } } return sqlTypesByPgTypeNames; } } }
debezium-connector-postgres/src/main/java/io/debezium/connector/postgresql/TypeRegistry.java
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.postgresql; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.kafka.connect.errors.ConnectException; import org.postgresql.core.BaseConnection; import org.postgresql.core.TypeInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.debezium.connector.postgresql.connection.PostgresConnection; import io.debezium.util.Collect; /** * A registry of types supported by a PostgreSQL instance. Allows lookup of the types according to * type name or OID. * * @author Jiri Pechanec * */ public class TypeRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(TypeRegistry.class); public static final String TYPE_NAME_GEOGRAPHY = "geography"; public static final String TYPE_NAME_GEOMETRY = "geometry"; public static final String TYPE_NAME_CITEXT = "citext"; public static final String TYPE_NAME_HSTORE = "hstore"; public static final String TYPE_NAME_LTREE = "ltree"; public static final String TYPE_NAME_HSTORE_ARRAY = "_hstore"; public static final String TYPE_NAME_GEOGRAPHY_ARRAY = "_geography"; public static final String TYPE_NAME_GEOMETRY_ARRAY = "_geometry"; public static final String TYPE_NAME_CITEXT_ARRAY = "_citext"; public static final String TYPE_NAME_LTREE_ARRAY = "_ltree"; public static final int NO_TYPE_MODIFIER = -1; public static final int UNKNOWN_LENGTH = -1; // PostgreSQL driver reports user-defined Domain types as Types.DISTINCT public static final int DOMAIN_TYPE = Types.DISTINCT; private static final String CATEGORY_ARRAY = "A"; private static final String CATEGORY_ENUM = "E"; private static final String SQL_NON_ARRAY_TYPES = "SELECT t.oid AS oid, t.typname AS name, t.typbasetype AS parentoid, t.typtypmod as modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.typcategory <> 'A'"; private static final String SQL_ARRAY_TYPES = "SELECT t.oid AS oid, t.typname AS name, t.typelem AS element, t.typbasetype AS parentoid, t.typtypmod as modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.typcategory = 'A'"; private static final String SQL_NAME_LOOKUP = "SELECT t.oid as oid, t.typname AS name, t.typelem AS element, t.typbasetype AS parentoid, t.typtypmod AS modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.typname = ?"; private static final String SQL_OID_LOOKUP = "SELECT t.oid as oid, t.typname AS name, t.typelem AS element, t.typbasetype AS parentoid, t.typtypmod AS modifiers, t.typcategory as category " + "FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON (t.typnamespace = n.oid) " + "WHERE n.nspname != 'pg_toast' AND t.oid = ?"; private static final String SQL_ENUM_VALUES_LOOKUP = "select t.enumlabel as enum_value " + "FROM pg_catalog.pg_enum t " + "WHERE t.enumtypid=? ORDER BY t.enumsortorder"; private static final Map<String, String> LONG_TYPE_NAMES = Collections.unmodifiableMap(getLongTypeNames()); private static Map<String, String> getLongTypeNames() { Map<String, String> longTypeNames = new HashMap<>(); longTypeNames.put("bigint", "int8"); longTypeNames.put("bit varying", "varbit"); longTypeNames.put("boolean", "bool"); longTypeNames.put("character", "bpchar"); longTypeNames.put("character varying", "varchar"); longTypeNames.put("double precision", "float8"); longTypeNames.put("integer", "int4"); longTypeNames.put("real", "float4"); longTypeNames.put("smallint", "int2"); longTypeNames.put("timestamp without time zone", "timestamp"); longTypeNames.put("timestamp with time zone", "timestamptz"); longTypeNames.put("time without time zone", "time"); longTypeNames.put("time with time zone", "timetz"); return longTypeNames; } private final Map<String, PostgresType> nameToType = new HashMap<>(); private final Map<Integer, PostgresType> oidToType = new HashMap<>(); private final PostgresConnection connection; private int geometryOid = Integer.MIN_VALUE; private int geographyOid = Integer.MIN_VALUE; private int citextOid = Integer.MIN_VALUE; private int hstoreOid = Integer.MIN_VALUE; private int ltreeOid = Integer.MIN_VALUE; private int hstoreArrayOid = Integer.MIN_VALUE; private int geometryArrayOid = Integer.MIN_VALUE; private int geographyArrayOid = Integer.MIN_VALUE; private int citextArrayOid = Integer.MIN_VALUE; private int ltreeArrayOid = Integer.MIN_VALUE; public TypeRegistry(PostgresConnection connection) { this.connection = connection; prime(); } private void addType(PostgresType type) { oidToType.put(type.getOid(), type); nameToType.put(type.getName(), type); if (TYPE_NAME_GEOMETRY.equals(type.getName())) { geometryOid = type.getOid(); } else if (TYPE_NAME_GEOGRAPHY.equals(type.getName())) { geographyOid = type.getOid(); } else if (TYPE_NAME_CITEXT.equals(type.getName())) { citextOid = type.getOid(); } else if (TYPE_NAME_HSTORE.equals(type.getName())) { hstoreOid = type.getOid(); } else if (TYPE_NAME_LTREE.equals(type.getName())) { ltreeOid = type.getOid(); } else if (TYPE_NAME_HSTORE_ARRAY.equals(type.getName())) { hstoreArrayOid = type.getOid(); } else if (TYPE_NAME_GEOMETRY_ARRAY.equals(type.getName())) { geometryArrayOid = type.getOid(); } else if (TYPE_NAME_GEOGRAPHY_ARRAY.equals(type.getName())) { geographyArrayOid = type.getOid(); } else if (TYPE_NAME_CITEXT_ARRAY.equals(type.getName())) { citextArrayOid = type.getOid(); } else if (TYPE_NAME_LTREE_ARRAY.equals(type.getName())) { ltreeArrayOid = type.getOid(); } } /** * * @param oid - PostgreSQL OID * @return type associated with the given OID */ public PostgresType get(int oid) { PostgresType r = oidToType.get(oid); if (r == null) { r = resolveUnknownType(oid); if (r == null) { LOGGER.warn("Unknown OID {} requested", oid); r = PostgresType.UNKNOWN; } } return r; } /** * * @param name - PostgreSQL type name * @return type associated with the given type name */ public PostgresType get(String name) { switch (name) { case "serial": name = "int4"; break; case "smallserial": name = "int2"; break; case "bigserial": name = "int8"; break; } String[] parts = name.split("\\."); if (parts.length > 1) { name = parts[1]; } if (name.charAt(0) == '"') { name = name.substring(1, name.length() - 1); } PostgresType r = nameToType.get(name); if (r == null) { r = resolveUnknownType(name); if (r == null) { LOGGER.warn("Unknown type named {} requested", name); r = PostgresType.UNKNOWN; } } return r; } /** * * @return OID for {@code GEOMETRY} type of this PostgreSQL instance */ public int geometryOid() { return geometryOid; } /** * * @return OID for {@code GEOGRAPHY} type of this PostgreSQL instance */ public int geographyOid() { return geographyOid; } /** * * @return OID for {@code CITEXT} type of this PostgreSQL instance */ public int citextOid() { return citextOid; } /** * * @return OID for {@code HSTORE} type of this PostgreSQL instance */ public int hstoreOid() { return hstoreOid; } /** * * @return OID for {@code LTREE} type of this PostgreSQL instance */ public int ltreeOid() { return ltreeOid; } /** * * @return OID for array of {@code HSTORE} type of this PostgreSQL instance */ public int hstoreArrayOid() { return hstoreArrayOid; } /** * * @return OID for array of {@code GEOMETRY} type of this PostgreSQL instance */ public int geometryArrayOid() { return geometryArrayOid; } /** * * @return OID for array of {@code GEOGRAPHY} type of this PostgreSQL instance */ public int geographyArrayOid() { return geographyArrayOid; } /** * * @return OID for array of {@code CITEXT} type of this PostgreSQL instance */ public int citextArrayOid() { return citextArrayOid; } /** * * @return OID for array of {@code LTREE} type of this PostgreSQL instance */ public int ltreeArrayOid() { return ltreeArrayOid; } /** * Converts a type name in long (readable) format like <code>boolean</code> to s standard * data type name like <code>bool</code>. * * @param typeName - a type name in long format * @return - the type name in standardized format */ public static String normalizeTypeName(String typeName) { return LONG_TYPE_NAMES.getOrDefault(typeName, typeName); } /** * Prime the {@link TypeRegistry} with all existing database types */ private void prime() { Connection pgConnection = null; try { pgConnection = connection.connection(); final TypeInfo typeInfo = ((BaseConnection) pgConnection).getTypeInfo(); final SqlTypeMapper sqlTypeMapper = new SqlTypeMapper(pgConnection, typeInfo); try (final Statement statement = pgConnection.createStatement()) { // Read non-array types try (final ResultSet rs = statement.executeQuery(SQL_NON_ARRAY_TYPES)) { final List<PostgresType.Builder> delayResolvedBuilders = new ArrayList<>(); while (rs.next()) { // Coerce long to int so large unsigned values are represented as signed // Same technique is used in TypeInfoCache final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); String category = rs.getString("category"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); if (CATEGORY_ENUM.equals(category)) { builder = builder.enumValues(resolveEnumValues(pgConnection, oid)); } // If the type does have have a base type, we can build/add immediately. if (parentTypeOid == 0) { addType(builder.build()); continue; } // For types with base type mappings, they need to be delayed. builder = builder.parentType(parentTypeOid); delayResolvedBuilders.add(builder); } // Resolve delayed builders for (PostgresType.Builder builder : delayResolvedBuilders) { addType(builder.build()); } } // Read array types try (final ResultSet rs = statement.executeQuery(SQL_ARRAY_TYPES)) { final List<PostgresType.Builder> delayResolvedBuilders = new ArrayList<>(); while (rs.next()) { // int2vector and oidvector will not be treated as arrays final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); builder = builder.elementType((int) rs.getLong("element")); // If the type doesnot have a base type, we can build/add immediately if (parentTypeOid == 0) { addType(builder.build()); continue; } // For types with base type mappings, they need to be delayed. builder = builder.parentType(parentTypeOid); delayResolvedBuilders.add(builder); } // Resolve delayed builders for (PostgresType.Builder builder : delayResolvedBuilders) { addType(builder.build()); } } } } catch (SQLException e) { if (pgConnection == null) { throw new ConnectException("Could not create PG connection", e); } else { throw new ConnectException("Could not initialize type registry", e); } } } private PostgresType resolveUnknownType(String name) { try { LOGGER.trace("Type '{}' not cached, attempting to lookup from database.", name); final Connection connection = this.connection.connection(); final TypeInfo typeInfo = ((BaseConnection) connection).getTypeInfo(); final SqlTypeMapper sqlTypeMapper = new SqlTypeMapper(connection, typeInfo); try (final PreparedStatement statement = connection.prepareStatement(SQL_NAME_LOOKUP)) { statement.setString(1, name); try (final ResultSet rs = statement.executeQuery()) { while (rs.next()) { final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); String category = rs.getString("category"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); if (CATEGORY_ENUM.equals(category)) { builder = builder.enumValues(resolveEnumValues(connection, oid)); } else if (CATEGORY_ARRAY.equals(category)) { builder = builder.elementType((int) rs.getLong("element")); } PostgresType result = builder.parentType(parentTypeOid).build(); addType(result); return result; } } } } catch (SQLException e) { throw new ConnectException("Database connection failed during resolving unknown type", e); } return null; } private PostgresType resolveUnknownType(int lookupOid) { try { LOGGER.trace("Type OID '{}' not cached, attempting to lookup from database.", lookupOid); final Connection connection = this.connection.connection(); final TypeInfo typeInfo = ((BaseConnection) connection).getTypeInfo(); final SqlTypeMapper sqlTypeMapper = new SqlTypeMapper(connection, typeInfo); try (final PreparedStatement statement = connection.prepareStatement(SQL_OID_LOOKUP)) { statement.setInt(1, lookupOid); try (final ResultSet rs = statement.executeQuery()) { while (rs.next()) { final int oid = (int) rs.getLong("oid"); final int parentTypeOid = (int) rs.getLong("parentoid"); final int modifiers = (int) rs.getLong("modifiers"); String typeName = rs.getString("name"); String category = rs.getString("category"); PostgresType.Builder builder = new PostgresType.Builder( this, typeName, oid, sqlTypeMapper.getSqlType(typeName), modifiers, typeInfo); if (CATEGORY_ENUM.equals(category)) { builder = builder.enumValues(resolveEnumValues(connection, oid)); } else if (CATEGORY_ARRAY.equals(category)) { builder = builder.elementType((int) rs.getLong("element")); } PostgresType result = builder.parentType(parentTypeOid).build(); addType(result); return result; } } } } catch (SQLException e) { throw new ConnectException("Database connection failed during resolving unknown type", e); } return null; } private List<String> resolveEnumValues(Connection pgConnection, int enumOid) throws SQLException { List<String> enumValues = new ArrayList<>(); try (final PreparedStatement enumStatement = pgConnection.prepareStatement(SQL_ENUM_VALUES_LOOKUP)) { enumStatement.setInt(1, enumOid); try (final ResultSet enumRs = enumStatement.executeQuery()) { while (enumRs.next()) { enumValues.add(enumRs.getString("enum_value")); } } } return enumValues.isEmpty() ? null : enumValues; } /** * Allows to obtain the SQL type corresponding to PG types. This uses a custom statement instead of going through * {@link PgDatabaseMetaData#getTypeInfo()} as the latter causes N+1 SELECTs, making it very slow on installations * with many custom types. * * @author Gunnar Morling * @see DBZ-899 */ private static class SqlTypeMapper { /** * Based on org.postgresql.jdbc.TypeInfoCache.getSQLType(String). To emulate the original statement's behavior * (which works for single types only), PG's DISTINCT ON extension is used to just return the first entry should a * type exist in multiple schemas. */ private static final String SQL_TYPE_DETAILS = "SELECT DISTINCT ON (typname) typname, typinput='array_in'::regproc, typtype, sp.r, pg_type.oid " + " FROM pg_catalog.pg_type " + " LEFT " + " JOIN (select ns.oid as nspoid, ns.nspname, r.r " + " from pg_namespace as ns " // -- go with older way of unnesting array to be compatible with 8.0 + " join ( select s.r, (current_schemas(false))[s.r] as nspname " + " from generate_series(1, array_upper(current_schemas(false), 1)) as s(r) ) as r " + " using ( nspname ) " + " ) as sp " + " ON sp.nspoid = typnamespace " + " ORDER BY typname, sp.r, pg_type.oid;"; private final TypeInfo typeInfo; private final Set<String> preloadedSqlTypes; private final Map<String, Integer> sqlTypesByPgTypeNames; private SqlTypeMapper(Connection db, TypeInfo typeInfo) throws SQLException { this.typeInfo = typeInfo; this.preloadedSqlTypes = Collect.unmodifiableSet(typeInfo.getPGTypeNamesWithSQLTypes()); this.sqlTypesByPgTypeNames = getSqlTypes(db, typeInfo); } public int getSqlType(String typeName) throws SQLException { boolean isCoreType = preloadedSqlTypes.contains(typeName); // obtain core types such as bool, int2 etc. from the driver, as it correctly maps these types to the JDBC // type codes. Also those values are cached in TypeInfoCache. if (isCoreType) { return typeInfo.getSQLType(typeName); } if (typeName.endsWith("[]")) { return Types.ARRAY; } // get custom type mappings from the map which was built up with a single query else { try { return sqlTypesByPgTypeNames.get(typeName); } catch (Exception e) { LOGGER.warn("Failed to obtain SQL type information for type {} via custom statement, falling back to TypeInfo#getSQLType()", typeName, e); return typeInfo.getSQLType(typeName); } } } /** * Builds up a map of SQL (JDBC) types by PG type name; contains only values for non-core types. */ private static Map<String, Integer> getSqlTypes(Connection db, TypeInfo typeInfo) throws SQLException { Map<String, Integer> sqlTypesByPgTypeNames = new HashMap<>(); try (final Statement statement = db.createStatement()) { try (final ResultSet rs = statement.executeQuery(SQL_TYPE_DETAILS)) { while (rs.next()) { int type; boolean isArray = rs.getBoolean(2); String typtype = rs.getString(3); if (isArray) { type = Types.ARRAY; } else if ("c".equals(typtype)) { type = Types.STRUCT; } else if ("d".equals(typtype)) { type = Types.DISTINCT; } else if ("e".equals(typtype)) { type = Types.VARCHAR; } else { type = Types.OTHER; } sqlTypesByPgTypeNames.put(rs.getString(1), type); } } } return sqlTypesByPgTypeNames; } } }
DBZ-2038 Load unknown postgres types by name and oid using same logic
debezium-connector-postgres/src/main/java/io/debezium/connector/postgresql/TypeRegistry.java
DBZ-2038 Load unknown postgres types by name and oid using same logic
Java
apache-2.0
c5e315990c9751a028a195547fddf33cfac8e330
0
jdillon/gshell,jdillon/gshell,jdillon/gshell,jdillon/gshell
/* * Copyright (C) 2009 the original author(s). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sonatype.gshell.shell; import jline.console.Completer; import jline.console.ConsoleReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sonatype.gshell.branding.Branding; import org.sonatype.gshell.command.IO; import org.sonatype.gshell.console.Console; import org.sonatype.gshell.console.ConsoleErrorHandler; import org.sonatype.gshell.console.ConsolePrompt; import org.sonatype.gshell.console.ConsoleTask; import org.sonatype.gshell.event.EventAware; import org.sonatype.gshell.event.EventManager; import org.sonatype.gshell.execute.CommandExecutor; import org.sonatype.gshell.io.Closer; import org.sonatype.gshell.io.StreamJack; import org.sonatype.gshell.notification.ExitNotification; import org.sonatype.gshell.util.Arguments; import org.sonatype.gshell.vars.VariableNames; import org.sonatype.gshell.vars.Variables; import org.sonatype.gshell.vars.VariablesImpl; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicReference; /** * The default {@link Shell} component. * * @author <a href="mailto:[email protected]">Jason Dillon</a> * @since 2.0 */ public class ShellImpl implements Shell, VariableNames { private final Logger log = LoggerFactory.getLogger(getClass()); private final Branding branding; private final CommandExecutor executor; private IO io; private final Variables variables; private final ShellHistory history; private List<Completer> completers; private ConsolePrompt prompt; private ConsoleErrorHandler errorHandler; private boolean opened; // // TODO: Maybe these should be set in variables? More supportable than adding new methods for little features like this. // private boolean loadProfileScripts = true; private boolean loadInteractiveScripts = true; public ShellImpl(final EventManager eventManager, final CommandExecutor executor, final Branding branding, final IO io, final Variables variables) throws IOException { assert eventManager != null; assert executor != null; assert branding != null; // io and variables may be null this.executor = executor; this.branding = branding; this.io = io != null ? io : new IO(); this.variables = variables != null ? variables : new VariablesImpl(); if (variables instanceof EventAware) { ((EventAware) variables).setEventManager(eventManager); } this.history = new ShellHistory(new File(branding.getUserContextDir(), branding.getHistoryFileName())); } public Branding getBranding() { return branding; } public IO getIo() { return io; } public Variables getVariables() { return variables; } public History getHistory() { return history; } public void setPrompt(final ConsolePrompt prompt) { this.prompt = prompt; } public void setErrorHandler(final ConsoleErrorHandler errorHandler) { this.errorHandler = errorHandler; } public void setCompleters(final List<Completer> completers) { this.completers = completers; } public void setCompleters(final Completer... completers) { assert completers != null; setCompleters(Arrays.asList(completers)); } public boolean isLoadProfileScripts() { return loadProfileScripts; } public void setLoadProfileScripts(boolean enable) { this.loadProfileScripts = enable; } public boolean isLoadInteractiveScripts() { return loadInteractiveScripts; } public void setLoadInteractiveScripts(boolean enable) { this.loadInteractiveScripts = enable; } public synchronized boolean isOpened() { return opened; } public synchronized void close() { opened = false; } private synchronized void ensureOpened() { if (!opened) { try { open(); } catch (Exception e) { throw new RuntimeException(e); } } } private synchronized void open() throws Exception { log.debug("Opening"); StreamJack.maybeInstall(); // Customize the shell branding.customize(this); opened = true; log.debug("Opened"); // Do this after we are marked as opened loadProfileScripts(); } public boolean isInteractive() { return true; } // FIXME: History should still be appended if not running inside of a JLineConsole public Object execute(final String line) throws Exception { ensureOpened(); return executor.execute(this, line); } public Object execute(final String command, final Object[] args) throws Exception { ensureOpened(); return executor.execute(this, command, args); } public Object execute(final Object... args) throws Exception { ensureOpened(); return executor.execute(this, args); } public void run(final Object... args) throws Exception { assert args != null; ensureOpened(); log.debug("Starting interactive console; args: {}", Arguments.toStringArray(args)); loadInteractiveScripts(); // Setup 2 final refs to allow our executor to pass stuff back to us final AtomicReference<ExitNotification> exitNotifHolder = new AtomicReference<ExitNotification>(); Callable<ConsoleTask> taskFactory = new Callable<ConsoleTask>() { public ConsoleTask call() throws Exception { return new ConsoleTask() { @Override public boolean doExecute(final String input) throws Exception { try { // result is saved to LAST_RESULT via the CommandExecutor ShellImpl.this.execute(input); } catch (ExitNotification n) { exitNotifHolder.set(n); return false; } return true; } }; } }; IO io = getIo(); Console console = new Console(io, taskFactory, history, loadBindings()); if (prompt != null) { console.setPrompt(prompt); } if (errorHandler != null) { console.setErrorHandler(errorHandler); } if (completers != null && !completers.isEmpty()) { for (Completer completer : completers) { console.addCompleter(completer); } } if (!io.isQuiet()) { renderWelcomeMessage(io); } // Check if there are args, and run them and then enter interactive if (args.length != 0) { execute(args); } // HACK: We have to replace the IO with the consoles so that children use the piped input final IO lastIo = io; this.io = console.getIo(); final Shell lastShell = ShellHolder.set(this); try { console.run(); } finally { this.io = lastIo; ShellHolder.set(lastShell); } if (!io.isQuiet()) { renderGoodbyeMessage(io); } // If any exit notification occurred while running, then puke it up ExitNotification n = exitNotifHolder.get(); if (n != null) { throw n; } } private InputStream loadBindings() throws IOException { File file = new File(branding.getUserContextDir(), ConsoleReader.JLINE_KEYBINDINGS); if (!file.exists() || !file.isFile()) { file = new File(branding.getShellContextDir(), ConsoleReader.JLINE_KEYBINDINGS); if (!file.exists() || file.isFile()) { try { String fileName = System.getProperty(ConsoleReader.JLINE_KEYBINDINGS); if (fileName != null) { file = new File(fileName); } if (!file.exists() || file.isFile()) { file = new File(branding.getUserHomeDir(), ConsoleReader.JLINEBINDINGS_PROPERTIES); } } catch (Exception e) { log.warn("Failed to load key-bindings", e); } } } InputStream bindings; if (file.exists() && file.isFile() && file.canRead()) { log.debug("Using bindings from file: {}", file); bindings = new BufferedInputStream(new FileInputStream(file)); } else { log.trace("Using default bindings"); bindings = io.getTerminal().getDefaultBindings(); } return bindings; } private void renderMessage(final IO io, final String msg) { assert io != null; if (msg != null) { io.out.println(msg); io.out.flush(); } } protected void renderWelcomeMessage(final IO io) { renderMessage(io, branding.getWelcomeMessage()); } protected void renderGoodbyeMessage(final IO io) { renderMessage(io, branding.getGoodbyeMessage()); } // Script Loader protected void loadProfileScripts() throws Exception { if (!isLoadProfileScripts()) return; String fileName = branding.getProfileScriptName(); loadSharedScript(fileName); loadUserScript(fileName); } protected void loadInteractiveScripts() throws Exception { if (!isLoadInteractiveScripts()) return; String fileName = branding.getInteractiveScriptName(); loadSharedScript(fileName); loadUserScript(fileName); } protected void loadScript(final File file) throws Exception { assert file != null; log.debug("Loading script: {}", file); BufferedReader reader = new BufferedReader(new FileReader(file)); try { String line; while ((line = reader.readLine()) != null) { execute(line); } } finally { Closer.close(reader); } } protected void loadUserScript(final String fileName) throws Exception { assert fileName != null; File file = new File(branding.getUserContextDir(), fileName); if (file.exists()) { loadScript(file); } else { log.trace("User script is not present: {}", file); } } protected void loadSharedScript(final String fileName) throws Exception { assert fileName != null; File file = new File(branding.getShellContextDir(), fileName); if (file.exists()) { loadScript(file); } else { log.trace("Shared script is not present: {}", file); } } }
gshell-core/src/main/java/org/sonatype/gshell/shell/ShellImpl.java
/* * Copyright (C) 2009 the original author(s). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sonatype.gshell.shell; import jline.console.Completer; import jline.console.ConsoleReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sonatype.gshell.branding.Branding; import org.sonatype.gshell.command.IO; import org.sonatype.gshell.console.Console; import org.sonatype.gshell.console.ConsoleErrorHandler; import org.sonatype.gshell.console.ConsolePrompt; import org.sonatype.gshell.console.ConsoleTask; import org.sonatype.gshell.event.EventAware; import org.sonatype.gshell.event.EventManager; import org.sonatype.gshell.execute.CommandExecutor; import org.sonatype.gshell.io.Closer; import org.sonatype.gshell.io.StreamJack; import org.sonatype.gshell.notification.ExitNotification; import org.sonatype.gshell.util.Arguments; import org.sonatype.gshell.vars.VariableNames; import org.sonatype.gshell.vars.Variables; import org.sonatype.gshell.vars.VariablesImpl; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicReference; /** * The default {@link Shell} component. * * @author <a href="mailto:[email protected]">Jason Dillon</a> * @since 2.0 */ public class ShellImpl implements Shell, VariableNames { private final Logger log = LoggerFactory.getLogger(getClass()); private final Branding branding; private final CommandExecutor executor; private IO io; private final Variables variables; private final ShellHistory history; private List<Completer> completers; private ConsolePrompt prompt; private ConsoleErrorHandler errorHandler; private boolean opened; // // TODO: Maybe these should be set in variables? More supportable than adding new methods for little features like this. // private boolean loadProfileScripts = true; private boolean loadInteractiveScripts = true; public ShellImpl(final EventManager eventManager, final CommandExecutor executor, final Branding branding, final IO io, final Variables variables) throws IOException { assert eventManager != null; assert executor != null; assert branding != null; // io and variables may be null this.executor = executor; this.branding = branding; this.io = io != null ? io : new IO(); this.variables = variables != null ? variables : new VariablesImpl(); if (variables instanceof EventAware) { ((EventAware) variables).setEventManager(eventManager); } this.history = new ShellHistory(new File(branding.getUserContextDir(), branding.getHistoryFileName())); } public Branding getBranding() { return branding; } public IO getIo() { return io; } public Variables getVariables() { return variables; } public History getHistory() { return history; } public void setPrompt(final ConsolePrompt prompt) { this.prompt = prompt; } public void setErrorHandler(final ConsoleErrorHandler errorHandler) { this.errorHandler = errorHandler; } public void setCompleters(final List<Completer> completers) { this.completers = completers; } public void setCompleters(final Completer... completers) { assert completers != null; setCompleters(Arrays.asList(completers)); } public boolean isLoadProfileScripts() { return loadProfileScripts; } public void setLoadProfileScripts(boolean enable) { this.loadProfileScripts = enable; } public boolean isLoadInteractiveScripts() { return loadInteractiveScripts; } public void setLoadInteractiveScripts(boolean enable) { this.loadInteractiveScripts = enable; } public synchronized boolean isOpened() { return opened; } public synchronized void close() { opened = false; } private synchronized void ensureOpened() { if (!opened) { try { open(); } catch (Exception e) { throw new RuntimeException(e); } } } private synchronized void open() throws Exception { log.debug("Opening"); StreamJack.maybeInstall(); // Customize the shell branding.customize(this); // Load profile scripts loadProfileScripts(); opened = true; log.debug("Opened"); } public boolean isInteractive() { return true; } // FIXME: History should still be appended if not running inside of a JLineConsole public Object execute(final String line) throws Exception { ensureOpened(); return executor.execute(this, line); } public Object execute(final String command, final Object[] args) throws Exception { ensureOpened(); return executor.execute(this, command, args); } public Object execute(final Object... args) throws Exception { ensureOpened(); return executor.execute(this, args); } public void run(final Object... args) throws Exception { assert args != null; ensureOpened(); log.debug("Starting interactive console; args: {}", Arguments.toStringArray(args)); loadInteractiveScripts(); // Setup 2 final refs to allow our executor to pass stuff back to us final AtomicReference<ExitNotification> exitNotifHolder = new AtomicReference<ExitNotification>(); Callable<ConsoleTask> taskFactory = new Callable<ConsoleTask>() { public ConsoleTask call() throws Exception { return new ConsoleTask() { @Override public boolean doExecute(final String input) throws Exception { try { // result is saved to LAST_RESULT via the CommandExecutor ShellImpl.this.execute(input); } catch (ExitNotification n) { exitNotifHolder.set(n); return false; } return true; } }; } }; IO io = getIo(); Console console = new Console(io, taskFactory, history, loadBindings()); if (prompt != null) { console.setPrompt(prompt); } if (errorHandler != null) { console.setErrorHandler(errorHandler); } if (completers != null && !completers.isEmpty()) { for (Completer completer : completers) { console.addCompleter(completer); } } if (!io.isQuiet()) { renderWelcomeMessage(io); } // Check if there are args, and run them and then enter interactive if (args.length != 0) { execute(args); } // HACK: We have to replace the IO with the consoles so that children use the piped input final IO lastIo = io; this.io = console.getIo(); final Shell lastShell = ShellHolder.set(this); try { console.run(); } finally { this.io = lastIo; ShellHolder.set(lastShell); } if (!io.isQuiet()) { renderGoodbyeMessage(io); } // If any exit notification occurred while running, then puke it up ExitNotification n = exitNotifHolder.get(); if (n != null) { throw n; } } private InputStream loadBindings() throws IOException { File file = new File(branding.getUserContextDir(), ConsoleReader.JLINE_KEYBINDINGS); if (!file.exists() || !file.isFile()) { file = new File(branding.getShellContextDir(), ConsoleReader.JLINE_KEYBINDINGS); if (!file.exists() || file.isFile()) { try { String fileName = System.getProperty(ConsoleReader.JLINE_KEYBINDINGS); if (fileName != null) { file = new File(fileName); } if (!file.exists() || file.isFile()) { file = new File(branding.getUserHomeDir(), ConsoleReader.JLINEBINDINGS_PROPERTIES); } } catch (Exception e) { log.warn("Failed to load key-bindings", e); } } } InputStream bindings; if (file.exists() && file.isFile() && file.canRead()) { log.debug("Using bindings from file: {}", file); bindings = new BufferedInputStream(new FileInputStream(file)); } else { log.trace("Using default bindings"); bindings = io.getTerminal().getDefaultBindings(); } return bindings; } private void renderMessage(final IO io, final String msg) { assert io != null; if (msg != null) { io.out.println(msg); io.out.flush(); } } protected void renderWelcomeMessage(final IO io) { renderMessage(io, branding.getWelcomeMessage()); } protected void renderGoodbyeMessage(final IO io) { renderMessage(io, branding.getGoodbyeMessage()); } // Script Loader protected void loadProfileScripts() throws Exception { if (!isLoadProfileScripts()) return; String fileName = branding.getProfileScriptName(); loadSharedScript(fileName); loadUserScript(fileName); } protected void loadInteractiveScripts() throws Exception { if (!isLoadInteractiveScripts()) return; String fileName = branding.getInteractiveScriptName(); loadSharedScript(fileName); loadUserScript(fileName); } protected void loadScript(final File file) throws Exception { assert file != null; log.debug("Loading script: {}", file); BufferedReader reader = new BufferedReader(new FileReader(file)); try { String line; while ((line = reader.readLine()) != null) { execute(line); } } finally { Closer.close(reader); } } protected void loadUserScript(final String fileName) throws Exception { assert fileName != null; File file = new File(branding.getUserContextDir(), fileName); if (file.exists()) { loadScript(file); } else { log.trace("User script is not present: {}", file); } } protected void loadSharedScript(final String fileName) throws Exception { assert fileName != null; File file = new File(branding.getShellContextDir(), fileName); if (file.exists()) { loadScript(file); } else { log.trace("Shared script is not present: {}", file); } } }
Load profile scripts after we are marked as opened, else we get nasty stack overflows
gshell-core/src/main/java/org/sonatype/gshell/shell/ShellImpl.java
Load profile scripts after we are marked as opened, else we get nasty stack overflows
Java
apache-2.0
e691cb47abf4237eb1eb22fc71cf443f6bb6ed42
0
osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi
/* * Copyright (c) OSGi Alliance (2005, 2008). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.service.deploymentadmin; import java.net.URL; import org.osgi.framework.Bundle; import org.osgi.framework.ServiceReference; import org.osgi.framework.Version; /** * The <code>DeploymentPackage</code> object represents a deployment package * (already installed or being currently processed). A Deployment Package groups * resources as a unit of management. A deployment package is something that can * be installed, updated, and uninstalled as a unit. A deployment package is a * reified concept, like a bundle, in an OSGi Service Platform. It is not known * by the OSGi Framework, but it is managed by the Deployment Admin service. A * deployment package is a stream of resources (including bundles) which, once * processed, will result in new artifacts (effects on the system) being added * to the OSGi platform. These new artifacts can include installed Bundles, new * configuration objects added to the Configuration Admin service, new Wire * objects added to the Wire Admin service, or changed system properties, etc. * All the changes caused by the processing of a deployment package are * persistently associated with the deployment package, so that they can be * appropriately cleaned up when the deployment package is uninstalled. There is * a strict no overlap rule imposed on deployment packages. Two deployment * packages are not allowed to create or manipulate the same artifact. * Obviously, this means that a bundle cannot be in two different deployment * packages. Any violation of this no overlap rule is considered an error and * the install or update of the offending deployment package must be aborted. * <p> * * The Deployment Admin service should do as much as possible to ensure * transactionality. It means that if a deployment package installation, update * or removal (uninstall) fails all the side effects caused by the process * should be disappeared and the system should be in the state in which it was * before the process. * <p> * * If a deployment package is being updated the old version is visible through * the <code>DeploymentPackage</code> interface until the update process ends. * After the package is updated the updated version is visible and the old one * is not accessible any more. */ public interface DeploymentPackage { /** * The name of the Deployment Package. This name is the same name as that * specified in the DeploymentPackage-SymbolicName Manifest header. */ String EVENT_DEPLOYMENTPACKAGE_NAME = "deploymentpackage.name"; /** * The human readable name of the DP localized to the default locale. */ String EVENT_DEPLOYMENTPACKAGE_READABLENAME = "deploymentpackage.readablename"; /** * The currently installed version of the Deployment Package. The attribute * is not present, if no version is installed: * <ul> * <li>in the INSTALL event, when an installDeploymentPackage was called * and no earlier version is present * <li> in the COMPLETE event after the _successfully_ completing an * uninstallDeploymentPackage call * </ul> * The value for this event must be a Version object. */ String EVENT_DEPLOYMENTPACKAGE_CURRENTVERSION = "deploymentpackage.currentversion"; /** * The version of DP after the successful completion of the install * operation (used in INSTALL event only). * * The value for this event must be a Version object. */ String EVENT_DEPLOYMENTPACKAGE_NEXTVERSION = "deploymentpackage.nextversion"; /** * Gives back the state of the deployment package whether it is stale or * not). After uninstall of a deployment package it becomes stale. Any * active method calls to a stale deployment package raise * {@link IllegalStateException}. Active methods are the following: * <p> * * <ul> * <li>{@link #getBundle(String)}</li> * <li>{@link #getResourceProcessor(String)}</li> * <li>{@link #uninstall()}</li> * <li>{@link #uninstallForced()}</li> * </ul> * * @return <code>true</code> if the deployment package is stale. * <code>false</code> otherwise * @see #uninstall * @see #uninstallForced */ boolean isStale(); /** * Returns the Deployment Package Symbolic Name of the package. * * @return The name of the deployment package. It cannot be null. */ String getName(); /** * Returns the Deployment Package human readable name. * * This method returns the localized human readable name as set with the * <code>DeploymentPackage-Name</code> manifest header using the default * locale. If no header is set, this method will return <code>null</code>. * * @return The human readable name of the deployment package or * <code>null</code> if header is not set. */ String getDisplayName(); /** * Returns the version of the deployment package. * * @return version of the deployment package. It cannot be null. */ Version getVersion(); /** * Returns an array of {@link BundleInfo} objects representing the bundles * specified in the manifest of this deployment package. Its size is equal * to the number of the bundles in the deployment package. * * @return array of <code>BundleInfo</code> objects * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ BundleInfo[] getBundleInfos(); /** * Returns a URL pointing to an image that represents the icon for this * Deployment Package. * * The <code>DeploymentPackage-Icon</code> header can set an icon for the * the deployment package. This method returns an absolute URL that is * defined by this header. The Deployment Admin service must provide this * icon as a local resource. That is, the Deployment Admin must make a local * copy of the default locale. The returned <code>URL</code>'s must point * to a local resource. * * @return An absolute URL to a local (device resident) image resource or * <code>null</code> if not found */ URL getIcon(); /** * Returns the bundle instance, which is part of this deployment package, * that corresponds to the bundle's symbolic name passed in the * <code>symbolicName</code> parameter. This method will return null for * request for bundles that are not part of this deployment package. * <p> * * As this instance is transient (i.e. a bundle can be removed at any time * because of the dynamic nature of the OSGi platform), this method may also * return null if the bundle is part of this deployment package, but is not * currently defined to the framework. * * @param symbolicName * the symbolic name of the requested bundle * @return The <code>Bundle</code> instance for a given bundle symbolic * name. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action * @throws IllegalStateException * if the package is stale */ Bundle getBundle(String symbolicName); /** * Returns an array of strings representing the resources (including * bundles) that are specified in the manifest of this deployment package. A * string element of the array is the same as the value of the "Name" * attribute in the manifest. The array contains the bundles as well. * <p> * * E.g. if the "Name" section of the resource (or individual-section as the * <a * href="http://java.sun.com/j2se/1.4.2/docs/guide/jar/jar.html#Manifest%20Specification">Manifest * Specification</a> calls it) in the manifest is the following * * <pre> * Name: foo/readme.txt * Resource-Processor: foo.rp * </pre> * * then the corresponding array element is the "foo/readme.txt" string. * <p> * * @return The string array corresponding to resources. It cannot be null * but its length can be zero. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ String[] getResources(); /** * At the time of deployment, resource processor service instances are * located to resources contained in a deployment package. * <p> * * This call returns a service reference to the corresponding service * instance. If the resource is not part of the deployment package or this * call is made during deployment, prior to the locating of the service to * process a given resource, null will be returned. Services can be updated * after a deployment package has been deployed. In this event, this call * will return a reference to the updated service, not to the instance that * was used at deployment time. * * @param resource * the name of the resource (it is the same as the value of the * "Name" attribute in the deployment package's manifest) * @return resource processor for the resource or <code>null</cpde>. * @throws SecurityException if the caller doesn't have the appropriate {@link DeploymentAdminPermission} * with "metadata" action * @throws IllegalStateException if the package is stale */ ServiceReference getResourceProcessor(String resource); /** * Returns the requested deployment package manifest header from the main * section. Header names are case insensitive. If the header doesn't exist * it returns null. * <p> * * If the header is localized then the localized value is returned (see OSGi * Service Platform, Mobile Specification Release 4 - Localization related * chapters). * * @param header * the requested header * @return the value of the header or <code>null</code> if the header does * not exist * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ String getHeader(String header); /** * Returns the requested deployment package manifest header from the name * section determined by the resource parameter. Header names are case * insensitive. If the resource or the header doesn't exist it returns null. * <p> * * If the header is localized then the localized value is returned (see OSGi * Service Platform, Mobile Specification Release 4 - Localization related * chapters). * * @param resource * the name of the resource (it is the same as the value of the * "Name" attribute in the deployment package's manifest) * @param header * the requested header * @return the value of the header or <code>null</code> if the resource or * the header doesn't exist * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ String getResourceHeader(String resource, String header); /** * Uninstalls the deployment package. After uninstallation, the deployment * package object becomes stale. This can be checked by using * {@link #isStale()}, which will return <code>true</code> when stale. * <p> * * @throws DeploymentException * if the deployment package could not be successfully * uninstalled. For detailed error code description see * {@link DeploymentException}. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission}("&lt;filter&gt;", * "uninstall") permission. * @throws IllegalStateException * if the package is stale */ void uninstall() throws DeploymentException; /** * This method is called to completely uninstall a deployment package, which * couldn't be uninstalled using traditional means ({@link #uninstall()}) * due to exceptions. After uninstallation, the deployment package object * becomes stale. This can be checked by using {@link #isStale()}, which * will return <code>true</code> when stale. * <p> * * The method forces removal of the Deployment Package from the repository * maintained by the Deployment Admin service. This method follows the same * steps as {@link #uninstall}. However, any errors or the absence of * Resource Processor services are ignored, they must not cause a roll back. * These errors should be logged. * * @return true if the operation was successful * @throws DeploymentException * only {@link DeploymentException#CODE_TIMEOUT} and * {@link DeploymentException#CODE_CANCELLED} can be thrown. For * detailed error code description see * {@link DeploymentException}. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission}("&lt;filter&gt;", * "uninstall_forced") permission. * @throws IllegalStateException * if the package is stale */ boolean uninstallForced() throws DeploymentException; /** * Returns a hash code value for the object. * * @return a hash code value for this object */ int hashCode(); /** * Indicates whether some other object is "equal to" this one. Two * deployment packages are equal if they have the same deployment package * symbolicname and version. * * @param other * the reference object with which to compare. * @return true if this object is the same as the obj argument; false * otherwise. */ boolean equals(Object other); }
org.osgi.service.deploymentadmin/src/org/osgi/service/deploymentadmin/DeploymentPackage.java
/* * Copyright (c) OSGi Alliance (2005, 2008). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.service.deploymentadmin; import java.net.URL; import org.osgi.framework.Bundle; import org.osgi.framework.ServiceReference; import org.osgi.framework.Version; /** * The <code>DeploymentPackage</code> object represents a deployment package * (already installed or being currently processed). A Deployment Package groups * resources as a unit of management. A deployment package is something that can * be installed, updated, and uninstalled as a unit. A deployment package is a * reified concept, like a bundle, in an OSGi Service Platform. It is not known * by the OSGi Framework, but it is managed by the Deployment Admin service. A * deployment package is a stream of resources (including bundles) which, once * processed, will result in new artifacts (effects on the system) being added * to the OSGi platform. These new artifacts can include installed Bundles, new * configuration objects added to the Configuration Admin service, new Wire * objects added to the Wire Admin service, or changed system properties, etc. * All the changes caused by the processing of a deployment package are * persistently associated with the deployment package, so that they can be * appropriately cleaned up when the deployment package is uninstalled. There is * a strict no overlap rule imposed on deployment packages. Two deployment * packages are not allowed to create or manipulate the same artifact. * Obviously, this means that a bundle cannot be in two different deployment * packages. Any violation of this no overlap rule is considered an error and * the install or update of the offending deployment package must be aborted. * <p> * * The Deployment Admin service should do as much as possible to ensure * transactionality. It means that if a deployment package installation, update * or removal (uninstall) fails all the side effects caused by the process * should be disappeared and the system should be in the state in which it was * before the process. * <p> * * If a deployment package is being updated the old version is visible through * the <code>DeploymentPackage</code> interface until the update process ends. * After the package is updated the updated version is visible and the old one * is not accessible any more. */ public interface DeploymentPackage { /** * The currently installed version of the Deployment Package. The attribute * is not present, if no version is installed: * <ul> * <li>in the INSTALL event, when an installDeploymentPackage was called * and no earlier version is present * <li> in the COMPLETE event after the _successfully_ completing an * uninstallDeploymentPackage call * </ul> * The value for this event must be a Version object. */ String EVENT_DEPLOYMENTPACKAGE_CURRENTVERSION = "deploymentpackage.currentversion"; /** * The version of DP after the successful completion of the install * operation (used in INSTALL event only). * * The value for this event must be a Version object. */ String EVENT_DEPLOYMENTPACKAGE_NEXTVERSION = "deploymentpackage.nextversion"; /** * The human readable name of the DP localized to the default locale. */ String EVENT_DEPLOYMENTPACKAGE_READABLENAME = "deploymentpackage.readablename"; /** * Gives back the state of the deployment package whether it is stale or * not). After uninstall of a deployment package it becomes stale. Any * active method calls to a stale deployment package raise * {@link IllegalStateException}. Active methods are the following: * <p> * * <ul> * <li>{@link #getBundle(String)}</li> * <li>{@link #getResourceProcessor(String)}</li> * <li>{@link #uninstall()}</li> * <li>{@link #uninstallForced()}</li> * </ul> * * @return <code>true</code> if the deployment package is stale. * <code>false</code> otherwise * @see #uninstall * @see #uninstallForced */ boolean isStale(); /** * Returns the Deployment Package Symbolic Name of the package. * * @return The name of the deployment package. It cannot be null. */ String getName(); /** * Returns the Deployment Package human readable name. * * This method returns the localized human readable name as set with the * <code>DeploymentPackage-Name</code> manifest header using the default * locale. If no header is set, this method will return <code>null</code>. * * @return The human readable name of the deployment package or * <code>null</code> if header is not set. */ String getDisplayName(); /** * Returns the version of the deployment package. * * @return version of the deployment package. It cannot be null. */ Version getVersion(); /** * Returns an array of {@link BundleInfo} objects representing the bundles * specified in the manifest of this deployment package. Its size is equal * to the number of the bundles in the deployment package. * * @return array of <code>BundleInfo</code> objects * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ BundleInfo[] getBundleInfos(); /** * Returns a URL pointing to an image that represents the icon for this * Deployment Package. * * The <code>DeploymentPackage-Icon</code> header can set an icon for the * the deployment package. This method returns an absolute URL that is * defined by this header. The Deployment Admin service must provide this * icon as a local resource. That is, the Deployment Admin must make a local * copy of the default locale. The returned <code>URL</code>'s must point * to a local resource. * * @return An absolute URL to a local (device resident) image resource or * <code>null</code> if not found */ URL getIcon(); /** * Returns the bundle instance, which is part of this deployment package, * that corresponds to the bundle's symbolic name passed in the * <code>symbolicName</code> parameter. This method will return null for * request for bundles that are not part of this deployment package. * <p> * * As this instance is transient (i.e. a bundle can be removed at any time * because of the dynamic nature of the OSGi platform), this method may also * return null if the bundle is part of this deployment package, but is not * currently defined to the framework. * * @param symbolicName * the symbolic name of the requested bundle * @return The <code>Bundle</code> instance for a given bundle symbolic * name. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action * @throws IllegalStateException * if the package is stale */ Bundle getBundle(String symbolicName); /** * Returns an array of strings representing the resources (including * bundles) that are specified in the manifest of this deployment package. A * string element of the array is the same as the value of the "Name" * attribute in the manifest. The array contains the bundles as well. * <p> * * E.g. if the "Name" section of the resource (or individual-section as the * <a * href="http://java.sun.com/j2se/1.4.2/docs/guide/jar/jar.html#Manifest%20Specification">Manifest * Specification</a> calls it) in the manifest is the following * * <pre> * Name: foo/readme.txt * Resource-Processor: foo.rp * </pre> * * then the corresponding array element is the "foo/readme.txt" string. * <p> * * @return The string array corresponding to resources. It cannot be null * but its length can be zero. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ String[] getResources(); /** * At the time of deployment, resource processor service instances are * located to resources contained in a deployment package. * <p> * * This call returns a service reference to the corresponding service * instance. If the resource is not part of the deployment package or this * call is made during deployment, prior to the locating of the service to * process a given resource, null will be returned. Services can be updated * after a deployment package has been deployed. In this event, this call * will return a reference to the updated service, not to the instance that * was used at deployment time. * * @param resource * the name of the resource (it is the same as the value of the * "Name" attribute in the deployment package's manifest) * @return resource processor for the resource or <code>null</cpde>. * @throws SecurityException if the caller doesn't have the appropriate {@link DeploymentAdminPermission} * with "metadata" action * @throws IllegalStateException if the package is stale */ ServiceReference getResourceProcessor(String resource); /** * Returns the requested deployment package manifest header from the main * section. Header names are case insensitive. If the header doesn't exist * it returns null. * <p> * * If the header is localized then the localized value is returned (see OSGi * Service Platform, Mobile Specification Release 4 - Localization related * chapters). * * @param header * the requested header * @return the value of the header or <code>null</code> if the header does * not exist * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ String getHeader(String header); /** * Returns the requested deployment package manifest header from the name * section determined by the resource parameter. Header names are case * insensitive. If the resource or the header doesn't exist it returns null. * <p> * * If the header is localized then the localized value is returned (see OSGi * Service Platform, Mobile Specification Release 4 - Localization related * chapters). * * @param resource * the name of the resource (it is the same as the value of the * "Name" attribute in the deployment package's manifest) * @param header * the requested header * @return the value of the header or <code>null</code> if the resource or * the header doesn't exist * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission} with "metadata" action */ String getResourceHeader(String resource, String header); /** * Uninstalls the deployment package. After uninstallation, the deployment * package object becomes stale. This can be checked by using * {@link #isStale()}, which will return <code>true</code> when stale. * <p> * * @throws DeploymentException * if the deployment package could not be successfully * uninstalled. For detailed error code description see * {@link DeploymentException}. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission}("&lt;filter&gt;", * "uninstall") permission. * @throws IllegalStateException * if the package is stale */ void uninstall() throws DeploymentException; /** * This method is called to completely uninstall a deployment package, which * couldn't be uninstalled using traditional means ({@link #uninstall()}) * due to exceptions. After uninstallation, the deployment package object * becomes stale. This can be checked by using {@link #isStale()}, which * will return <code>true</code> when stale. * <p> * * The method forces removal of the Deployment Package from the repository * maintained by the Deployment Admin service. This method follows the same * steps as {@link #uninstall}. However, any errors or the absence of * Resource Processor services are ignored, they must not cause a roll back. * These errors should be logged. * * @return true if the operation was successful * @throws DeploymentException * only {@link DeploymentException#CODE_TIMEOUT} and * {@link DeploymentException#CODE_CANCELLED} can be thrown. For * detailed error code description see * {@link DeploymentException}. * @throws SecurityException * if the caller doesn't have the appropriate * {@link DeploymentAdminPermission}("&lt;filter&gt;", * "uninstall_forced") permission. * @throws IllegalStateException * if the package is stale */ boolean uninstallForced() throws DeploymentException; /** * Returns a hash code value for the object. * * @return a hash code value for this object */ int hashCode(); /** * Indicates whether some other object is "equal to" this one. Two * deployment packages are equal if they have the same deployment package * symbolicname and version. * * @param other * the reference object with which to compare. * @return true if this object is the same as the obj argument; false * otherwise. */ boolean equals(Object other); }
#566 Added header definitions for deployment package events
org.osgi.service.deploymentadmin/src/org/osgi/service/deploymentadmin/DeploymentPackage.java
#566 Added header definitions for deployment package events
Java
apache-2.0
2ef847bdde554017f05375db72dc6908d7730961
0
bitzl/openwayback,zubairkhatri/openwayback,MohammedElsayyed/openwayback,nlnwa/openwayback,nla/openwayback,JesseWeinstein/openwayback,efundamentals/openwayback,kris-sigur/openwayback,bitzl/openwayback,efundamentals/openwayback,iipc/openwayback,iipc/openwayback,efundamentals/openwayback,bitzl/openwayback,JesseWeinstein/openwayback,emijrp/openwayback,SpiralsSeminaire/openwayback,JesseWeinstein/openwayback,MohammedElsayyed/openwayback,efundamentals/openwayback,SpiralsSeminaire/openwayback,SpiralsSeminaire/openwayback,JesseWeinstein/openwayback,JesseWeinstein/openwayback,nla/openwayback,zubairkhatri/openwayback,nla/openwayback,bitzl/openwayback,ukwa/openwayback,nlnwa/openwayback,nlnwa/openwayback,nla/openwayback,emijrp/openwayback,emijrp/openwayback,emijrp/openwayback,nlnwa/openwayback,iipc/openwayback,zubairkhatri/openwayback,kris-sigur/openwayback,kris-sigur/openwayback,kris-sigur/openwayback,chasehd/openwayback,nla/openwayback,chasehd/openwayback,MohammedElsayyed/openwayback,kris-sigur/openwayback,nlnwa/openwayback,efundamentals/openwayback,emijrp/openwayback,ukwa/openwayback,SpiralsSeminaire/openwayback,SpiralsSeminaire/openwayback,ukwa/openwayback,chasehd/openwayback,bitzl/openwayback,zubairkhatri/openwayback
/* * This file is part of the Wayback archival access software * (http://archive-access.sourceforge.net/projects/wayback/). * * Licensed to the Internet Archive (IA) by one or more individual * contributors. * * The IA licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.archive.wayback.accesspoint; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Properties; import javax.servlet.ServletContext; import org.archive.wayback.ExceptionRenderer; import org.archive.wayback.QueryRenderer; import org.archive.wayback.ReplayDispatcher; import org.archive.wayback.RequestParser; import org.archive.wayback.ResultURIConverter; import org.archive.wayback.UrlCanonicalizer; import org.archive.wayback.accesscontrol.CompositeExclusionFilterFactory; import org.archive.wayback.accesscontrol.ExclusionFilterFactory; import org.archive.wayback.accesscontrol.oracleclient.OraclePolicyService; import org.archive.wayback.accesspoint.proxy.ProxyAccessPoint; import org.archive.wayback.core.WaybackRequest; import org.archive.wayback.replay.html.ContextResultURIConverterFactory; import org.archive.wayback.util.operator.BooleanOperator; import org.archive.wayback.webapp.AccessPoint; import org.archive.wayback.webapp.CustomResultFilterFactory; import org.archive.wayback.webapp.LiveWebRedirector; import org.archive.wayback.webapp.WaybackCollection; /** * Sub-AccessPoint managed by {@link CompositeAccessPoint}. * * TODO: Strictly speaking this is not an <i>Adapter</i>. It is an * {@link AccessPoint} extended with a capability to inherit/override * parent's configuration. * */ public class AccessPointAdapter extends AccessPoint { private CompositeAccessPoint composite; private AccessPointConfig config; // private ExclusionFilterFactory exclusionFactory; private ResultURIConverter cacheUriConverter; private Properties props = null; private boolean switchable = false; public AccessPointAdapter(CompositeAccessPoint baseAccessPoint, AccessPointConfig config) { this.composite = baseAccessPoint; this.config = config; // this.exclusionFactory = null; this.switchable = true; initMergedProps(); } public AccessPointAdapter(String accessPointName, CompositeAccessPoint baseAccessPoint) { this.composite = baseAccessPoint; // this.exclusionFactory = null; this.config = baseAccessPoint.getAccessPointConfigs().getAccessPointConfigs().get(accessPointName); this.switchable = false; initMergedProps(); } protected void initMergedProps() { this.props = new Properties(); // First put the generic ones if (composite.getConfigs() != null) { props.putAll(composite.getConfigs()); } // Now, the custom ones for this config if (config.getConfigs() != null) { props.putAll(config.getConfigs()); } } public CompositeAccessPoint getBaseAccessPoint() { return composite; } public boolean isProxyMode() { return composite.isProxyEnabled(); } public boolean isProxySwitchable() { return switchable && isProxyMode(); } public String getSwitchCollPath() { return ProxyAccessPoint.SWITCH_COLLECTION_PATH; } public AccessPointConfig getAccessPointConfig() { return config; } @Override public List<String> getFileIncludePrefixes() { return config.getFileIncludePrefixes(); } @Override public List<String> getFileExcludePrefixes() { return config.getFileExcludePrefixes(); } @Override public Properties getConfigs() { return props; } @Override public String getAccessPointPath() { return config.getBeanName(); } protected String getPrefix(String basePrefix) { if (isProxyMode()) { return basePrefix; } else { return basePrefix + config.getBeanName() + "/"; } } @Override public String getStaticPrefix() { return composite.getStaticPrefix(); } @Override public String getReplayPrefix() { return getPrefix(composite.getReplayPrefix()); } @Override public String getQueryPrefix() { return getPrefix(composite.getQueryPrefix()); } @Override public boolean isExactHostMatch() { return composite.isExactHostMatch(); } @Override public boolean isExactSchemeMatch() { return composite.isExactSchemeMatch(); } @Override public boolean isUseAnchorWindow() { return composite.isUseAnchorWindow(); } @Override public boolean isServeStatic() { return composite.isServeStatic(); } @Override public ServletContext getServletContext() { return composite.getServletContext(); } @Override public LiveWebRedirector getLiveWebRedirector() { return composite.getLiveWebRedirector(); } @Override public String getLiveWebPrefix() { return composite.getLiveWebPrefix(); } @Override public String getInterstitialJsp() { return composite.getInterstitialJsp(); } @Override public Locale getLocale() { return composite.getLocale(); } @Override public List<String> getFilePatterns() { return composite.getFilePatterns(); } @Override public WaybackCollection getCollection() { if (config.getCollection() != null) { return config.getCollection(); } else { return composite.getCollection(); } } @Override public ExceptionRenderer getException() { return composite.getException(); } @Override public QueryRenderer getQuery() { return composite.getQuery(); } @Override public RequestParser getParser() { RequestParser requestParser = config.getRequestParser(); if (requestParser != null) { return requestParser; } else { return composite.getParser(); } } @Override public ReplayDispatcher getReplay() { return composite.getReplay(); } @Override public ResultURIConverter getUriConverter() { if (cacheUriConverter == null) { ContextResultURIConverterFactory factory = composite.getUriConverterFactory(); if (factory != null) { cacheUriConverter = factory.getContextConverter(getReplayPrefix()); } else { cacheUriConverter = composite.getUriConverter(); } } return cacheUriConverter; } @Override public BooleanOperator<WaybackRequest> getAuthentication() { return composite.getAuthentication(); } @Override public String getRefererAuth() { return composite.getRefererAuth(); } @Override public boolean isBounceToReplayPrefix() { return composite.isBounceToReplayPrefix(); } @Override public boolean isBounceToQueryPrefix() { return composite.isBounceToQueryPrefix(); } @Override public long getEmbargoMS() { return composite.getEmbargoMS(); } @Override public boolean isForceCleanQueries() { // Setting this to false to allow custom handling of adapter access // points return false; } @Override public CustomResultFilterFactory getFilterFactory() { return composite.getFilterFactory(); } @Override public UrlCanonicalizer getSelfRedirectCanonicalizer() { return composite.getSelfRedirectCanonicalizer(); } @Override public boolean isRequestAuth() { return composite.isRequestAuth(); } @Override public int getMaxRedirectAttempts() { return composite.getMaxRedirectAttempts(); } @Override public boolean isTimestampSearch() { return composite.isTimestampSearch(); } @Override public String getPerfStatsHeader() { return composite.getPerfStatsHeader(); } @Override public String getWarcFileHeader() { return composite.getWarcFileHeader(); } @Override public int getQueryCollapseTime() { return composite.getQueryCollapseTime(); } // deprecated members @Deprecated public boolean hasExclusions() { return (composite.getStaticExclusions() != null) || (composite.getOracleUrl() != null); } @SuppressWarnings("deprecation") @Override public ExclusionFilterFactory getExclusionFactory() { // if deprecated properties are not set, forward to new method. ExclusionFilterFactory factory = composite.getExclusionFactory(); // drop following if ... section when migration completes if (factory == null && hasExclusions()) { // emulate old behavior OraclePolicyService oracleFilterFactory = new OraclePolicyService(); oracleFilterFactory.setOracleUrl(composite.getOracleUrl()); ArrayList<ExclusionFilterFactory> staticExclusions = composite.getStaticExclusions(); if (staticExclusions == null) { factory = oracleFilterFactory; } else { CompositeExclusionFilterFactory compFactory = new CompositeExclusionFilterFactory(); ArrayList<ExclusionFilterFactory> members = new ArrayList<ExclusionFilterFactory>(staticExclusions); members.add(oracleFilterFactory); compFactory.setFactories(members); factory = compFactory; } setExclusionFactory(factory); } return factory; } }
wayback-core/src/main/java/org/archive/wayback/accesspoint/AccessPointAdapter.java
/* * This file is part of the Wayback archival access software * (http://archive-access.sourceforge.net/projects/wayback/). * * Licensed to the Internet Archive (IA) by one or more individual * contributors. * * The IA licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.archive.wayback.accesspoint; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Properties; import javax.servlet.ServletContext; import org.archive.wayback.ExceptionRenderer; import org.archive.wayback.QueryRenderer; import org.archive.wayback.ReplayDispatcher; import org.archive.wayback.RequestParser; import org.archive.wayback.ResultURIConverter; import org.archive.wayback.UrlCanonicalizer; import org.archive.wayback.accesscontrol.CompositeExclusionFilterFactory; import org.archive.wayback.accesscontrol.ExclusionFilterFactory; import org.archive.wayback.accesscontrol.oracleclient.OraclePolicyService; import org.archive.wayback.accesspoint.proxy.ProxyAccessPoint; import org.archive.wayback.core.WaybackRequest; import org.archive.wayback.replay.html.ContextResultURIConverterFactory; import org.archive.wayback.util.operator.BooleanOperator; import org.archive.wayback.webapp.AccessPoint; import org.archive.wayback.webapp.CustomResultFilterFactory; import org.archive.wayback.webapp.LiveWebRedirector; import org.archive.wayback.webapp.WaybackCollection; /** * Sub-AccessPoint managed by {@link CompositeAccessPoint}. * * TODO: Strictly speaking this is not an <i>Adapter</i>. It is an * {@link AccessPoint} extended with a capability to inherit/override * parent's configuration. * */ public class AccessPointAdapter extends AccessPoint { private CompositeAccessPoint composite; private AccessPointConfig config; // private ExclusionFilterFactory exclusionFactory; private ResultURIConverter cacheUriConverter; private Properties props = null; private boolean switchable = false; public AccessPointAdapter(CompositeAccessPoint baseAccessPoint, AccessPointConfig config) { this.composite = baseAccessPoint; this.config = config; // this.exclusionFactory = null; this.switchable = true; initMergedProps(); } public AccessPointAdapter(String accessPointName, CompositeAccessPoint baseAccessPoint) { this.composite = baseAccessPoint; // this.exclusionFactory = null; this.config = baseAccessPoint.getAccessPointConfigs().getAccessPointConfigs().get(accessPointName); this.switchable = false; initMergedProps(); } protected void initMergedProps() { this.props = new Properties(); // First put the generic ones if (composite.getConfigs() != null) { props.putAll(composite.getConfigs()); } // Now, the custom ones for this config if (config.getConfigs() != null) { props.putAll(config.getConfigs()); } } public CompositeAccessPoint getBaseAccessPoint() { return composite; } public boolean isProxyMode() { return composite.isProxyEnabled(); } public boolean isProxySwitchable() { return switchable && isProxyMode(); } public String getSwitchCollPath() { return ProxyAccessPoint.SWITCH_COLLECTION_PATH; } public AccessPointConfig getAccessPointConfig() { return config; } @Override public List<String> getFileIncludePrefixes() { return config.getFileIncludePrefixes(); } @Override public List<String> getFileExcludePrefixes() { return config.getFileExcludePrefixes(); } @Override public Properties getConfigs() { return props; } @Override public String getAccessPointPath() { return config.getBeanName(); } protected String getPrefix(String basePrefix) { if (isProxyMode()) { return basePrefix; } else { return basePrefix + config.getBeanName() + "/"; } } @Override public String getStaticPrefix() { return composite.getStaticPrefix(); } @Override public String getReplayPrefix() { return getPrefix(composite.getReplayPrefix()); } @Override public String getQueryPrefix() { return getPrefix(composite.getQueryPrefix()); } @Override public boolean isExactHostMatch() { return composite.isExactHostMatch(); } @Override public boolean isExactSchemeMatch() { return composite.isExactSchemeMatch(); } @Override public boolean isUseAnchorWindow() { return composite.isUseAnchorWindow(); } @Override public boolean isServeStatic() { return composite.isServeStatic(); } @Override public ServletContext getServletContext() { return composite.getServletContext(); } @Override public LiveWebRedirector getLiveWebRedirector() { return composite.getLiveWebRedirector(); } @Override public String getLiveWebPrefix() { return composite.getLiveWebPrefix(); } @Override public String getInterstitialJsp() { return composite.getInterstitialJsp(); } @Override public Locale getLocale() { return composite.getLocale(); } @Override public List<String> getFilePatterns() { return composite.getFilePatterns(); } @Override public WaybackCollection getCollection() { if (config.getCollection() != null) { return config.getCollection(); } else { return composite.getCollection(); } } @Override public ExceptionRenderer getException() { return composite.getException(); } @Override public QueryRenderer getQuery() { return composite.getQuery(); } @Override public RequestParser getParser() { RequestParser requestParser = config.getRequestParser(); if (requestParser != null) { return requestParser; } else { return composite.getParser(); } } @Override public ReplayDispatcher getReplay() { return composite.getReplay(); } @Override public ResultURIConverter getUriConverter() { if (cacheUriConverter == null) { ContextResultURIConverterFactory factory = composite.getUriConverterFactory(); if (factory != null) { cacheUriConverter = factory.getContextConverter(getReplayPrefix()); } else { cacheUriConverter = composite.getUriConverter(); } } return cacheUriConverter; } @Override public BooleanOperator<WaybackRequest> getAuthentication() { return composite.getAuthentication(); } @Override public String getRefererAuth() { return composite.getRefererAuth(); } @Override public boolean isBounceToReplayPrefix() { return composite.isBounceToReplayPrefix(); } @Override public boolean isBounceToQueryPrefix() { return composite.isBounceToQueryPrefix(); } @Override public long getEmbargoMS() { return composite.getEmbargoMS(); } @Override public boolean isForceCleanQueries() { // Setting this to false to allow custom handling of adapter access // points return false; } @Override public CustomResultFilterFactory getFilterFactory() { return composite.getFilterFactory(); } @Override public UrlCanonicalizer getSelfRedirectCanonicalizer() { return composite.getSelfRedirectCanonicalizer(); } @Override public boolean isRequestAuth() { return composite.isRequestAuth(); } @Override public int getMaxRedirectAttempts() { return composite.getMaxRedirectAttempts(); } @Override public boolean isTimestampSearch() { return composite.isTimestampSearch(); } @Override public String getPerfStatsHeader() { return composite.getPerfStatsHeader(); } @Override public String getWarcFileHeader() { return composite.getWarcFileHeader(); } @Override public int getQueryCollapseTime() { return composite.getQueryCollapseTime(); } // deprecated members @Deprecated public boolean hasExclusions() { return (composite.getStaticExclusions() != null) || (composite.getOracleUrl() != null); } // drop this override when migration is complete. @SuppressWarnings("deprecation") @Override public ExclusionFilterFactory getExclusionFactory() { // if deprecated properties are not set, forward to new method. ExclusionFilterFactory factory = getExclusionFactory(); if (factory == null && hasExclusions()) { // emulate old behavior OraclePolicyService oracleFilterFactory = new OraclePolicyService(); oracleFilterFactory.setOracleUrl(composite.getOracleUrl()); ArrayList<ExclusionFilterFactory> staticExclusions = composite.getStaticExclusions(); if (staticExclusions == null) { factory = oracleFilterFactory; } else { CompositeExclusionFilterFactory compFactory = new CompositeExclusionFilterFactory(); ArrayList<ExclusionFilterFactory> members = new ArrayList<ExclusionFilterFactory>(staticExclusions); members.add(oracleFilterFactory); compFactory.setFactories(members); factory = compFactory; } setExclusionFactory(factory); } return factory; } }
FIX: AccessPointAdapter.getExclusionFactory() not deligating to composite.
wayback-core/src/main/java/org/archive/wayback/accesspoint/AccessPointAdapter.java
FIX: AccessPointAdapter.getExclusionFactory() not deligating to composite.
Java
apache-2.0
10b689d486ad65807adaa9ee59419658973c8426
0
stefan-ziel/Activiti,stefan-ziel/Activiti,stefan-ziel/Activiti,stefan-ziel/Activiti
package org.activiti.rest.exception; import org.activiti.engine.ActivitiException; public class ActivitiConflictException extends ActivitiException { private static final long serialVersionUID = 1L; public ActivitiConflictException(String message) { super(message); } public ActivitiConflictException(String message, Throwable cause) { super(message, cause); } }
modules/activiti-rest/src/main/java/org/activiti/rest/exception/ActivitiConflictException.java
package org.activiti.rest.exception; import org.activiti.engine.ActivitiException; public class ActivitiConflictException extends ActivitiException { private static final long serialVersionUID = 1L; public ActivitiConflictException(String message) { super(message); } public ActivitiConflictException(String message, Throwable cause) { super(message, cause); } }
correcting indentation
modules/activiti-rest/src/main/java/org/activiti/rest/exception/ActivitiConflictException.java
correcting indentation
Java
apache-2.0
748be251b438d229ca729519bbd150cbac9f9a1b
0
boalang/compiler,boalang/compiler,boalang/compiler,boalang/compiler,boalang/compiler
/* * Copyright 2017, Hridesh Rajan, Robert Dyer, * Iowa State University of Science and Technology * and Bowling Green State University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boa.functions; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.MapFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper.Context; import org.eclipse.jdt.core.dom.AST; import org.eclipse.jdt.core.dom.ASTParser; import org.eclipse.jdt.core.JavaCore; import com.google.protobuf.CodedInputStream; import com.google.protobuf.InvalidProtocolBufferException; import boa.datagen.DefaultProperties; import boa.datagen.util.JavaErrorCheckVisitor; import boa.datagen.util.JavaVisitor; import boa.types.Ast.*; import boa.types.Code.CodeRepository; import boa.types.Code.Revision; import boa.types.Diff.ChangedFile; import boa.types.Issues.IssueRepository; import boa.types.Issues.IssuesRoot; import boa.types.Shared.ChangeKind; import boa.types.Shared.Person; import boa.types.Toplevel.Project; /** * Boa functions for working with ASTs. * * @author rdyer */ public class BoaAstIntrinsics { @SuppressWarnings("rawtypes") static Context context; private static MapFile.Reader map, commentsMap, issuesMap; private static final Revision emptyRevision; static { Revision.Builder rb = Revision.newBuilder(); rb.setCommitDate(0); Person.Builder pb = Person.newBuilder(); pb.setUsername(""); rb.setCommitter(pb); rb.setId(""); rb.setLog(""); emptyRevision = rb.build(); } private static MapFile.Reader commitMap; public static enum COMMITCOUNTER { GETS_ATTEMPTED, GETS_SUCCEED, GETS_FAILED, GETS_FAIL_MISSING, GETS_FAIL_BADPROTOBUF, GETS_FAIL_BADLOC, }; public static enum ASTCOUNTER { GETS_ATTEMPTED, GETS_SUCCEED, GETS_FAILED, GETS_FAIL_MISSING, GETS_FAIL_BADPROTOBUF, GETS_FAIL_BADLOC, }; @FunctionSpec(name = "url", returnType = "string", formalParameters = { "ChangedFile" }) public static String changedfileToString(final ChangedFile f) { return f.getKey() + "!!" + f.getName(); } private static final ASTRoot emptyAst = ASTRoot.newBuilder().build(); private static final CommentsRoot emptyComments = CommentsRoot.newBuilder().build(); private static final IssuesRoot emptyIssues = IssuesRoot.newBuilder().build(); /** * Given a ChangedFile, return the AST for that file at that revision. * * @param f the ChangedFile to get a snapshot of the AST for * @return the AST, or an empty AST on any sort of error */ @SuppressWarnings("unchecked") @FunctionSpec(name = "getast", returnType = "ASTRoot", formalParameters = { "ChangedFile" }) public static ASTRoot getast(final ChangedFile f) { if (!f.getAst()) return emptyAst; context.getCounter(ASTCOUNTER.GETS_ATTEMPTED).increment(1); if (map == null) openMap(); try { final BytesWritable value = new BytesWritable(); if (map.get(new LongWritable(f.getKey()), value) == null) { context.getCounter(ASTCOUNTER.GETS_FAIL_MISSING).increment(1); } else { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); // defaults to 64, really big ASTs require more _stream.setRecursionLimit(Integer.MAX_VALUE); final ASTRoot root = ASTRoot.parseFrom(_stream); context.getCounter(ASTCOUNTER.GETS_SUCCEED).increment(1); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } catch (final IOException e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final RuntimeException e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final Error e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } System.err.println("error with ast: " + f.getKey() + " from " + f.getName()); context.getCounter(ASTCOUNTER.GETS_FAILED).increment(1); return emptyAst; } @SuppressWarnings("unchecked") static Revision getRevision(long key) { context.getCounter(COMMITCOUNTER.GETS_ATTEMPTED).increment(1); if (commitMap == null) openCommitMap(); try { final BytesWritable value = new BytesWritable(); if (commitMap.get(new LongWritable(key), value) == null) { context.getCounter(COMMITCOUNTER.GETS_FAIL_MISSING).increment(1); } else { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); // defaults to 64, really big ASTs require more _stream.setRecursionLimit(Integer.MAX_VALUE); final Revision root = Revision.parseFrom(_stream); context.getCounter(COMMITCOUNTER.GETS_SUCCEED).increment(1); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } catch (final IOException e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final RuntimeException e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final Error e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } System.err.println("error with revision: " + key); context.getCounter(COMMITCOUNTER.GETS_FAILED).increment(1); return emptyRevision; } /** * Given a ChangedFile, return the comments for that file at that revision. * * @param f the ChangedFile to get a snapshot of the comments for * @return the comments list, or an empty list on any sort of error */ @FunctionSpec(name = "getcomments", returnType = "CommentsRoot", formalParameters = { "ChangedFile" }) public static CommentsRoot getcomments(final ChangedFile f) { // since we know only certain kinds have comments, filter before looking up final ChangedFile.FileKind kind = f.getKind(); if (kind != ChangedFile.FileKind.SOURCE_JAVA_ERROR && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS2 && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS3 && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS4 && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS8) return emptyComments; final String rowName = f.getKey() + "!!" + f.getName(); if (commentsMap == null) openCommentMap(); try { final BytesWritable value = new BytesWritable(); if (commentsMap.get(new Text(rowName), value) != null) { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); final CommentsRoot root = CommentsRoot.parseFrom(_stream); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); } catch (final IOException e) { e.printStackTrace(); } catch (final RuntimeException e) { e.printStackTrace(); } catch (final Error e) { e.printStackTrace(); } System.err.println("error with comments: " + rowName); return emptyComments; } /** * Given an IssueRepository, return the issues. * * @param f the IssueRepository to get issues for * @return the issues list, or an empty list on any sort of error */ @FunctionSpec(name = "getissues", returnType = "IssuesRoot", formalParameters = { "IssueRepository" }) public static IssuesRoot getissues(final IssueRepository f) { if (issuesMap == null) openIssuesMap(); try { final BytesWritable value = new BytesWritable(); if (issuesMap.get(new Text(f.getKey()), value) != null) { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); final IssuesRoot root = IssuesRoot.parseFrom(_stream); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); } catch (final IOException e) { e.printStackTrace(); } catch (final RuntimeException e) { e.printStackTrace(); } catch (final Error e) { e.printStackTrace(); } System.err.println("error with issues: " + f.getKey()); return emptyIssues; } @SuppressWarnings("rawtypes") public static void setup(final Context context) { BoaAstIntrinsics.context = context; } private static void openMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "ast"); fs = FileSystem.getLocal(conf); } else { p = new Path( context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path( conf.get("boa.ast.dir", conf.get("boa.input.dir", "repcache/live")), new Path("ast") ) ); fs = FileSystem.get(conf); } map = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } private static void openCommentMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "comments"); fs = FileSystem.getLocal(conf); } else { p = new Path( context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path( conf.get("boa.comments.dir", conf.get("boa.input.dir", "repcache/live")), new Path("comments") ) ); fs = FileSystem.get(conf); } commentsMap = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } private static void openIssuesMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "issues"); fs = FileSystem.getLocal(conf); } else { p = new Path( context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path( conf.get("boa.issues.dir", conf.get("boa.input.dir", "repcache/live")), new Path("issues") ) ); fs = FileSystem.get(conf); } issuesMap = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } private static void openCommitMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "commit"); fs = FileSystem.getLocal(conf); } else { p = new Path(context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path(conf.get("boa.ast.dir", conf.get("boa.input.dir", "repcache/live")), new Path("commit"))); fs = FileSystem.get(conf); } commitMap = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } @SuppressWarnings("rawtypes") public static void cleanup(final Context context) { closeMap(); closeCommentMap(); closeIssuesMap(); closeCommitMap(); } private static void closeMap() { if (map != null) try { map.close(); } catch (final IOException e) { e.printStackTrace(); } map = null; } private static void closeCommentMap() { if (commentsMap != null) try { commentsMap.close(); } catch (final IOException e) { e.printStackTrace(); } commentsMap = null; } private static void closeIssuesMap() { if (issuesMap != null) try { issuesMap.close(); } catch (final IOException e) { e.printStackTrace(); } issuesMap = null; } private static void closeCommitMap() { if (commitMap != null) try { commitMap.close(); } catch (final IOException e) { e.printStackTrace(); } commitMap = null; } @FunctionSpec(name = "type_name", returnType = "string", formalParameters = { "string" }) public static String type_name(final String s) { // first, normalize the string final String t = s.replaceAll("<\\s+", "<") .replaceAll(",\\s+", ", ") .replaceAll("\\s*>\\s*", ">") .replaceAll("\\s*&\\s*", " & ") .replaceAll("\\s*\\|\\s*", " | "); if (!t.contains(".")) return t; /* * Remove qualifiers from anywhere in the string... * * SomeType => SomeType * foo.SomeType => SomeType * foo.bar.SomeType => SomeType * SomeType<T> => SomeType<T> * SomeType<T, S> => SomeType<T, S> * SomeType<foo.bar.T, S> => SomeType<T, S> * SomeType<T, foo.bar.S> => SomeType<T, S> * foo.bar.SomeType<T, foo.bar.S<bar.Q>> => SomeType<T, S<Q>> * SomeType|foo.Bar => SomeType|Bar * foo<T>.bar<T> => foo<T>.bar<T> */ return t.replaceAll("[^\\s,<>|]+\\.([^\\s\\[.,><|]+)", "$1"); } /** * A visitor that returns the total number of AST nodes. */ public final static BoaCountingVisitor lenVisitor = new BoaCountingVisitor() { /** {@inheritDoc} */ @Override protected boolean defaultPreVisit() { count++; return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Project node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final CodeRepository node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Revision node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final ChangedFile node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final ASTRoot node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Person node) throws Exception { return true; } }; /** * */ public static class SnapshotVisitor extends BoaCollectingVisitor<String, ChangedFile> { private long timestamp; private String[] kinds; public SnapshotVisitor initialize(final long timestamp, final String... kinds) { initialize(new HashMap<String, ChangedFile>()); this.timestamp = timestamp; this.kinds = kinds; return this; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Revision node) throws Exception { return node.getCommitDate() <= timestamp; } /** {@inheritDoc} */ @Override protected boolean preVisit(final ChangedFile node) throws Exception { if (node.getChange() == ChangeKind.DELETED) { map.remove(node.getName()); return false; } boolean filter = kinds.length > 0; if (filter) { final String kindName = node.getKind().name(); for (final String kind : kinds) if (kindName.startsWith(kind)) { filter = false; break; } } if (!filter) map.put(node.getName(), node); return false; } } public final static SnapshotVisitor snapshot = new SnapshotVisitor(); /////////////////////////////// // Literal testing functions */ /////////////////////////////// /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is an integer literal. * * The test is a simplified grammar, based on the one from: * https://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.10 * * DecimalNumeral: * [0-9] [lL]? * [1-9] [0-9] ([0-9_]* [0-9])? [lL]? * [1-9] [_]+ [0-9] ([0-9_]* [0-9])? [lL]? * * HexNumeral: * 0 [xX] [0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])? [lL]? * * OctalNumeral: * 0 [_]* [0-7] ([0-7_]* [0-7])? [lL]? * * BinaryNumeral: * 0 [bB] [01] ([01_]* [01])? [lL]? * * If any of these match, it returns <code>true</code>. Otherwise it * returns <code>false</code>. * * @param e the expression to test * @return true if the expression is an integer literal, otherwise false */ @FunctionSpec(name = "isintlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isIntLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; if (e.getLiteral().matches("^[0-9][lL]?$")) return true; if (e.getLiteral().matches("^[1-9][0-9]([0-9_]*[0-9])?[lL]?$")) return true; if (e.getLiteral().matches("^[1-9][_]+[0-9]([0-9_]*[0-9])?[lL]?$")) return true; if (e.getLiteral().matches("^0[xX][0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?[lL]?$")) return true; if (e.getLiteral().matches("^0[_]*[0-7]([0-7_]*[0-7])?[lL]?$")) return true; return e.getLiteral().matches("^0[bB][01]([01_]*[01])?[lL]?$"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a float literal. * * The test is a simplified grammar, based on the one from: * https://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.10 * * DecimalFloatingPointLiteral: * [0-9] ([0-9_]* [0-9])? \\. ([0-9] ([0-9_]* [0-9])?)? ([eE] [+-]? [0-9] ([0-9_]* [0-9])?)? [fFdD]? * \\. [0-9] ([0-9_]* [0-9])? ([eE] [+-]? [0-9] ([0-9_]* [0-9])?)? [fFdD]? * [0-9] ([0-9_]* [0-9])? [eE] [+-]? [0-9] ([0-9_]* [0-9])? [fFdD]? * [0-9] ([0-9_]* [0-9])? ([eE] [+-]? [0-9] ([0-9_]* [0-9])?)? [fFdD] * * HexadecimalFloatingPointLiteral: * 0 [Xx] [0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])? \\.? [pP] [+-]? [0-9] ([0-9_]* [0-9])? [fFdD]? * 0 [Xx] ([0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])?)? \\. [0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])? [pP] [+-]? [0-9] ([0-9_]* [0-9])? [fFdD]? * * @param e the expression to test * @return true if the expression is a char literal, otherwise false */ @FunctionSpec(name = "isfloatlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isFloatLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; if (e.getLiteral().matches("^[0-9]([0-9_]*[0-9])?\\.([0-9]([0-9_]*[0-9])?)?([eE][+-]?[0-9]([0-9_]*[0-9])?)?[fFdD]?$")) return true; if (e.getLiteral().matches("^\\.[0-9]([0-9_]*[0-9])?([eE][+-]?[0-9]([0-9_]*[0-9])?)?[fFdD]?$")) return true; if (e.getLiteral().matches("^[0-9]([0-9_]*[0-9])?[eE][+-]?[0-9]([0-9_]*[0-9])?[fFdD]?$")) return true; if (e.getLiteral().matches("^[0-9]([0-9_]*[0-9])?([eE][+-]?[0-9]([0-9_]*[0-9])?)?[fFdD]$")) return true; if (e.getLiteral().matches("^0[Xx][0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?\\.?[pP][+-]?[0-9]([0-9_]*[0-9])?[fFdD]?$")) return true; return e.getLiteral().matches("^0[Xx]([0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?)?\\.[0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?[pP][+-]?[0-9]([0-9_]*[0-9])?[fFdD]?$"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a char literal. * * @param e the expression to test * @return true if the expression is a char literal, otherwise false */ @FunctionSpec(name = "ischarlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isCharLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().startsWith("'"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a string literal. * * @param e the expression to test * @return true if the expression is a string literal, otherwise false */ @FunctionSpec(name = "isstringlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isStringLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().startsWith("\""); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a type literal. * * @param e the expression to test * @return true if the expression is a type literal, otherwise false */ @FunctionSpec(name = "istypelit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isTypeLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().endsWith(".class"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a bool literal. * * @param e the expression to test * @return true if the expression is a bool literal, otherwise false */ @FunctionSpec(name = "isboollit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isBoolLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().equals("true") || e.getLiteral().equals("false"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a null literal. * * @param e the expression to test * @return true if the expression is a null literal, otherwise false */ @FunctionSpec(name = "isnulllit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isNullLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().equals("null"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and the literal matches the string <code>lit</code>. * * @param e the expression to test * @return true if the expression is a string literal, otherwise false */ @FunctionSpec(name = "isliteral", returnType = "bool", formalParameters = { "Expression", "string" }) public static boolean isLiteral(final Expression e, final String lit) throws Exception { return e.getKind() == Expression.ExpressionKind.LITERAL && e.hasLiteral() && e.getLiteral().equals(lit); } ////////////////////////////// // Collect Annotations Used // ////////////////////////////// private static class AnnotationCollectingVisitor extends BoaCollectingVisitor<String,Long> { @Override protected boolean preVisit(Modifier node) { if (node.getKind() == Modifier.ModifierKind.ANNOTATION) { final String name = BoaAstIntrinsics.type_name(node.getAnnotationName()); final long count = map.containsKey(name) ? map.get(name) : 0; map.put(name, count + 1); } return true; } } private static AnnotationCollectingVisitor annotationCollectingVisitor = new AnnotationCollectingVisitor(); @FunctionSpec(name = "collect_annotations", returnType = "map[string] of int", formalParameters = { "ASTRoot", "map[string] of int" }) public static HashMap<String,Long> collect_annotations(final ASTRoot f, final HashMap<String,Long> map) throws Exception { annotationCollectingVisitor.initialize(map).visit(f); return annotationCollectingVisitor.map; } /////////////////////////// // Collect Generics Used // /////////////////////////// private static class GenericsCollectingVisitor extends BoaCollectingVisitor<String,Long> { @Override protected boolean preVisit(Type node) { // FIXME /* try { parseGenericType(BoaAstIntrinsics.type_name(node.getName()).trim(), map); } catch (final StackOverflowError e) { System.err.println("STACK ERR: " + node.getName() + " -> " + BoaAstIntrinsics.type_name(node.getName()).trim()); } */ return true; } } private static GenericsCollectingVisitor genericsCollectingVisitor = new GenericsCollectingVisitor(); @FunctionSpec(name = "collect_generic_types", returnType = "map[string] of int", formalParameters = { "ASTRoot", "map[string] of int" }) public static HashMap<String,Long> collect_generic_types(final ASTRoot f, final HashMap<String,Long> map) throws Exception { genericsCollectingVisitor.initialize(map).visit(f); return genericsCollectingVisitor.map; } @SuppressWarnings("unused") private static void parseGenericType(final String name, final HashMap<String,Long> counts) { if (!name.contains("<") || name.startsWith("<")) return; if (name.contains("|")) { for (final String s : name.split("\\|")) parseGenericType(s.trim(), counts); return; } if (name.contains("&")) { int count = 0; int last = 0; for (int i = 0; i < name.length(); i++) switch (name.charAt(i)) { case '<': count++; break; case '>': count--; break; case '&': if (count == 0) { parseGenericType(name.substring(last, i).trim(), counts); last = i + 1; } break; default: break; } parseGenericType(name.substring(last).trim(), counts); return; } foundType(name, counts); int start = name.indexOf("<"); final Stack<Integer> starts = new Stack<Integer>(); int lastStart = start + 1; for (int i = lastStart; i < name.lastIndexOf(">"); i++) switch (name.charAt(i)) { case '<': starts.push(lastStart); lastStart = i + 1; break; case '>': if (!starts.empty()) foundType(name.substring(starts.pop(), i + 1).trim(), counts); break; case '&': case '|': case ',': case ' ': case '.': case '\t': lastStart = i + 1; break; default: break; } } private static void foundType(final String name, final HashMap<String,Long> counts) { final String type = name.endsWith("...") ? name.substring(0, name.length() - 3).trim() : name.trim(); final long count = counts.containsKey(type) ? counts.get(type) : 0; counts.put(type, count + 1); String rawType = type.substring(0, type.indexOf("<")).trim(); if (!type.endsWith(">")) rawType += type.substring(type.lastIndexOf(">") + 1).trim(); final long rawCount = counts.containsKey(rawType) ? counts.get(rawType) : 0; counts.put(rawType, rawCount + 1); } static int indent = 0; private static String indent() { String s = ""; for (int i = 0; i < indent; i++) s += "\t"; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "ASTRoot" }) public static String prettyprint(final ASTRoot r) { if (r == null) return ""; String s = ""; for (final Namespace n : r.getNamespacesList()) s += prettyprint(n); return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Namespace" }) public static String prettyprint(final Namespace n) { if (n == null) return ""; String s = ""; if (n.getName().length() > 0) { if (n.getModifiersCount() > 0) s += prettyprint(n.getModifiersList()) + " "; s += indent() + "package " + n.getName() + ";\n"; } for (final String i : n.getImportsList()) s += indent() + "import " + i + "\n"; for (final Declaration d : n.getDeclarationsList()) s += prettyprint(d); return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Declaration" }) public static String prettyprint(final Declaration d) { if (d == null) return ""; String s = indent() + prettyprint(d.getModifiersList()) + " "; switch (d.getKind()) { case INTERFACE: s += "interface " + d.getName(); if (d.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < d.getGenericParametersCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getGenericParameters(i)); } s += ">"; } if (d.getParentsCount() > 0) { s += " extends "; for (int i = 0; i < d.getParentsCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getParents(i)); } } s += " {\n"; break; case ANONYMOUS: break; case ENUM: s += "enum " + d.getName(); break; case ANNOTATION: s += "@interface class " + d.getName(); if (d.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < d.getGenericParametersCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getGenericParameters(i)); } s += ">"; } if (d.getParentsCount() > 0) { int i = 0; if (d.getParents(i).getKind() == TypeKind.CLASS) s += " extends " + prettyprint(d.getParents(i++)); if (i < d.getParentsCount()) { s += " implements "; for (int j = i; i < d.getParentsCount(); i++) { if (i != j) s += ", "; s += prettyprint(d.getParents(i)); } } } break; default: case CLASS: s += "class " + d.getName(); if (d.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < d.getGenericParametersCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getGenericParameters(i)); } s += ">"; } if (d.getParentsCount() > 0) { int i = 0; if (d.getParents(i).getKind() == TypeKind.CLASS) s += " extends " + prettyprint(d.getParents(i++)); if (i < d.getParentsCount()) { s += " implements "; for (int j = i; i < d.getParentsCount(); i++) { if (i != j) s += ", "; s += prettyprint(d.getParents(i)); } } } break; } s += " {\n"; indent++; for (final Variable v : d.getFieldsList()) s += indent() + prettyprint(v) + ";\n"; for (final Method m : d.getMethodsList()) s += prettyprint(m); for (final Declaration d2 : d.getNestedDeclarationsList()) s += prettyprint(d2); indent--; s += indent() + "}\n"; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Type" }) public static String prettyprint(final Type t) { if (t == null) return ""; return t.getName(); } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Method" }) public static String prettyprint(final Method m) { if (m == null) return ""; String s = indent(); for (int i = 0; i < m.getModifiersCount(); i++) s += prettyprint(m.getModifiers(i)) + " "; if (m.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < m.getGenericParametersCount(); i++) { if (i > 0) s += ", "; s += prettyprint(m.getGenericParameters(i)); } s += "> "; } s += prettyprint(m.getReturnType()) + " " + m.getName() + "("; for (int i = 0; i < m.getArgumentsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(m.getArguments(i)); } s += ") "; if (m.getExceptionTypesCount() > 0) { s += "throws "; for (int i = 0; i < m.getExceptionTypesCount(); i++) s += prettyprint(m.getExceptionTypes(i)) + " "; } s += "\n"; for (int i = 0; i < m.getStatementsCount(); i++) s += indent() + prettyprint(m.getStatements(i)) + "\n"; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Variable" }) public static String prettyprint(final Variable v) { if (v == null) return ""; String s = prettyprint(v.getModifiersList()) + prettyprint(v.getVariableType()) + " " + v.getName() + "("+ prettyprint(v.getExpressions(0)) +")"; if (v.hasInitializer()) s += " = " + prettyprint(v.getInitializer()); return s; } private static String prettyprint(final List<Modifier> mods) { String s = ""; for (final Modifier m : mods) s += prettyprint(m) + " "; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Statement" }) public static String prettyprint(final Statement stmt) { if (stmt == null) return ""; String s = ""; switch (stmt.getKind()) { case EMPTY: return ";"; case BLOCK: s += "{\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case RETURN: s += "return"; if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case BREAK: s += "break"; if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case CONTINUE: s += "continue"; if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case ASSERT: s += "assert "; s += prettyprint(stmt.getConditions(0)); if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case LABEL: return prettyprint(stmt.getExpressions(0)) + ": " + prettyprint(stmt.getStatements(0)); case CASE: return "case " + prettyprint(stmt.getExpressions(0)) + ":"; case DEFAULT: return "default:"; case EXPRESSION: return prettyprint(stmt.getExpressions(0)) + ";"; case TYPEDECL: return prettyprint(stmt.getTypeDeclaration()); case SYNCHRONIZED: s += "synchronized () {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += "}"; return s; case CATCH: s += indent() + "catch ("; s += prettyprint(stmt.getVariableDeclaration()); s += ") {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case FINALLY: s += indent() + "finally {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case TRY: s += "try"; if (stmt.getInitializationsCount() > 0) { s += "("; for (int i = 0; i < stmt.getInitializationsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(stmt.getInitializations(i)); } s += ")"; } s += " "; for (int i = 0; i < stmt.getStatementsCount(); i++) { s += prettyprint(stmt.getStatements(i)) + "\n"; } return s; case FOR: s += "for ("; if (stmt.hasVariableDeclaration()) { s += prettyprint(stmt.getVariableDeclaration()) + " : " + prettyprint(stmt.getConditions(0)); } else { for (int i = 0; i < stmt.getInitializationsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(stmt.getInitializations(i)); } s += "; " + prettyprint(stmt.getConditions(0)) + "; "; for (int i = 0; i < stmt.getUpdatesCount(); i++) { if (i > 0) s += ", "; s += prettyprint(stmt.getUpdates(i)); } } s += ")\n"; indent++; s += indent() + prettyprint(stmt.getStatements(0)) + "\n"; indent--; return s; case DO: s += "do\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "while (" + prettyprint(stmt.getConditions(0)) + ");"; return s; case WHILE: s += "while (" + prettyprint(stmt.getConditions(0)) + ") {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case IF: s += "if (" + prettyprint(stmt.getConditions(0)) + ")\n"; indent++; s += indent() + prettyprint(stmt.getStatements(0)) + "\n"; indent--; if (stmt.getStatementsCount() > 1) { s += indent() + "else\n"; indent++; s += indent() + prettyprint(stmt.getStatements(1)) + "\n"; indent--; } return s; case SWITCH: s += "switch (" + prettyprint(stmt.getExpressions(0)) + ") {"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += "}"; return s; case THROW: return "throw " + prettyprint(stmt.getExpressions(0)) + ";"; default: return s; } } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Expression" }) public static String prettyprint(final Expression e) { if (e == null) return ""; String s = ""; switch (e.getKind()) { case OP_ADD: if (e.getExpressionsCount() == 1) return ppPrefix("+", e); return ppInfix("+", e.getExpressionsList()); case OP_SUB: if (e.getExpressionsCount() == 1) return ppPrefix("-", e); return ppInfix("-", e.getExpressionsList()); case LOGICAL_AND: return "(" + ppInfix("&&", e.getExpressionsList()) + ")"; case LOGICAL_OR: return "(" + ppInfix("||", e.getExpressionsList()) + ")"; case EQ: return ppInfix("==", e.getExpressionsList()); case NEQ: return ppInfix("!=", e.getExpressionsList()); case LT: return ppInfix("<", e.getExpressionsList()); case GT: return ppInfix(">", e.getExpressionsList()); case LTEQ: return ppInfix("<=", e.getExpressionsList()); case GTEQ: return ppInfix(">=", e.getExpressionsList()); case OP_DIV: return ppInfix("/", e.getExpressionsList()); case OP_MULT: return ppInfix("*", e.getExpressionsList()); case OP_MOD: return ppInfix("%", e.getExpressionsList()); case BIT_AND: return ppInfix("&", e.getExpressionsList()); case BIT_OR: return ppInfix("|", e.getExpressionsList()); case BIT_XOR: return ppInfix("^", e.getExpressionsList()); case BIT_LSHIFT: return ppInfix("<<", e.getExpressionsList()); case BIT_RSHIFT: return ppInfix(">>", e.getExpressionsList()); case BIT_UNSIGNEDRSHIFT: return ppInfix(">>>", e.getExpressionsList()); case ASSIGN: return ppInfix("=", e.getExpressionsList()); case ASSIGN_ADD: return ppInfix("+=", e.getExpressionsList()); case ASSIGN_SUB: return ppInfix("-=", e.getExpressionsList()); case ASSIGN_MULT: return ppInfix("*=", e.getExpressionsList()); case ASSIGN_DIV: return ppInfix("/=", e.getExpressionsList()); case ASSIGN_MOD: return ppInfix("%=", e.getExpressionsList()); case ASSIGN_BITXOR: return ppInfix("^=", e.getExpressionsList()); case ASSIGN_BITAND: return ppInfix("&=", e.getExpressionsList()); case ASSIGN_BITOR: return ppInfix("|=", e.getExpressionsList()); case ASSIGN_LSHIFT: return ppInfix("<<=", e.getExpressionsList()); case ASSIGN_RSHIFT: return ppInfix(">>=", e.getExpressionsList()); case ASSIGN_UNSIGNEDRSHIFT: return ppInfix(">>>=", e.getExpressionsList()); case LOGICAL_NOT: return ppPrefix("!", e); case BIT_NOT: return ppPrefix("~", e); case OP_DEC: if (e.getIsPostfix()) return ppPostfix("--", e); return ppPrefix("--", e); case OP_INC: if (e.getIsPostfix()) return ppPostfix("++", e); return ppPrefix("++", e); case PAREN: return "(" + prettyprint(e.getExpressions(0)) + ")"; case LITERAL: return e.getLiteral(); case VARACCESS: for (int i = 0; i < e.getExpressionsCount(); i++) s += prettyprint(e.getExpressions(i)) + "."; s += e.getVariable(); return s; case CAST: return "(" + e.getNewType().getName() + ")" + prettyprint(e.getExpressions(0)); case CONDITIONAL: return prettyprint(e.getExpressions(0)) + " ? " + prettyprint(e.getExpressions(1)) + " : " + prettyprint(e.getExpressions(2)); case NULLCOALESCE: return prettyprint(e.getExpressions(0)) + " ?? " + prettyprint(e.getExpressions(1)); case METHODCALL: for (int i = 0; i < e.getExpressionsCount(); i++) s += prettyprint(e.getExpressions(i)) + "."; if (e.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < e.getGenericParametersCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getGenericParameters(i)); } s += ">"; } s += e.getMethod() + "("; for (int i = 0; i < e.getMethodArgsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getMethodArgs(i)); } s += ")"; return s; case TYPECOMPARE: return prettyprint(e.getExpressions(0)) + " instanceof " + prettyprint(e.getNewType()); case NEWARRAY: s += "new "; s += prettyprint(e.getNewType()); for (int i = 0; i < e.getExpressionsCount(); i++) s += prettyprint(e.getExpressions(i)); return s; case NEW: s += "new "; s += prettyprint(e.getNewType()); if (e.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < e.getGenericParametersCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getGenericParameters(i)); } s += ">"; } s += "("; for (int i = 0; i < e.getMethodArgsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getMethodArgs(i)); } s += ")"; if (e.hasAnonDeclaration()) s += prettyprint(e.getAnonDeclaration()); return s; case ARRAYACCESS: return prettyprint(e.getExpressions(0)) + "[" + prettyprint(e.getExpressions(1)) + "]"; case ARRAYINIT: s += "{"; for (int i = 0; i < e.getExpressionsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getExpressions(i)); } s += "}"; return s; case ANNOTATION: return prettyprint(e.getAnnotation()); case VARDECL: for (int i = 0; i < e.getVariableDecls(0).getModifiersCount(); i++) s += prettyprint(e.getVariableDecls(0).getModifiers(i)) + " "; s += prettyprint(e.getVariableDecls(0).getVariableType()) + " "; for (int i = 0; i < e.getVariableDeclsCount(); i++) { if (i > 0) s += ", "; s += e.getVariableDecls(i).getName(); if (e.getVariableDecls(i).hasInitializer()) s += " = " + prettyprint(e.getVariableDecls(i).getInitializer()); } return s; // TODO case METHOD_REFERENCE: // TODO case LAMBDA: s += "("; for (int i = 0; i < e.getVariableDeclsCount(); i++) { if (i > 0) s += ", "; s += e.getVariableDecls(i).getVariableType().getName() + " " + e.getVariableDecls(i).getName(); } s += ") -> "; if (e.getStatementsCount() != 0) s += prettyprint(e.getStatements(0)); if (e.getExpressionsCount() != 0) s += prettyprint(e.getExpressions(0)); default: return s; } } private static String ppPrefix(final String op, final Expression e) { return op + prettyprint(e.getExpressions(0)); } private static String ppPostfix(final String op, final Expression e) { return prettyprint(e.getExpressions(0)) + op; } private static String ppInfix(final String op, final List<Expression> exps) { StringBuilder s = new StringBuilder(); s.append(prettyprint(exps.get(0))); for (int i = 1; i < exps.size(); i++) { s.append(" "); s.append(op); s.append(" "); s.append(prettyprint(exps.get(i))); } return s.toString(); } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Modifier" }) public static String prettyprint(final Modifier m) { if (m == null) return ""; String s = ""; switch (m.getKind()) { case OTHER: return m.getOther(); case VISIBILITY: switch (m.getVisibility()) { case PUBLIC: return "public"; case PRIVATE: return "private"; case PROTECTED: return "protected"; case NAMESPACE: return "namespace"; default: return s; } case ANNOTATION: s = "@" + m.getAnnotationName(); if (m.getAnnotationMembersCount() > 0) s += "("; for (int i = 0; i < m.getAnnotationMembersCount(); i++) { if (i > 0) s += ", "; s += m.getAnnotationMembers(i) + " = " + prettyprint(m.getAnnotationValues(i)); } if (m.getAnnotationMembersCount() > 0) s += ")"; return s; case FINAL: return "final"; case STATIC: return "static"; case SYNCHRONIZED: return "synchronized"; case ABSTRACT: return "abstract"; default: return s; } } /** * Converts a string expression into an AST. * * @param s the string to parse/convert * @return the AST representation of the string */ @FunctionSpec(name = "parseexpression", returnType = "Expression", formalParameters = { "string" }) public static Expression parseexpression(final String s) { final ASTParser parser = ASTParser.newParser(AST.JLS8); parser.setKind(ASTParser.K_EXPRESSION); parser.setSource(s.toCharArray()); @SuppressWarnings("rawtypes") final Map options = JavaCore.getOptions(); JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options); parser.setCompilerOptions(options); try { final org.eclipse.jdt.core.dom.Expression e = (org.eclipse.jdt.core.dom.Expression) parser.createAST(null); final JavaVisitor visitor = new JavaVisitor(s); e.accept(visitor); return visitor.getExpression(); } catch (final Exception e) { // do nothing } final Expression.Builder eb = Expression.newBuilder(); eb.setKind(Expression.ExpressionKind.OTHER); return eb.build(); } /** * Converts a string into an AST. * * @param s the string to parse/convert * @return the AST representation of the string */ @FunctionSpec(name = "parse", returnType = "ASTRoot", formalParameters = { "string" }) public static ASTRoot parse(final String s) { final ASTParser parser = ASTParser.newParser(AST.JLS8); parser.setKind(ASTParser.K_COMPILATION_UNIT); parser.setSource(s.toCharArray()); @SuppressWarnings("rawtypes") final Map options = JavaCore.getOptions(); JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options); parser.setCompilerOptions(options); final ASTRoot.Builder ast = ASTRoot.newBuilder(); try { final org.eclipse.jdt.core.dom.CompilationUnit cu = (org.eclipse.jdt.core.dom.CompilationUnit) parser.createAST(null); final JavaErrorCheckVisitor errorCheck = new JavaErrorCheckVisitor(); cu.accept(errorCheck); if (!errorCheck.hasError) { final JavaVisitor visitor = new JavaVisitor(s); ast.addNamespaces(visitor.getNamespaces(cu)); } } catch (final Exception e) { // do nothing } return ast.build(); } }
src/java/boa/functions/BoaAstIntrinsics.java
/* * Copyright 2017, Hridesh Rajan, Robert Dyer, * Iowa State University of Science and Technology * and Bowling Green State University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boa.functions; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.MapFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper.Context; import org.eclipse.jdt.core.dom.AST; import org.eclipse.jdt.core.dom.ASTParser; import org.eclipse.jdt.core.JavaCore; import com.google.protobuf.CodedInputStream; import com.google.protobuf.InvalidProtocolBufferException; import boa.datagen.DefaultProperties; import boa.datagen.util.JavaErrorCheckVisitor; import boa.datagen.util.JavaVisitor; import boa.types.Ast.*; import boa.types.Code.CodeRepository; import boa.types.Code.Revision; import boa.types.Diff.ChangedFile; import boa.types.Issues.IssueRepository; import boa.types.Issues.IssuesRoot; import boa.types.Shared.ChangeKind; import boa.types.Shared.Person; import boa.types.Toplevel.Project; /** * Boa functions for working with ASTs. * * @author rdyer */ public class BoaAstIntrinsics { @SuppressWarnings("rawtypes") static Context context; private static MapFile.Reader map, commentsMap, issuesMap; private static final Revision emptyRevision; static { Revision.Builder rb = Revision.newBuilder(); rb.setCommitDate(0); Person.Builder pb = Person.newBuilder(); pb.setUsername(""); rb.setCommitter(pb); rb.setId(""); rb.setLog(""); emptyRevision = rb.build(); } private static MapFile.Reader commitMap; public static enum COMMITCOUNTER { GETS_ATTEMPTED, GETS_SUCCEED, GETS_FAILED, GETS_FAIL_MISSING, GETS_FAIL_BADPROTOBUF, GETS_FAIL_BADLOC, }; public static enum ASTCOUNTER { GETS_ATTEMPTED, GETS_SUCCEED, GETS_FAILED, GETS_FAIL_MISSING, GETS_FAIL_BADPROTOBUF, GETS_FAIL_BADLOC, }; @FunctionSpec(name = "url", returnType = "string", formalParameters = { "ChangedFile" }) public static String changedfileToString(final ChangedFile f) { return f.getKey() + "!!" + f.getName(); } private static final ASTRoot emptyAst = ASTRoot.newBuilder().build(); private static final CommentsRoot emptyComments = CommentsRoot.newBuilder().build(); private static final IssuesRoot emptyIssues = IssuesRoot.newBuilder().build(); /** * Given a ChangedFile, return the AST for that file at that revision. * * @param f the ChangedFile to get a snapshot of the AST for * @return the AST, or an empty AST on any sort of error */ @SuppressWarnings("unchecked") @FunctionSpec(name = "getast", returnType = "ASTRoot", formalParameters = { "ChangedFile" }) public static ASTRoot getast(final ChangedFile f) { if (!f.getAst()) return emptyAst; context.getCounter(ASTCOUNTER.GETS_ATTEMPTED).increment(1); if (map == null) openMap(); try { final BytesWritable value = new BytesWritable(); if (map.get(new LongWritable(f.getKey()), value) == null) { context.getCounter(ASTCOUNTER.GETS_FAIL_MISSING).increment(1); } else { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); // defaults to 64, really big ASTs require more _stream.setRecursionLimit(Integer.MAX_VALUE); final ASTRoot root = ASTRoot.parseFrom(_stream); context.getCounter(ASTCOUNTER.GETS_SUCCEED).increment(1); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } catch (final IOException e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final RuntimeException e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final Error e) { e.printStackTrace(); context.getCounter(ASTCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } System.err.println("error with ast: " + f.getKey() + " from " + f.getName()); context.getCounter(ASTCOUNTER.GETS_FAILED).increment(1); return emptyAst; } @SuppressWarnings("unchecked") static Revision getRevision(long key) { context.getCounter(COMMITCOUNTER.GETS_ATTEMPTED).increment(1); if (commitMap == null) openCommitMap(); try { final BytesWritable value = new BytesWritable(); if (commitMap.get(new LongWritable(key), value) == null) { context.getCounter(COMMITCOUNTER.GETS_FAIL_MISSING).increment(1); } else { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); // defaults to 64, really big ASTs require more _stream.setRecursionLimit(Integer.MAX_VALUE); final Revision root = Revision.parseFrom(_stream); context.getCounter(COMMITCOUNTER.GETS_SUCCEED).increment(1); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } catch (final IOException e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final RuntimeException e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_MISSING).increment(1); } catch (final Error e) { e.printStackTrace(); context.getCounter(COMMITCOUNTER.GETS_FAIL_BADPROTOBUF).increment(1); } System.err.println("error with revision: " + key); context.getCounter(COMMITCOUNTER.GETS_FAILED).increment(1); return emptyRevision; } /** * Given a ChangedFile, return the comments for that file at that revision. * * @param f the ChangedFile to get a snapshot of the comments for * @return the comments list, or an empty list on any sort of error */ @FunctionSpec(name = "getcomments", returnType = "CommentsRoot", formalParameters = { "ChangedFile" }) public static CommentsRoot getcomments(final ChangedFile f) { // since we know only certain kinds have comments, filter before looking up final ChangedFile.FileKind kind = f.getKind(); if (kind != ChangedFile.FileKind.SOURCE_JAVA_ERROR && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS2 && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS3 && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS4 && kind != ChangedFile.FileKind.SOURCE_JAVA_JLS8) return emptyComments; final String rowName = f.getKey() + "!!" + f.getName(); if (commentsMap == null) openCommentMap(); try { final BytesWritable value = new BytesWritable(); if (commentsMap.get(new Text(rowName), value) != null) { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); final CommentsRoot root = CommentsRoot.parseFrom(_stream); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); } catch (final IOException e) { e.printStackTrace(); } catch (final RuntimeException e) { e.printStackTrace(); } catch (final Error e) { e.printStackTrace(); } System.err.println("error with comments: " + rowName); return emptyComments; } /** * Given an IssueRepository, return the issues. * * @param f the IssueRepository to get issues for * @return the issues list, or an empty list on any sort of error */ @FunctionSpec(name = "getissues", returnType = "IssuesRoot", formalParameters = { "IssueRepository" }) public static IssuesRoot getissues(final IssueRepository f) { if (issuesMap == null) openIssuesMap(); try { final BytesWritable value = new BytesWritable(); if (issuesMap.get(new Text(f.getKey()), value) != null) { final CodedInputStream _stream = CodedInputStream.newInstance(value.getBytes(), 0, value.getLength()); final IssuesRoot root = IssuesRoot.parseFrom(_stream); return root; } } catch (final InvalidProtocolBufferException e) { e.printStackTrace(); } catch (final IOException e) { e.printStackTrace(); } catch (final RuntimeException e) { e.printStackTrace(); } catch (final Error e) { e.printStackTrace(); } System.err.println("error with issues: " + f.getKey()); return emptyIssues; } @SuppressWarnings("rawtypes") public static void setup(final Context context) { BoaAstIntrinsics.context = context; } private static void openMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "ast"); fs = FileSystem.getLocal(conf); } else { p = new Path( context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path( conf.get("boa.ast.dir", conf.get("boa.input.dir", "repcache/live")), new Path("ast") ) ); fs = FileSystem.get(conf); } map = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } private static void openCommentMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "comments"); fs = FileSystem.getLocal(conf); } else { p = new Path( context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path( conf.get("boa.comments.dir", conf.get("boa.input.dir", "repcache/live")), new Path("comments") ) ); fs = FileSystem.get(conf); } commentsMap = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } private static void openIssuesMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "issues"); fs = FileSystem.getLocal(conf); } else { p = new Path( context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path( conf.get("boa.issues.dir", conf.get("boa.input.dir", "repcache/live")), new Path("issues") ) ); fs = FileSystem.get(conf); } issuesMap = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } private static void openCommitMap() { try { final Configuration conf = context.getConfiguration(); final FileSystem fs; final Path p; if (DefaultProperties.localDataPath != null) { p = new Path(DefaultProperties.localDataPath, "commit"); fs = FileSystem.getLocal(conf); } else { p = new Path(context.getConfiguration().get("fs.default.name", "hdfs://boa-njt/"), new Path(conf.get("boa.ast.dir", conf.get("boa.input.dir", "repcache/live")), new Path("commit"))); fs = FileSystem.get(conf); } commitMap = new MapFile.Reader(fs, p.toString(), conf); } catch (final Exception e) { e.printStackTrace(); } } @SuppressWarnings("rawtypes") public static void cleanup(final Context context) { closeMap(); closeCommentMap(); closeIssuesMap(); closeCommitMap(); } private static void closeMap() { if (map != null) try { map.close(); } catch (final IOException e) { e.printStackTrace(); } map = null; } private static void closeCommentMap() { if (commentsMap != null) try { commentsMap.close(); } catch (final IOException e) { e.printStackTrace(); } commentsMap = null; } private static void closeIssuesMap() { if (issuesMap != null) try { issuesMap.close(); } catch (final IOException e) { e.printStackTrace(); } issuesMap = null; } private static void closeCommitMap() { if (commitMap != null) try { commitMap.close(); } catch (final IOException e) { e.printStackTrace(); } commitMap = null; } @FunctionSpec(name = "type_name", returnType = "string", formalParameters = { "string" }) public static String type_name(final String s) { // first, normalize the string final String t = s.replaceAll("<\\s+", "<") .replaceAll(",\\s+", ", ") .replaceAll("\\s*>\\s*", ">") .replaceAll("\\s*&\\s*", " & ") .replaceAll("\\s*\\|\\s*", " | "); if (!t.contains(".")) return t; /* * Remove qualifiers from anywhere in the string... * * SomeType => SomeType * foo.SomeType => SomeType * foo.bar.SomeType => SomeType * SomeType<T> => SomeType<T> * SomeType<T, S> => SomeType<T, S> * SomeType<foo.bar.T, S> => SomeType<T, S> * SomeType<T, foo.bar.S> => SomeType<T, S> * foo.bar.SomeType<T, foo.bar.S<bar.Q>> => SomeType<T, S<Q>> * SomeType|foo.Bar => SomeType|Bar * foo<T>.bar<T> => foo<T>.bar<T> */ return t.replaceAll("[^\\s,<>|]+\\.([^\\s\\[.,><|]+)", "$1"); } /** * A visitor that returns the total number of AST nodes. */ public final static BoaCountingVisitor lenVisitor = new BoaCountingVisitor() { /** {@inheritDoc} */ @Override protected boolean defaultPreVisit() { count++; return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Project node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final CodeRepository node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Revision node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final ChangedFile node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final ASTRoot node) throws Exception { return true; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Person node) throws Exception { return true; } }; /** * */ public static class SnapshotVisitor extends BoaCollectingVisitor<String, ChangedFile> { private long timestamp; private String[] kinds; public SnapshotVisitor initialize(final long timestamp, final String... kinds) { initialize(new HashMap<String, ChangedFile>()); this.timestamp = timestamp; this.kinds = kinds; return this; } /** {@inheritDoc} */ @Override protected boolean preVisit(final Revision node) throws Exception { return node.getCommitDate() <= timestamp; } /** {@inheritDoc} */ @Override protected boolean preVisit(final ChangedFile node) throws Exception { if (node.getChange() == ChangeKind.DELETED) { map.remove(node.getName()); return false; } boolean filter = kinds.length > 0; if (filter) { final String kindName = node.getKind().name(); for (final String kind : kinds) if (kindName.startsWith(kind)) { filter = false; break; } } if (!filter) map.put(node.getName(), node); return false; } } public final static SnapshotVisitor snapshot = new SnapshotVisitor(); /////////////////////////////// // Literal testing functions */ /////////////////////////////// /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is an integer literal. * * The test is a simplified grammar, based on the one from: * https://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.10 * * DecimalNumeral: * [0-9] [lL]? * [1-9] [0-9] ([0-9_]* [0-9])? [lL]? * [1-9] [_]+ [0-9] ([0-9_]* [0-9])? [lL]? * * HexNumeral: * 0 [xX] [0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])? [lL]? * * OctalNumeral: * 0 [_]* [0-7] ([0-7_]* [0-7])? [lL]? * * BinaryNumeral: * 0 [bB] [01] ([01_]* [01])? [lL]? * * If any of these match, it returns <code>true</code>. Otherwise it * returns <code>false</code>. * * @param e the expression to test * @return true if the expression is an integer literal, otherwise false */ @FunctionSpec(name = "isintlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isIntLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; if (e.getLiteral().matches("^[0-9][lL]?$")) return true; if (e.getLiteral().matches("^[1-9][0-9]([0-9_]*[0-9])?[lL]?$")) return true; if (e.getLiteral().matches("^[1-9][_]+[0-9]([0-9_]*[0-9])?[lL]?$")) return true; if (e.getLiteral().matches("^0[xX][0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?[lL]?$")) return true; if (e.getLiteral().matches("^0[_]*[0-7]([0-7_]*[0-7])?[lL]?$")) return true; return e.getLiteral().matches("^0[bB][01]([01_]*[01])?[lL]?$"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a float literal. * * The test is a simplified grammar, based on the one from: * https://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.10 * * DecimalFloatingPointLiteral: * [0-9] ([0-9_]* [0-9])? \\. ([0-9] ([0-9_]* [0-9])?)? ([eE] [+-]? [0-9] ([0-9_]* [0-9])?)? [fFdD]? * \\. [0-9] ([0-9_]* [0-9])? ([eE] [+-]? [0-9] ([0-9_]* [0-9])?)? [fFdD]? * [0-9] ([0-9_]* [0-9])? [eE] [+-]? [0-9] ([0-9_]* [0-9])? [fFdD]? * [0-9] ([0-9_]* [0-9])? ([eE] [+-]? [0-9] ([0-9_]* [0-9])?)? [fFdD] * * HexadecimalFloatingPointLiteral: * 0 [Xx] [0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])? \\.? [pP] [+-]? [0-9] ([0-9_]* [0-9])? [fFdD]? * 0 [Xx] ([0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])?)? \\. [0-9a-fA-F] ([0-9a-fA-F_]* [0-9a-fA-F])? [pP] [+-]? [0-9] ([0-9_]* [0-9])? [fFdD]? * * @param e the expression to test * @return true if the expression is a char literal, otherwise false */ @FunctionSpec(name = "isfloatlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isFloatLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; if (e.getLiteral().matches("^[0-9]([0-9_]*[0-9])?\\.([0-9]([0-9_]*[0-9])?)?([eE][+-]?[0-9]([0-9_]*[0-9])?)?[fFdD]?$")) return true; if (e.getLiteral().matches("^\\.[0-9]([0-9_]*[0-9])?([eE][+-]?[0-9]([0-9_]*[0-9])?)?[fFdD]?$")) return true; if (e.getLiteral().matches("^[0-9]([0-9_]*[0-9])?[eE][+-]?[0-9]([0-9_]*[0-9])?[fFdD]?$")) return true; if (e.getLiteral().matches("^[0-9]([0-9_]*[0-9])?([eE][+-]?[0-9]([0-9_]*[0-9])?)?[fFdD]$")) return true; if (e.getLiteral().matches("^0[Xx][0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?\\.?[pP][+-]?[0-9]([0-9_]*[0-9])?[fFdD]?$")) return true; return e.getLiteral().matches("^0[Xx]([0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?)?\\.[0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?[pP][+-]?[0-9]([0-9_]*[0-9])?[fFdD]?$"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a char literal. * * @param e the expression to test * @return true if the expression is a char literal, otherwise false */ @FunctionSpec(name = "ischarlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isCharLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().startsWith("'"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a string literal. * * @param e the expression to test * @return true if the expression is a string literal, otherwise false */ @FunctionSpec(name = "isstringlit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isStringLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().startsWith("\""); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a type literal. * * @param e the expression to test * @return true if the expression is a type literal, otherwise false */ @FunctionSpec(name = "istypelit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isTypeLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().endsWith(".class"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a bool literal. * * @param e the expression to test * @return true if the expression is a bool literal, otherwise false */ @FunctionSpec(name = "isboollit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isBoolLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().equals("true") || e.getLiteral().equals("false"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and is a null literal. * * @param e the expression to test * @return true if the expression is a null literal, otherwise false */ @FunctionSpec(name = "isnulllit", returnType = "bool", formalParameters = { "Expression" }) public static boolean isNullLit(final Expression e) throws Exception { if (e.getKind() != Expression.ExpressionKind.LITERAL) return false; if (!e.hasLiteral()) return false; return e.getLiteral().equals("null"); } /** * Returns <code>true</code> if the expression <code>e</code> is of kind * <code>LITERAL</code> and the literal matches the string <code>lit</code>. * * @param e the expression to test * @return true if the expression is a string literal, otherwise false */ @FunctionSpec(name = "isliteral", returnType = "bool", formalParameters = { "Expression", "string" }) public static boolean isLiteral(final Expression e, final String lit) throws Exception { return e.getKind() == Expression.ExpressionKind.LITERAL && e.hasLiteral() && e.getLiteral().equals(lit); } ////////////////////////////// // Collect Annotations Used // ////////////////////////////// private static class AnnotationCollectingVisitor extends BoaCollectingVisitor<String,Long> { @Override protected boolean preVisit(Modifier node) { if (node.getKind() == Modifier.ModifierKind.ANNOTATION) { final String name = BoaAstIntrinsics.type_name(node.getAnnotationName()); final long count = map.containsKey(name) ? map.get(name) : 0; map.put(name, count + 1); } return true; } } private static AnnotationCollectingVisitor annotationCollectingVisitor = new AnnotationCollectingVisitor(); @FunctionSpec(name = "collect_annotations", returnType = "map[string] of int", formalParameters = { "ASTRoot", "map[string] of int" }) public static HashMap<String,Long> collect_annotations(final ASTRoot f, final HashMap<String,Long> map) throws Exception { annotationCollectingVisitor.initialize(map).visit(f); return annotationCollectingVisitor.map; } /////////////////////////// // Collect Generics Used // /////////////////////////// private static class GenericsCollectingVisitor extends BoaCollectingVisitor<String,Long> { @Override protected boolean preVisit(Type node) { // FIXME /* try { parseGenericType(BoaAstIntrinsics.type_name(node.getName()).trim(), map); } catch (final StackOverflowError e) { System.err.println("STACK ERR: " + node.getName() + " -> " + BoaAstIntrinsics.type_name(node.getName()).trim()); } */ return true; } } private static GenericsCollectingVisitor genericsCollectingVisitor = new GenericsCollectingVisitor(); @FunctionSpec(name = "collect_generic_types", returnType = "map[string] of int", formalParameters = { "ASTRoot", "map[string] of int" }) public static HashMap<String,Long> collect_generic_types(final ASTRoot f, final HashMap<String,Long> map) throws Exception { genericsCollectingVisitor.initialize(map).visit(f); return genericsCollectingVisitor.map; } @SuppressWarnings("unused") private static void parseGenericType(final String name, final HashMap<String,Long> counts) { if (!name.contains("<") || name.startsWith("<")) return; if (name.contains("|")) { for (final String s : name.split("\\|")) parseGenericType(s.trim(), counts); return; } if (name.contains("&")) { int count = 0; int last = 0; for (int i = 0; i < name.length(); i++) switch (name.charAt(i)) { case '<': count++; break; case '>': count--; break; case '&': if (count == 0) { parseGenericType(name.substring(last, i).trim(), counts); last = i + 1; } break; default: break; } parseGenericType(name.substring(last).trim(), counts); return; } foundType(name, counts); int start = name.indexOf("<"); final Stack<Integer> starts = new Stack<Integer>(); int lastStart = start + 1; for (int i = lastStart; i < name.lastIndexOf(">"); i++) switch (name.charAt(i)) { case '<': starts.push(lastStart); lastStart = i + 1; break; case '>': if (!starts.empty()) foundType(name.substring(starts.pop(), i + 1).trim(), counts); break; case '&': case '|': case ',': case ' ': case '.': case '\t': lastStart = i + 1; break; default: break; } } private static void foundType(final String name, final HashMap<String,Long> counts) { final String type = name.endsWith("...") ? name.substring(0, name.length() - 3).trim() : name.trim(); final long count = counts.containsKey(type) ? counts.get(type) : 0; counts.put(type, count + 1); String rawType = type.substring(0, type.indexOf("<")).trim(); if (!type.endsWith(">")) rawType += type.substring(type.lastIndexOf(">") + 1).trim(); final long rawCount = counts.containsKey(rawType) ? counts.get(rawType) : 0; counts.put(rawType, rawCount + 1); } static int indent = 0; private static String indent() { String s = ""; for (int i = 0; i < indent; i++) s += "\t"; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "ASTRoot" }) public static String prettyprint(final ASTRoot r) { if (r == null) return ""; String s = ""; for (final Namespace n : r.getNamespacesList()) s += prettyprint(n); return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Namespace" }) public static String prettyprint(final Namespace n) { if (n == null) return ""; String s = ""; if (n.getName().length() > 0) { if (n.getModifiersCount() > 0) s += prettyprint(n.getModifiersList()) + " "; s += indent() + "package " + n.getName() + ";\n"; } for (final String i : n.getImportsList()) s += indent() + "import " + i + "\n"; for (final Declaration d : n.getDeclarationsList()) s += prettyprint(d); return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Declaration" }) public static String prettyprint(final Declaration d) { if (d == null) return ""; String s = indent() + prettyprint(d.getModifiersList()) + " "; switch (d.getKind()) { case INTERFACE: s += "interface " + d.getName(); if (d.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < d.getGenericParametersCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getGenericParameters(i)); } s += ">"; } if (d.getParentsCount() > 0) { s += " extends "; for (int i = 0; i < d.getParentsCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getParents(i)); } } s += " {\n"; break; case ANONYMOUS: break; case ENUM: s += "enum " + d.getName(); break; case ANNOTATION: s += "@interface class " + d.getName(); if (d.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < d.getGenericParametersCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getGenericParameters(i)); } s += ">"; } if (d.getParentsCount() > 0) { int i = 0; if (d.getParents(i).getKind() == TypeKind.CLASS) s += " extends " + prettyprint(d.getParents(i++)); if (i < d.getParentsCount()) { s += " implements "; for (int j = i; i < d.getParentsCount(); i++) { if (i != j) s += ", "; s += prettyprint(d.getParents(i)); } } } break; default: case CLASS: s += "class " + d.getName(); if (d.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < d.getGenericParametersCount(); i++) { if (i != 0) s += ", "; s += prettyprint(d.getGenericParameters(i)); } s += ">"; } if (d.getParentsCount() > 0) { int i = 0; if (d.getParents(i).getKind() == TypeKind.CLASS) s += " extends " + prettyprint(d.getParents(i++)); if (i < d.getParentsCount()) { s += " implements "; for (int j = i; i < d.getParentsCount(); i++) { if (i != j) s += ", "; s += prettyprint(d.getParents(i)); } } } break; } s += " {\n"; indent++; for (final Variable v : d.getFieldsList()) s += indent() + prettyprint(v) + ";\n"; for (final Method m : d.getMethodsList()) s += prettyprint(m); for (final Declaration d2 : d.getNestedDeclarationsList()) s += prettyprint(d2); indent--; s += indent() + "}\n"; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Type" }) public static String prettyprint(final Type t) { if (t == null) return ""; return t.getName(); } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Method" }) public static String prettyprint(final Method m) { if (m == null) return ""; String s = indent(); for (int i = 0; i < m.getModifiersCount(); i++) s += prettyprint(m.getModifiers(i)) + " "; if (m.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < m.getGenericParametersCount(); i++) { if (i > 0) s += ", "; s += prettyprint(m.getGenericParameters(i)); } s += "> "; } s += prettyprint(m.getReturnType()) + " " + m.getName() + "("; for (int i = 0; i < m.getArgumentsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(m.getArguments(i)); } s += ") "; if (m.getExceptionTypesCount() > 0) { s += "throws "; for (int i = 0; i < m.getExceptionTypesCount(); i++) s += prettyprint(m.getExceptionTypes(i)) + " "; } s += "\n"; for (int i = 0; i < m.getStatementsCount(); i++) s += indent() + prettyprint(m.getStatements(i)) + "\n"; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Variable" }) public static String prettyprint(final Variable v) { if (v == null) return ""; String s = prettyprint(v.getModifiersList()) + prettyprint(v.getVariableType()) + " " + v.getName() + "("+ prettyprint(v.getExpressions(0)) +")"; if (v.hasInitializer()) s += " = " + prettyprint(v.getInitializer()); return s; } private static String prettyprint(final List<Modifier> mods) { String s = ""; for (final Modifier m : mods) s += prettyprint(m) + " "; return s; } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Statement" }) public static String prettyprint(final Statement stmt) { if (stmt == null) return ""; String s = ""; switch (stmt.getKind()) { case EMPTY: return ";"; case BLOCK: s += "{\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case RETURN: s += "return"; if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case BREAK: s += "break"; if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case CONTINUE: s += "continue"; if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case ASSERT: s += "assert "; s += prettyprint(stmt.getConditions(0)); if (stmt.getExpressionsCount() > 0) s += " " + prettyprint(stmt.getExpressions(0)); s += ";"; return s; case LABEL: return prettyprint(stmt.getExpressions(0)) + ": " + prettyprint(stmt.getStatements(0)); case CASE: return "case " + prettyprint(stmt.getExpressions(0)) + ":"; case DEFAULT: return "default:"; case EXPRESSION: return prettyprint(stmt.getExpressions(0)) + ";"; case TYPEDECL: return prettyprint(stmt.getTypeDeclaration()); case SYNCHRONIZED: s += "synchronized () {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += "}"; return s; case CATCH: s += indent() + "catch ("; s += prettyprint(stmt.getVariableDeclaration()); s += ") {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case FINALLY: s += indent() + "finally {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case TRY: s += "try"; if (stmt.getInitializationsCount() > 0) { s += "("; for (int i = 0; i < stmt.getInitializationsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(stmt.getInitializations(i)); } s += ")"; } s += " "; for (int i = 0; i < stmt.getStatementsCount(); i++) { s += prettyprint(stmt.getStatements(i)) + "\n"; } return s; case FOR: s += "for ("; if (stmt.hasVariableDeclaration()) { s += prettyprint(stmt.getVariableDeclaration()) + " : " + prettyprint(stmt.getConditions(0)); } else { for (int i = 0; i < stmt.getInitializationsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(stmt.getInitializations(i)); } s += "; " + prettyprint(stmt.getConditions(0)) + "; "; for (int i = 0; i < stmt.getUpdatesCount(); i++) { if (i > 0) s += ", "; s += prettyprint(stmt.getUpdates(i)); } } s += ")\n"; indent++; s += indent() + prettyprint(stmt.getStatements(0)) + "\n"; indent--; return s; case DO: s += "do\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "while (" + prettyprint(stmt.getConditions(0)) + ");"; return s; case WHILE: s += "while (" + prettyprint(stmt.getConditions(0)) + ") {\n"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += indent() + "}"; return s; case IF: s += "if (" + prettyprint(stmt.getConditions(0)) + ")\n"; indent++; s += indent() + prettyprint(stmt.getStatements(0)) + "\n"; indent--; if (stmt.getStatementsCount() > 1) { s += indent() + "else\n"; indent++; s += indent() + prettyprint(stmt.getStatements(1)) + "\n"; indent--; } return s; case SWITCH: s += "switch (" + prettyprint(stmt.getExpressions(0)) + ") {"; indent++; for (int i = 0; i < stmt.getStatementsCount(); i++) s += indent() + prettyprint(stmt.getStatements(i)) + "\n"; indent--; s += "}"; return s; case THROW: return "throw " + prettyprint(stmt.getExpressions(0)) + ";"; default: return s; } } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Expression" }) public static String prettyprint(final Expression e) { if (e == null) return ""; String s = ""; switch (e.getKind()) { case OP_ADD: if (e.getExpressionsCount() == 1) return ppPrefix("+", e); return ppInfix("+", e.getExpressionsList()); case OP_SUB: if (e.getExpressionsCount() == 1) return ppPrefix("-", e); return ppInfix("-", e.getExpressionsList()); case LOGICAL_AND: return "(" + ppInfix("&&", e.getExpressionsList()) + ")"; case LOGICAL_OR: return "(" + ppInfix("||", e.getExpressionsList()) + ")"; case EQ: return ppInfix("==", e.getExpressionsList()); case NEQ: return ppInfix("!=", e.getExpressionsList()); case LT: return ppInfix("<", e.getExpressionsList()); case GT: return ppInfix(">", e.getExpressionsList()); case LTEQ: return ppInfix("<=", e.getExpressionsList()); case GTEQ: return ppInfix(">=", e.getExpressionsList()); case OP_DIV: return ppInfix("/", e.getExpressionsList()); case OP_MULT: return ppInfix("*", e.getExpressionsList()); case OP_MOD: return ppInfix("%", e.getExpressionsList()); case BIT_AND: return ppInfix("&", e.getExpressionsList()); case BIT_OR: return ppInfix("|", e.getExpressionsList()); case BIT_XOR: return ppInfix("^", e.getExpressionsList()); case BIT_LSHIFT: return ppInfix("<<", e.getExpressionsList()); case BIT_RSHIFT: return ppInfix(">>", e.getExpressionsList()); case BIT_UNSIGNEDRSHIFT: return ppInfix(">>>", e.getExpressionsList()); case ASSIGN: return ppInfix("=", e.getExpressionsList()); case ASSIGN_ADD: return ppInfix("+=", e.getExpressionsList()); case ASSIGN_SUB: return ppInfix("-=", e.getExpressionsList()); case ASSIGN_MULT: return ppInfix("*=", e.getExpressionsList()); case ASSIGN_DIV: return ppInfix("/=", e.getExpressionsList()); case ASSIGN_MOD: return ppInfix("%=", e.getExpressionsList()); case ASSIGN_BITXOR: return ppInfix("^=", e.getExpressionsList()); case ASSIGN_BITAND: return ppInfix("&=", e.getExpressionsList()); case ASSIGN_BITOR: return ppInfix("|=", e.getExpressionsList()); case ASSIGN_LSHIFT: return ppInfix("<<=", e.getExpressionsList()); case ASSIGN_RSHIFT: return ppInfix(">>=", e.getExpressionsList()); case ASSIGN_UNSIGNEDRSHIFT: return ppInfix(">>>=", e.getExpressionsList()); case LOGICAL_NOT: return ppPrefix("!", e); case BIT_NOT: return ppPrefix("~", e); case OP_DEC: if (e.getIsPostfix()) return ppPostfix("--", e); return ppPrefix("--", e); case OP_INC: if (e.getIsPostfix()) return ppPostfix("++", e); return ppPrefix("++", e); case PAREN: return "(" + prettyprint(e.getExpressions(0)) + ")"; case LITERAL: return e.getLiteral(); case VARACCESS: for (int i = 0; i < e.getExpressionsCount(); i++) s += prettyprint(e.getExpressions(i)) + "."; s += e.getVariable(); return s; case CAST: return "(" + e.getNewType().getName() + ")" + prettyprint(e.getExpressions(0)); case CONDITIONAL: return prettyprint(e.getExpressions(0)) + " ? " + prettyprint(e.getExpressions(1)) + " : " + prettyprint(e.getExpressions(2)); case NULLCOALESCE: return prettyprint(e.getExpressions(0)) + " ?? " + prettyprint(e.getExpressions(1)); case METHODCALL: for (int i = 0; i < e.getExpressionsCount(); i++) s += prettyprint(e.getExpressions(i)) + "."; if (e.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < e.getGenericParametersCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getGenericParameters(i)); } s += ">"; } s += e.getMethod() + "("; for (int i = 0; i < e.getMethodArgsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getMethodArgs(i)); } s += ")"; return s; case TYPECOMPARE: return prettyprint(e.getExpressions(0)) + " instanceof " + prettyprint(e.getNewType()); case NEWARRAY: s += "new "; s += prettyprint(e.getNewType()); for (int i = 0; i < e.getExpressionsCount(); i++) s += prettyprint(e.getExpressions(i)); return s; case NEW: s += "new "; s += prettyprint(e.getNewType()); if (e.getGenericParametersCount() > 0) { s += "<"; for (int i = 0; i < e.getGenericParametersCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getGenericParameters(i)); } s += ">"; } s += "("; for (int i = 0; i < e.getMethodArgsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getMethodArgs(i)); } s += ")"; if (e.hasAnonDeclaration()) s += prettyprint(e.getAnonDeclaration()); return s; case ARRAYACCESS: return prettyprint(e.getExpressions(0)) + "[" + prettyprint(e.getExpressions(1)) + "]"; case ARRAYINIT: s += "{"; for (int i = 0; i < e.getExpressionsCount(); i++) { if (i > 0) s += ", "; s += prettyprint(e.getExpressions(i)); } s += "}"; return s; case ANNOTATION: return prettyprint(e.getAnnotation()); case VARDECL: for (int i = 0; i < e.getVariableDecls(0).getModifiersCount(); i++) s += prettyprint(e.getVariableDecls(0).getModifiers(i)) + " "; s += prettyprint(e.getVariableDecls(0).getVariableType()) + " "; for (int i = 0; i < e.getVariableDeclsCount(); i++) { if (i > 0) s += ", "; s += e.getVariableDecls(i).getName(); if (e.getVariableDecls(i).hasInitializer()) s += " = " + prettyprint(e.getVariableDecls(i).getInitializer()); } return s; // TODO case METHOD_REFERENCE: // TODO case LAMBDA: default: return s; } } private static String ppPrefix(final String op, final Expression e) { return op + prettyprint(e.getExpressions(0)); } private static String ppPostfix(final String op, final Expression e) { return prettyprint(e.getExpressions(0)) + op; } private static String ppInfix(final String op, final List<Expression> exps) { StringBuilder s = new StringBuilder(); s.append(prettyprint(exps.get(0))); for (int i = 1; i < exps.size(); i++) { s.append(" "); s.append(op); s.append(" "); s.append(prettyprint(exps.get(i))); } return s.toString(); } @FunctionSpec(name = "prettyprint", returnType = "string", formalParameters = { "Modifier" }) public static String prettyprint(final Modifier m) { if (m == null) return ""; String s = ""; switch (m.getKind()) { case OTHER: return m.getOther(); case VISIBILITY: switch (m.getVisibility()) { case PUBLIC: return "public"; case PRIVATE: return "private"; case PROTECTED: return "protected"; case NAMESPACE: return "namespace"; default: return s; } case ANNOTATION: s = "@" + m.getAnnotationName(); if (m.getAnnotationMembersCount() > 0) s += "("; for (int i = 0; i < m.getAnnotationMembersCount(); i++) { if (i > 0) s += ", "; s += m.getAnnotationMembers(i) + " = " + prettyprint(m.getAnnotationValues(i)); } if (m.getAnnotationMembersCount() > 0) s += ")"; return s; case FINAL: return "final"; case STATIC: return "static"; case SYNCHRONIZED: return "synchronized"; case ABSTRACT: return "abstract"; default: return s; } } /** * Converts a string expression into an AST. * * @param s the string to parse/convert * @return the AST representation of the string */ @FunctionSpec(name = "parseexpression", returnType = "Expression", formalParameters = { "string" }) public static Expression parseexpression(final String s) { final ASTParser parser = ASTParser.newParser(AST.JLS8); parser.setKind(ASTParser.K_EXPRESSION); parser.setSource(s.toCharArray()); @SuppressWarnings("rawtypes") final Map options = JavaCore.getOptions(); JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options); parser.setCompilerOptions(options); try { final org.eclipse.jdt.core.dom.Expression e = (org.eclipse.jdt.core.dom.Expression) parser.createAST(null); final JavaVisitor visitor = new JavaVisitor(s); e.accept(visitor); return visitor.getExpression(); } catch (final Exception e) { // do nothing } final Expression.Builder eb = Expression.newBuilder(); eb.setKind(Expression.ExpressionKind.OTHER); return eb.build(); } /** * Converts a string into an AST. * * @param s the string to parse/convert * @return the AST representation of the string */ @FunctionSpec(name = "parse", returnType = "ASTRoot", formalParameters = { "string" }) public static ASTRoot parse(final String s) { final ASTParser parser = ASTParser.newParser(AST.JLS8); parser.setKind(ASTParser.K_COMPILATION_UNIT); parser.setSource(s.toCharArray()); @SuppressWarnings("rawtypes") final Map options = JavaCore.getOptions(); JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options); parser.setCompilerOptions(options); final ASTRoot.Builder ast = ASTRoot.newBuilder(); try { final org.eclipse.jdt.core.dom.CompilationUnit cu = (org.eclipse.jdt.core.dom.CompilationUnit) parser.createAST(null); final JavaErrorCheckVisitor errorCheck = new JavaErrorCheckVisitor(); cu.accept(errorCheck); if (!errorCheck.hasError) { final JavaVisitor visitor = new JavaVisitor(s); ast.addNamespaces(visitor.getNamespaces(cu)); } } catch (final Exception e) { // do nothing } return ast.build(); } }
prettyprint lambda expression
src/java/boa/functions/BoaAstIntrinsics.java
prettyprint lambda expression
Java
apache-2.0
8b7992ab9277561b2e5c97207324b6152b796a6a
0
drewnoakes/metadata-extractor,ycaihua/metadata-extractor,Nadahar/metadata-extractor,ycaihua/metadata-extractor,Nadahar/metadata-extractor,veggiespam/metadata-extractor,wswenyue/metadata-extractor,RoyZeng/metadata-extractor,wswenyue/metadata-extractor,Widen/metadata-extractor,rcketscientist/metadata-extractor,veggiespam/metadata-extractor,PaytonGarland/metadata-extractor,RoyZeng/metadata-extractor
/* * Copyright 2002-2014 Drew Noakes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ package com.drew.metadata.exif; import com.drew.imaging.jpeg.JpegSegmentMetadataReader; import com.drew.imaging.jpeg.JpegSegmentType; import com.drew.imaging.tiff.TiffProcessingException; import com.drew.imaging.tiff.TiffReader; import com.drew.lang.ByteArrayReader; import com.drew.lang.RandomAccessReader; import com.drew.lang.Rational; import com.drew.lang.SequentialByteArrayReader; import com.drew.lang.annotations.NotNull; import com.drew.metadata.Directory; import com.drew.metadata.Metadata; import com.drew.metadata.exif.makernotes.*; import com.drew.metadata.iptc.IptcReader; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; /** * Decodes Exif binary data, populating a {@link Metadata} object with tag values in {@link ExifSubIFDDirectory}, * {@link ExifThumbnailDirectory}, {@link ExifInteropDirectory}, {@link GpsDirectory} and one of the many camera makernote directories. * * @author Drew Noakes https://drewnoakes.com */ public class ExifReader implements JpegSegmentMetadataReader { /** The number of bytes used per format descriptor. */ @NotNull private static final int[] BYTES_PER_FORMAT = { 0, 1, 1, 2, 4, 8, 1, 1, 2, 4, 8, 4, 8 }; /** The number of formats known. */ private static final int MAX_FORMAT_CODE = 12; // Format types // TODO use an enum for these? /** An 8-bit unsigned integer. */ private static final int FMT_BYTE = 1; /** A fixed-length character string. */ private static final int FMT_STRING = 2; /** An unsigned 16-bit integer. */ private static final int FMT_USHORT = 3; /** An unsigned 32-bit integer. */ private static final int FMT_ULONG = 4; private static final int FMT_URATIONAL = 5; /** An 8-bit signed integer. */ private static final int FMT_SBYTE = 6; private static final int FMT_UNDEFINED = 7; /** A signed 16-bit integer. */ private static final int FMT_SSHORT = 8; /** A signed 32-bit integer. */ private static final int FMT_SLONG = 9; private static final int FMT_SRATIONAL = 10; /** A 32-bit floating point number. */ private static final int FMT_SINGLE = 11; /** A 64-bit floating point number. */ private static final int FMT_DOUBLE = 12; /** * The offset at which the TIFF data actually starts. This may be necessary when, for example, processing * JPEG Exif data from APP0 which has a 6-byte preamble before starting the TIFF data. */ private static final String JPEG_EXIF_SEGMENT_PREAMBLE = "Exif\0\0"; private boolean _storeThumbnailBytes = true; public boolean isStoreThumbnailBytes() { return _storeThumbnailBytes; } public void setStoreThumbnailBytes(boolean storeThumbnailBytes) { _storeThumbnailBytes = storeThumbnailBytes; } @NotNull public Iterable<JpegSegmentType> getSegmentTypes() { return Arrays.asList(JpegSegmentType.APP1); } public boolean canProcess(@NotNull final byte[] segmentBytes, @NotNull final JpegSegmentType segmentType) { return segmentBytes.length >= JPEG_EXIF_SEGMENT_PREAMBLE.length() && new String(segmentBytes, 0, JPEG_EXIF_SEGMENT_PREAMBLE.length()).equalsIgnoreCase(JPEG_EXIF_SEGMENT_PREAMBLE); } public void extract(@NotNull final byte[] segmentBytes, @NotNull final Metadata metadata, @NotNull final JpegSegmentType segmentType) { if (segmentBytes == null) throw new NullPointerException("segmentBytes cannot be null"); if (metadata == null) throw new NullPointerException("metadata cannot be null"); if (segmentType == null) throw new NullPointerException("segmentType cannot be null"); try { ByteArrayReader reader = new ByteArrayReader(segmentBytes); // // Check for the header preamble // try { if (!reader.getString(0, JPEG_EXIF_SEGMENT_PREAMBLE.length()).equals(JPEG_EXIF_SEGMENT_PREAMBLE)) { // TODO what do to with this error state? System.err.println("Invalid JPEG Exif segment preamble"); return; } } catch (IOException e) { // TODO what do to with this error state? e.printStackTrace(System.err); return; } // // Read the TIFF-formatted Exif data // new TiffReader().processTiff( reader, new ExifTiffHandler(metadata, _storeThumbnailBytes), JPEG_EXIF_SEGMENT_PREAMBLE.length() ); } catch (TiffProcessingException e) { // TODO what do to with this error state? e.printStackTrace(System.err); } catch (IOException e) { // TODO what do to with this error state? e.printStackTrace(System.err); } } /** * Performs the Exif data extraction on a TIFF/RAW, adding found values to the specified * instance of {@link Metadata}. * * @param reader The {@link RandomAccessReader} from which TIFF data should be read. * @param metadata The Metadata object into which extracted values should be merged. */ @Deprecated public void extractTiff(@NotNull final RandomAccessReader reader, @NotNull final Metadata metadata) { final ExifIFD0Directory directory = metadata.getOrCreateDirectory(ExifIFD0Directory.class); try { extractTiff(reader, metadata, directory, 0); } catch (IOException e) { directory.addError("IO problem: " + e.getMessage()); } } @Deprecated private static void extractTiff(@NotNull final RandomAccessReader reader, @NotNull final Metadata metadata, @NotNull final Directory firstDirectory, final int tiffHeaderOffset) throws IOException { // this should be either "MM" or "II" String byteOrderIdentifier = reader.getString(tiffHeaderOffset, 2); if ("MM".equals(byteOrderIdentifier)) { reader.setMotorolaByteOrder(true); } else if ("II".equals(byteOrderIdentifier)) { reader.setMotorolaByteOrder(false); } else { firstDirectory.addError("Unclear distinction between Motorola/Intel byte ordering: " + byteOrderIdentifier); return; } // Check the next two values for correctness. final int tiffMarker = reader.getUInt16(2 + tiffHeaderOffset); final int standardTiffMarker = 0x002A; final int olympusRawTiffMarker = 0x4F52; // for ORF files final int panasonicRawTiffMarker = 0x0055; // for RW2 files if (tiffMarker != standardTiffMarker && tiffMarker != olympusRawTiffMarker && tiffMarker != panasonicRawTiffMarker) { firstDirectory.addError("Unexpected TIFF marker after byte order identifier: 0x" + Integer.toHexString(tiffMarker)); return; } int firstIfdOffset = reader.getInt32(4 + tiffHeaderOffset) + tiffHeaderOffset; // David Ekholm sent a digital camera image that has this problem // TODO getLength should be avoided as it causes RandomAccessStreamReader to read to the end of the stream if (firstIfdOffset >= reader.getLength() - 1) { firstDirectory.addError("First Exif directory offset is beyond end of Exif data segment"); // First directory normally starts 14 bytes in -- try it here and catch another error in the worst case firstIfdOffset = 14; } Set<Integer> processedIfdOffsets = new HashSet<Integer>(); processIFD(firstDirectory, processedIfdOffsets, firstIfdOffset, tiffHeaderOffset, metadata, reader); // after the extraction process, if we have the correct tags, we may be able to store thumbnail information ExifThumbnailDirectory thumbnailDirectory = metadata.getDirectory(ExifThumbnailDirectory.class); if (thumbnailDirectory != null && thumbnailDirectory.containsTag(ExifThumbnailDirectory.TAG_THUMBNAIL_COMPRESSION)) { Integer offset = thumbnailDirectory.getInteger(ExifThumbnailDirectory.TAG_THUMBNAIL_OFFSET); Integer length = thumbnailDirectory.getInteger(ExifThumbnailDirectory.TAG_THUMBNAIL_LENGTH); if (offset != null && length != null) { try { byte[] thumbnailData = reader.getBytes(tiffHeaderOffset + offset, length); thumbnailDirectory.setThumbnailData(thumbnailData); } catch (IOException ex) { firstDirectory.addError("Invalid thumbnail data specification: " + ex.getMessage()); } } } } /** * Processes a TIFF IFD, storing tag values in the specified {@link Directory}. * <p/> * IFD Header: * <ul> * <li><b>2 bytes</b> number of tags</li> * </ul> * Tag structure: * <ul> * <li><b>2 bytes</b> tag type</li> * <li><b>2 bytes</b> format code (values 1 to 12, inclusive)</li> * <li><b>4 bytes</b> component count</li> * <li><b>4 bytes</b> inline value, or offset pointer if too large to fit in four bytes</li> * </ul> * * @param directory the {@link Directory} to write extracted values into * @param processedIfdOffsets the set of visited IFD offsets, to avoid revisiting the same IFD in an endless loop * @param ifdOffset the offset within <code>reader</code> at which the IFD data starts * @param tiffHeaderOffset the offset within <code>reader</code> at which the TIFF header starts */ @Deprecated private static void processIFD(@NotNull final Directory directory, @NotNull final Set<Integer> processedIfdOffsets, final int ifdOffset, final int tiffHeaderOffset, @NotNull final Metadata metadata, @NotNull final RandomAccessReader reader) throws IOException { // check for directories we've already visited to avoid stack overflows when recursive/cyclic directory structures exist if (processedIfdOffsets.contains(Integer.valueOf(ifdOffset))) return; // remember that we've visited this directory so that we don't visit it again later processedIfdOffsets.add(ifdOffset); if (ifdOffset >= reader.getLength() || ifdOffset < 0) { directory.addError("Ignored IFD marked to start outside data segment"); return; } // First two bytes in the IFD are the number of tags in this directory int dirTagCount = reader.getUInt16(ifdOffset); int dirLength = (2 + (12 * dirTagCount) + 4); if (dirLength + ifdOffset > reader.getLength()) { directory.addError("Illegally sized IFD"); return; } // Handle each tag in this directory for (int tagNumber = 0; tagNumber < dirTagCount; tagNumber++) { final int tagOffset = calculateTagOffset(ifdOffset, tagNumber); // 2 bytes for the tag type final int tagType = reader.getUInt16(tagOffset); // 2 bytes for the format code final int formatCode = reader.getUInt16(tagOffset + 2); if (formatCode < 1 || formatCode > MAX_FORMAT_CODE) { // This error suggests that we are processing at an incorrect index and will generate // rubbish until we go out of bounds (which may be a while). Exit now. directory.addError("Invalid TIFF tag format code: " + formatCode); return; } // 4 bytes dictate the number of components in this tag's data final int componentCount = reader.getInt32(tagOffset + 4); if (componentCount < 0) { directory.addError("Negative TIFF tag component count"); continue; } // each component may have more than one byte... calculate the total number of bytes final int byteCount = componentCount * BYTES_PER_FORMAT[formatCode]; final int tagValueOffset; if (byteCount > 4) { // If it's bigger than 4 bytes, the dir entry contains an offset. // dirEntryOffset must be passed, as some makernote implementations (e.g. Fujifilm) incorrectly use an // offset relative to the start of the makernote itself, not the TIFF segment. final int offsetVal = reader.getInt32(tagOffset + 8); if (offsetVal + byteCount > reader.getLength()) { // Bogus pointer offset and / or byteCount value directory.addError("Illegal TIFF tag pointer offset"); continue; } tagValueOffset = tiffHeaderOffset + offsetVal; } else { // 4 bytes or less and value is in the dir entry itself tagValueOffset = tagOffset + 8; } if (tagValueOffset < 0 || tagValueOffset > reader.getLength()) { directory.addError("Illegal TIFF tag pointer offset"); continue; } // Check that this tag isn't going to allocate outside the bounds of the data array. // This addresses an uncommon OutOfMemoryError. if (byteCount < 0 || tagValueOffset + byteCount > reader.getLength()) { directory.addError("Illegal number of bytes for TIFF tag data: " + byteCount); continue; } // // Special handling for certain known tags that point to or contain other chunks of data to be processed // if (tagType == ExifIFD0Directory.TAG_EXIF_SUB_IFD_OFFSET && directory instanceof ExifIFD0Directory) { if (byteCount != 4) { directory.addError("Exif SubIFD Offset tag should have a component count of four (bytes) for the offset."); } else { final int subDirOffset = tiffHeaderOffset + reader.getInt32(tagValueOffset); processIFD(metadata.getOrCreateDirectory(ExifSubIFDDirectory.class), processedIfdOffsets, subDirOffset, tiffHeaderOffset, metadata, reader); } } else if (tagType == ExifSubIFDDirectory.TAG_INTEROP_OFFSET && directory instanceof ExifSubIFDDirectory) { if (byteCount != 4) { directory.addError("Exif Interop Offset tag should have a component count of four (bytes) for the offset."); } else { final int subDirOffset = tiffHeaderOffset + reader.getInt32(tagValueOffset); processIFD(metadata.getOrCreateDirectory(ExifInteropDirectory.class), processedIfdOffsets, subDirOffset, tiffHeaderOffset, metadata, reader); } } else if (tagType == ExifIFD0Directory.TAG_GPS_INFO_OFFSET && directory instanceof ExifIFD0Directory) { if (byteCount != 4) { directory.addError("Exif GPS Info Offset tag should have a component count of four (bytes) for the offset."); } else { final int subDirOffset = tiffHeaderOffset + reader.getInt32(tagValueOffset); processIFD(metadata.getOrCreateDirectory(GpsDirectory.class), processedIfdOffsets, subDirOffset, tiffHeaderOffset, metadata, reader); } } else if (tagType == ExifSubIFDDirectory.TAG_MAKERNOTE && directory instanceof ExifSubIFDDirectory) { // The makernote tag contains the encoded makernote data directly. // Pass the offset to this tag's value. Manufacturer/Model-specific logic will be used to // determine the correct offset for further processing. processMakernote(tagValueOffset, processedIfdOffsets, tiffHeaderOffset, metadata, reader); } else if (tagType == ExifSubIFDDirectory.TAG_IPTC_NAA && directory instanceof ExifIFD0Directory) { // NOTE Adobe sets type 4 for IPTC instead of 7 final byte[] c = reader.getBytes(tagValueOffset, byteCount); new IptcReader().extract(new SequentialByteArrayReader(c), metadata, c.length); } else { processTag(directory, tagType, tagValueOffset, componentCount, formatCode, reader); } } // at the end of each IFD is an optional link to the next IFD final int finalTagOffset = calculateTagOffset(ifdOffset, dirTagCount); int nextDirectoryOffset = reader.getInt32(finalTagOffset); if (nextDirectoryOffset != 0) { nextDirectoryOffset += tiffHeaderOffset; if (nextDirectoryOffset >= reader.getLength()) { // Last 4 bytes of IFD reference another IFD with an address that is out of bounds // Note this could have been caused by jhead 1.3 cropping too much return; } else if (nextDirectoryOffset < ifdOffset) { // Last 4 bytes of IFD reference another IFD with an address that is before the start of this directory return; } // TODO in Exif, the only known 'follower' IFD is the thumbnail one, however this may not be the case final ExifThumbnailDirectory nextDirectory = metadata.getOrCreateDirectory(ExifThumbnailDirectory.class); processIFD(nextDirectory, processedIfdOffsets, nextDirectoryOffset, tiffHeaderOffset, metadata, reader); } } @Deprecated private static void processMakernote(final int makernoteOffset, final @NotNull Set<Integer> processedIfdOffsets, final int tiffHeaderOffset, final @NotNull Metadata metadata, final @NotNull RandomAccessReader reader) throws IOException { // Determine the camera model and makernote format Directory ifd0Directory = metadata.getDirectory(ExifIFD0Directory.class); if (ifd0Directory == null) return; String cameraMake = ifd0Directory.getString(ExifIFD0Directory.TAG_MAKE); final String firstThreeChars = reader.getString(makernoteOffset, 3); final String firstFourChars = reader.getString(makernoteOffset, 4); final String firstFiveChars = reader.getString(makernoteOffset, 5); final String firstSixChars = reader.getString(makernoteOffset, 6); final String firstSevenChars = reader.getString(makernoteOffset, 7); final String firstEightChars = reader.getString(makernoteOffset, 8); final String firstTwelveChars = reader.getString(makernoteOffset, 12); boolean byteOrderBefore = reader.isMotorolaByteOrder(); if ("OLYMP".equals(firstFiveChars) || "EPSON".equals(firstFiveChars) || "AGFA".equals(firstFourChars)) { // Olympus Makernote // Epson and Agfa use Olympus makernote standard: http://www.ozhiker.com/electronics/pjmt/jpeg_info/ processIFD(metadata.getOrCreateDirectory(OlympusMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); } else if (cameraMake != null && cameraMake.trim().toUpperCase().startsWith("NIKON")) { if ("Nikon".equals(firstFiveChars)) { /* There are two scenarios here: * Type 1: ** * :0000: 4E 69 6B 6F 6E 00 01 00-05 00 02 00 02 00 06 00 Nikon........... * :0010: 00 00 EC 02 00 00 03 00-03 00 01 00 00 00 06 00 ................ * Type 3: ** * :0000: 4E 69 6B 6F 6E 00 02 00-00 00 4D 4D 00 2A 00 00 Nikon....MM.*... * :0010: 00 08 00 1E 00 01 00 07-00 00 00 04 30 32 30 30 ............0200 */ switch (reader.getUInt8(makernoteOffset + 6)) { case 1: processIFD(metadata.getOrCreateDirectory(NikonType1MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); break; case 2: processIFD(metadata.getOrCreateDirectory(NikonType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 18, makernoteOffset + 10, metadata, reader); break; default: ifd0Directory.addError("Unsupported Nikon makernote data ignored."); break; } } else { // The IFD begins with the first Makernote byte (no ASCII name). This occurs with CoolPix 775, E990 and D1 models. processIFD(metadata.getOrCreateDirectory(NikonType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } } else if ("SONY CAM".equals(firstEightChars) || "SONY DSC".equals(firstEightChars)) { processIFD(metadata.getOrCreateDirectory(SonyType1MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 12, tiffHeaderOffset, metadata, reader); } else if ("SEMC MS\u0000\u0000\u0000\u0000\u0000".equals(firstTwelveChars)) { // force MM for this directory reader.setMotorolaByteOrder(true); // skip 12 byte header + 2 for "MM" + 6 processIFD(metadata.getOrCreateDirectory(SonyType6MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 20, tiffHeaderOffset, metadata, reader); } else if ("SIGMA\u0000\u0000\u0000".equals(firstEightChars) || "FOVEON\u0000\u0000".equals(firstEightChars)) { processIFD(metadata.getOrCreateDirectory(SigmaMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 10, tiffHeaderOffset, metadata, reader); } else if ("KDK".equals(firstThreeChars)) { reader.setMotorolaByteOrder(firstSevenChars.equals("KDK INFO")); processKodakMakernote(metadata.getOrCreateDirectory(KodakMakernoteDirectory.class), makernoteOffset, reader); } else if ("Canon".equalsIgnoreCase(cameraMake)) { processIFD(metadata.getOrCreateDirectory(CanonMakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } else if (cameraMake != null && cameraMake.toUpperCase().startsWith("CASIO")) { if ("QVC\u0000\u0000\u0000".equals(firstSixChars)) { processIFD(metadata.getOrCreateDirectory(CasioType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 6, tiffHeaderOffset, metadata, reader); } else { processIFD(metadata.getOrCreateDirectory(CasioType1MakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } } else if ("FUJIFILM".equals(firstEightChars) || "Fujifilm".equalsIgnoreCase(cameraMake)) { // Note that this also applies to certain Leica cameras, such as the Digilux-4.3 reader.setMotorolaByteOrder(false); // the 4 bytes after "FUJIFILM" in the makernote point to the start of the makernote // IFD, though the offset is relative to the start of the makernote, not the TIFF // header (like everywhere else) int ifdStart = makernoteOffset + reader.getInt32(makernoteOffset + 8); processIFD(metadata.getOrCreateDirectory(FujifilmMakernoteDirectory.class), processedIfdOffsets, ifdStart, makernoteOffset, metadata, reader); } else if (cameraMake != null && cameraMake.toUpperCase().startsWith("MINOLTA")) { // Cases seen with the model starting with MINOLTA in capitals seem to have a valid Olympus makernote // area that commences immediately. processIFD(metadata.getOrCreateDirectory(OlympusMakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } else if ("KYOCERA".equals(firstSevenChars)) { // http://www.ozhiker.com/electronics/pjmt/jpeg_info/kyocera_mn.html processIFD(metadata.getOrCreateDirectory(KyoceraMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 22, tiffHeaderOffset, metadata, reader); } else if ("LEICA".equals(firstFiveChars)) { reader.setMotorolaByteOrder(false); if ("Leica Camera AG".equals(cameraMake)) { processIFD(metadata.getOrCreateDirectory(LeicaMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); } else if ("LEICA".equals(cameraMake)) { // Some Leica cameras use Panasonic makernote tags processIFD(metadata.getOrCreateDirectory(PanasonicMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); } } else if ("Panasonic\u0000\u0000\u0000".equals(reader.getString(makernoteOffset, 12))) { // NON-Standard TIFF IFD Data using Panasonic Tags. There is no Next-IFD pointer after the IFD // Offsets are relative to the start of the TIFF header at the beginning of the EXIF segment // more information here: http://www.ozhiker.com/electronics/pjmt/jpeg_info/panasonic_mn.html processIFD(metadata.getOrCreateDirectory(PanasonicMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 12, tiffHeaderOffset, metadata, reader); } else if ("AOC\u0000".equals(firstFourChars)) { // NON-Standard TIFF IFD Data using Casio Type 2 Tags // IFD has no Next-IFD pointer at end of IFD, and // Offsets are relative to the start of the current IFD tag, not the TIFF header // Observed for: // - Pentax ist D processIFD(metadata.getOrCreateDirectory(CasioType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 6, makernoteOffset, metadata, reader); } else if (cameraMake != null && (cameraMake.toUpperCase().startsWith("PENTAX") || cameraMake.toUpperCase().startsWith("ASAHI"))) { // NON-Standard TIFF IFD Data using Pentax Tags // IFD has no Next-IFD pointer at end of IFD, and // Offsets are relative to the start of the current IFD tag, not the TIFF header // Observed for: // - PENTAX Optio 330 // - PENTAX Optio 430 processIFD(metadata.getOrCreateDirectory(PentaxMakernoteDirectory.class), processedIfdOffsets, makernoteOffset, makernoteOffset, metadata, reader); // } else if ("KC".equals(firstTwoChars) || "MINOL".equals(firstFiveChars) || "MLY".equals(firstThreeChars) || "+M+M+M+M".equals(firstEightChars)) { // // This Konica data is not understood. Header identified in accordance with information at this site: // // http://www.ozhiker.com/electronics/pjmt/jpeg_info/minolta_mn.html // // TODO add support for minolta/konica cameras // exifDirectory.addError("Unsupported Konica/Minolta data ignored."); } else if ("SANYO\0\1\0".equals(firstEightChars)) { processIFD(metadata.getOrCreateDirectory(SanyoMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, makernoteOffset, metadata, reader); } else { // The makernote is not comprehended by this library. // If you are reading this and believe a particular camera's image should be processed, get in touch. } reader.setMotorolaByteOrder(byteOrderBefore); } @Deprecated private static void processKodakMakernote(@NotNull final KodakMakernoteDirectory directory, final int tagValueOffset, @NotNull final RandomAccessReader reader) { // Kodak's makernote is not in IFD format. It has values at fixed offsets. int dataOffset = tagValueOffset + 8; try { directory.setString(KodakMakernoteDirectory.TAG_KODAK_MODEL, reader.getString(dataOffset, 8)); directory.setInt(KodakMakernoteDirectory.TAG_QUALITY, reader.getUInt8(dataOffset + 9)); directory.setInt(KodakMakernoteDirectory.TAG_BURST_MODE, reader.getUInt8(dataOffset + 10)); directory.setInt(KodakMakernoteDirectory.TAG_IMAGE_WIDTH, reader.getUInt16(dataOffset + 12)); directory.setInt(KodakMakernoteDirectory.TAG_IMAGE_HEIGHT, reader.getUInt16(dataOffset + 14)); directory.setInt(KodakMakernoteDirectory.TAG_YEAR_CREATED, reader.getUInt16(dataOffset + 16)); directory.setByteArray(KodakMakernoteDirectory.TAG_MONTH_DAY_CREATED, reader.getBytes(dataOffset + 18, 2)); directory.setByteArray(KodakMakernoteDirectory.TAG_TIME_CREATED, reader.getBytes(dataOffset + 20, 4)); directory.setInt(KodakMakernoteDirectory.TAG_BURST_MODE_2, reader.getUInt16(dataOffset + 24)); directory.setInt(KodakMakernoteDirectory.TAG_SHUTTER_MODE, reader.getUInt8(dataOffset + 27)); directory.setInt(KodakMakernoteDirectory.TAG_METERING_MODE, reader.getUInt8(dataOffset + 28)); directory.setInt(KodakMakernoteDirectory.TAG_SEQUENCE_NUMBER, reader.getUInt8(dataOffset + 29)); directory.setInt(KodakMakernoteDirectory.TAG_F_NUMBER, reader.getUInt16(dataOffset + 30)); directory.setLong(KodakMakernoteDirectory.TAG_EXPOSURE_TIME, reader.getUInt32(dataOffset + 32)); directory.setInt(KodakMakernoteDirectory.TAG_EXPOSURE_COMPENSATION, reader.getInt16(dataOffset + 36)); directory.setInt(KodakMakernoteDirectory.TAG_FOCUS_MODE, reader.getUInt8(dataOffset + 56)); directory.setInt(KodakMakernoteDirectory.TAG_WHITE_BALANCE, reader.getUInt8(dataOffset + 64)); directory.setInt(KodakMakernoteDirectory.TAG_FLASH_MODE, reader.getUInt8(dataOffset + 92)); directory.setInt(KodakMakernoteDirectory.TAG_FLASH_FIRED, reader.getUInt8(dataOffset + 93)); directory.setInt(KodakMakernoteDirectory.TAG_ISO_SETTING, reader.getUInt16(dataOffset + 94)); directory.setInt(KodakMakernoteDirectory.TAG_ISO, reader.getUInt16(dataOffset + 96)); directory.setInt(KodakMakernoteDirectory.TAG_TOTAL_ZOOM, reader.getUInt16(dataOffset + 98)); directory.setInt(KodakMakernoteDirectory.TAG_DATE_TIME_STAMP, reader.getUInt16(dataOffset + 100)); directory.setInt(KodakMakernoteDirectory.TAG_COLOR_MODE, reader.getUInt16(dataOffset + 102)); directory.setInt(KodakMakernoteDirectory.TAG_DIGITAL_ZOOM, reader.getUInt16(dataOffset + 104)); directory.setInt(KodakMakernoteDirectory.TAG_SHARPNESS, reader.getInt8(dataOffset + 107)); } catch (IOException ex) { directory.addError("Error processing Kodak makernote data: " + ex.getMessage()); } } @Deprecated private static void processTag(@NotNull final Directory directory, final int tagType, final int tagValueOffset, final int componentCount, final int formatCode, @NotNull final RandomAccessReader reader) throws IOException { // Directory simply stores raw values // The display side uses a Descriptor class per directory to turn the raw values into 'pretty' descriptions switch (formatCode) { case FMT_UNDEFINED: // this includes exif user comments directory.setByteArray(tagType, reader.getBytes(tagValueOffset, componentCount)); break; case FMT_STRING: String string = reader.getNullTerminatedString(tagValueOffset, componentCount); directory.setString(tagType, string); break; case FMT_SRATIONAL: if (componentCount == 1) { directory.setRational(tagType, new Rational(reader.getInt32(tagValueOffset), reader.getInt32(tagValueOffset + 4))); } else if (componentCount > 1) { Rational[] rationals = new Rational[componentCount]; for (int i = 0; i < componentCount; i++) rationals[i] = new Rational(reader.getInt32(tagValueOffset + (8 * i)), reader.getInt32(tagValueOffset + 4 + (8 * i))); directory.setRationalArray(tagType, rationals); } break; case FMT_URATIONAL: if (componentCount == 1) { directory.setRational(tagType, new Rational(reader.getUInt32(tagValueOffset), reader.getUInt32(tagValueOffset + 4))); } else if (componentCount > 1) { Rational[] rationals = new Rational[componentCount]; for (int i = 0; i < componentCount; i++) rationals[i] = new Rational(reader.getUInt32(tagValueOffset + (8 * i)), reader.getUInt32(tagValueOffset + 4 + (8 * i))); directory.setRationalArray(tagType, rationals); } break; case FMT_SINGLE: if (componentCount == 1) { directory.setFloat(tagType, reader.getFloat32(tagValueOffset)); } else { float[] floats = new float[componentCount]; for (int i = 0; i < componentCount; i++) floats[i] = reader.getFloat32(tagValueOffset + (i * 4)); directory.setFloatArray(tagType, floats); } break; case FMT_DOUBLE: if (componentCount == 1) { directory.setDouble(tagType, reader.getDouble64(tagValueOffset)); } else { double[] doubles = new double[componentCount]; for (int i = 0; i < componentCount; i++) doubles[i] = reader.getDouble64(tagValueOffset + (i * 4)); directory.setDoubleArray(tagType, doubles); } break; // // Note that all integral types are stored as int32 internally (the largest supported by TIFF) // case FMT_SBYTE: if (componentCount == 1) { directory.setInt(tagType, reader.getInt8(tagValueOffset)); } else { int[] bytes = new int[componentCount]; for (int i = 0; i < componentCount; i++) bytes[i] = reader.getInt8(tagValueOffset + i); directory.setIntArray(tagType, bytes); } break; case FMT_BYTE: if (componentCount == 1) { directory.setInt(tagType, reader.getUInt8(tagValueOffset)); } else { int[] bytes = new int[componentCount]; for (int i = 0; i < componentCount; i++) bytes[i] = reader.getUInt8(tagValueOffset + i); directory.setIntArray(tagType, bytes); } break; case FMT_USHORT: if (componentCount == 1) { int i = reader.getUInt16(tagValueOffset); directory.setInt(tagType, i); } else { int[] ints = new int[componentCount]; for (int i = 0; i < componentCount; i++) ints[i] = reader.getUInt16(tagValueOffset + (i * 2)); directory.setIntArray(tagType, ints); } break; case FMT_SSHORT: if (componentCount == 1) { int i = reader.getInt16(tagValueOffset); directory.setInt(tagType, i); } else { int[] ints = new int[componentCount]; for (int i = 0; i < componentCount; i++) ints[i] = reader.getInt16(tagValueOffset + (i * 2)); directory.setIntArray(tagType, ints); } break; case FMT_SLONG: case FMT_ULONG: // NOTE 'long' in this case means 32 bit, not 64 if (componentCount == 1) { int i = reader.getInt32(tagValueOffset); directory.setInt(tagType, i); } else { int[] ints = new int[componentCount]; for (int i = 0; i < componentCount; i++) ints[i] = reader.getInt32(tagValueOffset + (i * 4)); directory.setIntArray(tagType, ints); } break; default: directory.addError("Unknown format code " + formatCode + " for tag " + tagType); } } /** * Determine the offset at which a given InteropArray entry begins within the specified IFD. * * @param ifdStartOffset the offset at which the IFD starts * @param entryNumber the zero-based entry number */ @Deprecated private static int calculateTagOffset(int ifdStartOffset, int entryNumber) { // add 2 bytes for the tag count // each entry is 12 bytes, so we skip 12 * the number seen so far return ifdStartOffset + 2 + (12 * entryNumber); } }
Source/com/drew/metadata/exif/ExifReader.java
/* * Copyright 2002-2014 Drew Noakes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ package com.drew.metadata.exif; import com.drew.imaging.jpeg.JpegSegmentMetadataReader; import com.drew.imaging.jpeg.JpegSegmentType; import com.drew.imaging.tiff.TiffProcessingException; import com.drew.imaging.tiff.TiffReader; import com.drew.lang.ByteArrayReader; import com.drew.lang.RandomAccessReader; import com.drew.lang.Rational; import com.drew.lang.annotations.NotNull; import com.drew.metadata.Directory; import com.drew.metadata.Metadata; import com.drew.metadata.exif.makernotes.*; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; /** * Decodes Exif binary data, populating a {@link Metadata} object with tag values in {@link ExifSubIFDDirectory}, * {@link ExifThumbnailDirectory}, {@link ExifInteropDirectory}, {@link GpsDirectory} and one of the many camera makernote directories. * * @author Drew Noakes https://drewnoakes.com */ public class ExifReader implements JpegSegmentMetadataReader { /** The number of bytes used per format descriptor. */ @NotNull private static final int[] BYTES_PER_FORMAT = { 0, 1, 1, 2, 4, 8, 1, 1, 2, 4, 8, 4, 8 }; /** The number of formats known. */ private static final int MAX_FORMAT_CODE = 12; // Format types // TODO use an enum for these? /** An 8-bit unsigned integer. */ private static final int FMT_BYTE = 1; /** A fixed-length character string. */ private static final int FMT_STRING = 2; /** An unsigned 16-bit integer. */ private static final int FMT_USHORT = 3; /** An unsigned 32-bit integer. */ private static final int FMT_ULONG = 4; private static final int FMT_URATIONAL = 5; /** An 8-bit signed integer. */ private static final int FMT_SBYTE = 6; private static final int FMT_UNDEFINED = 7; /** A signed 16-bit integer. */ private static final int FMT_SSHORT = 8; /** A signed 32-bit integer. */ private static final int FMT_SLONG = 9; private static final int FMT_SRATIONAL = 10; /** A 32-bit floating point number. */ private static final int FMT_SINGLE = 11; /** A 64-bit floating point number. */ private static final int FMT_DOUBLE = 12; /** * The offset at which the TIFF data actually starts. This may be necessary when, for example, processing * JPEG Exif data from APP0 which has a 6-byte preamble before starting the TIFF data. */ private static final String JPEG_EXIF_SEGMENT_PREAMBLE = "Exif\0\0"; private boolean _storeThumbnailBytes = true; public boolean isStoreThumbnailBytes() { return _storeThumbnailBytes; } public void setStoreThumbnailBytes(boolean storeThumbnailBytes) { _storeThumbnailBytes = storeThumbnailBytes; } @NotNull public Iterable<JpegSegmentType> getSegmentTypes() { return Arrays.asList(JpegSegmentType.APP1); } public boolean canProcess(@NotNull final byte[] segmentBytes, @NotNull final JpegSegmentType segmentType) { return segmentBytes.length >= JPEG_EXIF_SEGMENT_PREAMBLE.length() && new String(segmentBytes, 0, JPEG_EXIF_SEGMENT_PREAMBLE.length()).equalsIgnoreCase(JPEG_EXIF_SEGMENT_PREAMBLE); } public void extract(@NotNull final byte[] segmentBytes, @NotNull final Metadata metadata, @NotNull final JpegSegmentType segmentType) { if (segmentBytes == null) throw new NullPointerException("segmentBytes cannot be null"); if (metadata == null) throw new NullPointerException("metadata cannot be null"); if (segmentType == null) throw new NullPointerException("segmentType cannot be null"); try { ByteArrayReader reader = new ByteArrayReader(segmentBytes); // // Check for the header preamble // try { if (!reader.getString(0, JPEG_EXIF_SEGMENT_PREAMBLE.length()).equals(JPEG_EXIF_SEGMENT_PREAMBLE)) { // TODO what do to with this error state? System.err.println("Invalid JPEG Exif segment preamble"); return; } } catch (IOException e) { // TODO what do to with this error state? e.printStackTrace(System.err); return; } // // Read the TIFF-formatted Exif data // new TiffReader().processTiff( reader, new ExifTiffHandler(metadata, _storeThumbnailBytes), JPEG_EXIF_SEGMENT_PREAMBLE.length() ); } catch (TiffProcessingException e) { // TODO what do to with this error state? e.printStackTrace(System.err); } catch (IOException e) { // TODO what do to with this error state? e.printStackTrace(System.err); } } /** * Performs the Exif data extraction on a TIFF/RAW, adding found values to the specified * instance of {@link Metadata}. * * @param reader The {@link RandomAccessReader} from which TIFF data should be read. * @param metadata The Metadata object into which extracted values should be merged. */ @Deprecated public void extractTiff(@NotNull final RandomAccessReader reader, @NotNull final Metadata metadata) { final ExifIFD0Directory directory = metadata.getOrCreateDirectory(ExifIFD0Directory.class); try { extractTiff(reader, metadata, directory, 0); } catch (IOException e) { directory.addError("IO problem: " + e.getMessage()); } } @Deprecated private static void extractTiff(@NotNull final RandomAccessReader reader, @NotNull final Metadata metadata, @NotNull final Directory firstDirectory, final int tiffHeaderOffset) throws IOException { // this should be either "MM" or "II" String byteOrderIdentifier = reader.getString(tiffHeaderOffset, 2); if ("MM".equals(byteOrderIdentifier)) { reader.setMotorolaByteOrder(true); } else if ("II".equals(byteOrderIdentifier)) { reader.setMotorolaByteOrder(false); } else { firstDirectory.addError("Unclear distinction between Motorola/Intel byte ordering: " + byteOrderIdentifier); return; } // Check the next two values for correctness. final int tiffMarker = reader.getUInt16(2 + tiffHeaderOffset); final int standardTiffMarker = 0x002A; final int olympusRawTiffMarker = 0x4F52; // for ORF files final int panasonicRawTiffMarker = 0x0055; // for RW2 files if (tiffMarker != standardTiffMarker && tiffMarker != olympusRawTiffMarker && tiffMarker != panasonicRawTiffMarker) { firstDirectory.addError("Unexpected TIFF marker after byte order identifier: 0x" + Integer.toHexString(tiffMarker)); return; } int firstIfdOffset = reader.getInt32(4 + tiffHeaderOffset) + tiffHeaderOffset; // David Ekholm sent a digital camera image that has this problem // TODO getLength should be avoided as it causes RandomAccessStreamReader to read to the end of the stream if (firstIfdOffset >= reader.getLength() - 1) { firstDirectory.addError("First Exif directory offset is beyond end of Exif data segment"); // First directory normally starts 14 bytes in -- try it here and catch another error in the worst case firstIfdOffset = 14; } Set<Integer> processedIfdOffsets = new HashSet<Integer>(); processIFD(firstDirectory, processedIfdOffsets, firstIfdOffset, tiffHeaderOffset, metadata, reader); // after the extraction process, if we have the correct tags, we may be able to store thumbnail information ExifThumbnailDirectory thumbnailDirectory = metadata.getDirectory(ExifThumbnailDirectory.class); if (thumbnailDirectory != null && thumbnailDirectory.containsTag(ExifThumbnailDirectory.TAG_THUMBNAIL_COMPRESSION)) { Integer offset = thumbnailDirectory.getInteger(ExifThumbnailDirectory.TAG_THUMBNAIL_OFFSET); Integer length = thumbnailDirectory.getInteger(ExifThumbnailDirectory.TAG_THUMBNAIL_LENGTH); if (offset != null && length != null) { try { byte[] thumbnailData = reader.getBytes(tiffHeaderOffset + offset, length); thumbnailDirectory.setThumbnailData(thumbnailData); } catch (IOException ex) { firstDirectory.addError("Invalid thumbnail data specification: " + ex.getMessage()); } } } } /** * Processes a TIFF IFD, storing tag values in the specified {@link Directory}. * <p/> * IFD Header: * <ul> * <li><b>2 bytes</b> number of tags</li> * </ul> * Tag structure: * <ul> * <li><b>2 bytes</b> tag type</li> * <li><b>2 bytes</b> format code (values 1 to 12, inclusive)</li> * <li><b>4 bytes</b> component count</li> * <li><b>4 bytes</b> inline value, or offset pointer if too large to fit in four bytes</li> * </ul> * * @param directory the {@link Directory} to write extracted values into * @param processedIfdOffsets the set of visited IFD offsets, to avoid revisiting the same IFD in an endless loop * @param ifdOffset the offset within <code>reader</code> at which the IFD data starts * @param tiffHeaderOffset the offset within <code>reader</code> at which the TIFF header starts */ @Deprecated private static void processIFD(@NotNull final Directory directory, @NotNull final Set<Integer> processedIfdOffsets, final int ifdOffset, final int tiffHeaderOffset, @NotNull final Metadata metadata, @NotNull final RandomAccessReader reader) throws IOException { // check for directories we've already visited to avoid stack overflows when recursive/cyclic directory structures exist if (processedIfdOffsets.contains(Integer.valueOf(ifdOffset))) return; // remember that we've visited this directory so that we don't visit it again later processedIfdOffsets.add(ifdOffset); if (ifdOffset >= reader.getLength() || ifdOffset < 0) { directory.addError("Ignored IFD marked to start outside data segment"); return; } // First two bytes in the IFD are the number of tags in this directory int dirTagCount = reader.getUInt16(ifdOffset); int dirLength = (2 + (12 * dirTagCount) + 4); if (dirLength + ifdOffset > reader.getLength()) { directory.addError("Illegally sized IFD"); return; } // Handle each tag in this directory for (int tagNumber = 0; tagNumber < dirTagCount; tagNumber++) { final int tagOffset = calculateTagOffset(ifdOffset, tagNumber); // 2 bytes for the tag type final int tagType = reader.getUInt16(tagOffset); // 2 bytes for the format code final int formatCode = reader.getUInt16(tagOffset + 2); if (formatCode < 1 || formatCode > MAX_FORMAT_CODE) { // This error suggests that we are processing at an incorrect index and will generate // rubbish until we go out of bounds (which may be a while). Exit now. directory.addError("Invalid TIFF tag format code: " + formatCode); return; } // 4 bytes dictate the number of components in this tag's data final int componentCount = reader.getInt32(tagOffset + 4); if (componentCount < 0) { directory.addError("Negative TIFF tag component count"); continue; } // each component may have more than one byte... calculate the total number of bytes final int byteCount = componentCount * BYTES_PER_FORMAT[formatCode]; final int tagValueOffset; if (byteCount > 4) { // If it's bigger than 4 bytes, the dir entry contains an offset. // dirEntryOffset must be passed, as some makernote implementations (e.g. Fujifilm) incorrectly use an // offset relative to the start of the makernote itself, not the TIFF segment. final int offsetVal = reader.getInt32(tagOffset + 8); if (offsetVal + byteCount > reader.getLength()) { // Bogus pointer offset and / or byteCount value directory.addError("Illegal TIFF tag pointer offset"); continue; } tagValueOffset = tiffHeaderOffset + offsetVal; } else { // 4 bytes or less and value is in the dir entry itself tagValueOffset = tagOffset + 8; } if (tagValueOffset < 0 || tagValueOffset > reader.getLength()) { directory.addError("Illegal TIFF tag pointer offset"); continue; } // Check that this tag isn't going to allocate outside the bounds of the data array. // This addresses an uncommon OutOfMemoryError. if (byteCount < 0 || tagValueOffset + byteCount > reader.getLength()) { directory.addError("Illegal number of bytes for TIFF tag data: " + byteCount); continue; } // // Special handling for certain known tags that point to or contain other chunks of data to be processed // if (tagType == ExifIFD0Directory.TAG_EXIF_SUB_IFD_OFFSET && directory instanceof ExifIFD0Directory) { if (byteCount != 4) { directory.addError("Exif SubIFD Offset tag should have a component count of four (bytes) for the offset."); } else { final int subDirOffset = tiffHeaderOffset + reader.getInt32(tagValueOffset); processIFD(metadata.getOrCreateDirectory(ExifSubIFDDirectory.class), processedIfdOffsets, subDirOffset, tiffHeaderOffset, metadata, reader); } } else if (tagType == ExifSubIFDDirectory.TAG_INTEROP_OFFSET && directory instanceof ExifSubIFDDirectory) { if (byteCount != 4) { directory.addError("Exif Interop Offset tag should have a component count of four (bytes) for the offset."); } else { final int subDirOffset = tiffHeaderOffset + reader.getInt32(tagValueOffset); processIFD(metadata.getOrCreateDirectory(ExifInteropDirectory.class), processedIfdOffsets, subDirOffset, tiffHeaderOffset, metadata, reader); } } else if (tagType == ExifIFD0Directory.TAG_GPS_INFO_OFFSET && directory instanceof ExifIFD0Directory) { if (byteCount != 4) { directory.addError("Exif GPS Info Offset tag should have a component count of four (bytes) for the offset."); } else { final int subDirOffset = tiffHeaderOffset + reader.getInt32(tagValueOffset); processIFD(metadata.getOrCreateDirectory(GpsDirectory.class), processedIfdOffsets, subDirOffset, tiffHeaderOffset, metadata, reader); } } else if (tagType == ExifSubIFDDirectory.TAG_MAKERNOTE && directory instanceof ExifSubIFDDirectory) { // The makernote tag contains the encoded makernote data directly. // Pass the offset to this tag's value. Manufacturer/Model-specific logic will be used to // determine the correct offset for further processing. processMakernote(tagValueOffset, processedIfdOffsets, tiffHeaderOffset, metadata, reader); } else { processTag(directory, tagType, tagValueOffset, componentCount, formatCode, reader); } } // at the end of each IFD is an optional link to the next IFD final int finalTagOffset = calculateTagOffset(ifdOffset, dirTagCount); int nextDirectoryOffset = reader.getInt32(finalTagOffset); if (nextDirectoryOffset != 0) { nextDirectoryOffset += tiffHeaderOffset; if (nextDirectoryOffset >= reader.getLength()) { // Last 4 bytes of IFD reference another IFD with an address that is out of bounds // Note this could have been caused by jhead 1.3 cropping too much return; } else if (nextDirectoryOffset < ifdOffset) { // Last 4 bytes of IFD reference another IFD with an address that is before the start of this directory return; } // TODO in Exif, the only known 'follower' IFD is the thumbnail one, however this may not be the case final ExifThumbnailDirectory nextDirectory = metadata.getOrCreateDirectory(ExifThumbnailDirectory.class); processIFD(nextDirectory, processedIfdOffsets, nextDirectoryOffset, tiffHeaderOffset, metadata, reader); } } @Deprecated private static void processMakernote(final int makernoteOffset, final @NotNull Set<Integer> processedIfdOffsets, final int tiffHeaderOffset, final @NotNull Metadata metadata, final @NotNull RandomAccessReader reader) throws IOException { // Determine the camera model and makernote format Directory ifd0Directory = metadata.getDirectory(ExifIFD0Directory.class); if (ifd0Directory == null) return; String cameraMake = ifd0Directory.getString(ExifIFD0Directory.TAG_MAKE); final String firstThreeChars = reader.getString(makernoteOffset, 3); final String firstFourChars = reader.getString(makernoteOffset, 4); final String firstFiveChars = reader.getString(makernoteOffset, 5); final String firstSixChars = reader.getString(makernoteOffset, 6); final String firstSevenChars = reader.getString(makernoteOffset, 7); final String firstEightChars = reader.getString(makernoteOffset, 8); final String firstTwelveChars = reader.getString(makernoteOffset, 12); boolean byteOrderBefore = reader.isMotorolaByteOrder(); if ("OLYMP".equals(firstFiveChars) || "EPSON".equals(firstFiveChars) || "AGFA".equals(firstFourChars)) { // Olympus Makernote // Epson and Agfa use Olympus makernote standard: http://www.ozhiker.com/electronics/pjmt/jpeg_info/ processIFD(metadata.getOrCreateDirectory(OlympusMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); } else if (cameraMake != null && cameraMake.trim().toUpperCase().startsWith("NIKON")) { if ("Nikon".equals(firstFiveChars)) { /* There are two scenarios here: * Type 1: ** * :0000: 4E 69 6B 6F 6E 00 01 00-05 00 02 00 02 00 06 00 Nikon........... * :0010: 00 00 EC 02 00 00 03 00-03 00 01 00 00 00 06 00 ................ * Type 3: ** * :0000: 4E 69 6B 6F 6E 00 02 00-00 00 4D 4D 00 2A 00 00 Nikon....MM.*... * :0010: 00 08 00 1E 00 01 00 07-00 00 00 04 30 32 30 30 ............0200 */ switch (reader.getUInt8(makernoteOffset + 6)) { case 1: processIFD(metadata.getOrCreateDirectory(NikonType1MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); break; case 2: processIFD(metadata.getOrCreateDirectory(NikonType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 18, makernoteOffset + 10, metadata, reader); break; default: ifd0Directory.addError("Unsupported Nikon makernote data ignored."); break; } } else { // The IFD begins with the first Makernote byte (no ASCII name). This occurs with CoolPix 775, E990 and D1 models. processIFD(metadata.getOrCreateDirectory(NikonType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } } else if ("SONY CAM".equals(firstEightChars) || "SONY DSC".equals(firstEightChars)) { processIFD(metadata.getOrCreateDirectory(SonyType1MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 12, tiffHeaderOffset, metadata, reader); } else if ("SEMC MS\u0000\u0000\u0000\u0000\u0000".equals(firstTwelveChars)) { // force MM for this directory reader.setMotorolaByteOrder(true); // skip 12 byte header + 2 for "MM" + 6 processIFD(metadata.getOrCreateDirectory(SonyType6MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 20, tiffHeaderOffset, metadata, reader); } else if ("SIGMA\u0000\u0000\u0000".equals(firstEightChars) || "FOVEON\u0000\u0000".equals(firstEightChars)) { processIFD(metadata.getOrCreateDirectory(SigmaMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 10, tiffHeaderOffset, metadata, reader); } else if ("KDK".equals(firstThreeChars)) { reader.setMotorolaByteOrder(firstSevenChars.equals("KDK INFO")); processKodakMakernote(metadata.getOrCreateDirectory(KodakMakernoteDirectory.class), makernoteOffset, reader); } else if ("Canon".equalsIgnoreCase(cameraMake)) { processIFD(metadata.getOrCreateDirectory(CanonMakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } else if (cameraMake != null && cameraMake.toUpperCase().startsWith("CASIO")) { if ("QVC\u0000\u0000\u0000".equals(firstSixChars)) { processIFD(metadata.getOrCreateDirectory(CasioType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 6, tiffHeaderOffset, metadata, reader); } else { processIFD(metadata.getOrCreateDirectory(CasioType1MakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } } else if ("FUJIFILM".equals(firstEightChars) || "Fujifilm".equalsIgnoreCase(cameraMake)) { // Note that this also applies to certain Leica cameras, such as the Digilux-4.3 reader.setMotorolaByteOrder(false); // the 4 bytes after "FUJIFILM" in the makernote point to the start of the makernote // IFD, though the offset is relative to the start of the makernote, not the TIFF // header (like everywhere else) int ifdStart = makernoteOffset + reader.getInt32(makernoteOffset + 8); processIFD(metadata.getOrCreateDirectory(FujifilmMakernoteDirectory.class), processedIfdOffsets, ifdStart, makernoteOffset, metadata, reader); } else if (cameraMake != null && cameraMake.toUpperCase().startsWith("MINOLTA")) { // Cases seen with the model starting with MINOLTA in capitals seem to have a valid Olympus makernote // area that commences immediately. processIFD(metadata.getOrCreateDirectory(OlympusMakernoteDirectory.class), processedIfdOffsets, makernoteOffset, tiffHeaderOffset, metadata, reader); } else if ("KYOCERA".equals(firstSevenChars)) { // http://www.ozhiker.com/electronics/pjmt/jpeg_info/kyocera_mn.html processIFD(metadata.getOrCreateDirectory(KyoceraMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 22, tiffHeaderOffset, metadata, reader); } else if ("LEICA".equals(firstFiveChars)) { reader.setMotorolaByteOrder(false); if ("Leica Camera AG".equals(cameraMake)) { processIFD(metadata.getOrCreateDirectory(LeicaMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); } else if ("LEICA".equals(cameraMake)) { // Some Leica cameras use Panasonic makernote tags processIFD(metadata.getOrCreateDirectory(PanasonicMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, tiffHeaderOffset, metadata, reader); } } else if ("Panasonic\u0000\u0000\u0000".equals(reader.getString(makernoteOffset, 12))) { // NON-Standard TIFF IFD Data using Panasonic Tags. There is no Next-IFD pointer after the IFD // Offsets are relative to the start of the TIFF header at the beginning of the EXIF segment // more information here: http://www.ozhiker.com/electronics/pjmt/jpeg_info/panasonic_mn.html processIFD(metadata.getOrCreateDirectory(PanasonicMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 12, tiffHeaderOffset, metadata, reader); } else if ("AOC\u0000".equals(firstFourChars)) { // NON-Standard TIFF IFD Data using Casio Type 2 Tags // IFD has no Next-IFD pointer at end of IFD, and // Offsets are relative to the start of the current IFD tag, not the TIFF header // Observed for: // - Pentax ist D processIFD(metadata.getOrCreateDirectory(CasioType2MakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 6, makernoteOffset, metadata, reader); } else if (cameraMake != null && (cameraMake.toUpperCase().startsWith("PENTAX") || cameraMake.toUpperCase().startsWith("ASAHI"))) { // NON-Standard TIFF IFD Data using Pentax Tags // IFD has no Next-IFD pointer at end of IFD, and // Offsets are relative to the start of the current IFD tag, not the TIFF header // Observed for: // - PENTAX Optio 330 // - PENTAX Optio 430 processIFD(metadata.getOrCreateDirectory(PentaxMakernoteDirectory.class), processedIfdOffsets, makernoteOffset, makernoteOffset, metadata, reader); // } else if ("KC".equals(firstTwoChars) || "MINOL".equals(firstFiveChars) || "MLY".equals(firstThreeChars) || "+M+M+M+M".equals(firstEightChars)) { // // This Konica data is not understood. Header identified in accordance with information at this site: // // http://www.ozhiker.com/electronics/pjmt/jpeg_info/minolta_mn.html // // TODO add support for minolta/konica cameras // exifDirectory.addError("Unsupported Konica/Minolta data ignored."); } else if ("SANYO\0\1\0".equals(firstEightChars)) { processIFD(metadata.getOrCreateDirectory(SanyoMakernoteDirectory.class), processedIfdOffsets, makernoteOffset + 8, makernoteOffset, metadata, reader); } else { // The makernote is not comprehended by this library. // If you are reading this and believe a particular camera's image should be processed, get in touch. } reader.setMotorolaByteOrder(byteOrderBefore); } @Deprecated private static void processKodakMakernote(@NotNull final KodakMakernoteDirectory directory, final int tagValueOffset, @NotNull final RandomAccessReader reader) { // Kodak's makernote is not in IFD format. It has values at fixed offsets. int dataOffset = tagValueOffset + 8; try { directory.setString(KodakMakernoteDirectory.TAG_KODAK_MODEL, reader.getString(dataOffset, 8)); directory.setInt(KodakMakernoteDirectory.TAG_QUALITY, reader.getUInt8(dataOffset + 9)); directory.setInt(KodakMakernoteDirectory.TAG_BURST_MODE, reader.getUInt8(dataOffset + 10)); directory.setInt(KodakMakernoteDirectory.TAG_IMAGE_WIDTH, reader.getUInt16(dataOffset + 12)); directory.setInt(KodakMakernoteDirectory.TAG_IMAGE_HEIGHT, reader.getUInt16(dataOffset + 14)); directory.setInt(KodakMakernoteDirectory.TAG_YEAR_CREATED, reader.getUInt16(dataOffset + 16)); directory.setByteArray(KodakMakernoteDirectory.TAG_MONTH_DAY_CREATED, reader.getBytes(dataOffset + 18, 2)); directory.setByteArray(KodakMakernoteDirectory.TAG_TIME_CREATED, reader.getBytes(dataOffset + 20, 4)); directory.setInt(KodakMakernoteDirectory.TAG_BURST_MODE_2, reader.getUInt16(dataOffset + 24)); directory.setInt(KodakMakernoteDirectory.TAG_SHUTTER_MODE, reader.getUInt8(dataOffset + 27)); directory.setInt(KodakMakernoteDirectory.TAG_METERING_MODE, reader.getUInt8(dataOffset + 28)); directory.setInt(KodakMakernoteDirectory.TAG_SEQUENCE_NUMBER, reader.getUInt8(dataOffset + 29)); directory.setInt(KodakMakernoteDirectory.TAG_F_NUMBER, reader.getUInt16(dataOffset + 30)); directory.setLong(KodakMakernoteDirectory.TAG_EXPOSURE_TIME, reader.getUInt32(dataOffset + 32)); directory.setInt(KodakMakernoteDirectory.TAG_EXPOSURE_COMPENSATION, reader.getInt16(dataOffset + 36)); directory.setInt(KodakMakernoteDirectory.TAG_FOCUS_MODE, reader.getUInt8(dataOffset + 56)); directory.setInt(KodakMakernoteDirectory.TAG_WHITE_BALANCE, reader.getUInt8(dataOffset + 64)); directory.setInt(KodakMakernoteDirectory.TAG_FLASH_MODE, reader.getUInt8(dataOffset + 92)); directory.setInt(KodakMakernoteDirectory.TAG_FLASH_FIRED, reader.getUInt8(dataOffset + 93)); directory.setInt(KodakMakernoteDirectory.TAG_ISO_SETTING, reader.getUInt16(dataOffset + 94)); directory.setInt(KodakMakernoteDirectory.TAG_ISO, reader.getUInt16(dataOffset + 96)); directory.setInt(KodakMakernoteDirectory.TAG_TOTAL_ZOOM, reader.getUInt16(dataOffset + 98)); directory.setInt(KodakMakernoteDirectory.TAG_DATE_TIME_STAMP, reader.getUInt16(dataOffset + 100)); directory.setInt(KodakMakernoteDirectory.TAG_COLOR_MODE, reader.getUInt16(dataOffset + 102)); directory.setInt(KodakMakernoteDirectory.TAG_DIGITAL_ZOOM, reader.getUInt16(dataOffset + 104)); directory.setInt(KodakMakernoteDirectory.TAG_SHARPNESS, reader.getInt8(dataOffset + 107)); } catch (IOException ex) { directory.addError("Error processing Kodak makernote data: " + ex.getMessage()); } } @Deprecated private static void processTag(@NotNull final Directory directory, final int tagType, final int tagValueOffset, final int componentCount, final int formatCode, @NotNull final RandomAccessReader reader) throws IOException { // Directory simply stores raw values // The display side uses a Descriptor class per directory to turn the raw values into 'pretty' descriptions switch (formatCode) { case FMT_UNDEFINED: // this includes exif user comments directory.setByteArray(tagType, reader.getBytes(tagValueOffset, componentCount)); break; case FMT_STRING: String string = reader.getNullTerminatedString(tagValueOffset, componentCount); directory.setString(tagType, string); break; case FMT_SRATIONAL: if (componentCount == 1) { directory.setRational(tagType, new Rational(reader.getInt32(tagValueOffset), reader.getInt32(tagValueOffset + 4))); } else if (componentCount > 1) { Rational[] rationals = new Rational[componentCount]; for (int i = 0; i < componentCount; i++) rationals[i] = new Rational(reader.getInt32(tagValueOffset + (8 * i)), reader.getInt32(tagValueOffset + 4 + (8 * i))); directory.setRationalArray(tagType, rationals); } break; case FMT_URATIONAL: if (componentCount == 1) { directory.setRational(tagType, new Rational(reader.getUInt32(tagValueOffset), reader.getUInt32(tagValueOffset + 4))); } else if (componentCount > 1) { Rational[] rationals = new Rational[componentCount]; for (int i = 0; i < componentCount; i++) rationals[i] = new Rational(reader.getUInt32(tagValueOffset + (8 * i)), reader.getUInt32(tagValueOffset + 4 + (8 * i))); directory.setRationalArray(tagType, rationals); } break; case FMT_SINGLE: if (componentCount == 1) { directory.setFloat(tagType, reader.getFloat32(tagValueOffset)); } else { float[] floats = new float[componentCount]; for (int i = 0; i < componentCount; i++) floats[i] = reader.getFloat32(tagValueOffset + (i * 4)); directory.setFloatArray(tagType, floats); } break; case FMT_DOUBLE: if (componentCount == 1) { directory.setDouble(tagType, reader.getDouble64(tagValueOffset)); } else { double[] doubles = new double[componentCount]; for (int i = 0; i < componentCount; i++) doubles[i] = reader.getDouble64(tagValueOffset + (i * 4)); directory.setDoubleArray(tagType, doubles); } break; // // Note that all integral types are stored as int32 internally (the largest supported by TIFF) // case FMT_SBYTE: if (componentCount == 1) { directory.setInt(tagType, reader.getInt8(tagValueOffset)); } else { int[] bytes = new int[componentCount]; for (int i = 0; i < componentCount; i++) bytes[i] = reader.getInt8(tagValueOffset + i); directory.setIntArray(tagType, bytes); } break; case FMT_BYTE: if (componentCount == 1) { directory.setInt(tagType, reader.getUInt8(tagValueOffset)); } else { int[] bytes = new int[componentCount]; for (int i = 0; i < componentCount; i++) bytes[i] = reader.getUInt8(tagValueOffset + i); directory.setIntArray(tagType, bytes); } break; case FMT_USHORT: if (componentCount == 1) { int i = reader.getUInt16(tagValueOffset); directory.setInt(tagType, i); } else { int[] ints = new int[componentCount]; for (int i = 0; i < componentCount; i++) ints[i] = reader.getUInt16(tagValueOffset + (i * 2)); directory.setIntArray(tagType, ints); } break; case FMT_SSHORT: if (componentCount == 1) { int i = reader.getInt16(tagValueOffset); directory.setInt(tagType, i); } else { int[] ints = new int[componentCount]; for (int i = 0; i < componentCount; i++) ints[i] = reader.getInt16(tagValueOffset + (i * 2)); directory.setIntArray(tagType, ints); } break; case FMT_SLONG: case FMT_ULONG: // NOTE 'long' in this case means 32 bit, not 64 if (componentCount == 1) { int i = reader.getInt32(tagValueOffset); directory.setInt(tagType, i); } else { int[] ints = new int[componentCount]; for (int i = 0; i < componentCount; i++) ints[i] = reader.getInt32(tagValueOffset + (i * 4)); directory.setIntArray(tagType, ints); } break; default: directory.addError("Unknown format code " + formatCode + " for tag " + tagType); } } /** * Determine the offset at which a given InteropArray entry begins within the specified IFD. * * @param ifdStartOffset the offset at which the IFD starts * @param entryNumber the zero-based entry number */ @Deprecated private static int calculateTagOffset(int ifdStartOffset, int entryNumber) { // add 2 bytes for the tag count // each entry is 12 bytes, so we skip 12 * the number seen so far return ifdStartOffset + 2 + (12 * entryNumber); } }
Extract inline IPTC data found in Exif chunks. Patch originally contributed by Samuele Manfrin.
Source/com/drew/metadata/exif/ExifReader.java
Extract inline IPTC data found in Exif chunks.
Java
apache-2.0
a06f269b1269a12e5ba2c44e877284b19a3a203f
0
drakeet/MultiType
/* * Copyright 2016 drakeet. https://github.com/drakeet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package me.drakeet.multitype.sample.one2many; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v7.widget.RecyclerView; import java.util.ArrayList; import java.util.List; import me.drakeet.multitype.FlatTypeClassAdapter; import me.drakeet.multitype.Items; import me.drakeet.multitype.MultiTypeAdapter; import me.drakeet.multitype.MultiTypeAsserts; import me.drakeet.multitype.sample.MenuBaseActivity; import me.drakeet.multitype.sample.R; import static me.drakeet.multitype.MultiTypeAsserts.assertAllRegistered; import static me.drakeet.multitype.MultiTypeAsserts.assertHasTheSameAdapter; public class OneDataToManyActivity extends MenuBaseActivity { RecyclerView recyclerView; MultiTypeAdapter adapter; Items items; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_list); recyclerView = (RecyclerView) findViewById(R.id.list); items = new Items(); adapter = new MultiTypeAdapter(items); adapter.setFlatTypeAdapter(new FlatTypeClassAdapter() { @NonNull @Override public Class onFlattenClass(@NonNull Object item) { return ((Data) item).typeClass; } }); adapter.register(Data.Type1.class, new DataType1ViewProvider()); adapter.register(Data.Type2.class, new DataType2ViewProvider()); List<Data> dataList = getDataFromService(); // ๅฆ‚ๆžœ Data ไปฌไธไผš่‡ชๅŠจๆŠŠ int type -> Class typeClass ็š„่ฏ๏ผŒ // ้œ€่ฆ่‡ช่กŒๅค„็†๏ผŒ็คบไพ‹ๅฆ‚ไธ‹๏ผš // for (Data data : dataList) { // data.typeClass = Data.getTypeClass(data.type); // } items.addAll(dataList); assertAllRegistered(adapter, items); recyclerView.setAdapter(adapter); assertHasTheSameAdapter(recyclerView, adapter); } private List<Data> getDataFromService() { List<Data> list = new ArrayList<>(); final int TYPE1 = 1, TYPE2 = 2; for (int i = 0; i < 30; i = i + 2) { list.add(new Data("title: " + i, TYPE1)); list.add(new Data("title: " + i + 1, TYPE2)); } return list; } }
sample/src/main/java/me/drakeet/multitype/sample/one2many/OneDataToManyActivity.java
/* * Copyright 2016 drakeet. https://github.com/drakeet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package me.drakeet.multitype.sample.one2many; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v7.widget.RecyclerView; import java.util.ArrayList; import java.util.List; import me.drakeet.multitype.FlatTypeClassAdapter; import me.drakeet.multitype.Items; import me.drakeet.multitype.MultiTypeAdapter; import me.drakeet.multitype.sample.MenuBaseActivity; import me.drakeet.multitype.sample.R; public class OneDataToManyActivity extends MenuBaseActivity { RecyclerView recyclerView; MultiTypeAdapter adapter; Items items; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_list); recyclerView = (RecyclerView) findViewById(R.id.list); items = new Items(); adapter = new MultiTypeAdapter(items); adapter.setFlatTypeAdapter(new FlatTypeClassAdapter() { @NonNull @Override public Class onFlattenClass(@NonNull Object item) { return ((Data) item).typeClass; } }); adapter.register(Data.Type1.class, new DataType1ViewProvider()); adapter.register(Data.Type2.class, new DataType2ViewProvider()); List<Data> dataList = getDataFromService(); // ๅฆ‚ๆžœ Data ไปฌไธไผš่‡ชๅŠจๆŠŠ int type -> Class typeClass ็š„่ฏ๏ผŒ // ้œ€่ฆ่‡ช่กŒๅค„็†๏ผŒ็คบไพ‹ๅฆ‚ไธ‹๏ผš // for (Data data : dataList) { // data.typeClass = Data.getTypeClass(data.type); // } items.addAll(dataList); recyclerView.setAdapter(adapter); } private List<Data> getDataFromService() { List<Data> list = new ArrayList<>(); final int TYPE1 = 1, TYPE2 = 2; for (int i = 0; i < 30; i = i + 2) { list.add(new Data("title: " + i, TYPE1)); list.add(new Data("title: " + i + 1, TYPE2)); } return list; } }
Supplied MultiTypeAsserts sample to OneDataToManyActivity
sample/src/main/java/me/drakeet/multitype/sample/one2many/OneDataToManyActivity.java
Supplied MultiTypeAsserts sample to OneDataToManyActivity
Java
apache-2.0
83b6c285d0a40e8645c7422e7250e8fa2b7da86d
0
mashengchen/incubator-trafodion,rlugojr/incubator-trafodion,mashengchen/incubator-trafodion,robertamarton/incubator-trafodion,rlugojr/incubator-trafodion,rlugojr/incubator-trafodion,robertamarton/incubator-trafodion,mashengchen/incubator-trafodion,robertamarton/incubator-trafodion,robertamarton/incubator-trafodion,mashengchen/incubator-trafodion,mashengchen/incubator-trafodion,rlugojr/incubator-trafodion,robertamarton/incubator-trafodion,rlugojr/incubator-trafodion,rlugojr/incubator-trafodion,rlugojr/incubator-trafodion,mashengchen/incubator-trafodion,rlugojr/incubator-trafodion,robertamarton/incubator-trafodion,mashengchen/incubator-trafodion,robertamarton/incubator-trafodion,robertamarton/incubator-trafodion,robertamarton/incubator-trafodion,mashengchen/incubator-trafodion,mashengchen/incubator-trafodion,robertamarton/incubator-trafodion,rlugojr/incubator-trafodion,mashengchen/incubator-trafodion,rlugojr/incubator-trafodion
/** * @@@ START COPYRIGHT @@@ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * @@@ END COPYRIGHT @@@ */ package org.trafodion.libmgmt; import java.io.File; import java.io.FileFilter; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.file.Files; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FileMgmt { private static final Logger LOG = LoggerFactory.getLogger(FileMgmt.class); private static final String url = "jdbc:default:connection"; // 100Mb private static final long MAX_JAR_FILE_SIZE = 104857600; private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static final int MaxDataSize = 12800; private static final String CHARTSET = "ISO-8859-1"; private static final String DEL_POSTFIX = ".DELETE"; /** * Print help info * * @param helps: * INOUT parameter like PUT/LS/... */ public static void help(String[] helps) { String[] help = new String[] { "PUT - Upload a JAR. SHOWDDL PROCEDURE [SCHEMA NAME.]PUT for more info.", "LS - List JARs. SHOWDDL PROCEDURE [SCHEMA NAME.]LS for more info.", "LSALL - List all JARs. SHOWDDL PROCEDURE [SCHEMA NAME.]LSALL for more info.", "RM - Remove a JAR. SHOWDDL PROCEDURE [SCHEMA NAME.]RM for more info.", "RMREX - Remove JARs by a perticular pattern. SHOWDDL PROCEDURE [SCHEMA NAME.]RMREX for more info.", "GETFILE - Download a JAR. SHOWDDL PROCEDURE [SCHEMA NAME.]GETFILE for more info.", "ADDLIB - Create a library. SHOWDDL PROCEDURE [SCHEMA NAME.]ADDLIB for more info.", "ALTERLIB - Update a library. SHOWDDL PROCEDURE [SCHEMA NAME.]ALTERLIB for more info.", "DROPLIB - Drop a library. SHOWDDL PROCEDURE [SCHEMA NAME.]DROPLIB for more info." }; List<String> index = new ArrayList<String>(help.length); index.add("PUT"); index.add("LS"); index.add("LSALL"); index.add("RM"); index.add("RMREX"); index.add("GETFILE"); String tmp = helps[0].trim().toUpperCase(); helps[0] = "HELP:\r\n"; switch (index.indexOf(tmp)) { case 0: helps[0] = help[0]; break; case 1: helps[0] = help[1]; break; case 2: helps[0] = help[2]; break; case 3: helps[0] = help[3]; break; case 4: helps[0] = help[4]; break; case 5: helps[0] = help[5]; break; default: for (String h : help) { helps[0] += h + "\r\n"; } } } /** create a library * @param libName library name * @param fileName related file name * @param hostName host name * @param localFile local file * @throws SQLException */ public static void addLib(String libName, String fileName, String hostName, String localFile) throws SQLException { checkFileName(fileName); Connection conn = getConn(); Statement st = null; String sql = ""; try { st = conn.createStatement(); String userPath = getCodeFilePath(conn); sql = "create library " + libName + " file '" + userPath + fileName + "'"; if (hostName != null && !"".equals(hostName.trim())) { sql += " HOST NAME '" + hostName + "'"; } if (localFile != null && !"".equals(localFile.trim())) { sql += " LOCAL FILE '" + localFile + "'"; } st.execute(sql); } catch(SQLException e){ LOG.error(sql,e); throw e; }finally { if (st != null) { try { st.close(); } catch (Exception e) { } } if (conn != null){ try { conn.close(); } catch (Exception e) { } } } } /** * change the library related attribute * * @param libName * library name * @param fileName * uploaded file's name * @param hostName * @param localFile * @throws SQLException */ public static void alterLib(String libName, String fileName, String hostName, String localFile) throws SQLException { checkFileName(fileName); Connection conn = getConn(); Statement st = null; String userPath = getCodeFilePath(conn); String sql = "alter library " + libName + " FILE '" + userPath + fileName + "'"; if (hostName != null && !"".equals(hostName.trim())) { sql += " HOST NAME '" + hostName + "'"; } if (localFile != null && !"".equals(localFile.trim())) { sql += " LOCAL FILE '" + localFile + "'"; } try { st = conn.createStatement(); st.execute(sql); } catch(SQLException e){ LOG.error(sql,e); throw e; } finally { if (st != null) st.close(); if (conn != null){ try { conn.close(); } catch (Exception e) { } } } } /** * drop the library * * @param libName * @param isdefault * true is RESTRICT false is CASCADE * @throws SQLException */ public static void dropLib(String libName, String mode) throws SQLException { String sql = null; Connection con = getConn(); Statement st = null; try { st = con.createStatement(); sql = "drop library " + libName; if (mode != null) if (mode.trim().equalsIgnoreCase("RESTRICT")) sql += " RESTRICT"; else if (mode.trim().equalsIgnoreCase("CASCADE")) sql += " CASCADE"; st.execute(sql); } catch(SQLException e){ LOG.error(sql,e); throw e; } finally { if (st != null) st.close(); if (con != null){ try { con.close(); } catch (Exception e) { } } } } public static void syncJar(String userPath, String fileName) throws SQLException, IOException { checkFileName(fileName); LOG.info("syncJars " + fileName); String nodes = System.getenv("MY_NODES"); if (nodes != null && !"".equals(nodes.trim())) { String pdcp = System.getenv("SQ_PDCP"); String pdsh = System.getenv("SQ_PDSH"); if (pdcp != null) { execShell(pdcp + " " + nodes + " " + userPath + fileName.trim() + " " + userPath + " "); } if (pdsh != null) { execShell(pdsh + " " + nodes + " chmod 755 " + userPath + fileName.trim()); } } } public static void rmJar(String userPath, String fileName) throws SQLException, IOException { checkFileName(fileName); LOG.info("syncJars " + fileName); String nodes = System.getenv("MY_NODES"); if (nodes != null && !"".equals(nodes.trim())) { String pdsh = System.getenv("SQ_PDSH"); if (pdsh != null) { execShell(pdsh + " " + nodes + " rm -rf " + userPath + fileName.trim()); } } } private static String execShell(String cmd) throws IOException { Process p = Runtime.getRuntime().exec(cmd); if (p != null) { StringBuilder sb = new StringBuilder(); InputStream in = null; try { in = p.getInputStream(); int c = -1; while ((c = in.read()) != -1) { sb.append((char) c); } } finally { if (in != null) in.close(); } try { in = p.getErrorStream(); int c = -1; boolean flag = true; while ((c = in.read()) != -1) { if (flag) { sb.append("\r\n"); } else { flag = false; } sb.append((char) c); } } finally { if (in != null) in.close(); } return sb.toString(); } return null; } /** * Download a JAR file * * @param fileName * @param offset * @param fileData * @throws SQLException * @throws IOException */ public static void get(String fileName, int offset, String[] fileData, long[] fileLength) throws SQLException, IOException { checkFileName(fileName); Connection conn = getConn(); LOG.info("Get " + fileName); String userPath = getCodeFilePath(conn); close(conn); File file = new File(userPath + fileName); if (!file.exists()) { throw new SQLException("No such file[" + fileName + "]"); } RandomAccessFile rAFile = null; try { rAFile = new RandomAccessFile(file, "r"); rAFile.seek(offset); byte bArray[] = new byte[MaxDataSize]; int bytesRead = rAFile.read(bArray, 0, MaxDataSize); if (bytesRead != -1) { fileData[0] = new String(Arrays.copyOf(bArray, bytesRead), CHARTSET); fileLength[0] = file.length(); LOG.info("Download: " + fileName + ", offset:" + offset + ",compressed length:" + fileData[0].length() + ",file length:" + fileLength[0]); } } catch(IOException e){ LOG.error(fileName,e); throw e; } finally { if (rAFile != null) { try { rAFile.close(); } catch (Exception e) { LOG.warn("Something wrong while close file[" + fileName + "] stream: " + e.getMessage()); } } } } /** * Remove exact file * * @param fileName * @throws SQLException * @throws IOException */ public static void rm(String fileName) throws SQLException, IOException { checkFileName(fileName); Connection conn = getConn(); LOG.info("Remove " + fileName); String userPath = getCodeFilePath(conn); close(conn); File file = new File(userPath + fileName); File delFile = new File(fileName + DEL_POSTFIX); boolean isSuccess = false; if (file.exists()) { try { boolean isRenamed = file.renameTo(delFile); if (isRenamed) rmJar(userPath, fileName); else { throw new IOException("Delete " + fileName + " failed. File metrics: CanRead is " + file.canRead() + ", canWrite is " + file.canWrite() + ", canExecute is " + file.canExecute()); } LOG.info("Remove " + fileName + " successfully!"); isSuccess = true; return; } finally { if (isSuccess) { delFile.delete(); } else { delFile.renameTo(file); } } } else { LOG.error("No such file[" + fileName + "]"); throw new SQLException("No such file[" + fileName + "]"); } } /** * Remove files via regular formulation * * @param pattern: * to be deleted * @param names * : file names to be deleted * @throws SQLException * @throws IOException */ public static void rmRex(String pattern, String[] names) throws SQLException, IOException { checkFileName(pattern); Connection conn = getConn(); LOG.info("Try to remove files[" + pattern + "]"); String userPath = getCodeFilePath(conn); close(conn); File[] files = getFiles(pattern, new File(userPath)); File[] delFiles = new File[files.length]; StringBuilder sb = new StringBuilder(); sb.append("<rmRex>"); sb.append(toXML(files, "rmList")); sb.append("<message>"); boolean hasError = false; boolean isSuccess = false; try { for (int i = 0; i < files.length; i++) { delFiles[i] = new File(files[i].getAbsolutePath() + DEL_POSTFIX); files[i].renameTo(delFiles[i]); } rmJar(userPath, pattern); isSuccess = true; } finally { if (isSuccess) { for (int i = 0; i < delFiles.length; i++) { delFiles[i].delete(); } } else { for (int i = 0; i < delFiles.length; i++) { delFiles[i].renameTo(files[i]); } } } if (!hasError) { sb.append("Remove the files successfully!"); } sb.append("</message>"); sb.append("</rmRex>"); names[0] = sb.toString(); LOG.info("Done for removing files[" + pattern + "]."); } public static void lsAll(String[] names) throws SQLException { ls("*", names); } /** * list the Jars matching PATTERN * * @param pattern: * @param names * @throws SQLException */ public static void ls(String pattern, String[] names) throws SQLException { checkFileName(pattern); Connection conn = getConn(); LOG.info("List files[" + pattern + "]"); String userPath = getCodeFilePath(conn); close(conn); File dir = new File(userPath); if (!dir.exists() || !dir.isDirectory()) { LOG.error("Directory [" + userPath + "] is not found!"); throw new SQLException("Directory [" + userPath + "] is not found!"); } if (pattern == null) { LOG.error("File pattern should not be empty!"); throw new SQLException("Pattern is empty!"); } File[] files = getFiles(pattern, dir); names[0] = toXML(files, "ls"); } /** * upload a JAR file * * @param fileData * @param fileName * @param appendFlag * 0: append; otherwise overwrite * @throws SQLException */ public static void put(String fileData, String fileName, int appendFlag) throws SQLException { checkFileName(fileName); try { byte[] data = fileData.getBytes(CHARTSET); Connection conn = getConn(); LOG.info("Put " + fileName + ", length: " + data.length + ", file string length:" + fileData.length()); String userPath = getCodeFilePath(conn); close(conn); String fname = userPath + fileName; checkFile(fname, data.length); FileOutputStream fos = null; FileChannel channel = null; FileLock lock = null; try { fos = new FileOutputStream(fname, (appendFlag == 0)); channel = fos.getChannel(); lock = channel.tryLock(); if (lock != null) { fos.write(data); fos.flush(); }else{ throw new SQLException("File "+fileName+" is locked, please try again later."); } } finally { if(lock != null){ lock.release(); } if(channel !=null){ channel.close(); } if (fos != null) fos.close(); } syncJar(userPath, fileName); LOG.info("PUT method out !!! " + fileName); } catch (Throwable t) { LOG.error(t.getMessage(), t); throw new SQLException(t.getMessage()); } } private static void checkFileName(String fileName) throws SQLException { if (fileName.contains("/") || fileName.contains("\\")) throw new SQLException("Illegal file name: " + fileName + ". File name must not contain \"/\"."); } private static void checkFile(String fname, int dataSize) throws SQLException { File jar = new File(fname); if (jar.length() + dataSize > MAX_JAR_FILE_SIZE) { LOG.error("Jar file size is over the threshold[100Mb]"); throw new SQLException("Jar file size is over the threshold[100Mb]"); } } private static String getCodeFilePath(Connection conn) throws SQLException { String user = getCurrentUser(conn); String root = System.getenv("MY_SQROOT"); if (root == null || "".equals(root.trim())) { LOG.error("Cant get your traf installation path!"); throw new SQLException("Cant get your traf installation path!"); } File file = new File(root + "/udr/lib/" + user); if (!file.exists()) { file.mkdirs(); } else if (!file.isDirectory()) { throw new SQLException("User Directory is not valide or you dont have permission!"); } LOG.info("SPJ JARs location: " + file.getAbsolutePath()); return file.getAbsolutePath() + "/"; } private static Connection getConn() throws SQLException { Connection conn = null; try { conn = DriverManager.getConnection(url); LOG.info("Create connection successfully. " + conn +", autocommit:"+conn.getAutoCommit()); } catch (Throwable t) { LOG.error("Error encountered while getting connection ", t); throw new SQLException(t.getMessage()); } return conn; } private static String getCurrentUser(Connection conn) throws SQLException { Statement st = null; ResultSet rs = null; String user = null; try { st = conn.createStatement(); rs = st.executeQuery("values(session_user)"); if (rs.next()) { user = rs.getString(1); } } catch (Exception e) { LOG.error(e.getMessage(), e); throw new SQLException(e); } finally { if (rs != null) { try { rs.close(); } catch (Exception e) { LOG.warn(e.getMessage(), e); } } if (st != null) { try { st.close(); } catch (Exception e) { LOG.warn(e.getMessage(), e); } } } return user.replaceAll("[\\\\/]", "_"); } private static File[] getFiles(String pattern, File dir) { final String p = pattern.replaceAll("\\*", ".*").trim().toUpperCase(); return dir.listFiles(new FileFilter() { @Override public boolean accept(File name) { if (name == null || !name.isFile() || name.getName().endsWith(DEL_POSTFIX)) { return false; } return name.getName().trim().toUpperCase().matches(p); } }); } private static String toXML(File[] files, String root) { StringBuilder sb = new StringBuilder(); sb.append("<" + root + ">"); for (File f : files) { sb.append("<file name='" + f.getName() + "' lastModifyTime='" + format.format(new Date(f.lastModified())) + "' size='" + f.length() + "'/>"); } sb.append("</" + root + ">"); return sb.toString(); } private static void close(Connection conn) { try { conn.close(); LOG.info("Closed connection"); } catch (Exception e) { LOG.warn(e.getMessage()); } } }
core/sql/lib_mgmt/src/main/java/org/trafodion/libmgmt/FileMgmt.java
/** * @@@ START COPYRIGHT @@@ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * @@@ END COPYRIGHT @@@ */ package org.trafodion.libmgmt; import java.io.File; import java.io.FileFilter; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.file.Files; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FileMgmt { private static final Logger LOG = LoggerFactory.getLogger(FileMgmt.class); private static final String url = "jdbc:default:connection"; // 100Mb private static final long MAX_JAR_FILE_SIZE = 104857600; private static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static final int MaxDataSize = 12800; private static final String CHARTSET = "ISO-8859-1"; private static final String DEL_POSTFIX = ".DELETE"; /** * Print help info * * @param helps: * INOUT parameter like PUT/LS/... */ public static void help(String[] helps) { String[] help = new String[] { "PUT - Upload a JAR. SHOWDDL PROCEDURE [SCHEMA NAME.]PUT for more info.", "LS - List JARs. SHOWDDL PROCEDURE [SCHEMA NAME.]LS for more info.", "LSALL - List all JARs. SHOWDDL PROCEDURE [SCHEMA NAME.]LSALL for more info.", "RM - Remove a JAR. SHOWDDL PROCEDURE [SCHEMA NAME.]RM for more info.", "RMREX - Remove JARs by a perticular pattern. SHOWDDL PROCEDURE [SCHEMA NAME.]RMREX for more info.", "GETFILE - Download a JAR. SHOWDDL PROCEDURE [SCHEMA NAME.]GETFILE for more info.", "ADDLIB - Create a library. SHOWDDL PROCEDURE [SCHEMA NAME.]ADDLIB for more info.", "ALTERLIB - Update a library. SHOWDDL PROCEDURE [SCHEMA NAME.]ALTERLIB for more info.", "DROPLIB - Drop a library. SHOWDDL PROCEDURE [SCHEMA NAME.]DROPLIB for more info." }; List<String> index = new ArrayList<String>(help.length); index.add("PUT"); index.add("LS"); index.add("LSALL"); index.add("RM"); index.add("RMREX"); index.add("GETFILE"); String tmp = helps[0].trim().toUpperCase(); helps[0] = "HELP:\r\n"; switch (index.indexOf(tmp)) { case 0: helps[0] = help[0]; break; case 1: helps[0] = help[1]; break; case 2: helps[0] = help[2]; break; case 3: helps[0] = help[3]; break; case 4: helps[0] = help[4]; break; case 5: helps[0] = help[5]; break; default: for (String h : help) { helps[0] += h + "\r\n"; } } } /** create a library * @param libName library name * @param fileName related file name * @param hostName host name * @param localFile local file * @throws SQLException */ public static void addLib(String libName, String fileName, String hostName, String localFile) throws SQLException { checkFileName(fileName); Connection conn = getConn(); Statement st = null; String sql = ""; try { st = conn.createStatement(); String userPath = getCodeFilePath(conn); sql = "create library " + libName + " file '" + userPath + fileName + "'"; if (hostName != null && !"".equals(hostName.trim())) { sql += " HOST NAME '" + hostName + "'"; } if (localFile != null && !"".equals(localFile.trim())) { sql += " LOCAL FILE '" + localFile + "'"; } st.execute(sql); } catch(SQLException e){ LOG.error(sql,e); throw e; }finally { if (st != null) { try { st.close(); } catch (Exception e) { } } if (conn != null){ try { conn.close(); } catch (Exception e) { } } } } /** * change the library related attribute * * @param libName * library name * @param fileName * uploaded file's name * @param hostName * @param localFile * @throws SQLException */ public static void alterLib(String libName, String fileName, String hostName, String localFile) throws SQLException { checkFileName(fileName); Connection conn = getConn(); Statement st = null; String userPath = getCodeFilePath(conn); String sql = "alter library " + libName + " FILE '" + userPath + fileName + "'"; if (hostName != null && !"".equals(hostName.trim())) { sql += " HOST NAME '" + hostName + "'"; } if (localFile != null && !"".equals(localFile.trim())) { sql += " LOCAL FILE '" + localFile + "'"; } try { st = conn.createStatement(); st.execute(sql); } catch(SQLException e){ LOG.error(sql,e); throw e; } finally { if (st != null) st.close(); if (conn != null){ try { conn.close(); } catch (Exception e) { } } } } /** * drop the library * * @param libName * @param isdefault * true is RESTRICT false is CASCADE * @throws SQLException */ public static void dropLib(String libName, String mode) throws SQLException { String sql = null; Connection con = getConn(); Statement st = null; try { st = con.createStatement(); sql = "drop library " + libName; if (mode != null) if (mode.trim().equalsIgnoreCase("RESTRICT")) sql += " RESTRICT"; else if (mode.trim().equalsIgnoreCase("CASCADE")) sql += " CASCADE"; st.execute(sql); } catch(SQLException e){ LOG.error(sql,e); throw e; } finally { if (st != null) st.close(); if (con != null){ try { con.close(); } catch (Exception e) { } } } } public static void syncJar(String userPath, String fileName) throws SQLException, IOException { checkFileName(fileName); LOG.info("syncJars " + fileName); String nodes = System.getenv("MY_NODES"); if (nodes != null && !"".equals(nodes.trim())) { String pdcp = System.getenv("SQ_PDCP"); String pdsh = System.getenv("SQ_PDSH"); if (pdcp != null) { execShell(pdcp + " " + nodes + " " + userPath + fileName.trim() + " " + userPath + " "); } if (pdsh != null) { execShell(pdsh + " " + nodes + " chmod 755 " + userPath + fileName.trim()); } } } public static void rmJar(String userPath, String fileName) throws SQLException, IOException { checkFileName(fileName); LOG.info("syncJars " + fileName); String nodes = System.getenv("MY_NODES"); if (nodes != null && !"".equals(nodes.trim())) { String pdsh = System.getenv("SQ_PDSH"); if (pdsh != null) { execShell(pdsh + " " + nodes + " rm -rf " + userPath + fileName.trim()); } } } private static String execShell(String cmd) throws IOException { Process p = Runtime.getRuntime().exec(cmd); if (p != null) { StringBuilder sb = new StringBuilder(); InputStream in = null; try { in = p.getInputStream(); int c = -1; while ((c = in.read()) != -1) { sb.append((char) c); } } finally { if (in != null) in.close(); } try { in = p.getErrorStream(); int c = -1; boolean flag = true; while ((c = in.read()) != -1) { if (flag) { sb.append("\r\n"); } else { flag = false; } sb.append((char) c); } } finally { if (in != null) in.close(); } return sb.toString(); } return null; } /** * Download a JAR file * * @param fileName * @param offset * @param fileData * @throws SQLException * @throws IOException */ public static void get(String fileName, int offset, String[] fileData, long[] fileLength) throws SQLException, IOException { checkFileName(fileName); Connection conn = getConn(); LOG.info("Get " + fileName); String userPath = getCodeFilePath(conn); close(conn); File file = new File(userPath + fileName); if (!file.exists()) { throw new SQLException("No such file[" + fileName + "]"); } RandomAccessFile rAFile = null; try { rAFile = new RandomAccessFile(file, "r"); rAFile.seek(offset); byte bArray[] = new byte[MaxDataSize]; int bytesRead = rAFile.read(bArray, 0, MaxDataSize); if (bytesRead != -1) { fileData[0] = new String(Arrays.copyOf(bArray, bytesRead), CHARTSET); fileLength[0] = file.length(); LOG.info("Download: " + fileName + ", offset:" + offset + ",compressed length:" + fileData[0].length() + ",file length:" + fileLength[0]); } } catch(IOException e){ LOG.error(fileName,e); throw e; } finally { if (rAFile != null) { try { rAFile.close(); } catch (Exception e) { LOG.warn("Something wrong while close file[" + fileName + "] stream: " + e.getMessage()); } } } } /** * Remove exact file * * @param fileName * @throws SQLException * @throws IOException */ public static void rm(String fileName) throws SQLException, IOException { checkFileName(fileName); Connection conn = getConn(); LOG.info("Remove " + fileName); String userPath = getCodeFilePath(conn); close(conn); File file = new File(userPath + fileName); File delFile = new File(fileName + DEL_POSTFIX); boolean isSuccess = false; if (file.exists()) { try { boolean isRenamed = file.renameTo(delFile); if (isRenamed) rmJar(userPath, fileName); else { throw new IOException("Delete " + fileName + " failed. File metrics: CanRead is " + file.canRead() + ", canWrite is " + file.canWrite() + ", canExecute is " + file.canExecute()); } LOG.info("Remove " + fileName + " successfully!"); isSuccess = true; return; } finally { if (isSuccess) { delFile.delete(); } else { delFile.renameTo(file); } } } else { LOG.error("No such file[" + fileName + "]"); throw new SQLException("No such file[" + fileName + "]"); } } /** * Remove files via regular formulation * * @param pattern: * to be deleted * @param names * : file names to be deleted * @throws SQLException * @throws IOException */ public static void rmRex(String pattern, String[] names) throws SQLException, IOException { checkFileName(pattern); Connection conn = getConn(); LOG.info("Try to remove files[" + pattern + "]"); String userPath = getCodeFilePath(conn); close(conn); File[] files = getFiles(pattern, new File(userPath)); File[] delFiles = new File[files.length]; StringBuilder sb = new StringBuilder(); sb.append("<rmRex>"); sb.append(toXML(files, "rmList")); sb.append("<message>"); boolean hasError = false; boolean isSuccess = false; try { for (int i = 0; i < files.length; i++) { delFiles[i] = new File(files[i].getAbsolutePath() + DEL_POSTFIX); files[i].renameTo(delFiles[i]); } rmJar(userPath, pattern); isSuccess = true; } finally { if (isSuccess) { for (int i = 0; i < delFiles.length; i++) { delFiles[i].delete(); } } else { for (int i = 0; i < delFiles.length; i++) { delFiles[i].renameTo(files[i]); } } } if (!hasError) { sb.append("Remove the files successfully!"); } sb.append("</message>"); sb.append("</rmRex>"); names[0] = sb.toString(); LOG.info("Done for removing files[" + pattern + "]."); } public static void lsAll(String[] names) throws SQLException { ls("*", names); } /** * list the Jars matching PATTERN * * @param pattern: * @param names * @throws SQLException */ public static void ls(String pattern, String[] names) throws SQLException { checkFileName(pattern); Connection conn = getConn(); LOG.info("List files[" + pattern + "]"); String userPath = getCodeFilePath(conn); close(conn); File dir = new File(userPath); if (!dir.exists() || !dir.isDirectory()) { LOG.error("Directory [" + userPath + "] is not found!"); throw new SQLException("Directory [" + userPath + "] is not found!"); } if (pattern == null) { LOG.error("File pattern should not be empty!"); throw new SQLException("Pattern is empty!"); } File[] files = getFiles(pattern, dir); names[0] = toXML(files, "ls"); } /** * upload a JAR file * * @param fileData * @param fileName * @param appendFlag * 0: append; otherwise overwrite * @throws SQLException */ public static void put(String fileData, String fileName, int appendFlag) throws SQLException { checkFileName(fileName); try { byte[] data = fileData.getBytes(CHARTSET); Connection conn = getConn(); LOG.info("Put " + fileName + ", length: " + data.length + ", file string length:" + fileData.length()); String userPath = getCodeFilePath(conn); close(conn); String fname = userPath + fileName; checkFile(fname, data.length); FileOutputStream fos = null; FileChannel channel = null; FileLock lock = null; try { fos = new FileOutputStream(fname, (appendFlag == 0)); channel = fos.getChannel(); lock = channel.tryLock(); if (lock != null) { fos.write(data); fos.flush(); }else{ throw new SQLException("File "+fileName+" is locked, please try again later."); } } finally { if(lock != null){ lock.release(); } if(channel !=null){ channel.close(); } if (fos != null) fos.close(); } syncJar(userPath, fileName); LOG.info("PUT method out !!! " + fileName); } catch (Throwable t) { LOG.error(t.getMessage(), t); throw new SQLException(t.getMessage()); } } private static void checkFileName(String fileName) throws SQLException { if (fileName.contains("/") || fileName.contains("\\")) throw new SQLException("Illegal file name: " + fileName + ". File name must not contain \"/\"."); } private static void checkFile(String fname, int dataSize) throws SQLException { File jar = new File(fname); if (jar.length() + dataSize > MAX_JAR_FILE_SIZE) { LOG.error("Jar file size is over the threshold[100Mb]"); throw new SQLException("Jar file size is over the threshold[100Mb]"); } } private static String getCodeFilePath(Connection conn) throws SQLException { String user = getCurrentUser(conn); String root = System.getenv("MY_SQROOT"); if (root == null || "".equals(root.trim())) { LOG.error("Cant get your traf installation path!"); throw new SQLException("Cant get your traf installation path!"); } File file = new File(root + "/udr/lib/" + user); if (!file.exists()) { file.mkdirs(); } else if (!file.isDirectory()) { throw new SQLException("User Directory is not valide or you dont have permission!"); } LOG.info("SPJ JARs location: " + file.getAbsolutePath()); return file.getAbsolutePath() + "/"; } private static Connection getConn() throws SQLException { Connection conn = null; try { conn = DriverManager.getConnection(url); LOG.info("Create connection successfully. " + conn +", autocommit:"+conn.getAutoCommit()); } catch (Throwable t) { LOG.error("Error encountered while getting connection ", t); throw new SQLException(t.getMessage()); } return conn; } private static String getCurrentUser(Connection conn) throws SQLException { Statement st = null; ResultSet rs = null; String user = null; try { st = conn.createStatement(); rs = st.executeQuery("values(session_user)"); if (rs.next()) { user = rs.getString(1); } } catch (Exception e) { LOG.error(e.getMessage(), e); throw new SQLException(e); } finally { if (rs != null) { try { rs.close(); } catch (Exception e) { LOG.warn(e.getMessage(), e); } } if (st != null) { try { st.close(); } catch (Exception e) { LOG.warn(e.getMessage(), e); } } } return user.replaceAll("[\\\\/]", "_"); } private static File[] getFiles(String pattern, File dir) { final String p = pattern.replaceAll("\\*", ".*").trim().toUpperCase(); return dir.listFiles(new FileFilter() { @Override public boolean accept(File name) { if (name == null || !name.isFile()) { return false; } return name.getName().trim().toUpperCase().matches(p); } }); } private static String toXML(File[] files, String root) { StringBuilder sb = new StringBuilder(); sb.append("<" + root + ">"); for (File f : files) { sb.append("<file name='" + f.getName() + "' lastModifyTime='" + format.format(new Date(f.lastModified())) + "' size='" + f.length() + "'/>"); } sb.append("</" + root + ">"); return sb.toString(); } private static void close(Connection conn) { try { conn.close(); LOG.info("Closed connection"); } catch (Exception e) { LOG.warn(e.getMessage()); } } }
not show the file with DELETE postfix
core/sql/lib_mgmt/src/main/java/org/trafodion/libmgmt/FileMgmt.java
not show the file with DELETE postfix
Java
apache-2.0
46cfaa0ef8e68631da140e3f74fccb0ea6360835
0
klehmann/domino-jna
package com.mindoo.domino.jna.dxl; import java.io.IOException; import java.io.OutputStream; import java.io.Writer; import java.nio.charset.Charset; import java.util.Collection; import java.util.Collections; import java.util.List; import com.mindoo.domino.jna.NotesDatabase; import com.mindoo.domino.jna.NotesIDTable; import com.mindoo.domino.jna.NotesNote; import com.mindoo.domino.jna.errors.NotesError; import com.mindoo.domino.jna.errors.NotesErrorUtils; import com.mindoo.domino.jna.gc.IAllocatedMemory; import com.mindoo.domino.jna.gc.NotesGC; import com.mindoo.domino.jna.internal.DisposableMemory; import com.mindoo.domino.jna.internal.ItemDecoder; import com.mindoo.domino.jna.internal.LMBCSStringList; import com.mindoo.domino.jna.internal.Mem32; import com.mindoo.domino.jna.internal.Mem64; import com.mindoo.domino.jna.internal.NotesCallbacks; import com.mindoo.domino.jna.internal.NotesConstants; import com.mindoo.domino.jna.internal.NotesNativeAPI; import com.mindoo.domino.jna.internal.NotesNativeAPI32; import com.mindoo.domino.jna.internal.NotesNativeAPI64; import com.mindoo.domino.jna.internal.Win32NotesCallbacks; import com.mindoo.domino.jna.internal.WriterOutputStream; import com.mindoo.domino.jna.utils.NotesStringUtils; import com.mindoo.domino.jna.utils.PlatformUtils; import com.sun.jna.Memory; import com.sun.jna.Native; import com.sun.jna.Pointer; import com.sun.jna.ptr.IntByReference; /** * DXL Exporter<br> * <br> * <b>Please make sure that you include the file jvm/lib/ext/websvc.jar of the Notes Client / Domino server * directory to the Java classpath if your code is not running within Notes/Domino, * but in a standalone application.<br> * <br> * Otherwise you might experience crashes during DXL export (we had crashes when testing the DB export).<br> * </b> * * @author Karsten Lehmann */ public class DXLExporter implements IAllocatedMemory { private int m_hExporter; public DXLExporter() { IntByReference rethDXLExport = new IntByReference(); short result = NotesNativeAPI.get().DXLCreateExporter(rethDXLExport); NotesErrorUtils.checkResult(result); m_hExporter = rethDXLExport.getValue(); if (m_hExporter==0) { throw new NotesError(0, "Failed to allocate DXL exporter"); } NotesGC.__memoryAllocated(this); } @Override public void free() { if (isFreed()) { return; } if (m_hExporter!=0) { NotesNativeAPI.get().DXLDeleteExporter(m_hExporter); m_hExporter = 0; } } @Override public boolean isFreed() { return m_hExporter==0; } @Override public int getHandle32() { return m_hExporter; } @Override public long getHandle64() { return m_hExporter; } private void checkHandle() { if (m_hExporter==0) throw new NotesError(0, "DXL exporter already freed"); if (PlatformUtils.is64Bit()) { NotesGC.__b64_checkValidMemHandle(DXLExporter.class, m_hExporter); } else { NotesGC.__b32_checkValidMemHandle(DXLExporter.class, m_hExporter); } } public boolean exportErrorWasLogged() { checkHandle(); short logged = NotesNativeAPI.get().DXLExportWasErrorLogged(m_hExporter); return logged == 1; } /** * Export a single Note into XML format. * * @param note note to export * @param out result writer * @throws IOException in case of I/O errors */ public void exportNote(NotesNote note, Writer out) throws IOException { WriterOutputStream outStream = new WriterOutputStream(out, Charset.forName("UTF-8")); exportNote(note, outStream); outStream.flush(); } /** * Export a single Note into XML format. * * @param note note to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportNote(NotesNote note, final OutputStream out) throws IOException { checkHandle(); if (note.isRecycled()) { throw new NotesError(0, "Note is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportNote(m_hExporter, callback, note.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportNote(m_hExporter, callback, note.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of note "+note+" in database "+ note.getParent().getServer()+"!!"+note.getParent().getRelativeFilePath(), ex[0]); } } /** * Export a set of note ids into XML format. * * @param db database containing the export ids * @param ids ids to export * @param out result writer * @throws IOException in case of I/O errors */ public void exportIDs(NotesDatabase db, Collection<Integer> ids, Writer out) throws IOException { NotesIDTable idTable = new NotesIDTable(ids); try { exportIDTable(db, idTable, out); } finally { idTable.recycle(); } } /** * Export an IDTable of notes into XML format. * * @param db database containing the export ids * @param idTable IDTable to export * @param out result writer * @throws IOException in case of I/O errors */ public void exportIDTable(NotesDatabase db, NotesIDTable idTable, Writer out) throws IOException { WriterOutputStream outStream = new WriterOutputStream(out, Charset.forName("UTF-8")); exportIDTable(db, idTable, outStream); outStream.flush(); } /** * Export an IDTable of notes into XML format. * * @param db database containing the export ids * @param idTable IDTable to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportIDTable(NotesDatabase db, NotesIDTable idTable, final OutputStream out) throws IOException { checkHandle(); if (db.isRecycled()) { throw new NotesError(0, "Database is recycled"); } if (idTable.isRecycled()) { throw new NotesError(0, "IDTable is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportIDTable(m_hExporter, callback, db.getHandle64(), idTable.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportIDTable(m_hExporter, callback, db.getHandle32(), idTable.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of "+idTable+" in database "+ db.getServer()+"!!"+db.getRelativeFilePath(), ex[0]); } } /** * Export an entire database in XML format. * * @param db database to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportDatabase(NotesDatabase db, final OutputStream out) throws IOException { checkHandle(); if (db.isRecycled()) { throw new NotesError(0, "Database is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportDatabase(m_hExporter, callback, db.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportDatabase(m_hExporter, callback, db.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of database "+ db.getServer()+"!!"+db.getRelativeFilePath(), ex[0]); } } /** * Export the ACL of the specified database in XML format. * * @param db database to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportACL(NotesDatabase db, final OutputStream out) throws IOException { checkHandle(); if (db.isRecycled()) { throw new NotesError(0, "Database is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportACL(m_hExporter, callback, db.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportACL(m_hExporter, callback, db.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of database ACL for "+ db.getServer()+"!!"+db.getRelativeFilePath(), ex[0]); } } private void setBooleanProperty(short index, boolean value) { checkHandle(); DisposableMemory m = new DisposableMemory(Native.BOOL_SIZE); try { m.setByte(0, (byte) (value ? 1 : 0)); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); } finally { m.dispose(); } } private boolean getBooleanProperty(short index) { checkHandle(); DisposableMemory m = new DisposableMemory(2); try { short result = NotesNativeAPI.get().DXLGetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); short boolAsShort = m.getShort(0); return boolAsShort != 0; } finally { m.dispose(); } } private void setStringProperty(short index, String str) { checkHandle(); if (str==null) { str = ""; } Memory strAsLMBCs = NotesStringUtils.toLMBCS(str, true); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, strAsLMBCs); NotesErrorUtils.checkResult(result); } private String getStringFromMemhandle(short index) { checkHandle(); DisposableMemory m = new DisposableMemory(4); try { short result = NotesNativeAPI.get().DXLGetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); int memHandle = m.getInt(0); if (memHandle==0) { return ""; } if (PlatformUtils.is64Bit()) { Pointer ptr = Mem64.OSMemoryLock(memHandle); try { String str = NotesStringUtils.fromLMBCS(ptr, -1); return str; } finally { Mem64.OSMemoryUnlock(memHandle); Mem64.OSMemoryFree(memHandle); } } else { Pointer ptr = Mem32.OSMemoryLock(memHandle); try { String str = NotesStringUtils.fromLMBCS(ptr, -1); return str; } finally { Mem32.OSMemoryUnlock(memHandle); Mem32.OSMemoryFree(memHandle); } } } finally { m.dispose(); } } private void setStringList(short index, List<String> values) { LMBCSStringList lmbcsStrList = new LMBCSStringList(values, false); try { if (PlatformUtils.is64Bit()) { DisposableMemory m = new DisposableMemory(8); try { m.setLong(0, lmbcsStrList.getHandle64()); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); } finally { m.dispose(); } } else { DisposableMemory m = new DisposableMemory(4); try { m.setInt(0, lmbcsStrList.getHandle32()); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); } finally { m.dispose(); } } } finally { lmbcsStrList.free(); } } public List<String> getStringList(short index) { checkHandle(); DisposableMemory m = new DisposableMemory(4); try { short result = NotesNativeAPI.get().DXLGetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); int handle = m.getInt(0); if (handle==0) { return Collections.emptyList(); } if (PlatformUtils.is64Bit()) { Pointer pList = Mem64.OSLockObject(handle); try { @SuppressWarnings("rawtypes") List list = ItemDecoder.decodeTextListValue(pList, false); return list; } finally { Mem64.OSUnlockObject(handle); result = Mem64.OSMemFree(handle); NotesErrorUtils.checkResult(result); } } else { Pointer pList = Mem32.OSLockObject(handle); try { @SuppressWarnings("rawtypes") List list = ItemDecoder.decodeTextListValue(pList, false); return list; } finally { Mem32.OSUnlockObject(handle); result = Mem32.OSMemFree(handle); NotesErrorUtils.checkResult(result); } } } finally { m.dispose(); } } public boolean isOutputXmlDecl() { return getBooleanProperty(NotesConstants.eOutputXmlDecl); } public void setOutputXmlDecl(boolean b) { setBooleanProperty(NotesConstants.eOutputXmlDecl, b); } public boolean isOutputDoctype() { return getBooleanProperty(NotesConstants.eOutputDOCTYPE); } public void setOutputDoctype(boolean b) { setBooleanProperty(NotesConstants.eOutputDOCTYPE, b); } public boolean isConvertNotesbitmapsToGIF() { return getBooleanProperty(NotesConstants.eConvertNotesbitmapsToGIF); } public void setConvertNotesbitmapsToGIF(boolean b) { setBooleanProperty(NotesConstants.eConvertNotesbitmapsToGIF, b); } public boolean isOmitRichtextAttachments() { return getBooleanProperty(NotesConstants.eOmitRichtextAttachments); } public void setOmitRichtextAttachments(boolean b) { setBooleanProperty(NotesConstants.eOmitRichtextAttachments, b); } public boolean isOmitOLEObjects() { return getBooleanProperty(NotesConstants.eOmitOLEObjects); } public void setOmitOLEObjects(boolean b) { setBooleanProperty(NotesConstants.eOmitOLEObjects, b); } public boolean isOmitMiscFileObjects() { return getBooleanProperty(NotesConstants.eOmitMiscFileObjects); } public void setOmitMiscFileObjects(boolean b) { setBooleanProperty(NotesConstants.eOmitMiscFileObjects, b); } public boolean isOmitPictures() { return getBooleanProperty(NotesConstants.eOmitPictures); } public void setOmitPictures(boolean b) { setBooleanProperty(NotesConstants.eOmitPictures, b); } public boolean isUncompressAttachments() { return getBooleanProperty(NotesConstants.eUncompressAttachments); } public void setUncompressAttachments(boolean b) { setBooleanProperty(NotesConstants.eUncompressAttachments, b); } public String getDxlExportResultLog() { return getStringFromMemhandle(NotesConstants.eDxlExportResultLog); } public String getDefaultDoctypeSYSTEM() { return getStringFromMemhandle(NotesConstants.eDefaultDoctypeSYSTEM); } public String getDoctypeSYSTEM() { return getStringFromMemhandle(NotesConstants.eDoctypeSYSTEM); } public void setDoctypeSYSTEM(String docType) { setStringProperty(NotesConstants.eDoctypeSYSTEM, docType); } public String getDXLBannerComments() { return getStringFromMemhandle(NotesConstants.eDXLBannerComments); } public void setDXLBannerComments(String comments) { setStringProperty(NotesConstants.eDXLBannerComments, comments); } public String getDxlExportResultLogComment() { return getStringFromMemhandle(NotesConstants.eDxlExportResultLogComment); } public void setDxlExportResultLogComment(String comment) { setStringProperty(NotesConstants.eDxlExportResultLogComment, comment); } public String getDxlDefaultSchemaLocation() { return getStringFromMemhandle(NotesConstants.eDxlDefaultSchemaLocation); } public String getDxlSchemaLocation() { return getStringFromMemhandle(NotesConstants.eDxlSchemaLocation); } public void setDxlSchemaLocation(String loc) { setStringProperty(NotesConstants.eDxlSchemaLocation, loc); } public String getAttachmentOmittedText() { return getStringFromMemhandle(NotesConstants.eAttachmentOmittedText); } public void setAttachmentOmittedText(String txt) { setStringProperty(NotesConstants.eAttachmentOmittedText, txt); } public String getOLEObjectOmittedText() { return getStringFromMemhandle(NotesConstants.eOLEObjectOmittedText); } public void setOLEObjectOmittedText(String txt) { setStringProperty(NotesConstants.eOLEObjectOmittedText, txt); } public String getPictureOmittedText() { return getStringFromMemhandle(NotesConstants.ePictureOmittedText); } public void setPictureOmittedText(String txt) { setStringProperty(NotesConstants.ePictureOmittedText, txt); } public List<String> getOmitItemNames() { return getStringList(NotesConstants.eOmitItemNames); } public void setOmitItemNames(List<String> itemNames) { setStringList(NotesConstants.eOmitItemNames, itemNames); } public List<String> getRestrictToItemNames() { return getStringList(NotesConstants.eRestrictToItemNames); } public void setRestrictToItemNames(List<String> itemNames) { setStringList(NotesConstants.eRestrictToItemNames, itemNames); } }
domino-jna/src/main/java/com/mindoo/domino/jna/dxl/DXLExporter.java
package com.mindoo.domino.jna.dxl; import java.io.IOException; import java.io.OutputStream; import java.io.Writer; import java.nio.charset.Charset; import java.util.Collection; import java.util.Collections; import java.util.List; import com.mindoo.domino.jna.NotesDatabase; import com.mindoo.domino.jna.NotesIDTable; import com.mindoo.domino.jna.NotesNote; import com.mindoo.domino.jna.errors.NotesError; import com.mindoo.domino.jna.errors.NotesErrorUtils; import com.mindoo.domino.jna.gc.IAllocatedMemory; import com.mindoo.domino.jna.gc.NotesGC; import com.mindoo.domino.jna.internal.DisposableMemory; import com.mindoo.domino.jna.internal.ItemDecoder; import com.mindoo.domino.jna.internal.LMBCSStringList; import com.mindoo.domino.jna.internal.Mem32; import com.mindoo.domino.jna.internal.Mem64; import com.mindoo.domino.jna.internal.NotesCallbacks; import com.mindoo.domino.jna.internal.NotesConstants; import com.mindoo.domino.jna.internal.NotesNativeAPI; import com.mindoo.domino.jna.internal.NotesNativeAPI32; import com.mindoo.domino.jna.internal.NotesNativeAPI64; import com.mindoo.domino.jna.internal.Win32NotesCallbacks; import com.mindoo.domino.jna.internal.WriterOutputStream; import com.mindoo.domino.jna.utils.NotesStringUtils; import com.mindoo.domino.jna.utils.PlatformUtils; import com.sun.jna.Memory; import com.sun.jna.Native; import com.sun.jna.Pointer; import com.sun.jna.ptr.IntByReference; import lotus.domino.NotesException; import lotus.domino.Session; /** * DXL Exporter<br> * <br> * <b>Please make sure that you include the file jvm/lib/ext/websvc.jar of the Notes Client / Domino server * directory to the Java classpath if your code is not running within Notes/Domino, * but in a standalone application.<br> * <br> * Otherwise you might experience crashes during DXL export (we had crashes when testing the DB export).<br> * </b> * * @author Karsten Lehmann */ public class DXLExporter implements IAllocatedMemory { private int m_hExporter; public DXLExporter() { IntByReference rethDXLExport = new IntByReference(); short result = NotesNativeAPI.get().DXLCreateExporter(rethDXLExport); NotesErrorUtils.checkResult(result); m_hExporter = rethDXLExport.getValue(); if (m_hExporter==0) { throw new NotesError(0, "Failed to allocate DXL exporter"); } NotesGC.__memoryAllocated(this); } @Override public void free() { if (isFreed()) { return; } if (m_hExporter!=0) { NotesNativeAPI.get().DXLDeleteExporter(m_hExporter); m_hExporter = 0; } } @Override public boolean isFreed() { return m_hExporter==0; } @Override public int getHandle32() { return m_hExporter; } @Override public long getHandle64() { return m_hExporter; } private void checkHandle() { if (m_hExporter==0) throw new NotesError(0, "DXL exporter already freed"); if (PlatformUtils.is64Bit()) { NotesGC.__b64_checkValidMemHandle(DXLExporter.class, m_hExporter); } else { NotesGC.__b32_checkValidMemHandle(DXLExporter.class, m_hExporter); } } public boolean exportErrorWasLogged() { checkHandle(); short logged = NotesNativeAPI.get().DXLExportWasErrorLogged(m_hExporter); return logged == 1; } /** * Export a single Note into XML format. * * @param note note to export * @param out result writer * @throws IOException in case of I/O errors */ public void exportNote(NotesNote note, Writer out) throws IOException { WriterOutputStream outStream = new WriterOutputStream(out, Charset.forName("UTF-8")); exportNote(note, outStream); outStream.flush(); } /** * Export a single Note into XML format. * * @param note note to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportNote(NotesNote note, final OutputStream out) throws IOException { checkHandle(); if (note.isRecycled()) { throw new NotesError(0, "Note is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportNote(m_hExporter, callback, note.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportNote(m_hExporter, callback, note.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of note "+note+" in database "+ note.getParent().getServer()+"!!"+note.getParent().getRelativeFilePath(), ex[0]); } } /** * Export an IDTable of notes into XML format. * * @param db database containing the export ids * @param ids ids to export * @param out result writer * @throws IOException in case of I/O errors */ public void exportIDs(NotesDatabase db, Collection<Integer> ids, Writer out) throws IOException { NotesIDTable idTable = new NotesIDTable(ids); try { exportIDTable(db, idTable, out); } finally { idTable.recycle(); } } /** * Export an IDTable of notes into XML format. * * @param db database containing the export ids * @param idTable IDTable to export * @param out result writer * @throws IOException in case of I/O errors */ public void exportIDTable(NotesDatabase db, NotesIDTable idTable, Writer out) throws IOException { WriterOutputStream outStream = new WriterOutputStream(out, Charset.forName("UTF-8")); exportIDTable(db, idTable, outStream); outStream.flush(); } /** * Export an IDTable of notes into XML format. * * @param db database containing the export ids * @param idTable IDTable to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportIDTable(NotesDatabase db, NotesIDTable idTable, final OutputStream out) throws IOException { checkHandle(); if (db.isRecycled()) { throw new NotesError(0, "Database is recycled"); } if (idTable.isRecycled()) { throw new NotesError(0, "IDTable is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportIDTable(m_hExporter, callback, db.getHandle64(), idTable.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportIDTable(m_hExporter, callback, db.getHandle32(), idTable.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of "+idTable+" in database "+ db.getServer()+"!!"+db.getRelativeFilePath(), ex[0]); } } /** * Export an entire database in XML format. * * @param db database to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportDatabase(NotesDatabase db, final OutputStream out) throws IOException { checkHandle(); if (db.isRecycled()) { throw new NotesError(0, "Database is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportDatabase(m_hExporter, callback, db.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportDatabase(m_hExporter, callback, db.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of database "+ db.getServer()+"!!"+db.getRelativeFilePath(), ex[0]); } } /** * Export the ACL of the specified database in XML format. * * @param db database to export * @param out result stream * @throws IOException in case of I/O errors */ public void exportACL(NotesDatabase db, final OutputStream out) throws IOException { checkHandle(); if (db.isRecycled()) { throw new NotesError(0, "Database is recycled"); } NotesCallbacks.XML_WRITE_FUNCTION callback; final Exception[] ex = new Exception[1]; if (PlatformUtils.isWin32()) { callback = new Win32NotesCallbacks.XML_WRITE_FUNCTIONWin32() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } else { callback = new NotesCallbacks.XML_WRITE_FUNCTION() { @Override public void invoke(Pointer bBuffer, int length, Pointer pAction) { if (ex[0] == null && length>0) { try { byte[] data = bBuffer.getByteArray(0, length); out.write(data); } catch (Exception t) { ex[0] = t; } } } }; } short result; if (PlatformUtils.is64Bit()) { result = NotesNativeAPI64.get().DXLExportACL(m_hExporter, callback, db.getHandle64(), (Pointer) null); } else { result = NotesNativeAPI32.get().DXLExportACL(m_hExporter, callback, db.getHandle32(), (Pointer) null); } NotesErrorUtils.checkResult(result); if (ex[0] instanceof IOException) { throw (IOException) ex[0]; } else if (ex[0]!=null) { throw new NotesError(0, "Error during DXL export of database ACL for "+ db.getServer()+"!!"+db.getRelativeFilePath(), ex[0]); } } private void tbd(Session session) throws NotesException { //TODO session.createDxlExporter().getAttachmentOmittedText(); session.createDxlExporter().getConvertNotesBitmapsToGIF(); session.createDxlExporter().getDoctypeSYSTEM(); session.createDxlExporter().getExitOnFirstFatalError(); session.createDxlExporter().getForceNoteFormat(); session.createDxlExporter().getLog(); session.createDxlExporter().getLogComment(); session.createDxlExporter().getMIMEOption(); session.createDxlExporter().getOLEObjectOmittedText(); session.createDxlExporter().getOmitItemNames(); session.createDxlExporter().getOmitMiscFileObjects(); session.createDxlExporter().getOmitOLEObjects(); session.createDxlExporter().getOmitRichtextAttachments(); session.createDxlExporter().getOmitRichtextPictures(); session.createDxlExporter().getOutputDOCTYPE(); session.createDxlExporter().getPictureOmittedText(); session.createDxlExporter().getRestrictToItemNames(); session.createDxlExporter().getRichTextOption(); session.createDxlExporter().getUncompressAttachments(); } private void setBooleanProperty(short index, boolean value) { checkHandle(); DisposableMemory m = new DisposableMemory(Native.BOOL_SIZE); try { m.setByte(0, (byte) (value ? 1 : 0)); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); } finally { m.dispose(); } } private boolean getBooleanProperty(short index) { checkHandle(); DisposableMemory m = new DisposableMemory(2); try { short result = NotesNativeAPI.get().DXLGetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); short boolAsShort = m.getShort(0); return boolAsShort != 0; } finally { m.dispose(); } } private void setStringProperty(short index, String str) { checkHandle(); if (str==null) { str = ""; } Memory strAsLMBCs = NotesStringUtils.toLMBCS(str, true); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, strAsLMBCs); NotesErrorUtils.checkResult(result); } private String getStringFromMemhandle(short index) { checkHandle(); DisposableMemory m = new DisposableMemory(4); try { short result = NotesNativeAPI.get().DXLGetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); int memHandle = m.getInt(0); if (memHandle==0) { return ""; } if (PlatformUtils.is64Bit()) { Pointer ptr = Mem64.OSMemoryLock(memHandle); try { String str = NotesStringUtils.fromLMBCS(ptr, -1); return str; } finally { Mem64.OSMemoryUnlock(memHandle); Mem64.OSMemoryFree(memHandle); } } else { Pointer ptr = Mem32.OSMemoryLock(memHandle); try { String str = NotesStringUtils.fromLMBCS(ptr, -1); return str; } finally { Mem32.OSMemoryUnlock(memHandle); Mem32.OSMemoryFree(memHandle); } } } finally { m.dispose(); } } private void setStringList(short index, List<String> values) { LMBCSStringList lmbcsStrList = new LMBCSStringList(values, false); try { if (PlatformUtils.is64Bit()) { DisposableMemory m = new DisposableMemory(8); try { m.setLong(0, lmbcsStrList.getHandle64()); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); } finally { m.dispose(); } } else { DisposableMemory m = new DisposableMemory(4); try { m.setInt(0, lmbcsStrList.getHandle32()); short result = NotesNativeAPI.get().DXLSetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); } finally { m.dispose(); } } } finally { lmbcsStrList.free(); } } public List<String> getStringList(short index) { checkHandle(); DisposableMemory m = new DisposableMemory(4); try { short result = NotesNativeAPI.get().DXLGetExporterProperty(m_hExporter, index, m); NotesErrorUtils.checkResult(result); int handle = m.getInt(0); if (handle==0) { return Collections.emptyList(); } if (PlatformUtils.is64Bit()) { Pointer pList = Mem64.OSLockObject(handle); try { @SuppressWarnings("rawtypes") List list = ItemDecoder.decodeTextListValue(pList, false); return list; } finally { Mem64.OSUnlockObject(handle); result = Mem64.OSMemFree(handle); NotesErrorUtils.checkResult(result); } } else { Pointer pList = Mem32.OSLockObject(handle); try { @SuppressWarnings("rawtypes") List list = ItemDecoder.decodeTextListValue(pList, false); return list; } finally { Mem32.OSUnlockObject(handle); result = Mem32.OSMemFree(handle); NotesErrorUtils.checkResult(result); } } } finally { m.dispose(); } } public boolean isOutputXmlDecl() { return getBooleanProperty(NotesConstants.eOutputXmlDecl); } public void setOutputXmlDecl(boolean b) { setBooleanProperty(NotesConstants.eOutputXmlDecl, b); } public boolean isOutputDoctype() { return getBooleanProperty(NotesConstants.eOutputDOCTYPE); } public void setOutputDoctype(boolean b) { setBooleanProperty(NotesConstants.eOutputDOCTYPE, b); } public boolean isConvertNotesbitmapsToGIF() { return getBooleanProperty(NotesConstants.eConvertNotesbitmapsToGIF); } public void setConvertNotesbitmapsToGIF(boolean b) { setBooleanProperty(NotesConstants.eConvertNotesbitmapsToGIF, b); } public boolean isOmitRichtextAttachments() { return getBooleanProperty(NotesConstants.eOmitRichtextAttachments); } public void setOmitRichtextAttachments(boolean b) { setBooleanProperty(NotesConstants.eOmitRichtextAttachments, b); } public boolean isOmitOLEObjects() { return getBooleanProperty(NotesConstants.eOmitOLEObjects); } public void setOmitOLEObjects(boolean b) { setBooleanProperty(NotesConstants.eOmitOLEObjects, b); } public boolean isOmitMiscFileObjects() { return getBooleanProperty(NotesConstants.eOmitMiscFileObjects); } public void setOmitMiscFileObjects(boolean b) { setBooleanProperty(NotesConstants.eOmitMiscFileObjects, b); } public boolean isOmitPictures() { return getBooleanProperty(NotesConstants.eOmitPictures); } public void setOmitPictures(boolean b) { setBooleanProperty(NotesConstants.eOmitPictures, b); } public boolean isUncompressAttachments() { return getBooleanProperty(NotesConstants.eUncompressAttachments); } public void setUncompressAttachments(boolean b) { setBooleanProperty(NotesConstants.eUncompressAttachments, b); } public String getDxlExportResultLog() { return getStringFromMemhandle(NotesConstants.eDxlExportResultLog); } public String getDefaultDoctypeSYSTEM() { return getStringFromMemhandle(NotesConstants.eDefaultDoctypeSYSTEM); } public String getDoctypeSYSTEM() { return getStringFromMemhandle(NotesConstants.eDoctypeSYSTEM); } public void setDoctypeSYSTEM(String docType) { setStringProperty(NotesConstants.eDoctypeSYSTEM, docType); } public String getDXLBannerComments() { return getStringFromMemhandle(NotesConstants.eDXLBannerComments); } public void setDXLBannerComments(String comments) { setStringProperty(NotesConstants.eDXLBannerComments, comments); } public String getDxlExportResultLogComment() { return getStringFromMemhandle(NotesConstants.eDxlExportResultLogComment); } public void setDxlExportResultLogComment(String comment) { setStringProperty(NotesConstants.eDxlExportResultLogComment, comment); } public String getDxlDefaultSchemaLocation() { return getStringFromMemhandle(NotesConstants.eDxlDefaultSchemaLocation); } public String getDxlSchemaLocation() { return getStringFromMemhandle(NotesConstants.eDxlSchemaLocation); } public void setDxlSchemaLocation(String loc) { setStringProperty(NotesConstants.eDxlSchemaLocation, loc); } public String getAttachmentOmittedText() { return getStringFromMemhandle(NotesConstants.eAttachmentOmittedText); } public void setAttachmentOmittedText(String txt) { setStringProperty(NotesConstants.eAttachmentOmittedText, txt); } public String getOLEObjectOmittedText() { return getStringFromMemhandle(NotesConstants.eOLEObjectOmittedText); } public void setOLEObjectOmittedText(String txt) { setStringProperty(NotesConstants.eOLEObjectOmittedText, txt); } public String getPictureOmittedText() { return getStringFromMemhandle(NotesConstants.ePictureOmittedText); } public void setPictureOmittedText(String txt) { setStringProperty(NotesConstants.ePictureOmittedText, txt); } public List<String> getOmitItemNames() { return getStringList(NotesConstants.eOmitItemNames); } public void setOmitItemNames(List<String> itemNames) { setStringList(NotesConstants.eOmitItemNames, itemNames); } public List<String> getRestrictToItemNames() { return getStringList(NotesConstants.eRestrictToItemNames); } public void setRestrictToItemNames(List<String> itemNames) { setStringList(NotesConstants.eRestrictToItemNames, itemNames); } }
Code cleanup
domino-jna/src/main/java/com/mindoo/domino/jna/dxl/DXLExporter.java
Code cleanup
Java
apache-2.0
850c57ae0be0d1dcdc39e7f280eb8e3e4a790cec
0
phax/peppol-directory,phax/peppol-yellow-pages,phax/peppol-directory,phax/peppol-yellow-pages,phax/peppol-yellow-pages,phax/peppol-yellow-pages,phax/peppol-directory
/** * Copyright (C) 2015-2016 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.pd.client; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.helger.commons.collection.ext.CommonsArrayList; import com.helger.commons.collection.ext.ICommonsList; import com.helger.commons.string.StringHelper; import com.helger.commons.system.SystemProperties; import com.helger.peppol.utils.ConfigFile; /** * This class manages the configuration properties of the PEPPOL Directory * client. The order of the properties file resolving is as follows: * <ol> * <li>Check for the value of the system property * <code>peppol.pd.client.properties.path</code></li> * <li>Check for the value of the system property * <code>pd.client.properties.path</code></li> * <li>The filename <code>private-pd-client.properties</code> in the root of the * classpath</li> * <li>The filename <code>pd-client.properties</code> in the root of the * classpath</li> * </ol> * * @author Philip Helger */ @Immutable public final class PDClientConfiguration { private static final Logger s_aLogger = LoggerFactory.getLogger (PDClientConfiguration.class); private static final ConfigFile s_aConfigFile; static { final ICommonsList <String> aFilePaths = new CommonsArrayList <> (); // Check if the system property is present String sPropertyPath = SystemProperties.getPropertyValue ("peppol.pd.client.properties.path"); if (StringHelper.hasText (sPropertyPath)) aFilePaths.add (sPropertyPath); sPropertyPath = SystemProperties.getPropertyValue ("pd.client.properties.path"); if (StringHelper.hasText (sPropertyPath)) aFilePaths.add (sPropertyPath); // Use the default paths aFilePaths.add ("private-pd-client.properties"); aFilePaths.add ("pd-client.properties"); s_aConfigFile = ConfigFile.create (aFilePaths); if (s_aConfigFile.isRead ()) s_aLogger.info ("Read PEPPOL Directory client properties from " + s_aConfigFile.getReadResource ().getPath ()); else s_aLogger.warn ("Failed to read PEPPOL Directory client properties from any of the paths: " + aFilePaths); } private PDClientConfiguration () {} /** * @return The global config file for the SMP client. */ @Nonnull public static ConfigFile getConfigFile () { return s_aConfigFile; } /** * @return The key store location as specified in the configuration file by * the property <code>keystore.path</code>. */ @Nullable public static String getKeyStorePath () { return s_aConfigFile.getAsString ("keystore.path"); } /** * @return The keystore password as specified in the configuration file by the * property <code>keystore.password</code>. */ @Nullable public static String getKeyStorePassword () { return s_aConfigFile.getAsString ("keystore.password"); } /** * @return The private key alias as specified in the configuration file by the * property <code>keystore.key.alias</code>. */ @Nullable public static String getKeyStoreKeyAlias () { return s_aConfigFile.getAsString ("keystore.key.alias"); } /** * @return The private key password as specified in the configuration file by * the property <code>keystore.key.password</code>. */ @Nullable public static char [] getKeyStoreKeyPassword () { return s_aConfigFile.getAsCharArray ("keystore.key.password"); } /** * @return The proxy host to be used for "http" calls. May be * <code>null</code>. * @see #getHttpsProxyHost() */ @Nullable public static String getHttpProxyHost () { return s_aConfigFile.getAsString ("http.proxyHost"); } /** * @return The proxy port to be used for "http" calls. Defaults to 0. * @see #getHttpsProxyPort() */ public static int getHttpProxyPort () { return s_aConfigFile.getAsInt ("http.proxyPort", 0); } /** * @return The proxy host to be used for "https" calls. May be * <code>null</code>. * @see #getHttpProxyHost() */ @Nullable public static String getHttpsProxyHost () { return s_aConfigFile.getAsString ("https.proxyHost"); } /** * @return The proxy port to be used for "https" calls. Defaults to 0. * @see #getHttpProxyPort() */ public static int getHttpsProxyPort () { return s_aConfigFile.getAsInt ("https.proxyPort", 0); } }
peppol-directory-client/src/main/java/com/helger/pd/client/PDClientConfiguration.java
/** * Copyright (C) 2015-2016 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.pd.client; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.helger.commons.collection.ext.CommonsArrayList; import com.helger.commons.collection.ext.ICommonsList; import com.helger.commons.string.StringHelper; import com.helger.commons.system.SystemProperties; import com.helger.peppol.utils.ConfigFile; /** * This class manages the configuration properties of the PEPPOL Directory * client. The order of the properties file resolving is as follows: * <ol> * <li>Check for the value of the system property * <code>peppol.pd.client.properties.path</code></li> * <li>Check for the value of the system property * <code>pd.client.properties.path</code></li> * <li>The filename <code>private-pd-client.properties</code> in the root of the * classpath</li> * <li>The filename <code>pd-client.properties</code> in the root of the * classpath</li> * </ol> * * @author Philip Helger */ @Immutable public final class PDClientConfiguration { private static final Logger s_aLogger = LoggerFactory.getLogger (PDClientConfiguration.class); private static final ConfigFile s_aConfigFile; static { final ICommonsList <String> aFilePaths = new CommonsArrayList<> (); // Check if the system property is present String sPropertyPath = SystemProperties.getPropertyValue ("peppol.pd.client.properties.path"); if (StringHelper.hasText (sPropertyPath)) aFilePaths.add (sPropertyPath); sPropertyPath = SystemProperties.getPropertyValue ("pd.client.properties.path"); if (StringHelper.hasText (sPropertyPath)) aFilePaths.add (sPropertyPath); // Use the default paths aFilePaths.add ("private-pd-client.properties"); aFilePaths.add ("pd-client.properties"); s_aConfigFile = new ConfigFile (aFilePaths); if (s_aConfigFile.isRead ()) s_aLogger.info ("Read PEPPOL Directory client properties from " + s_aConfigFile.getReadResource ().getPath ()); else s_aLogger.warn ("Failed to read PEPPOL Directory client properties from any of the paths: " + aFilePaths); } private PDClientConfiguration () {} /** * @return The global config file for the SMP client. */ @Nonnull public static ConfigFile getConfigFile () { return s_aConfigFile; } /** * @return The key store location as specified in the configuration file by * the property <code>keystore.path</code>. */ @Nullable public static String getKeyStorePath () { return s_aConfigFile.getString ("keystore.path"); } /** * @return The keystore password as specified in the configuration file by the * property <code>keystore.password</code>. */ @Nullable public static String getKeyStorePassword () { return s_aConfigFile.getString ("keystore.password"); } /** * @return The private key alias as specified in the configuration file by the * property <code>keystore.key.alias</code>. */ @Nullable public static String getKeyStoreKeyAlias () { return s_aConfigFile.getString ("keystore.key.alias"); } /** * @return The private key password as specified in the configuration file by * the property <code>keystore.key.password</code>. */ @Nullable public static char [] getKeyStoreKeyPassword () { return s_aConfigFile.getCharArray ("keystore.key.password"); } /** * @return The proxy host to be used for "http" calls. May be * <code>null</code>. * @see #getHttpsProxyHost() */ @Nullable public static String getHttpProxyHost () { return s_aConfigFile.getString ("http.proxyHost"); } /** * @return The proxy port to be used for "http" calls. Defaults to 0. * @see #getHttpsProxyPort() */ public static int getHttpProxyPort () { return s_aConfigFile.getInt ("http.proxyPort", 0); } /** * @return The proxy host to be used for "https" calls. May be * <code>null</code>. * @see #getHttpProxyHost() */ @Nullable public static String getHttpsProxyHost () { return s_aConfigFile.getString ("https.proxyHost"); } /** * @return The proxy port to be used for "https" calls. Defaults to 0. * @see #getHttpProxyPort() */ public static int getHttpsProxyPort () { return s_aConfigFile.getInt ("https.proxyPort", 0); } }
Updated ConfigFile API
peppol-directory-client/src/main/java/com/helger/pd/client/PDClientConfiguration.java
Updated ConfigFile API
Java
apache-2.0
fc9d4d3741558db47a39b670c24558681256bc08
0
Adobe-Marketing-Cloud/experiencemanager-java-emailprovider,Adobe-Marketing-Cloud/experiencemanager-java-emailprovider
/************************************************************************* * * ADOBE CONFIDENTIAL * __________________ * * Copyright 2011 Adobe Systems Incorporated * All Rights Reserved. * * NOTICE: All information contained herein is, and remains * the property of Adobe Systems Incorporated and its suppliers, * if any. The intellectual and technical concepts contained * herein are proprietary to Adobe Systems Incorporated and its * suppliers and may be covered by U.S. and Foreign Patents, * patents in process, and are protected by trade secret or copyright law. * Dissemination of this information or reproduction of this material * is strictly forbidden unless prior written permission is obtained * from Adobe Systems Incorporated. **************************************************************************/ package com.day.cq.xemailservice.impl; import com.day.cq.xemailservice.XEmailServiceClient; import com.day.cq.xemailservice.XEmailServiceException; import com.day.cq.mcm.emailprovider.ESConstants; import com.day.cq.mcm.emailprovider.EmailService; import com.day.cq.mcm.emailprovider.impl.types.ListAttributeImpl; import com.day.cq.mcm.emailprovider.types.*; import com.day.cq.wcm.webservicesupport.Configuration; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Service; import java.util.ArrayList; import java.util.Map; @Component(metatype = false, label = "%cq.mcm.xemailservice.service.name", description = "%cq.mcm.xemailservice.service.description") @Service @Properties({ @Property(name = "service.description", value = "XEmailService Service Implementation") }) public class XEmailServiceImpl implements EmailService { private XEmailServiceClient client; private Configuration config; private static final String SERVICE_NAME = "XEmailService"; //update the name with the corresponding email service provider name @Override public String getName() { return SERVICE_NAME; } /** * This method is an entry point for all the actions to email service but the specific API calls, for specific calls refer {@link com.day.cq.xemailservice.servlets.XEmailServiceSpecificActionsServlet}. * @param op Operation which is initiated on the email service * @param params map of * @param config * */ public Object execute(EmailServiceActions op,Map<String,Object> params, Configuration config) throws XEmailServiceException { this.client = new XEmailServiceClientImpl(config); this.config = config; switch(op){ //the below operations are needed for configuring the account and hence configuration is null case CONNECT: checkCredentials(getConnectionParams(params)); return null; case GET_ACCOUNTS: return handleGetAccounts(params); default: break; } if (config == null) { throw new XEmailServiceException("No Configuration specified while invoking " + op.name()); } switch(op) { case ADD_SUBSCRIBER: handleAddSubscriber(params); return null; case DELETE_SUBSCRIBER: handleDeleteSubscriber(params); return null; case PUBLISH_EMAIL: handlePublishEmail(params); return null; case UPDATE_EMAIL: handleUpdateEmail(params); return null; case SEND_AUTO_RESPONDER: handleAutoResponder(params); return null; case SEND_EMAIL_TO_SUBSCRIBER: handleSendMailToSubscriber(params); return null; case SEND_EMAIL_TO_SUBSCRIBER_LIST: handleSendMailToSubscriberList(params); return null; case GET_LISTS: return handleGetLists(params); case GET_PERSONALIZATION_INFO: return handleGetPersonalizationInfo(params); case GET_EMAILS: return handleGetEmails(params); case GET_EMAIL_CLASSIFICATIONS: return handleGetEmailClassifications(params); case CREATE_SUBSCRIPTION_LIST: return handleCreateSubscriptionList(params); case DELETE_SUBSCRIPTION_LIST: handleDeleteSubscriptionList(params); return null; case GET_SUBSCRIBERS: return handleGetSubscribers(params); case GET_FORM_FIELDS: return handleGetFormFields(params,config); default: throw new XEmailServiceException("Unsupported Operation: " + op.name()); } } private void checkCredentials(ConnectionParams connectionParams) throws XEmailServiceException { this.client.checkCredentials(connectionParams); } private ConnectionParams getConnectionParams(Map<String, Object> params) { return null;//To change body of created methods use File | Settings | File Templates. } private Object handleGetAccounts(Map<String, Object> params) throws XEmailServiceException { Object filterObject = null; return this.client.getAccounts(this.getConnectionParams(params), filterObject); } private void handleAutoResponder(Map<String, Object> params) { //To change body of created methods use File | Settings | File Templates. } private void handleSendMailToSubscriber(Map<String, Object> params) throws XEmailServiceException { Email email = null; Subscriber subscriber = null; this.client.sendEmail(email, subscriber); } private void handleSendMailToSubscriberList(Map<String, Object> params) throws XEmailServiceException { //foreach subscriber list this.handleSendMailToSubscriber(params); } /** * This handler method is to fetch the lists. Output from this method should be a of type java.util.List<Map<String,Object> >. * * Later this object will be converted to JSON object. * @param params * @return * @throws XEmailServiceException */ private Object handleGetLists(Map<String, Object> params) throws XEmailServiceException { Object filterObject = null; // construct filter object if any with the help of params map return this.client.getSubscriberList(filterObject); } /** * This handler method is to fetch personalized information. Output from this method should be a of type java.util.List<Map<String,Object> >. * Later this object will be converted to JSON object. * @param params * @return */ private Object handleGetPersonalizationInfo(Map<String, Object> params) { return null; //To change body of created methods use File | Settings | File Templates. } private Object handleGetEmails(Map<String, Object> params) throws XEmailServiceException { return this.client.getEmails(new Object()); } private Object handleGetEmailClassifications(Map<String, Object> params) { return null; //To change body of created methods use File | Settings | File Templates. } private Object handleCreateSubscriptionList(Map<String, Object> params) throws XEmailServiceException { return this.client.createList(params); } private void handleDeleteSubscriptionList(Map<String, Object> params) throws XEmailServiceException { //fetch list name from params String listName = (String) params.get("<List>"); //Replace <List> with the actual key this.client.deleteList(listName); } private Object handleGetSubscribers(Map<String, Object> params) throws XEmailServiceException { return this.client.getSubscribers(""); //pull the actual name from the params } private void handleUpdateEmail(Map<String, Object> params) { //this.client.updateEmail(email); //construct desired email object from params } private void handlePublishEmail(Map<String, Object> params) { //this.client.publishEmail(email); //construct desired email object from params } private void handleDeleteSubscriber(Map<String, Object> params) { // this.client.deleteSubscriberFromList(subscriber);//construct subscriber from the params object } private void handleAddSubscriber(Map<String, Object> params) { //this.client.addSubscriberToList(subscriber); //construct subscriber from the params object } /** * Sample method to handle forms fields * @param params * @param config * @return * @throws XEmailServiceException */ private Object handleGetFormFields(Map<String, Object> params, Configuration config) throws XEmailServiceException { java.util.List<ListAttribute> attributes = new ArrayList<ListAttribute>(); String actionType = (String)params.get("actionType"); EmailServiceActions formAction = null; if(actionType.indexOf("addSubscriber")!=-1) formAction = EmailServiceActions.ADD_SUBSCRIBER; else if(actionType.indexOf("deleteSubscriber")!=-1) formAction = EmailServiceActions.DELETE_SUBSCRIBER; else if(actionType.indexOf("autoResponder")!=-1) formAction = EmailServiceActions.SEND_AUTO_RESPONDER; else throw new XEmailServiceException("Form fields not associated with given form action: " + actionType); //ADD default email field ListAttribute attribute = new ListAttributeImpl(ESConstants.FORM_EMAIL_FIELD,ESConstants.FORM_EMAIL_FIELD,true); attributes.add(attribute); java.util.List<Map<String,Object>> mapList = new ArrayList<Map<String,Object>>(); for(ListAttribute attr: attributes) { if(formAction == EmailServiceActions.DELETE_SUBSCRIBER && !attr.isRequired()) continue; mapList.add(attr.getPropertiesMap()); } return mapList; } }
sampleemailservice/src/main/java/com/day/cq/xemailservice/impl/XEmailServiceImpl.java
/************************************************************************* * * ADOBE CONFIDENTIAL * __________________ * * Copyright 2011 Adobe Systems Incorporated * All Rights Reserved. * * NOTICE: All information contained herein is, and remains * the property of Adobe Systems Incorporated and its suppliers, * if any. The intellectual and technical concepts contained * herein are proprietary to Adobe Systems Incorporated and its * suppliers and may be covered by U.S. and Foreign Patents, * patents in process, and are protected by trade secret or copyright law. * Dissemination of this information or reproduction of this material * is strictly forbidden unless prior written permission is obtained * from Adobe Systems Incorporated. **************************************************************************/ package com.day.cq.xemailservice.impl; import com.day.cq.xemailservice.XEmailServiceClient; import com.day.cq.xemailservice.XEmailServiceException; import com.day.cq.mcm.emailprovider.ESConstants; import com.day.cq.mcm.emailprovider.EmailService; import com.day.cq.mcm.emailprovider.impl.types.ListAttributeImpl; import com.day.cq.mcm.emailprovider.types.*; import com.day.cq.wcm.webservicesupport.Configuration; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Service; import java.util.ArrayList; import java.util.Map; /** * */ @Component(metatype = false, label = "%cq.mcm.xemailservice.service.name", description = "%cq.mcm.xemailservice.service.description") @Service @Properties({ @Property(name = "service.description", value = "XEmailService Service Implementation") }) public class XEmailServiceImpl implements EmailService { private XEmailServiceClient client; private Configuration config; private static final String SERVICE_NAME = "XEmailService"; //update the name with the corresponding email service provider name @Override public String getName() { return SERVICE_NAME; } /** * This method is an entry point for all the actions to email service but the specific API calls, for specific calls refer {@link com.day.cq.xemailservice.servlets.XEmailServiceSpecificActionsServlet}. * @param op Operation which is initiated on the email service * @param params map of * @param config * */ public Object execute(EmailServiceActions op,Map<String,Object> params, Configuration config) throws XEmailServiceException { this.client = new XEmailServiceClientImpl(config); this.config = config; switch(op){ //the below operations are needed for configuring the account and hence configuration is null case CONNECT: checkCredentials(getConnectionParams(params)); return null; case GET_ACCOUNTS: return handleGetAccounts(params); default: break; } if (config == null) { throw new XEmailServiceException("No Configuration specified while invoking " + op.name()); } switch(op) { case ADD_SUBSCRIBER: handleAddSubscriber(params); return null; case DELETE_SUBSCRIBER: handleDeleteSubscriber(params); return null; case PUBLISH_EMAIL: handlePublishEmail(params); return null; case UPDATE_EMAIL: handleUpdateEmail(params); return null; case SEND_AUTO_RESPONDER: handleAutoResponder(params); return null; case SEND_EMAIL_TO_SUBSCRIBER: handleSendMailToSubscriber(params); return null; case SEND_EMAIL_TO_SUBSCRIBER_LIST: handleSendMailToSubscriberList(params); return null; case GET_LISTS: return handleGetLists(params); case GET_PERSONALIZATION_INFO: return handleGetPersonalizationInfo(params); case GET_EMAILS: return handleGetEmails(params); case GET_EMAIL_CLASSIFICATIONS: return handleGetEmailClassifications(params); case CREATE_SUBSCRIPTION_LIST: return handleCreateSubscriptionList(params); case DELETE_SUBSCRIPTION_LIST: handleDeleteSubscriptionList(params); return null; case GET_SUBSCRIBERS: return handleGetSubscribers(params); case GET_FORM_FIELDS: return handleGetFormFields(params,config); default: throw new XEmailServiceException("Unsupported Operation: " + op.name()); } } private void checkCredentials(ConnectionParams connectionParams) throws XEmailServiceException { this.client.checkCredentials(connectionParams); } private ConnectionParams getConnectionParams(Map<String, Object> params) { return null;//To change body of created methods use File | Settings | File Templates. } private Object handleGetAccounts(Map<String, Object> params) throws XEmailServiceException { Object filterObject = null; return this.client.getAccounts(this.getConnectionParams(params), filterObject); } private void handleAutoResponder(Map<String, Object> params) { //To change body of created methods use File | Settings | File Templates. } private void handleSendMailToSubscriber(Map<String, Object> params) throws XEmailServiceException { Email email = null; Subscriber subscriber = null; this.client.sendEmail(email, subscriber); } private void handleSendMailToSubscriberList(Map<String, Object> params) throws XEmailServiceException { //foreach subscriber list this.handleSendMailToSubscriber(params); } /** * This handler method is to fetch the lists. Output from this method should be a of type java.util.List<Map<String,Object> >. * * Later this object will be converted to JSON object. * @param params * @return * @throws XEmailServiceException */ private Object handleGetLists(Map<String, Object> params) throws XEmailServiceException { Object filterObject = null; // construct filter object if any with the help of params map return this.client.getSubscriberList(filterObject); } /** * This handler method is to fetch personalized information. Output from this method should be a of type java.util.List<Map<String,Object> >. * Later this object will be converted to JSON object. * @param params * @return */ private Object handleGetPersonalizationInfo(Map<String, Object> params) { return null; //To change body of created methods use File | Settings | File Templates. } private Object handleGetEmails(Map<String, Object> params) throws XEmailServiceException { return this.client.getEmails(new Object()); } private Object handleGetEmailClassifications(Map<String, Object> params) { return null; //To change body of created methods use File | Settings | File Templates. } private Object handleCreateSubscriptionList(Map<String, Object> params) throws XEmailServiceException { return this.client.createList(params); } private void handleDeleteSubscriptionList(Map<String, Object> params) throws XEmailServiceException { //fetch list name from params String listName = (String) params.get("<List>"); //Replace <List> with the actual key this.client.deleteList(listName); } private Object handleGetSubscribers(Map<String, Object> params) throws XEmailServiceException { return this.client.getSubscribers(""); //pull the actual name from the params } private void handleUpdateEmail(Map<String, Object> params) { //this.client.updateEmail(email); //construct desired email object from params } private void handlePublishEmail(Map<String, Object> params) { //this.client.publishEmail(email); //construct desired email object from params } private void handleDeleteSubscriber(Map<String, Object> params) { // this.client.deleteSubscriberFromList(subscriber);//construct subscriber from the params object } private void handleAddSubscriber(Map<String, Object> params) { //this.client.addSubscriberToList(subscriber); //construct subscriber from the params object } /** * Sample method to handle forms fields * @param params * @param config * @return * @throws XEmailServiceException */ private Object handleGetFormFields(Map<String, Object> params, Configuration config) throws XEmailServiceException { java.util.List<ListAttribute> attributes = new ArrayList<ListAttribute>(); String actionType = (String)params.get("actionType"); EmailServiceActions formAction = null; if(actionType.indexOf("addSubscriber")!=-1) formAction = EmailServiceActions.ADD_SUBSCRIBER; else if(actionType.indexOf("deleteSubscriber")!=-1) formAction = EmailServiceActions.DELETE_SUBSCRIBER; else if(actionType.indexOf("autoResponder")!=-1) formAction = EmailServiceActions.SEND_AUTO_RESPONDER; else throw new XEmailServiceException("Form fields not associated with given form action: " + actionType); //ADD default email field ListAttribute attribute = new ListAttributeImpl(ESConstants.FORM_EMAIL_FIELD,ESConstants.FORM_EMAIL_FIELD,true); attributes.add(attribute); java.util.List<Map<String,Object>> mapList = new ArrayList<Map<String,Object>>(); for(ListAttribute attr: attributes) { if(formAction == EmailServiceActions.DELETE_SUBSCRIBER && !attr.isRequired()) continue; mapList.add(attr.getPropertiesMap()); } return mapList; } }
Update XEmailServiceImpl.java
sampleemailservice/src/main/java/com/day/cq/xemailservice/impl/XEmailServiceImpl.java
Update XEmailServiceImpl.java
Java
apache-2.0
d902cdbdd853cb490db1507d79c587acede01ae3
0
apache/wicket,mosoft521/wicket,apache/wicket,mosoft521/wicket,apache/wicket,apache/wicket,mosoft521/wicket,apache/wicket,mosoft521/wicket,mosoft521/wicket
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.request.mapper.info; import org.apache.wicket.util.lang.Args; import org.apache.wicket.util.string.Strings; /** * Encodes listener and component path in form of * {@code <listener>-<componentPath>}, * {@code <listener>.<behaviorIndex>-<componentPath>} or * {@code <render-count>.<listener>.<behaviorIndex>-<componentPath>} * <p> * Component path is escaped (':' characters are replaced by '~') * * @author Matej Knopp */ public class ComponentInfo { private static final char BEHAVIOR_INDEX_SEPARATOR = '.'; private static final char SEPARATOR = '-'; private static final char COMPONENT_SEPARATOR = ':'; private static final char SEPARATOR_ENCODED = '~'; /** * Replaces ':' with '-', and '-' with '~'. * * @param path * the path to the component in its page * @return the encoded path */ private static String encodeComponentPath(CharSequence path) { if (path != null) { int length = path.length(); if (length == 0) { return path.toString(); } StringBuilder result = new StringBuilder(length); for (int i = 0; i < length; i++) { char c = path.charAt(i); switch (c) { case COMPONENT_SEPARATOR : result.append(SEPARATOR); break; case SEPARATOR : result.append(SEPARATOR_ENCODED); break; default : result.append(c); } } return result.toString(); } else { return null; } } /** * Replaces '~' with '-' and '-' with ':' * * @param path * the encoded path of the component in its page * @return the (non-encoded) path of the component in its page */ private static String decodeComponentPath(CharSequence path) { if (path != null) { int length = path.length(); if (length == 0) { return path.toString(); } StringBuilder result = new StringBuilder(length); for (int i = 0; i < length; i++) { char c = path.charAt(i); switch (c) { case SEPARATOR_ENCODED : result.append(SEPARATOR); break; case SEPARATOR : result.append(COMPONENT_SEPARATOR); break; default : result.append(c); } } return result.toString(); } else { return null; } } private final String componentPath; private final Integer behaviorId; private final Integer renderCount; /** * Construct. * * @param renderCount * @param componentPath * @param behaviorId */ public ComponentInfo(final Integer renderCount, final String componentPath, final Integer behaviorId) { Args.notNull(componentPath, "componentPath"); this.componentPath = componentPath; this.behaviorId = behaviorId; this.renderCount = renderCount; } /** * @return component path */ public String getComponentPath() { return componentPath; } /** * @return behavior index */ public Integer getBehaviorId() { return behaviorId; } /** * * @return render count */ public Integer getRenderCount() { return renderCount; } /** * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder result = new StringBuilder(); if (renderCount != null) { result.append(renderCount); } if (renderCount != null || behaviorId != null) { result.append(BEHAVIOR_INDEX_SEPARATOR); } if (behaviorId != null) { result.append(behaviorId); } result.append(SEPARATOR); result.append(encodeComponentPath(componentPath)); return result.toString(); } /** * Method that rigidly checks if the string consists of digits only. * * @param string * @return whether the string consists of digits only */ private static boolean isNumber(final String string) { if (string == null || string.isEmpty()) { return false; } for (int i = 0; i < string.length(); ++i) { if (!Character.isDigit(string.charAt(i))) { return false; } } return true; } /** * Parses the given string. * * @param string * @return component info or <code>null</code> if the string is not in correct format. */ public static ComponentInfo parse(final String string) { if (Strings.isEmpty(string)) { return null; } int i = string.indexOf(SEPARATOR); if (i == -1) { return null; } else { String listener = string.substring(0, i); String componentPath = decodeComponentPath(string.substring(i + 1)); Integer behaviorIndex = null; Integer renderCount = null; String listenerParts[] = Strings.split(listener, BEHAVIOR_INDEX_SEPARATOR); if (listenerParts.length == 0) { return new ComponentInfo(renderCount, componentPath, behaviorIndex); } else if (listenerParts.length == 2) { if (isNumber(listenerParts[0])) { renderCount = Integer.valueOf(listenerParts[0]); } if (isNumber(listenerParts[1])) { behaviorIndex = Integer.valueOf(listenerParts[1]); } return new ComponentInfo(renderCount, componentPath, behaviorIndex); } else { return null; } } } }
wicket-request/src/main/java/org/apache/wicket/request/mapper/info/ComponentInfo.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.request.mapper.info; import org.apache.wicket.util.lang.Args; import org.apache.wicket.util.string.Strings; /** * Encodes listener and component path in form of * {@code <listener>-<componentPath>}, * {@code <listener>.<behaviorIndex>-<componentPath>} or * {@code <render-count>.<listener>.<behaviorIndex>-<componentPath>} * <p> * Component path is escaped (':' characters are replaced by '~') * * @author Matej Knopp */ public class ComponentInfo { private static final char BEHAVIOR_INDEX_SEPARATOR = '.'; private static final char SEPARATOR = '-'; private static final char COMPONENT_SEPARATOR = ':'; private static final char SEPARATOR_ENCODED = '~'; /** * Replaces ':' with '-', and '-' with '~'. * * @param path * the path to the component in its page * @return the encoded path */ private static String encodeComponentPath(CharSequence path) { if (path != null) { StringBuilder result = new StringBuilder(); int length = path.length(); for (int i = 0; i < length; i++) { char c = path.charAt(i); switch (c) { case COMPONENT_SEPARATOR : result.append(SEPARATOR); break; case SEPARATOR : result.append(SEPARATOR_ENCODED); break; default : result.append(c); } } return result.toString(); } else { return null; } } /** * Replaces '~' with '-' and '-' with ':' * * @param path * the encoded path of the component in its page * @return the (non-encoded) path of the component in its page */ private static String decodeComponentPath(CharSequence path) { if (path != null) { StringBuilder result = new StringBuilder(); int length = path.length(); for (int i = 0; i < length; i++) { char c = path.charAt(i); switch (c) { case SEPARATOR_ENCODED : result.append(SEPARATOR); break; case SEPARATOR : result.append(COMPONENT_SEPARATOR); break; default : result.append(c); } } return result.toString(); } else { return null; } } private final String componentPath; private final Integer behaviorId; private final Integer renderCount; /** * Construct. * * @param renderCount * @param componentPath * @param behaviorId */ public ComponentInfo(final Integer renderCount, final String componentPath, final Integer behaviorId) { Args.notNull(componentPath, "componentPath"); this.componentPath = componentPath; this.behaviorId = behaviorId; this.renderCount = renderCount; } /** * @return component path */ public String getComponentPath() { return componentPath; } /** * @return behavior index */ public Integer getBehaviorId() { return behaviorId; } /** * * @return render count */ public Integer getRenderCount() { return renderCount; } /** * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder result = new StringBuilder(); if (renderCount != null) { result.append(renderCount); } if (renderCount != null || behaviorId != null) { result.append(BEHAVIOR_INDEX_SEPARATOR); } if (behaviorId != null) { result.append(behaviorId); } result.append(SEPARATOR); result.append(encodeComponentPath(componentPath)); return result.toString(); } /** * Method that rigidly checks if the string consists of digits only. * * @param string * @return whether the string consists of digits only */ private static boolean isNumber(final String string) { if ((string == null) || (string.length() == 0)) { return false; } for (int i = 0; i < string.length(); ++i) { if (Character.isDigit(string.charAt(i)) == false) { return false; } } return true; } /** * Parses the given string. * * @param string * @return component info or <code>null</code> if the string is not in correct format. */ public static ComponentInfo parse(final String string) { if (Strings.isEmpty(string)) { return null; } int i = string.indexOf(SEPARATOR); if (i == -1) { return null; } else { String listener = string.substring(0, i); String componentPath = decodeComponentPath(string.substring(i + 1)); Integer behaviorIndex = null; Integer renderCount = null; String listenerParts[] = Strings.split(listener, BEHAVIOR_INDEX_SEPARATOR); if (listenerParts.length == 0) { return new ComponentInfo(renderCount, componentPath, behaviorIndex); } else if (listenerParts.length == 2) { if (isNumber(listenerParts[0])) { renderCount = Integer.valueOf(listenerParts[0]); } if (isNumber(listenerParts[1])) { behaviorIndex = Integer.valueOf(listenerParts[1]); } return new ComponentInfo(renderCount, componentPath, behaviorIndex); } else { return null; } } } }
WICKET-6960 Avoid unnecessary allocations when decoding and encoding `ComponentInfo` (#501)
wicket-request/src/main/java/org/apache/wicket/request/mapper/info/ComponentInfo.java
WICKET-6960 Avoid unnecessary allocations when decoding and encoding `ComponentInfo` (#501)
Java
apache-2.0
542720553f1bf604ba1133c8823373635b73000a
0
leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.core.routing; import io.shardingsphere.core.constant.DatabaseType; import io.shardingsphere.core.metadata.datasource.ShardingDataSourceMetaData; import io.shardingsphere.core.metadata.table.ShardingTableMetaData; import io.shardingsphere.core.parsing.parser.sql.SQLStatement; import io.shardingsphere.core.routing.router.masterslave.ShardingMasterSlaveRouter; import io.shardingsphere.core.routing.router.sharding.ShardingRouter; import io.shardingsphere.core.routing.router.sharding.ShardingRouterFactory; import io.shardingsphere.core.rule.ShardingRule; import java.util.Collections; /** * Statement routing engine. * * @author zhangliang * @author panjuan */ public final class StatementRoutingEngine { private final ShardingRouter shardingRouter; private final ShardingMasterSlaveRouter masterSlaveRouter; public StatementRoutingEngine(final ShardingRule shardingRule, final ShardingTableMetaData shardingTableMetaData, final DatabaseType databaseType, final boolean showSQL, final ShardingDataSourceMetaData shardingDataSourceMetaData) { shardingRouter = ShardingRouterFactory.createSQLRouter(shardingRule, shardingTableMetaData, databaseType, showSQL, shardingDataSourceMetaData); masterSlaveRouter = new ShardingMasterSlaveRouter(shardingRule.getMasterSlaveRules()); } /** * SQL route. * * @param logicSQL logic SQL * @return route result */ public SQLRouteResult route(final String logicSQL) { SQLStatement sqlStatement = shardingRouter.parse(logicSQL, true); return masterSlaveRouter.route(shardingRouter.route(logicSQL, Collections.emptyList(), sqlStatement)); } }
sharding-core/src/main/java/io/shardingsphere/core/routing/StatementRoutingEngine.java
/* * Copyright 2016-2018 shardingsphere.io. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package io.shardingsphere.core.routing; import io.shardingsphere.core.constant.DatabaseType; import io.shardingsphere.core.metadata.datasource.ShardingDataSourceMetaData; import io.shardingsphere.core.metadata.table.ShardingTableMetaData; import io.shardingsphere.core.parsing.parser.sql.SQLStatement; import io.shardingsphere.core.routing.router.masterslave.ShardingMasterSlaveRouter; import io.shardingsphere.core.routing.router.sharding.ShardingRouter; import io.shardingsphere.core.routing.router.sharding.ShardingRouterFactory; import io.shardingsphere.core.rule.ShardingRule; import java.util.Collections; /** * Statement routing engine. * * @author zhangliang * @author panjuan */ public final class StatementRoutingEngine { private final ShardingRouter shardingRouter; private final ShardingMasterSlaveRouter masterSlaveRouter; public StatementRoutingEngine(final ShardingRule shardingRule, final ShardingTableMetaData shardingTableMetaData, final DatabaseType databaseType, final boolean showSQL, final ShardingDataSourceMetaData shardingDataSourceMetaData) { shardingRouter = ShardingRouterFactory.createSQLRouter(shardingRule, shardingTableMetaData, databaseType, showSQL, shardingDataSourceMetaData); masterSlaveRouter = new ShardingMasterSlaveRouter(shardingRule.getMasterSlaveRules()); } /** * SQL route. * * @param logicSQL logic SQL * @return route result */ public SQLRouteResult route(final String logicSQL) { SQLStatement sqlStatement = shardingRouter.parse(logicSQL, false); return masterSlaveRouter.route(shardingRouter.route(logicSQL, Collections.emptyList(), sqlStatement)); } }
switch on cache of parse statement
sharding-core/src/main/java/io/shardingsphere/core/routing/StatementRoutingEngine.java
switch on cache of parse statement
Java
apache-2.0
b6fd07732265eff9e315293a74438105db5bb3f3
0
JEBailey/sling,JEBailey/sling,vladbailescu/sling,trekawek/sling,awadheshv/sling,labertasch/sling,ist-dresden/sling,tmaret/sling,mcdan/sling,anchela/sling,mcdan/sling,tmaret/sling,mcdan/sling,anchela/sling,tmaret/sling,ieb/sling,anchela/sling,awadheshv/sling,roele/sling,roele/sling,headwirecom/sling,vladbailescu/sling,headwirecom/sling,tmaret/sling,mcdan/sling,trekawek/sling,mcdan/sling,anchela/sling,mcdan/sling,vladbailescu/sling,trekawek/sling,awadheshv/sling,roele/sling,trekawek/sling,ist-dresden/sling,vladbailescu/sling,headwirecom/sling,ieb/sling,roele/sling,ist-dresden/sling,anchela/sling,labertasch/sling,tmaret/sling,ieb/sling,headwirecom/sling,JEBailey/sling,ieb/sling,ieb/sling,ieb/sling,ist-dresden/sling,roele/sling,JEBailey/sling,trekawek/sling,headwirecom/sling,labertasch/sling,JEBailey/sling,awadheshv/sling,ist-dresden/sling,awadheshv/sling,labertasch/sling,trekawek/sling,awadheshv/sling,vladbailescu/sling,labertasch/sling
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.scripting.core.it; import org.apache.sling.testing.paxexam.TestSupport; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.Option; import static org.apache.sling.testing.paxexam.SlingOptions.sling; import static org.apache.sling.testing.paxexam.SlingOptions.versionResolver; import static org.apache.sling.testing.paxexam.SlingOptions.webconsole; import static org.ops4j.pax.exam.CoreOptions.composite; import static org.ops4j.pax.exam.CoreOptions.junitBundles; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.cm.ConfigurationAdminOptions.factoryConfiguration; import static org.ops4j.pax.exam.cm.ConfigurationAdminOptions.newConfiguration; public class ScriptingCoreTestSupport extends TestSupport { @Configuration public Option[] configuration() { return new Option[]{ baseConfiguration(), launchpad(), // Sling Scripting Core testBundle("bundle.filename"), mavenBundle().groupId("org.apache.sling").artifactId("org.apache.sling.scripting.api").versionAsInProject(), factoryConfiguration("org.apache.sling.serviceusermapping.impl.ServiceUserMapperImpl.amended") .put("user.mapping", new String[]{ "org.apache.sling.scripting.core=sling-scripting" }) .asOption(), // debugging mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.inventory").version(versionResolver), mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.webconsole.plugins.ds").version(versionResolver), // testing junitBundles() }; } protected Option launchpad() { versionResolver.setVersion("org.apache.felix", "org.apache.felix.http.jetty", "3.1.6"); // Java 7 versionResolver.setVersion("org.apache.felix", "org.apache.felix.http.whiteboard", "2.3.2"); // Java 7 final int httpPort = findFreePort(); System.out.println("http port " + httpPort); return composite( sling(), webconsole(), newConfiguration("org.apache.felix.http") .put("org.osgi.service.http.port", httpPort) .asOption() ); } }
bundles/scripting/core/src/test/java/org/apache/sling/scripting/core/it/ScriptingCoreTestSupport.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.scripting.core.it; import org.apache.sling.testing.paxexam.TestSupport; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.Option; import static org.apache.sling.testing.paxexam.SlingOptions.sling; import static org.apache.sling.testing.paxexam.SlingOptions.versionResolver; import static org.apache.sling.testing.paxexam.SlingOptions.webconsole; import static org.ops4j.pax.exam.CoreOptions.composite; import static org.ops4j.pax.exam.CoreOptions.junitBundles; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.cm.ConfigurationAdminOptions.factoryConfiguration; import static org.ops4j.pax.exam.cm.ConfigurationAdminOptions.newConfiguration; public class ScriptingCoreTestSupport extends TestSupport { @Configuration public Option[] configuration() { return new Option[]{ baseConfiguration(), launchpad(), // Sling Scripting Core testBundle("bundle.filename"), mavenBundle().groupId("org.apache.sling").artifactId("org.apache.sling.scripting.api").versionAsInProject(), factoryConfiguration("org.apache.sling.serviceusermapping.impl.ServiceUserMapperImpl.amended") .put("user.mapping", new String[]{ "org.apache.sling.scripting.core=sling-scripting" }) .asOption(), // debugging mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.inventory").version(versionResolver), mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.webconsole.plugins.ds").version(versionResolver), // testing junitBundles() }; } protected Option launchpad() { final int httpPort = findFreePort(); System.out.println("http port " + httpPort); return composite( sling(), webconsole(), newConfiguration("org.apache.felix.http") .put("org.osgi.service.http.port", httpPort) .asOption() ); } }
support build with Java 7 git-svn-id: 6eed74fe9a15c8da84b9a8d7f2960c0406113ece@1789111 13f79535-47bb-0310-9956-ffa450edef68
bundles/scripting/core/src/test/java/org/apache/sling/scripting/core/it/ScriptingCoreTestSupport.java
support build with Java 7
Java
apache-2.0
4349ade4fd5076f2fa94ad88509abd24699870b2
0
gstevey/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gstevey/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,lsmaira/gradle
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugin.devel.tasks; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.Incubating; import org.gradle.api.InvalidUserDataException; import org.gradle.api.Task; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileVisitDetails; import org.gradle.api.file.FileVisitor; import org.gradle.api.internal.file.collections.DirectoryFileTree; import org.gradle.api.internal.project.taskfactory.TaskPropertyValidationAccess; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputDirectory; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.ParallelizableTask; import org.gradle.api.tasks.SkipWhenEmpty; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.TaskValidationException; import org.gradle.api.tasks.VerificationTask; import org.gradle.internal.Cast; import org.gradle.internal.classloader.ClassLoaderFactory; import org.gradle.internal.classloader.ClassLoaderUtils; import org.gradle.internal.classpath.ClassPath; import org.gradle.internal.classpath.DefaultClassPath; import org.gradle.util.internal.Java9ClassReader; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Opcodes; import javax.inject.Inject; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; /** * Validates task property annotations. * * <p> * Task properties must be annotated with one of: * </p> * * <ul> * <li> * <b>Properties taken into account during up-to-date checks:</b> * <ul> * <li> * {@literal @}{@link org.gradle.api.tasks.Input}, * {@literal @}{@link org.gradle.api.tasks.Nested}, * {@literal @}{@link org.gradle.api.tasks.InputFile}, * {@literal @}{@link org.gradle.api.tasks.InputDirectory}, * {@literal @}{@link org.gradle.api.tasks.InputFiles} * to mark it as an input to the task. * </li> * <li> * {@literal @}{@link org.gradle.api.tasks.OutputFile}, * {@literal @}{@link org.gradle.api.tasks.OutputDirectory} * to mark it as an output of the task. * </li> * </ul> * </li> * <li> * <b>Properties ignored during up-to-date checks:</b> * <ul> * <li>{@literal @}{@link javax.inject.Inject} marks a Gradle service used by the task.</li> * <li>{@literal @}{@link org.gradle.api.tasks.Console Console} marks a property that only influences the console output of the task.</li> * <li>{@literal @}{@link org.gradle.api.tasks.Internal Internal} mark an internal property of the task.</li> * </ul> * </li> * </ul> */ @Incubating @ParallelizableTask @SuppressWarnings("WeakerAccess") public class ValidateTaskProperties extends DefaultTask implements VerificationTask { private File classesDir; private FileCollection classpath; private Object outputFile; private boolean ignoreFailures; private boolean failOnWarning; @TaskAction public void validateTaskClasses() throws IOException { ClassPath classPath = new DefaultClassPath(Iterables.concat(Collections.singleton(getClassesDir()), getClasspath())); ClassLoader classLoader = getClassLoaderFactory().createIsolatedClassLoader(classPath); try { validateTaskClasses(classLoader); } finally { ClassLoaderUtils.tryClose(classLoader); } } private void validateTaskClasses(final ClassLoader classLoader) throws IOException { final Map<String, Boolean> taskValidationProblems = Maps.newTreeMap(); final Class<?> taskInterface; final Method validatorMethod; try { taskInterface = classLoader.loadClass(Task.class.getName()); Class<?> validatorClass = classLoader.loadClass(TaskPropertyValidationAccess.class.getName()); validatorMethod = validatorClass.getMethod("collectTaskValidationProblems", Class.class, Map.class); } catch (ClassNotFoundException e) { throw Throwables.propagate(e); } catch (NoSuchMethodException e) { throw Throwables.propagate(e); } new DirectoryFileTree(getClassesDir()).visit(new FileVisitor() { @Override public void visitDir(FileVisitDetails dirDetails) { } @Override public void visitFile(FileVisitDetails fileDetails) { if (!fileDetails.getPath().endsWith(".class")) { return; } ClassReader reader; try { reader = new Java9ClassReader(Files.asByteSource(fileDetails.getFile()).read()); } catch (IOException e) { throw Throwables.propagate(e); } List<String> classNames = Lists.newArrayList(); reader.accept(new TaskNameCollectorVisitor(classNames), ClassReader.SKIP_CODE); for (String className : classNames) { Class<?> clazz; try { clazz = classLoader.loadClass(className); } catch (IllegalAccessError e) { throw new GradleException("Could not load class: " + className, e); } catch (ClassNotFoundException e) { throw new GradleException("Could not load class: " + className, e); } catch (NoClassDefFoundError e) { throw new GradleException("Could not load class: " + className, e); } if (!Modifier.isPublic(clazz.getModifiers())) { continue; } if (Modifier.isAbstract(clazz.getModifiers())) { continue; } if (!taskInterface.isAssignableFrom(clazz)) { continue; } Class<? extends Task> taskClass = Cast.uncheckedCast(clazz); try { validatorMethod.invoke(null, taskClass, taskValidationProblems); } catch (IllegalAccessException e) { throw Throwables.propagate(e); } catch (InvocationTargetException e) { throw Throwables.propagate(e); } } } }); List<String> problemMessages = toProblemMessages(taskValidationProblems); storeResults(problemMessages, getOutputFile()); communicateResult(problemMessages, taskValidationProblems.values().contains(Boolean.TRUE)); } private void storeResults(List<String> problemMessages, File outputFile) throws IOException { if (outputFile != null) { //noinspection ResultOfMethodCallIgnored outputFile.createNewFile(); Files.asCharSink(outputFile, Charsets.UTF_8).write(Joiner.on('\n').join(problemMessages)); } } private void communicateResult(List<String> problemMessages, boolean hasErrors) { if (problemMessages.isEmpty()) { getLogger().info("Task property validation finished without warnings."); } else { if (hasErrors || getFailOnWarning()) { if (getIgnoreFailures()) { getLogger().warn("Task property validation finished with errors:{}", toMessageList(problemMessages)); } else { throw new TaskValidationException("Task property validation failed", toExceptionList(problemMessages)); } } else { getLogger().warn("Task property validation finished with warnings:{}", toMessageList(problemMessages)); } } } private static List<String> toProblemMessages(Map<String, Boolean> problems) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Map.Entry<String, Boolean> entry : problems.entrySet()) { String problem = entry.getKey(); Boolean error = entry.getValue(); builder.add(String.format("%s: %s", Boolean.TRUE.equals(error) ? "Error" : "Warning", problem )); } return builder.build(); } private static CharSequence toMessageList(List<String> problemMessages) { StringBuilder builder = new StringBuilder(); for (String problemMessage : problemMessages) { builder.append(String.format("%n - %s", problemMessage)); } return builder; } private static List<InvalidUserDataException> toExceptionList(List<String> problemMessages) { return Lists.transform(problemMessages, new Function<String, InvalidUserDataException>() { @Override public InvalidUserDataException apply(String problemMessage) { return new InvalidUserDataException(problemMessage); } }); } /** * {@inheritDoc} */ @Input @Override public boolean getIgnoreFailures() { return ignoreFailures; } /** * {@inheritDoc} */ @Override public void setIgnoreFailures(boolean ignoreFailures) { this.ignoreFailures = ignoreFailures; } /** * The directory containing the classes to validate. */ @InputDirectory @SkipWhenEmpty public File getClassesDir() { return classesDir; } /** * Sets the directory containing the classes to validate. */ public void setClassesDir(File classesDir) { this.classesDir = classesDir; } /** * The classpath used to load the classes under validation. */ @InputFiles public FileCollection getClasspath() { return classpath; } /** * Sets the classpath used to load the classes under validation. */ public void setClasspath(FileCollection classpath) { this.classpath = classpath; } /** * Returns whether the build should break when the verifications performed by this task detects a warning. */ @Input public boolean getFailOnWarning() { return failOnWarning; } /** * Returns the output file to store the report in. */ @Optional @OutputFile public File getOutputFile() { return outputFile == null ? null : getProject().file(outputFile); } /** * Sets the output file to store the report in. */ public void setOutputFile(Object outputFile) { this.outputFile = outputFile; } /** * Specifies whether the build should break when the verifications performed by this task detects a warning. * * @param failOnWarning {@code true} to break the build on warning, {@code false} to ignore warnings. The default is {@code false}. */ @SuppressWarnings("unused") public void setFailOnWarning(boolean failOnWarning) { this.failOnWarning = failOnWarning; } @Inject protected ClassLoaderFactory getClassLoaderFactory() { throw new UnsupportedOperationException(); } private static class TaskNameCollectorVisitor extends ClassVisitor { private final Collection<String> classNames; public TaskNameCollectorVisitor(Collection<String> classNames) { super(Opcodes.ASM5); this.classNames = classNames; } @Override public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { if ((access & Opcodes.ACC_PUBLIC) != 0) { classNames.add(name.replace('/', '.')); } } } }
subprojects/plugin-development/src/main/java/org/gradle/plugin/devel/tasks/ValidateTaskProperties.java
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugin.devel.tasks; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.Incubating; import org.gradle.api.InvalidUserDataException; import org.gradle.api.Task; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileVisitDetails; import org.gradle.api.file.FileVisitor; import org.gradle.api.internal.file.collections.DirectoryFileTree; import org.gradle.api.internal.project.taskfactory.TaskPropertyValidationAccess; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputDirectory; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.ParallelizableTask; import org.gradle.api.tasks.SkipWhenEmpty; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.TaskValidationException; import org.gradle.api.tasks.VerificationTask; import org.gradle.internal.Cast; import org.gradle.internal.classloader.ClassLoaderFactory; import org.gradle.internal.classpath.ClassPath; import org.gradle.internal.classpath.DefaultClassPath; import org.gradle.util.internal.Java9ClassReader; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Opcodes; import javax.inject.Inject; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; /** * Validates task property annotations. * * <p> * Task properties must be annotated with one of: * </p> * * <ul> * <li> * <b>Properties taken into account during up-to-date checks:</b> * <ul> * <li> * {@literal @}{@link org.gradle.api.tasks.Input}, * {@literal @}{@link org.gradle.api.tasks.Nested}, * {@literal @}{@link org.gradle.api.tasks.InputFile}, * {@literal @}{@link org.gradle.api.tasks.InputDirectory}, * {@literal @}{@link org.gradle.api.tasks.InputFiles} * to mark it as an input to the task. * </li> * <li> * {@literal @}{@link org.gradle.api.tasks.OutputFile}, * {@literal @}{@link org.gradle.api.tasks.OutputDirectory} * to mark it as an output of the task. * </li> * </ul> * </li> * <li> * <b>Properties ignored during up-to-date checks:</b> * <ul> * <li>{@literal @}{@link javax.inject.Inject} marks a Gradle service used by the task.</li> * <li>{@literal @}{@link org.gradle.api.tasks.Console Console} marks a property that only influences the console output of the task.</li> * <li>{@literal @}{@link org.gradle.api.tasks.Internal Internal} mark an internal property of the task.</li> * </ul> * </li> * </ul> */ @Incubating @ParallelizableTask @SuppressWarnings("WeakerAccess") public class ValidateTaskProperties extends DefaultTask implements VerificationTask { private File classesDir; private FileCollection classpath; private Object outputFile; private boolean ignoreFailures; private boolean failOnWarning; @TaskAction public void validateTaskClasses() throws IOException { final Map<String, Boolean> taskValidationProblems = Maps.newTreeMap(); ClassPath classPath = new DefaultClassPath(Iterables.concat(Collections.singleton(getClassesDir()), getClasspath())); final ClassLoader classLoader = getClassLoaderFactory().createIsolatedClassLoader(classPath); final Class<?> taskInterface; final Method validatorMethod; try { taskInterface = classLoader.loadClass(Task.class.getName()); Class<?> validatorClass = classLoader.loadClass(TaskPropertyValidationAccess.class.getName()); validatorMethod = validatorClass.getMethod("collectTaskValidationProblems", Class.class, Map.class); } catch (ClassNotFoundException e) { throw Throwables.propagate(e); } catch (NoSuchMethodException e) { throw Throwables.propagate(e); } new DirectoryFileTree(getClassesDir()).visit(new FileVisitor() { @Override public void visitDir(FileVisitDetails dirDetails) { } @Override public void visitFile(FileVisitDetails fileDetails) { if (!fileDetails.getPath().endsWith(".class")) { return; } ClassReader reader; try { reader = new Java9ClassReader(Files.asByteSource(fileDetails.getFile()).read()); } catch (IOException e) { throw Throwables.propagate(e); } List<String> classNames = Lists.newArrayList(); reader.accept(new TaskNameCollectorVisitor(classNames), ClassReader.SKIP_CODE); for (String className : classNames) { Class<?> clazz; try { clazz = classLoader.loadClass(className); } catch (IllegalAccessError e) { throw new GradleException("Could not load class: " + className, e); } catch (ClassNotFoundException e) { throw new GradleException("Could not load class: " + className, e); } catch (NoClassDefFoundError e) { throw new GradleException("Could not load class: " + className, e); } if (!Modifier.isPublic(clazz.getModifiers())) { continue; } if (Modifier.isAbstract(clazz.getModifiers())) { continue; } if (!taskInterface.isAssignableFrom(clazz)) { continue; } Class<? extends Task> taskClass = Cast.uncheckedCast(clazz); try { validatorMethod.invoke(null, taskClass, taskValidationProblems); } catch (IllegalAccessException e) { throw Throwables.propagate(e); } catch (InvocationTargetException e) { throw Throwables.propagate(e); } } } }); List<String> problemMessages = toProblemMessages(taskValidationProblems); storeResults(problemMessages, getOutputFile()); communicateResult(problemMessages, taskValidationProblems.values().contains(Boolean.TRUE)); } private void storeResults(List<String> problemMessages, File outputFile) throws IOException { if (outputFile != null) { //noinspection ResultOfMethodCallIgnored outputFile.createNewFile(); Files.asCharSink(outputFile, Charsets.UTF_8).write(Joiner.on('\n').join(problemMessages)); } } private void communicateResult(List<String> problemMessages, boolean hasErrors) { if (problemMessages.isEmpty()) { getLogger().info("Task property validation finished without warnings."); } else { if (hasErrors || getFailOnWarning()) { if (getIgnoreFailures()) { getLogger().warn("Task property validation finished with errors:{}", toMessageList(problemMessages)); } else { throw new TaskValidationException("Task property validation failed", toExceptionList(problemMessages)); } } else { getLogger().warn("Task property validation finished with warnings:{}", toMessageList(problemMessages)); } } } private static List<String> toProblemMessages(Map<String, Boolean> problems) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Map.Entry<String, Boolean> entry : problems.entrySet()) { String problem = entry.getKey(); Boolean error = entry.getValue(); builder.add(String.format("%s: %s", Boolean.TRUE.equals(error) ? "Error" : "Warning", problem )); } return builder.build(); } private static CharSequence toMessageList(List<String> problemMessages) { StringBuilder builder = new StringBuilder(); for (String problemMessage : problemMessages) { builder.append(String.format("%n - %s", problemMessage)); } return builder; } private static List<InvalidUserDataException> toExceptionList(List<String> problemMessages) { return Lists.transform(problemMessages, new Function<String, InvalidUserDataException>() { @Override public InvalidUserDataException apply(String problemMessage) { return new InvalidUserDataException(problemMessage); } }); } /** * {@inheritDoc} */ @Input @Override public boolean getIgnoreFailures() { return ignoreFailures; } /** * {@inheritDoc} */ @Override public void setIgnoreFailures(boolean ignoreFailures) { this.ignoreFailures = ignoreFailures; } /** * The directory containing the classes to validate. */ @InputDirectory @SkipWhenEmpty public File getClassesDir() { return classesDir; } /** * Sets the directory containing the classes to validate. */ public void setClassesDir(File classesDir) { this.classesDir = classesDir; } /** * The classpath used to load the classes under validation. */ @InputFiles public FileCollection getClasspath() { return classpath; } /** * Sets the classpath used to load the classes under validation. */ public void setClasspath(FileCollection classpath) { this.classpath = classpath; } /** * Returns whether the build should break when the verifications performed by this task detects a warning. */ @Input public boolean getFailOnWarning() { return failOnWarning; } /** * Returns the output file to store the report in. */ @Optional @OutputFile public File getOutputFile() { return outputFile == null ? null : getProject().file(outputFile); } /** * Sets the output file to store the report in. */ public void setOutputFile(Object outputFile) { this.outputFile = outputFile; } /** * Specifies whether the build should break when the verifications performed by this task detects a warning. * * @param failOnWarning {@code true} to break the build on warning, {@code false} to ignore warnings. The default is {@code false}. */ @SuppressWarnings("unused") public void setFailOnWarning(boolean failOnWarning) { this.failOnWarning = failOnWarning; } @Inject protected ClassLoaderFactory getClassLoaderFactory() { throw new UnsupportedOperationException(); } private static class TaskNameCollectorVisitor extends ClassVisitor { private final Collection<String> classNames; public TaskNameCollectorVisitor(Collection<String> classNames) { super(Opcodes.ASM5); this.classNames = classNames; } @Override public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { if ((access & Opcodes.ACC_PUBLIC) != 0) { classNames.add(name.replace('/', '.')); } } } }
Close the ClassLoader in ValidateTaskProperties.
subprojects/plugin-development/src/main/java/org/gradle/plugin/devel/tasks/ValidateTaskProperties.java
Close the ClassLoader in ValidateTaskProperties.
Java
apache-2.0
a0292f2e07d878ea084881a313894fb70d214d56
0
Bedework/bw-classic,Bedework/bw-classic,Bedework/bw-classic,Bedework/bw-classic
/* Copyright (c) 2000-2005 University of Washington. All rights reserved. Redistribution and use of this distribution in source and binary forms, with or without modification, are permitted provided that: The above copyright notice and this permission notice appear in all copies and supporting documentation; The name, identifiers, and trademarks of the University of Washington are not used in advertising or publicity without the express prior written permission of the University of Washington; Recipients acknowledge that this distribution is made available as a research courtesy, "as is", potentially with defects, without any obligation on the part of the University of Washington to provide support, services, or repair; THE UNIVERSITY OF WASHINGTON DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, WITH REGARD TO THIS SOFTWARE, INCLUDING WITHOUT LIMITATION ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, AND IN NO EVENT SHALL THE UNIVERSITY OF WASHINGTON BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, TORT (INCLUDING NEGLIGENCE) OR STRICT LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /* ********************************************************************** Copyright 2005 Rensselaer Polytechnic Institute. All worldwide rights reserved. Redistribution and use of this distribution in source and binary forms, with or without modification, are permitted provided that: The above copyright notice and this permission notice appear in all copies and supporting documentation; The name, identifiers, and trademarks of Rensselaer Polytechnic Institute are not used in advertising or publicity without the express prior written permission of Rensselaer Polytechnic Institute; DISCLAIMER: The software is distributed" AS IS" without any express or implied warranty, including but not limited to, any implied warranties of merchantability or fitness for a particular purpose or any warrant)' of non-infringement of any current or pending patent rights. The authors of the software make no representations about the suitability of this software for any particular purpose. The entire risk as to the quality and performance of the software is with the user. Should the software prove defective, the user assumes the cost of all necessary servicing, repair or correction. In particular, neither Rensselaer Polytechnic Institute, nor the authors of the software are liable for any indirect, special, consequential, or incidental damages related to the software, to the maximum extent the law permits. */ package org.bedework.dumprestore.restore.rules; import org.bedework.calfacade.BwUser; import org.bedework.calfacade.svc.BwPreferences; import org.bedework.calfacade.svc.BwSubscription; import org.bedework.calfacade.svc.BwView; import org.bedework.dumprestore.restore.RestoreGlobals; /** * @author Mike Douglass [email protected] * @version 1.0 */ public class UserPrefsFieldRule extends EntityFieldRule { UserPrefsFieldRule(RestoreGlobals globals) { super(globals); } public void field(String name) throws java.lang.Exception{ BwPreferences p = (BwPreferences)top(); if (ownedEntityTags(p, name)) { return; } if (name.equals("email")) { p.setEmail(stringFld()); } else if (name.equals("default-calendar")) { p.setDefaultCalendar(calendarFld()); } else if (name.equals("skinName")) { p.setSkinName(stringFld()); } else if (name.equals("skinStyle")) { p.setSkinStyle(stringFld()); } else if (name.equals("preferredView")) { p.setPreferredView(stringFld()); } else if (name.equals("subscriptions")) { // Nothing to do now } else if (name.equals("workDays")) { p.setWorkDays(stringFld()); } else if (name.equals("workdayStart")) { p.setWorkdayStart(intFld()); } else if (name.equals("workdayEnd")) { p.setWorkdayEnd(intFld()); // subscription fields } else if (name.equals("subscription")) { globals.subscriptionsTbl.put(p.getOwner(), globals.curSub); p.addSubscription(globals.curSub); globals.curSub = null; } else if (name.equals("sub-id")) { globals.curSub = new BwSubscription(); globals.curSub.setId(intFld()); } else if (name.equals("sub-seq")) { globals.curSub.setSeq(intFld()); } else if (name.equals("sub-name")) { globals.curSub.setName(stringFld()); } else if (name.equals("sub-owner")) { BwUser sowner = userFld(); if (!p.getOwner().equals(sowner)) { error("Subscription owners don't match for " + globals.curSub); error(" Found owner " + sowner + " expected " + p.getOwner()); } globals.curSub.setOwner(p.getOwner()); } else if (name.equals("sub-uri")) { globals.curSub.setUri(stringFld()); } else if (name.equals("sub-affectsFreeBusy")) { globals.curSub.setAffectsFreeBusy(booleanFld()); } else if (name.equals("sub-display")) { globals.curSub.setDisplay(booleanFld()); } else if (name.equals("sub-style")) { globals.curSub.setStyle(stringFld()); } else if (name.equals("sub-internalSubscription")) { globals.curSub.setInternalSubscription(booleanFld()); } else if (name.equals("sub-emailNotifications")) { globals.curSub.setEmailNotifications(booleanFld()); } else if (name.equals("sub-calendarDeleted")) { globals.curSub.setCalendarDeleted(booleanFld()); } else if (name.equals("sub-unremoveable")) { globals.curSub.setUnremoveable(booleanFld()); // view fields } else if (name.equals("view")) { p.addView(globals.curView); globals.curView = null; } else if (name.equals("view-id")) { globals.curView = new BwView(); globals.curView.setId(intFld()); } else if (name.equals("view-seq")) { globals.curView.setSeq(intFld()); } else if (name.equals("view-name")) { globals.curView.setName(stringFld()); } else if (name.equals("view-owner")) { BwUser vowner = userFld(); if (!p.getOwner().equals(vowner)) { error("View owners don't match for " + globals.curView); error(" Found owner " + vowner + " expected " + p.getOwner()); } globals.curView.setOwner(p.getOwner()); } else if (name.equals("view-subscriptions")) { } else if (name.equals("view-sub-id")) { BwSubscription sub = globals.subscriptionsTbl.getSub(p.getOwner(), intFld()); if (sub == null) { error(" Missing subscription " + intFld() + " for view " + globals.curView); } else { globals.curView.addSubscription(sub); } } } }
calendar3/dumprestore/src/org/bedework/dumprestore/restore/rules/UserPrefsFieldRule.java
/* Copyright (c) 2000-2005 University of Washington. All rights reserved. Redistribution and use of this distribution in source and binary forms, with or without modification, are permitted provided that: The above copyright notice and this permission notice appear in all copies and supporting documentation; The name, identifiers, and trademarks of the University of Washington are not used in advertising or publicity without the express prior written permission of the University of Washington; Recipients acknowledge that this distribution is made available as a research courtesy, "as is", potentially with defects, without any obligation on the part of the University of Washington to provide support, services, or repair; THE UNIVERSITY OF WASHINGTON DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, WITH REGARD TO THIS SOFTWARE, INCLUDING WITHOUT LIMITATION ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, AND IN NO EVENT SHALL THE UNIVERSITY OF WASHINGTON BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, TORT (INCLUDING NEGLIGENCE) OR STRICT LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /* ********************************************************************** Copyright 2005 Rensselaer Polytechnic Institute. All worldwide rights reserved. Redistribution and use of this distribution in source and binary forms, with or without modification, are permitted provided that: The above copyright notice and this permission notice appear in all copies and supporting documentation; The name, identifiers, and trademarks of Rensselaer Polytechnic Institute are not used in advertising or publicity without the express prior written permission of Rensselaer Polytechnic Institute; DISCLAIMER: The software is distributed" AS IS" without any express or implied warranty, including but not limited to, any implied warranties of merchantability or fitness for a particular purpose or any warrant)' of non-infringement of any current or pending patent rights. The authors of the software make no representations about the suitability of this software for any particular purpose. The entire risk as to the quality and performance of the software is with the user. Should the software prove defective, the user assumes the cost of all necessary servicing, repair or correction. In particular, neither Rensselaer Polytechnic Institute, nor the authors of the software are liable for any indirect, special, consequential, or incidental damages related to the software, to the maximum extent the law permits. */ package org.bedework.dumprestore.restore.rules; import org.bedework.calfacade.svc.BwPreferences; import org.bedework.calfacade.svc.BwSubscription; import org.bedework.calfacade.svc.BwView; import org.bedework.dumprestore.restore.RestoreGlobals; /** * @author Mike Douglass [email protected] * @version 1.0 */ public class UserPrefsFieldRule extends EntityFieldRule { UserPrefsFieldRule(RestoreGlobals globals) { super(globals); } public void field(String name) throws java.lang.Exception{ BwPreferences p = (BwPreferences)top(); if (ownedEntityTags(p, name)) { return; } if (name.equals("email")) { p.setEmail(stringFld()); } else if (name.equals("default-calendar")) { p.setDefaultCalendar(calendarFld()); } else if (name.equals("skinName")) { p.setSkinName(stringFld()); } else if (name.equals("skinStyle")) { p.setSkinStyle(stringFld()); } else if (name.equals("preferredView")) { p.setPreferredView(stringFld()); } else if (name.equals("subscriptions")) { // Nothing to do now } else if (name.equals("workDays")) { p.setWorkDays(stringFld()); } else if (name.equals("workdayStart")) { p.setWorkdayStart(intFld()); } else if (name.equals("workdayEnd")) { p.setWorkdayEnd(intFld()); // subscription fields } else if (name.equals("subscription")) { globals.subscriptionsTbl.put(p.getOwner(), globals.curSub); p.addSubscription(globals.curSub); globals.curSub = null; } else if (name.equals("sub-id")) { globals.curSub = new BwSubscription(); globals.curSub.setId(intFld()); } else if (name.equals("sub-seq")) { globals.curSub.setSeq(intFld()); } else if (name.equals("sub-name")) { globals.curSub.setName(stringFld()); } else if (name.equals("sub-owner")) { if (p.getOwner().getId() != intFld()) { error("Subscription owners don't match for " + globals.curSub); error(" Found owner id " + intFld() + " expected " + p); } globals.curSub.setOwner(p.getOwner()); } else if (name.equals("sub-uri")) { globals.curSub.setUri(stringFld()); } else if (name.equals("sub-affectsFreeBusy")) { globals.curSub.setAffectsFreeBusy(booleanFld()); } else if (name.equals("sub-display")) { globals.curSub.setDisplay(booleanFld()); } else if (name.equals("sub-style")) { globals.curSub.setStyle(stringFld()); } else if (name.equals("sub-internalSubscription")) { globals.curSub.setInternalSubscription(booleanFld()); } else if (name.equals("sub-emailNotifications")) { globals.curSub.setEmailNotifications(booleanFld()); } else if (name.equals("sub-calendarDeleted")) { globals.curSub.setCalendarDeleted(booleanFld()); } else if (name.equals("sub-unremoveable")) { globals.curSub.setUnremoveable(booleanFld()); // view fields } else if (name.equals("view")) { p.addView(globals.curView); globals.curView = null; } else if (name.equals("view-id")) { globals.curView = new BwView(); globals.curView.setId(intFld()); } else if (name.equals("view-seq")) { globals.curView.setSeq(intFld()); } else if (name.equals("view-name")) { globals.curView.setName(stringFld()); } else if (name.equals("view-owner")) { if (p.getOwner().getId() != intFld()) { error("Viewowners don't match for " + globals.curView); error(" Found owner id " + intFld() + " expected " + p.getOwner()); } globals.curView.setOwner(p.getOwner()); } else if (name.equals("view-subscriptions")) { } else if (name.equals("view-sub-id")) { BwSubscription sub = globals.subscriptionsTbl.getSub(p.getOwner(), intFld()); if (sub == null) { error(" Missing subscription " + intFld() + " for view " + globals.curView); } else { globals.curView.addSubscription(sub); } } } }
Small fix to remove incorrect error message caused by remapping userid 0
calendar3/dumprestore/src/org/bedework/dumprestore/restore/rules/UserPrefsFieldRule.java
Small fix to remove incorrect error message caused by remapping userid 0
Java
apache-2.0
a9bad7b1a06758f5dad72e436aed8dc6d21537e5
0
RohanHart/camel,jkorab/camel,CandleCandle/camel,hqstevenson/camel,acartapanis/camel,apache/camel,YMartsynkevych/camel,apache/camel,jmandawg/camel,snadakuduru/camel,nikvaessen/camel,nicolaferraro/camel,akhettar/camel,haku/camel,satishgummadelli/camel,borcsokj/camel,dmvolod/camel,YoshikiHigo/camel,CodeSmell/camel,edigrid/camel,neoramon/camel,lasombra/camel,manuelh9r/camel,Fabryprog/camel,lburgazzoli/apache-camel,yuruki/camel,dkhanolkar/camel,pmoerenhout/camel,jamesnetherton/camel,CodeSmell/camel,nikvaessen/camel,bgaudaen/camel,prashant2402/camel,gautric/camel,askannon/camel,anton-k11/camel,brreitme/camel,NickCis/camel,dkhanolkar/camel,christophd/camel,manuelh9r/camel,sabre1041/camel,jonmcewen/camel,yuruki/camel,driseley/camel,manuelh9r/camel,bhaveshdt/camel,kevinearls/camel,driseley/camel,atoulme/camel,yury-vashchyla/camel,royopa/camel,pplatek/camel,davidkarlsen/camel,iweiss/camel,YMartsynkevych/camel,alvinkwekel/camel,Fabryprog/camel,ge0ffrey/camel,chirino/camel,DariusX/camel,bgaudaen/camel,RohanHart/camel,nicolaferraro/camel,objectiser/camel,haku/camel,zregvart/camel,satishgummadelli/camel,haku/camel,Fabryprog/camel,anoordover/camel,noelo/camel,askannon/camel,atoulme/camel,ge0ffrey/camel,anton-k11/camel,nikhilvibhav/camel,pkletsko/camel,YoshikiHigo/camel,borcsokj/camel,sabre1041/camel,YMartsynkevych/camel,christophd/camel,acartapanis/camel,salikjan/camel,NickCis/camel,mgyongyosi/camel,arnaud-deprez/camel,trohovsky/camel,gyc567/camel,mzapletal/camel,jmandawg/camel,chirino/camel,snurmine/camel,kevinearls/camel,eformat/camel,jlpedrosa/camel,veithen/camel,eformat/camel,drsquidop/camel,jkorab/camel,yury-vashchyla/camel,ssharma/camel,drsquidop/camel,YoshikiHigo/camel,iweiss/camel,ekprayas/camel,tlehoux/camel,lburgazzoli/camel,scranton/camel,mzapletal/camel,jarst/camel,noelo/camel,RohanHart/camel,apache/camel,gnodet/camel,nboukhed/camel,jarst/camel,NickCis/camel,chirino/camel,rmarting/camel,oalles/camel,allancth/camel,gilfernandes/camel,brreitme/camel,jamesnetherton/camel,jlpedrosa/camel,sverkera/camel,snadakuduru/camel,johnpoth/camel,onders86/camel,borcsokj/camel,tkopczynski/camel,CandleCandle/camel,duro1/camel,erwelch/camel,cunningt/camel,tkopczynski/camel,sabre1041/camel,sirlatrom/camel,punkhorn/camel-upstream,trohovsky/camel,punkhorn/camel-upstream,mgyongyosi/camel,MrCoder/camel,sverkera/camel,chanakaudaya/camel,dmvolod/camel,neoramon/camel,neoramon/camel,borcsokj/camel,davidwilliams1978/camel,yury-vashchyla/camel,nikvaessen/camel,pmoerenhout/camel,davidkarlsen/camel,gilfernandes/camel,manuelh9r/camel,sebi-hgdata/camel,jarst/camel,davidkarlsen/camel,eformat/camel,cunningt/camel,chirino/camel,acartapanis/camel,cunningt/camel,CodeSmell/camel,Thopap/camel,alvinkwekel/camel,YoshikiHigo/camel,MrCoder/camel,gyc567/camel,gyc567/camel,bhaveshdt/camel,jlpedrosa/camel,isavin/camel,tlehoux/camel,erwelch/camel,woj-i/camel,snurmine/camel,bhaveshdt/camel,lburgazzoli/camel,yuruki/camel,brreitme/camel,chanakaudaya/camel,tadayosi/camel,nicolaferraro/camel,nboukhed/camel,nikvaessen/camel,atoulme/camel,cunningt/camel,chanakaudaya/camel,cunningt/camel,erwelch/camel,hqstevenson/camel,punkhorn/camel-upstream,pkletsko/camel,w4tson/camel,w4tson/camel,tadayosi/camel,jonmcewen/camel,joakibj/camel,duro1/camel,DariusX/camel,yury-vashchyla/camel,curso007/camel,adessaigne/camel,joakibj/camel,tdiesler/camel,apache/camel,rmarting/camel,manuelh9r/camel,ge0ffrey/camel,jkorab/camel,tadayosi/camel,ssharma/camel,driseley/camel,alvinkwekel/camel,iweiss/camel,sverkera/camel,jlpedrosa/camel,cunningt/camel,snurmine/camel,nboukhed/camel,mcollovati/camel,royopa/camel,nikhilvibhav/camel,MrCoder/camel,w4tson/camel,tdiesler/camel,arnaud-deprez/camel,pmoerenhout/camel,yury-vashchyla/camel,trohovsky/camel,erwelch/camel,pkletsko/camel,akhettar/camel,bhaveshdt/camel,jmandawg/camel,satishgummadelli/camel,adessaigne/camel,anton-k11/camel,yogamaha/camel,YoshikiHigo/camel,johnpoth/camel,mzapletal/camel,pax95/camel,nikvaessen/camel,drsquidop/camel,sirlatrom/camel,edigrid/camel,brreitme/camel,pkletsko/camel,jlpedrosa/camel,objectiser/camel,pax95/camel,adessaigne/camel,ssharma/camel,johnpoth/camel,jollygeorge/camel,royopa/camel,curso007/camel,tkopczynski/camel,neoramon/camel,ekprayas/camel,NickCis/camel,davidwilliams1978/camel,rmarting/camel,yogamaha/camel,gilfernandes/camel,askannon/camel,satishgummadelli/camel,dkhanolkar/camel,gyc567/camel,satishgummadelli/camel,tadayosi/camel,akhettar/camel,driseley/camel,scranton/camel,FingolfinTEK/camel,adessaigne/camel,royopa/camel,nicolaferraro/camel,sabre1041/camel,jonmcewen/camel,onders86/camel,ge0ffrey/camel,lburgazzoli/apache-camel,drsquidop/camel,jkorab/camel,scranton/camel,anoordover/camel,kevinearls/camel,w4tson/camel,ullgren/camel,duro1/camel,RohanHart/camel,chanakaudaya/camel,objectiser/camel,bfitzpat/camel,driseley/camel,mgyongyosi/camel,noelo/camel,sebi-hgdata/camel,christophd/camel,johnpoth/camel,jkorab/camel,YMartsynkevych/camel,acartapanis/camel,mzapletal/camel,woj-i/camel,askannon/camel,joakibj/camel,JYBESSON/camel,sebi-hgdata/camel,lowwool/camel,nikvaessen/camel,zregvart/camel,ullgren/camel,dmvolod/camel,prashant2402/camel,noelo/camel,yogamaha/camel,gnodet/camel,CodeSmell/camel,joakibj/camel,tlehoux/camel,mcollovati/camel,borcsokj/camel,gilfernandes/camel,veithen/camel,lburgazzoli/apache-camel,woj-i/camel,woj-i/camel,w4tson/camel,gyc567/camel,iweiss/camel,CandleCandle/camel,mgyongyosi/camel,pplatek/camel,jkorab/camel,FingolfinTEK/camel,lowwool/camel,ekprayas/camel,chanakaudaya/camel,royopa/camel,snurmine/camel,JYBESSON/camel,RohanHart/camel,yuruki/camel,bfitzpat/camel,atoulme/camel,sirlatrom/camel,tkopczynski/camel,FingolfinTEK/camel,anton-k11/camel,DariusX/camel,salikjan/camel,MrCoder/camel,tlehoux/camel,isavin/camel,trohovsky/camel,joakibj/camel,duro1/camel,lburgazzoli/camel,yogamaha/camel,gyc567/camel,johnpoth/camel,veithen/camel,nboukhed/camel,lasombra/camel,ekprayas/camel,royopa/camel,oalles/camel,RohanHart/camel,anoordover/camel,christophd/camel,yury-vashchyla/camel,lburgazzoli/camel,w4tson/camel,hqstevenson/camel,jamesnetherton/camel,zregvart/camel,tadayosi/camel,mgyongyosi/camel,woj-i/camel,lburgazzoli/camel,anton-k11/camel,snadakuduru/camel,ge0ffrey/camel,gilfernandes/camel,DariusX/camel,curso007/camel,arnaud-deprez/camel,JYBESSON/camel,ekprayas/camel,eformat/camel,davidkarlsen/camel,onders86/camel,acartapanis/camel,sirlatrom/camel,dkhanolkar/camel,snadakuduru/camel,davidwilliams1978/camel,tdiesler/camel,duro1/camel,NickCis/camel,onders86/camel,lburgazzoli/apache-camel,chirino/camel,akhettar/camel,atoulme/camel,YMartsynkevych/camel,snurmine/camel,YoshikiHigo/camel,bfitzpat/camel,jamesnetherton/camel,Thopap/camel,scranton/camel,trohovsky/camel,anton-k11/camel,pmoerenhout/camel,oalles/camel,prashant2402/camel,gautric/camel,pplatek/camel,prashant2402/camel,lowwool/camel,isavin/camel,nboukhed/camel,anoordover/camel,gautric/camel,tdiesler/camel,gautric/camel,mcollovati/camel,CandleCandle/camel,jollygeorge/camel,driseley/camel,bfitzpat/camel,zregvart/camel,ssharma/camel,rmarting/camel,sebi-hgdata/camel,lowwool/camel,sirlatrom/camel,jollygeorge/camel,apache/camel,joakibj/camel,christophd/camel,mgyongyosi/camel,isavin/camel,adessaigne/camel,tlehoux/camel,jmandawg/camel,satishgummadelli/camel,CandleCandle/camel,yogamaha/camel,veithen/camel,oalles/camel,haku/camel,allancth/camel,rmarting/camel,jollygeorge/camel,pax95/camel,dmvolod/camel,sverkera/camel,sverkera/camel,brreitme/camel,tlehoux/camel,ekprayas/camel,pkletsko/camel,kevinearls/camel,nboukhed/camel,oalles/camel,NickCis/camel,akhettar/camel,edigrid/camel,borcsokj/camel,FingolfinTEK/camel,ssharma/camel,edigrid/camel,sebi-hgdata/camel,Thopap/camel,gautric/camel,jonmcewen/camel,tadayosi/camel,anoordover/camel,gnodet/camel,edigrid/camel,alvinkwekel/camel,YMartsynkevych/camel,onders86/camel,noelo/camel,Thopap/camel,eformat/camel,bgaudaen/camel,scranton/camel,JYBESSON/camel,jarst/camel,bhaveshdt/camel,bfitzpat/camel,jamesnetherton/camel,woj-i/camel,prashant2402/camel,manuelh9r/camel,drsquidop/camel,iweiss/camel,pax95/camel,bgaudaen/camel,jonmcewen/camel,MrCoder/camel,Thopap/camel,tkopczynski/camel,trohovsky/camel,snadakuduru/camel,pplatek/camel,lburgazzoli/camel,allancth/camel,punkhorn/camel-upstream,Thopap/camel,curso007/camel,duro1/camel,lburgazzoli/apache-camel,arnaud-deprez/camel,veithen/camel,MrCoder/camel,ssharma/camel,bfitzpat/camel,ge0ffrey/camel,kevinearls/camel,acartapanis/camel,snadakuduru/camel,tdiesler/camel,adessaigne/camel,askannon/camel,jollygeorge/camel,akhettar/camel,jmandawg/camel,pmoerenhout/camel,onders86/camel,sabre1041/camel,JYBESSON/camel,allancth/camel,chanakaudaya/camel,sirlatrom/camel,curso007/camel,brreitme/camel,prashant2402/camel,pplatek/camel,bgaudaen/camel,sabre1041/camel,yuruki/camel,lasombra/camel,lowwool/camel,lowwool/camel,sverkera/camel,gilfernandes/camel,pax95/camel,noelo/camel,ullgren/camel,rmarting/camel,dkhanolkar/camel,johnpoth/camel,davidwilliams1978/camel,pplatek/camel,hqstevenson/camel,eformat/camel,lasombra/camel,yuruki/camel,davidwilliams1978/camel,curso007/camel,jarst/camel,tkopczynski/camel,Fabryprog/camel,pmoerenhout/camel,dmvolod/camel,sebi-hgdata/camel,bhaveshdt/camel,pplatek/camel,nikhilvibhav/camel,erwelch/camel,mzapletal/camel,ullgren/camel,FingolfinTEK/camel,jlpedrosa/camel,isavin/camel,hqstevenson/camel,oalles/camel,isavin/camel,atoulme/camel,dmvolod/camel,anoordover/camel,haku/camel,nikhilvibhav/camel,neoramon/camel,snurmine/camel,edigrid/camel,lburgazzoli/apache-camel,allancth/camel,gnodet/camel,apache/camel,yogamaha/camel,davidwilliams1978/camel,gautric/camel,haku/camel,pkletsko/camel,drsquidop/camel,mzapletal/camel,dkhanolkar/camel,hqstevenson/camel,christophd/camel,lasombra/camel,kevinearls/camel,FingolfinTEK/camel,erwelch/camel,arnaud-deprez/camel,chirino/camel,neoramon/camel,askannon/camel,bgaudaen/camel,jarst/camel,jmandawg/camel,gnodet/camel,scranton/camel,mcollovati/camel,CandleCandle/camel,jonmcewen/camel,arnaud-deprez/camel,veithen/camel,jamesnetherton/camel,objectiser/camel,lasombra/camel,allancth/camel,jollygeorge/camel,JYBESSON/camel,tdiesler/camel,iweiss/camel,pax95/camel
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kafka; import java.util.Properties; import kafka.javaapi.producer.Producer; import kafka.producer.KeyedMessage; import kafka.producer.ProducerConfig; import org.apache.camel.CamelException; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.impl.DefaultProducer; /** * */ public class KafkaProducer<K, V> extends DefaultProducer { protected Producer<K, V> producer; private final KafkaEndpoint endpoint; public KafkaProducer(KafkaEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; } @Override protected void doStop() throws Exception { if (producer != null) { producer.close(); } } Properties getProps() { Properties props = endpoint.getConfiguration().createProducerProperties(); if (endpoint.getBrokers() != null) { props.put("metadata.broker.list", endpoint.getBrokers()); } return props; } @Override protected void doStart() throws Exception { Properties props = getProps(); ProducerConfig config = new ProducerConfig(props); producer = new Producer<K, V>(config); } @Override @SuppressWarnings("unchecked") public void process(Exchange exchange) throws CamelException { String topic = endpoint.getTopic(); if (!endpoint.isBridgeEndpoint()) { topic = exchange.getIn().getHeader(KafkaConstants.TOPIC, topic, String.class); } if (topic == null) { throw new CamelExchangeException("No topic key set", exchange); } K partitionKey = (K) exchange.getIn().getHeader(KafkaConstants.PARTITION_KEY); boolean hasPartitionKey = partitionKey != null; K messageKey = (K) exchange.getIn().getHeader(KafkaConstants.KEY); boolean hasMessageKey = messageKey != null; V msg = (V) exchange.getIn().getBody(); KeyedMessage<K, V> data; if (hasPartitionKey && hasMessageKey) { data = new KeyedMessage<K, V>(topic, messageKey, partitionKey, msg); } else if (hasPartitionKey) { data = new KeyedMessage<K, V>(topic, partitionKey, msg); } else if (hasMessageKey) { data = new KeyedMessage<K, V>(topic, messageKey, msg); } else { log.warn("No message key or partition key set"); data = new KeyedMessage<K, V>(topic, messageKey, partitionKey, msg); } producer.send(data); } }
components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kafka; import java.util.Properties; import kafka.javaapi.producer.Producer; import kafka.producer.KeyedMessage; import kafka.producer.ProducerConfig; import org.apache.camel.CamelException; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.impl.DefaultProducer; /** * */ public class KafkaProducer<K, V> extends DefaultProducer { protected Producer<K, V> producer; private final KafkaEndpoint endpoint; public KafkaProducer(KafkaEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; } @Override protected void doStop() throws Exception { if (producer != null) { producer.close(); } } Properties getProps() { Properties props = endpoint.getConfiguration().createProducerProperties(); if (endpoint.getBrokers() != null) { props.put("metadata.broker.list", endpoint.getBrokers()); } return props; } @Override protected void doStart() throws Exception { Properties props = getProps(); ProducerConfig config = new ProducerConfig(props); producer = new Producer<K, V>(config); } @Override @SuppressWarnings("unchecked") public void process(Exchange exchange) throws CamelException { String topic = endpoint.getTopic(); if (!endpoint.isBridgeEndpoint()) { topic = exchange.getIn().getHeader(KafkaConstants.TOPIC, endpoint.getTopic(), String.class); } if (topic == null) { throw new CamelExchangeException("No topic key set", exchange); } K partitionKey = (K) exchange.getIn().getHeader(KafkaConstants.PARTITION_KEY); boolean hasPartitionKey = partitionKey != null; K messageKey = (K) exchange.getIn().getHeader(KafkaConstants.KEY); boolean hasMessageKey = messageKey != null; V msg = (V) exchange.getIn().getBody(); KeyedMessage<K, V> data; if (hasPartitionKey && hasMessageKey) { data = new KeyedMessage<K, V>(topic, messageKey, partitionKey, msg); } else if (hasPartitionKey) { data = new KeyedMessage<K, V>(topic, partitionKey, msg); } else if (hasMessageKey) { data = new KeyedMessage<K, V>(topic, messageKey, msg); } else { log.warn("No message key or partition key set"); data = new KeyedMessage<K, V>(topic, messageKey, partitionKey, msg); } producer.send(data); } }
CAMEL-8923 Use local var instead of getter again.
components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
CAMEL-8923 Use local var instead of getter again.
Java
apache-2.0
7c49c4fc9bc3f10419efb18f1bb8d6f10ecceb47
0
caskdata/cdap,chtyim/cdap,caskdata/cdap,mpouttuclarke/cdap,caskdata/cdap,chtyim/cdap,anthcp/cdap,chtyim/cdap,hsaputra/cdap,anthcp/cdap,caskdata/cdap,anthcp/cdap,chtyim/cdap,mpouttuclarke/cdap,caskdata/cdap,mpouttuclarke/cdap,chtyim/cdap,hsaputra/cdap,hsaputra/cdap,caskdata/cdap,anthcp/cdap,hsaputra/cdap,anthcp/cdap,mpouttuclarke/cdap,chtyim/cdap,mpouttuclarke/cdap,hsaputra/cdap
package com.continuuity.data2.transaction; import com.continuuity.data.runtime.DataFabricModules; import com.continuuity.data2.transaction.inmemory.InMemoryTransactionManager; import com.continuuity.data2.transaction.inmemory.InMemoryTxSystemClient; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.google.inject.util.Modules; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.annotation.Nullable; import java.util.Collection; import java.util.List; /** * Tests the transaction executor. */ public class TransactionExecutorTest { static final Injector INJECTOR = Guice.createInjector(Modules.override( new DataFabricModules().getInMemoryModules()).with(new AbstractModule() { @Override protected void configure() { bind(InMemoryTransactionManager.class).toInstance(new InMemoryTransactionManager()); bind(TransactionSystemClient.class).to(DummyTxClient.class).in(Singleton.class); } })); static final DummyTxClient TX_CLIENT = (DummyTxClient) INJECTOR.getInstance(TransactionSystemClient.class); static final TransactionExecutorFactory factory = INJECTOR.getInstance(TransactionExecutorFactory.class); final DummyTxAware ds1 = new DummyTxAware(), ds2 = new DummyTxAware(); final Collection<TransactionAware> txAwares = ImmutableList.<TransactionAware>of(ds1, ds2); private DefaultTransactionExecutor getExecutor() { return factory.createExecutor(txAwares); } static final byte[] A = { 'a' }; static final byte[] B = { 'b' }; final Function<Integer, Integer> testFunction = new Function<Integer, Integer>() { @Nullable @Override public Integer apply(@Nullable Integer input) { ds1.addChange(A); ds2.addChange(B); if (input == null) { throw new RuntimeException("function failed"); } return input * input; } }; @Before public void resetTxAwares() { ds1.reset(); ds2.reset(); } @Test public void testSuccessful() throws TransactionFailureException { // execute: add a change to ds1 and ds2 Integer result = getExecutor().execute(testFunction, 10); // verify both are committed and post-committed Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertTrue(ds2.committed); Assert.assertTrue(ds1.postCommitted); Assert.assertTrue(ds2.postCommitted); Assert.assertFalse(ds1.rolledBack); Assert.assertFalse(ds2.rolledBack); Assert.assertTrue(100 == result); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Committed); } @Test public void testPostCommitFailure() throws TransactionFailureException { ds1.failPostCommitTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("post commit failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("post failure", e.getCause().getMessage()); } // verify both are committed and post-committed Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertTrue(ds2.committed); Assert.assertTrue(ds1.postCommitted); Assert.assertTrue(ds2.postCommitted); Assert.assertFalse(ds1.rolledBack); Assert.assertFalse(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Committed); } @Test public void testPersistFailure() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("persist failure", e.getCause().getMessage()); } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testPersistFalse() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ReturnFalse; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertNull(e.getCause()); // in this case, the ds simply returned false } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testPersistAndRollbackFailure() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ThrowException; ds1.failRollbackTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("persist failure", e.getCause().getMessage()); } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testPersistAndRollbackFalse() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ReturnFalse; ds1.failRollbackTxOnce = InduceFailure.ReturnFalse; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertNull(e.getCause()); // in this case, the ds simply returned false } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testCommitFalse() throws TransactionFailureException { TX_CLIENT.failCommitOnce = true; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("commit failed - exception should be thrown"); } catch (TransactionConflictException e) { Assert.assertNull(e.getCause()); } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertTrue(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testCanCommitFalse() throws TransactionFailureException { TX_CLIENT.failCanCommitOnce = true; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("commit failed - exception should be thrown"); } catch (TransactionConflictException e) { Assert.assertNull(e.getCause()); } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testChangesAndRollbackFailure() throws TransactionFailureException { ds1.failChangesTxOnce = InduceFailure.ThrowException; ds1.failRollbackTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("get changes failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("changes failure", e.getCause().getMessage()); } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertFalse(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testFunctionAndRollbackFailure() throws TransactionFailureException { ds1.failRollbackTxOnce = InduceFailure.ReturnFalse; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, null); Assert.fail("function failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("function failed", e.getCause().getMessage()); } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertFalse(ds1.checked); Assert.assertFalse(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testStartAndRollbackFailure() throws TransactionFailureException { ds1.failStartTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("start failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("start failure", e.getCause().getMessage()); } // verify both are not rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertFalse(ds2.started); Assert.assertFalse(ds1.checked); Assert.assertFalse(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertFalse(ds1.rolledBack); Assert.assertFalse(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } enum InduceFailure { NoFailure, ReturnFalse, ThrowException } static class DummyTxAware implements TransactionAware { Transaction tx; boolean started = false; boolean committed = false; boolean checked = false; boolean rolledBack = false; boolean postCommitted = false; List<byte[]> changes = Lists.newArrayList(); InduceFailure failStartTxOnce = InduceFailure.NoFailure; InduceFailure failChangesTxOnce = InduceFailure.NoFailure; InduceFailure failCommitTxOnce = InduceFailure.NoFailure; InduceFailure failPostCommitTxOnce = InduceFailure.NoFailure; InduceFailure failRollbackTxOnce = InduceFailure.NoFailure; void addChange(byte[] key) { changes.add(key); } void reset() { tx = null; started = false; checked = false; committed = false; rolledBack = false; postCommitted = false; changes.clear(); } @Override public void startTx(Transaction tx) { reset(); started = true; this.tx = tx; if (failStartTxOnce == InduceFailure.ThrowException) { failStartTxOnce = InduceFailure.NoFailure; throw new RuntimeException("start failure"); } } @Override public Collection<byte[]> getTxChanges() { checked = true; if (failChangesTxOnce == InduceFailure.ThrowException) { failChangesTxOnce = InduceFailure.NoFailure; throw new RuntimeException("changes failure"); } return ImmutableList.copyOf(changes); } @Override public boolean commitTx() throws Exception { committed = true; if (failCommitTxOnce == InduceFailure.ThrowException) { failCommitTxOnce = InduceFailure.NoFailure; throw new RuntimeException("persist failure"); } if (failCommitTxOnce == InduceFailure.ReturnFalse) { failCommitTxOnce = InduceFailure.NoFailure; return false; } return true; } @Override public void postTxCommit() { postCommitted = true; if (failPostCommitTxOnce == InduceFailure.ThrowException) { failPostCommitTxOnce = InduceFailure.NoFailure; throw new RuntimeException("post failure"); } } @Override public boolean rollbackTx() throws Exception { rolledBack = true; if (failRollbackTxOnce == InduceFailure.ThrowException) { failRollbackTxOnce = InduceFailure.NoFailure; throw new RuntimeException("rollback failure"); } if (failRollbackTxOnce == InduceFailure.ReturnFalse) { failRollbackTxOnce = InduceFailure.NoFailure; return false; } return true; } @Override public String getName() { return "dummy"; } } static class DummyTxClient extends InMemoryTxSystemClient { boolean failCanCommitOnce = false; boolean failCommitOnce = false; enum CommitState { Started, Committed, Aborted, Invalidated } CommitState state = CommitState.Started; @Inject DummyTxClient(InMemoryTransactionManager txmgr) { super(txmgr); } @Override public boolean canCommit(Transaction tx, Collection<byte[]> changeIds) { if (failCanCommitOnce) { failCanCommitOnce = false; return false; } else { return super.canCommit(tx, changeIds); } } @Override public boolean commit(Transaction tx) { if (failCommitOnce) { failCommitOnce = false; return false; } else { state = CommitState.Committed; return super.commit(tx); } } @Override public Transaction startLong() { state = CommitState.Started; return super.startLong(); } @Override public Transaction startShort() { state = CommitState.Started; return super.startShort(); } @Override public Transaction startShort(int timeout) { state = CommitState.Started; return super.startShort(timeout); } @Override public void abort(Transaction tx) { state = CommitState.Aborted; super.abort(tx); } @Override public void invalidate(Transaction tx) { state = CommitState.Invalidated; super.invalidate(tx); } } }
data-fabric/src/test/java/com/continuuity/data2/transaction/TransactionExecutorTest.java
package com.continuuity.data2.transaction; import com.continuuity.data.runtime.DataFabricModules; import com.continuuity.data2.transaction.inmemory.InMemoryTransactionManager; import com.continuuity.data2.transaction.inmemory.InMemoryTxSystemClient; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.google.inject.util.Modules; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.annotation.Nullable; import java.util.Collection; import java.util.List; /** * Tests the transaction executor. */ public class TransactionExecutorTest { static final Injector INJECTOR = Guice.createInjector(Modules.override( new DataFabricModules().getInMemoryModules()).with(new AbstractModule() { @Override protected void configure() { bind(InMemoryTransactionManager.class).toInstance(new InMemoryTransactionManager()); bind(TransactionSystemClient.class).to(DummyTxClient.class).in(Singleton.class); } })); static final DummyTxClient TX_CLIENT = (DummyTxClient) INJECTOR.getInstance(TransactionSystemClient.class); final DummyTxAware ds1 = new DummyTxAware(), ds2 = new DummyTxAware(); private DefaultTransactionExecutor getExecutor() { return new DefaultTransactionExecutor(TX_CLIENT, ds1, ds2); } static final byte[] A = { 'a' }; static final byte[] B = { 'b' }; final Function<Integer, Integer> testFunction = new Function<Integer, Integer>() { @Nullable @Override public Integer apply(@Nullable Integer input) { ds1.addChange(A); ds2.addChange(B); if (input == null) { throw new RuntimeException("function failed"); } return input * input; } }; @Before public void resetTxAwares() { ds1.reset(); ds2.reset(); } @Test public void testSuccessful() throws TransactionFailureException { // execute: add a change to ds1 and ds2 Integer result = getExecutor().execute(testFunction, 10); // verify both are committed and post-committed Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertTrue(ds2.committed); Assert.assertTrue(ds1.postCommitted); Assert.assertTrue(ds2.postCommitted); Assert.assertFalse(ds1.rolledBack); Assert.assertFalse(ds2.rolledBack); Assert.assertTrue(100 == result); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Committed); } @Test public void testPostCommitFailure() throws TransactionFailureException { ds1.failPostCommitTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("post commit failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("post failure", e.getCause().getMessage()); } // verify both are committed and post-committed Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertTrue(ds2.committed); Assert.assertTrue(ds1.postCommitted); Assert.assertTrue(ds2.postCommitted); Assert.assertFalse(ds1.rolledBack); Assert.assertFalse(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Committed); } @Test public void testPersistFailure() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("persist failure", e.getCause().getMessage()); } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testPersistFalse() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ReturnFalse; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertNull(e.getCause()); // in this case, the ds simply returned false } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testPersistAndRollbackFailure() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ThrowException; ds1.failRollbackTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("persist failure", e.getCause().getMessage()); } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testPersistAndRollbackFalse() throws TransactionFailureException { ds1.failCommitTxOnce = InduceFailure.ReturnFalse; ds1.failRollbackTxOnce = InduceFailure.ReturnFalse; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("persist failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertNull(e.getCause()); // in this case, the ds simply returned false } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testCommitFalse() throws TransactionFailureException { TX_CLIENT.failCommitOnce = true; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("commit failed - exception should be thrown"); } catch (TransactionConflictException e) { Assert.assertNull(e.getCause()); } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertTrue(ds1.committed); Assert.assertTrue(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testCanCommitFalse() throws TransactionFailureException { TX_CLIENT.failCanCommitOnce = true; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("commit failed - exception should be thrown"); } catch (TransactionConflictException e) { Assert.assertNull(e.getCause()); } // verify both are rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertTrue(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } @Test public void testChangesAndRollbackFailure() throws TransactionFailureException { ds1.failChangesTxOnce = InduceFailure.ThrowException; ds1.failRollbackTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("get changes failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("changes failure", e.getCause().getMessage()); } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertTrue(ds1.checked); Assert.assertFalse(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testFunctionAndRollbackFailure() throws TransactionFailureException { ds1.failRollbackTxOnce = InduceFailure.ReturnFalse; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, null); Assert.fail("function failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("function failed", e.getCause().getMessage()); } // verify both are rolled back and tx is invalidated Assert.assertTrue(ds1.started); Assert.assertTrue(ds2.started); Assert.assertFalse(ds1.checked); Assert.assertFalse(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertTrue(ds1.rolledBack); Assert.assertTrue(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Invalidated); } @Test public void testStartAndRollbackFailure() throws TransactionFailureException { ds1.failStartTxOnce = InduceFailure.ThrowException; // execute: add a change to ds1 and ds2 try { getExecutor().execute(testFunction, 10); Assert.fail("start failed - exception should be thrown"); } catch (TransactionFailureException e) { Assert.assertEquals("start failure", e.getCause().getMessage()); } // verify both are not rolled back and tx is aborted Assert.assertTrue(ds1.started); Assert.assertFalse(ds2.started); Assert.assertFalse(ds1.checked); Assert.assertFalse(ds2.checked); Assert.assertFalse(ds1.committed); Assert.assertFalse(ds2.committed); Assert.assertFalse(ds1.postCommitted); Assert.assertFalse(ds2.postCommitted); Assert.assertFalse(ds1.rolledBack); Assert.assertFalse(ds2.rolledBack); Assert.assertEquals(TX_CLIENT.state, DummyTxClient.CommitState.Aborted); } enum InduceFailure { NoFailure, ReturnFalse, ThrowException } static class DummyTxAware implements TransactionAware { Transaction tx; boolean started = false; boolean committed = false; boolean checked = false; boolean rolledBack = false; boolean postCommitted = false; List<byte[]> changes = Lists.newArrayList(); InduceFailure failStartTxOnce = InduceFailure.NoFailure; InduceFailure failChangesTxOnce = InduceFailure.NoFailure; InduceFailure failCommitTxOnce = InduceFailure.NoFailure; InduceFailure failPostCommitTxOnce = InduceFailure.NoFailure; InduceFailure failRollbackTxOnce = InduceFailure.NoFailure; void addChange(byte[] key) { changes.add(key); } void reset() { tx = null; started = false; checked = false; committed = false; rolledBack = false; postCommitted = false; changes.clear(); } @Override public void startTx(Transaction tx) { reset(); started = true; this.tx = tx; if (failStartTxOnce == InduceFailure.ThrowException) { failStartTxOnce = InduceFailure.NoFailure; throw new RuntimeException("start failure"); } } @Override public Collection<byte[]> getTxChanges() { checked = true; if (failChangesTxOnce == InduceFailure.ThrowException) { failChangesTxOnce = InduceFailure.NoFailure; throw new RuntimeException("changes failure"); } return ImmutableList.copyOf(changes); } @Override public boolean commitTx() throws Exception { committed = true; if (failCommitTxOnce == InduceFailure.ThrowException) { failCommitTxOnce = InduceFailure.NoFailure; throw new RuntimeException("persist failure"); } if (failCommitTxOnce == InduceFailure.ReturnFalse) { failCommitTxOnce = InduceFailure.NoFailure; return false; } return true; } @Override public void postTxCommit() { postCommitted = true; if (failPostCommitTxOnce == InduceFailure.ThrowException) { failPostCommitTxOnce = InduceFailure.NoFailure; throw new RuntimeException("post failure"); } } @Override public boolean rollbackTx() throws Exception { rolledBack = true; if (failRollbackTxOnce == InduceFailure.ThrowException) { failRollbackTxOnce = InduceFailure.NoFailure; throw new RuntimeException("rollback failure"); } if (failRollbackTxOnce == InduceFailure.ReturnFalse) { failRollbackTxOnce = InduceFailure.NoFailure; return false; } return true; } @Override public String getName() { return "dummy"; } } static class DummyTxClient extends InMemoryTxSystemClient { boolean failCanCommitOnce = false; boolean failCommitOnce = false; enum CommitState { Started, Committed, Aborted, Invalidated } CommitState state = CommitState.Started; @Inject DummyTxClient(InMemoryTransactionManager txmgr) { super(txmgr); } @Override public boolean canCommit(Transaction tx, Collection<byte[]> changeIds) { if (failCanCommitOnce) { failCanCommitOnce = false; return false; } else { return super.canCommit(tx, changeIds); } } @Override public boolean commit(Transaction tx) { if (failCommitOnce) { failCommitOnce = false; return false; } else { state = CommitState.Committed; return super.commit(tx); } } @Override public Transaction startLong() { state = CommitState.Started; return super.startLong(); } @Override public Transaction startShort() { state = CommitState.Started; return super.startShort(); } @Override public Transaction startShort(int timeout) { state = CommitState.Started; return super.startShort(timeout); } @Override public void abort(Transaction tx) { state = CommitState.Aborted; super.abort(tx); } @Override public void invalidate(Transaction tx) { state = CommitState.Invalidated; super.invalidate(tx); } } }
actually use injection in tx executor tests
data-fabric/src/test/java/com/continuuity/data2/transaction/TransactionExecutorTest.java
actually use injection in tx executor tests
Java
apache-2.0
8cb967d2d9a19de5f59e8fe594bcb359ad8a46f3
0
InMobi/pintail,rajubairishetti/pintail,InMobi/pintail,sreedishps/pintail,rajubairishetti/pintail,sreedishps/pintail
package com.inmobi.databus.partition; import java.io.IOException; import java.util.Date; import java.util.Set; import java.util.concurrent.BlockingQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import com.inmobi.databus.files.StreamFile; import com.inmobi.messaging.EOFMessage; import com.inmobi.messaging.Message; import com.inmobi.messaging.MessageBase; import com.inmobi.messaging.consumer.databus.MessageCheckpoint; import com.inmobi.messaging.consumer.databus.MessagingConsumerConfig; import com.inmobi.messaging.consumer.databus.QueueEntry; import com.inmobi.messaging.metrics.CollectorReaderStatsExposer; import com.inmobi.messaging.metrics.PartitionReaderStatsExposer; public class PartitionReader { private static final Log LOG = LogFactory.getLog(PartitionReader.class); private final PartitionId partitionId; private final BlockingQueue<QueueEntry> buffer; private PartitionStreamReader reader; private Thread thread; private volatile boolean stopped; private boolean inited = false; private final PartitionReaderStatsExposer prMetrics; public PartitionReader(PartitionId partitionId, PartitionCheckpoint partitionCheckpoint, Configuration conf, FileSystem fs, Path collectorDataDir, Path streamsLocalDir, BlockingQueue<QueueEntry> buffer, String streamName, Date startTime, long waitTimeForFlush, long waitTimeForFileCreate, PartitionReaderStatsExposer prMetrics, Date stopDate) throws IOException { this(partitionId, partitionCheckpoint, conf, fs, collectorDataDir, streamsLocalDir, buffer, streamName, startTime, waitTimeForFlush, waitTimeForFileCreate, prMetrics, false, stopDate); } public PartitionReader(PartitionId partitionId, PartitionCheckpointList partitionCheckpointList, FileSystem fs, BlockingQueue<QueueEntry> buffer, Path streamDir, Configuration conf, String inputFormatClass, Date startTime, long waitTimeForFileCreate, boolean isDatabusData, PartitionReaderStatsExposer prMetrics, Set<Integer> partitionMinList, Date stopDate) throws IOException { this(partitionId, partitionCheckpointList, fs, buffer, streamDir, conf, inputFormatClass, startTime, waitTimeForFileCreate, isDatabusData, prMetrics, false, partitionMinList, stopDate); } PartitionReader(PartitionId partitionId, PartitionCheckpoint partitionCheckpoint, Configuration conf, FileSystem fs, Path collectorDataDir, Path streamLocalDir, BlockingQueue<QueueEntry> buffer, String streamName, Date startTime, long waitTimeForFlush, long waitTimeForFileCreate, PartitionReaderStatsExposer prMetrics, boolean noNewFiles, Date stopDate) throws IOException { this(partitionId, partitionCheckpoint, buffer, startTime, prMetrics); reader = new CollectorReader(partitionId, partitionCheckpoint, fs, streamName, collectorDataDir, streamLocalDir, conf, startTime, waitTimeForFlush, waitTimeForFileCreate, ((CollectorReaderStatsExposer)prMetrics), noNewFiles, stopDate); // initialize cluster and its directories LOG.info("Partition reader initialized with partitionId:" + partitionId + " checkPoint:" + partitionCheckpoint + " startTime:" + startTime + " currentReader:" + reader); } PartitionReader(PartitionId partitionId, PartitionCheckpointList partitionCheckpointList, FileSystem fs, BlockingQueue<QueueEntry> buffer, Path streamDir, Configuration conf, String inputFormatClass, Date startTime, long waitTimeForFileCreate, boolean isDatabusData, PartitionReaderStatsExposer prMetrics, boolean noNewFiles, Set<Integer> partitionMinList, Date stopDate) throws IOException { this(partitionId, partitionCheckpointList, buffer, startTime, prMetrics); reader = new ClusterReader(partitionId, partitionCheckpointList, fs, streamDir, conf, inputFormatClass, startTime, waitTimeForFileCreate, isDatabusData, prMetrics, noNewFiles, partitionMinList, stopDate); // initialize cluster and its directories LOG.info("Partition reader initialized with partitionId:" + partitionId + " checkPoint:" + partitionCheckpointList + " startTime:" + startTime + " currentReader:" + reader); } private PartitionReader(PartitionId partitionId, MessageCheckpoint msgCheckpoint, BlockingQueue<QueueEntry> buffer, Date startTime, PartitionReaderStatsExposer prMetrics) throws IOException { if (startTime == null && msgCheckpoint == null) { String msg = "StartTime and checkpoint both" + " cannot be null in PartitionReader"; LOG.warn(msg); throw new IllegalArgumentException(msg); } this.partitionId = partitionId; this.buffer = buffer; this.prMetrics = prMetrics; } public synchronized void start() { Runnable runnable = new Runnable() { @Override public void run() { while (!stopped && !thread.isInterrupted()) { long startTime = System.currentTimeMillis(); try { while (!stopped && !inited) { init(); } LOG.info("Started streaming the data from reader:" + reader); execute(); if (stopped || thread.isInterrupted()) return; } catch (Throwable e) { LOG.warn("Error in run", e); prMetrics.incrementHandledExceptions(); } long finishTime = System.currentTimeMillis(); LOG.debug("Execution took ms : " + (finishTime - startTime)); try { long sleep = 1000; if (sleep > 0) { LOG.debug("Sleeping for " + sleep); Thread.sleep(sleep); } } catch (InterruptedException e) { LOG.warn("thread interrupted " + thread.getName(), e); return; } } } }; thread = new Thread(runnable, this.partitionId.toString()); LOG.info("Starting thread " + thread.getName()); thread.start(); } void init() throws IOException, InterruptedException { if (!inited) { reader.initializeCurrentFile(); inited = true; } } public void close() { stopped = true; LOG.info(Thread.currentThread().getName() + " stopped [" + stopped + "]"); if (reader != null) { try { reader.close(); } catch (IOException e) { LOG.warn("Error closing current stream", e); } } if (thread != null) { thread.interrupt(); try { thread.join(); } catch (InterruptedException ie) { LOG.warn("thread join interrupted " + thread.getName(), ie); return; } } } StreamFile getCurrentFile() { return reader.getCurrentFile(); } PartitionStreamReader getReader() { return reader; } void execute() { assert (reader != null); try { reader.openStream(); LOG.info("Reading file " + reader.getCurrentFile() + " and lineNum:" + reader.getCurrentLineNum()); while (!stopped) { Message msg = reader.readLine(); if (reader.isStopped()) { EOFMessage eofMessage = new EOFMessage(); buffer.put(new QueueEntry(eofMessage, partitionId, null)); // close the reader if reader's status is "closing" close(); } if (msg != null) { // add the data to queue MessageCheckpoint checkpoint = reader.getMessageCheckpoint(); buffer.put(new QueueEntry(msg, partitionId, checkpoint)); prMetrics.incrementMessagesAddedToBuffer(); } else { LOG.info("No stream to read"); return; } } } catch (InterruptedException ie) { LOG.info("Interrupted while reading stream", ie); } catch (Throwable e) { LOG.warn("Error while reading stream", e); prMetrics.incrementHandledExceptions(); } finally { try { reader.closeStream(); } catch (Exception e) { LOG.warn("Error while closing stream", e); prMetrics.incrementHandledExceptions(); } } } public PartitionReaderStatsExposer getStatsExposer() { return prMetrics; } }
messaging-client-databus/src/main/java/com/inmobi/databus/partition/PartitionReader.java
package com.inmobi.databus.partition; import java.io.IOException; import java.util.Date; import java.util.Set; import java.util.concurrent.BlockingQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import com.inmobi.databus.files.StreamFile; import com.inmobi.messaging.EOFMessage; import com.inmobi.messaging.Message; import com.inmobi.messaging.MessageBase; import com.inmobi.messaging.consumer.databus.MessageCheckpoint; import com.inmobi.messaging.consumer.databus.MessagingConsumerConfig; import com.inmobi.messaging.consumer.databus.QueueEntry; import com.inmobi.messaging.metrics.CollectorReaderStatsExposer; import com.inmobi.messaging.metrics.PartitionReaderStatsExposer; public class PartitionReader { private static final Log LOG = LogFactory.getLog(PartitionReader.class); private final PartitionId partitionId; private final BlockingQueue<QueueEntry> buffer; private PartitionStreamReader reader; private Thread thread; private volatile boolean stopped; private boolean inited = false; private final PartitionReaderStatsExposer prMetrics; public PartitionReader(PartitionId partitionId, PartitionCheckpoint partitionCheckpoint, Configuration conf, FileSystem fs, Path collectorDataDir, Path streamsLocalDir, BlockingQueue<QueueEntry> buffer, String streamName, Date startTime, long waitTimeForFlush, long waitTimeForFileCreate, PartitionReaderStatsExposer prMetrics, Date stopDate) throws IOException { this(partitionId, partitionCheckpoint, conf, fs, collectorDataDir, streamsLocalDir, buffer, streamName, startTime, waitTimeForFlush, waitTimeForFileCreate, prMetrics, false, stopDate); } public PartitionReader(PartitionId partitionId, PartitionCheckpointList partitionCheckpointList, FileSystem fs, BlockingQueue<QueueEntry> buffer, Path streamDir, Configuration conf, String inputFormatClass, Date startTime, long waitTimeForFileCreate, boolean isDatabusData, PartitionReaderStatsExposer prMetrics, Set<Integer> partitionMinList, Date stopDate) throws IOException { this(partitionId, partitionCheckpointList, fs, buffer, streamDir, conf, inputFormatClass, startTime, waitTimeForFileCreate, isDatabusData, prMetrics, false, partitionMinList, stopDate); } PartitionReader(PartitionId partitionId, PartitionCheckpoint partitionCheckpoint, Configuration conf, FileSystem fs, Path collectorDataDir, Path streamLocalDir, BlockingQueue<QueueEntry> buffer, String streamName, Date startTime, long waitTimeForFlush, long waitTimeForFileCreate, PartitionReaderStatsExposer prMetrics, boolean noNewFiles, Date stopDate) throws IOException { this(partitionId, partitionCheckpoint, buffer, startTime, prMetrics); reader = new CollectorReader(partitionId, partitionCheckpoint, fs, streamName, collectorDataDir, streamLocalDir, conf, startTime, waitTimeForFlush, waitTimeForFileCreate, ((CollectorReaderStatsExposer)prMetrics), noNewFiles, stopDate); // initialize cluster and its directories LOG.info("Partition reader initialized with partitionId:" + partitionId + " checkPoint:" + partitionCheckpoint + " startTime:" + startTime + " currentReader:" + reader); } PartitionReader(PartitionId partitionId, PartitionCheckpointList partitionCheckpointList, FileSystem fs, BlockingQueue<QueueEntry> buffer, Path streamDir, Configuration conf, String inputFormatClass, Date startTime, long waitTimeForFileCreate, boolean isDatabusData, PartitionReaderStatsExposer prMetrics, boolean noNewFiles, Set<Integer> partitionMinList, Date stopDate) throws IOException { this(partitionId, partitionCheckpointList, buffer, startTime, prMetrics); reader = new ClusterReader(partitionId, partitionCheckpointList, fs, streamDir, conf, inputFormatClass, startTime, waitTimeForFileCreate, isDatabusData, prMetrics, noNewFiles, partitionMinList, stopDate); // initialize cluster and its directories LOG.info("Partition reader initialized with partitionId:" + partitionId + " checkPoint:" + partitionCheckpointList + " startTime:" + startTime + " currentReader:" + reader); } private PartitionReader(PartitionId partitionId, MessageCheckpoint msgCheckpoint, BlockingQueue<QueueEntry> buffer, Date startTime, PartitionReaderStatsExposer prMetrics) throws IOException { if (startTime == null && msgCheckpoint == null) { String msg = "StartTime and checkpoint both" + " cannot be null in PartitionReader"; LOG.warn(msg); throw new IllegalArgumentException(msg); } this.partitionId = partitionId; this.buffer = buffer; this.prMetrics = prMetrics; } public synchronized void start() { Runnable runnable = new Runnable() { @Override public void run() { while (!stopped && !thread.isInterrupted()) { long startTime = System.currentTimeMillis(); try { while (!stopped && !inited) { init(); } LOG.info("Started streaming the data from reader:" + reader); execute(); if (stopped || thread.isInterrupted()) return; } catch (Throwable e) { LOG.warn("Error in run", e); prMetrics.incrementHandledExceptions(); } long finishTime = System.currentTimeMillis(); LOG.debug("Execution took ms : " + (finishTime - startTime)); try { long sleep = 1000; if (sleep > 0) { LOG.debug("Sleeping for " + sleep); Thread.sleep(sleep); } } catch (InterruptedException e) { LOG.warn("thread interrupted " + thread.getName(), e); return; } } } }; thread = new Thread(runnable, this.partitionId.toString()); LOG.info("Starting thread " + thread.getName()); thread.start(); } void init() throws IOException, InterruptedException { if (!inited) { reader.initializeCurrentFile(); inited = true; } } public void close() { stopped = true; LOG.info(Thread.currentThread().getName() + " stopped [" + stopped + "]"); if (reader != null) { try { reader.close(); } catch (IOException e) { LOG.warn("Error closing current stream", e); } } if (thread != null) { thread.interrupt(); try { thread.join(); } catch (InterruptedException ie) { LOG.warn("thread join interrupted " + thread.getName(), ie); return; } } } StreamFile getCurrentFile() { return reader.getCurrentFile(); } PartitionStreamReader getReader() { return reader; } void execute() { assert (reader != null); try { reader.openStream(); LOG.info("Reading file " + reader.getCurrentFile() + " and lineNum:" + reader.getCurrentLineNum()); while (!stopped) { Message msg = reader.readLine(); if (reader.isStopped()) { EOFMessage eofMessage = new EOFMessage(); buffer.put(new QueueEntry(eofMessage, partitionId, null)); } if (msg != null) { // add the data to queue MessageCheckpoint checkpoint = reader.getMessageCheckpoint(); buffer.put(new QueueEntry(msg, partitionId, checkpoint)); prMetrics.incrementMessagesAddedToBuffer(); } else { LOG.info("No stream to read"); return; } } } catch (InterruptedException ie) { LOG.info("Interrupted while reading stream", ie); } catch (Throwable e) { LOG.warn("Error while reading stream", e); prMetrics.incrementHandledExceptions(); } finally { try { reader.closeStream(); } catch (Exception e) { LOG.warn("Error while closing stream", e); prMetrics.incrementHandledExceptions(); } } } public PartitionReaderStatsExposer getStatsExposer() { return prMetrics; } }
close the reader if reader's status is closing
messaging-client-databus/src/main/java/com/inmobi/databus/partition/PartitionReader.java
close the reader if reader's status is closing
Java
apache-2.0
3121051c6d13f6247455e898031684b0c2309bce
0
facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho.fresco.common; import android.graphics.ColorFilter; import android.graphics.PointF; import android.graphics.drawable.Drawable; import android.support.annotation.Nullable; import com.facebook.litho.ComponentContext; import com.facebook.litho.reference.Reference; import com.facebook.drawee.drawable.AutoRotateDrawable; import com.facebook.drawee.drawable.DrawableUtils; import com.facebook.drawee.drawable.ScalingUtils; import com.facebook.drawee.generic.GenericDraweeHierarchy; import com.facebook.drawee.generic.RoundingParams; import com.facebook.drawee.interfaces.SettableDraweeHierarchy; import static com.facebook.drawee.generic.GenericDraweeHierarchyBuilder.DEFAULT_SCALE_TYPE; /** * A wrapper around {@link com.facebook.drawee.generic.GenericDraweeHierarchy} which correctly * manages references. */ public class GenericReferenceDraweeHierarchy implements SettableDraweeHierarchy { private final GenericDraweeHierarchy mGenericDraweeHierarchy; private ComponentContext mContext; private Reference<Drawable> mPlaceholderReference; private Reference<Drawable> mRetryReference; private Reference<Drawable> mFailureReference; private Reference<Drawable> mProgressBarReference; private Reference<Drawable> mOverlayImageReference; private Drawable mPlaceholder; private Drawable mRetry; private Drawable mFailure; private Drawable mProgressBar; private Drawable mOverlayImage; public GenericReferenceDraweeHierarchy(GenericDraweeHierarchy genericDraweeHierarchy) { mGenericDraweeHierarchy = genericDraweeHierarchy; } /** * @return * The {@link GenericDraweeHierarchy} to which this reference points to. */ public GenericDraweeHierarchy getHierarchy() { return mGenericDraweeHierarchy; } /** * Set the context to be used to acquire and release references. * * @param c * The context used to acquire and release references. This context is set to null on * releaseReferences() */ public void setContext(ComponentContext c) { mContext = c; } /** * Set the placeholder image of the wrapped GenericDraweeHierarchy through the use of a reference. * * @param placeholderReference * The reference which references the placeholder drawable to use. Accepts null which will * remove the current placeholder image. Make sure to call release() to release this reference. * * @param scaleType * The scale type of the placeholder drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. */ public void setPlaceholderReference( @Nullable Reference<Drawable> placeholderReference, @Nullable ScalingUtils.ScaleType scaleType) { if (mPlaceholderReference != null) { if (!Reference.shouldUpdate(mPlaceholderReference, placeholderReference)) { return; } else { Reference.release(mContext, mPlaceholder, mPlaceholderReference); mPlaceholderReference = null; mPlaceholder = null; } } if (placeholderReference == null) { mGenericDraweeHierarchy.setPlaceholderImage(null); return; } mPlaceholderReference = placeholderReference; mPlaceholder = Reference.acquire(mContext, placeholderReference); mGenericDraweeHierarchy.setPlaceholderImage( DrawableUtils.cloneDrawable(mPlaceholder), scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the retry image of the wrapped GenericDraweeHierarchy through the use of a reference. * * @param retryReference * The reference which references the retry drawable to use. Accepts null which will * remove the current retry image. Make sure to call release() to release this reference. * * @param scaleType * The scale type of the retry drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. */ public void setRetryReference( @Nullable Reference<Drawable> retryReference, @Nullable ScalingUtils.ScaleType scaleType) { if (mRetryReference != null) { if (!Reference.shouldUpdate(mRetryReference, retryReference)) { return; } else { Reference.release(mContext, mRetry, mRetryReference); mRetryReference = null; mRetry = null; } } if (retryReference == null) { mGenericDraweeHierarchy.setRetryImage(null); return; } mRetryReference = retryReference; mRetry = Reference.acquire(mContext, retryReference); mGenericDraweeHierarchy.setRetryImage( DrawableUtils.cloneDrawable(mRetry), scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the failure of the wrapped GenericDraweeHierarchy through the use of a reference. * * @param failureReference * The reference which references the failure drawable to use. Accepts null which will * remove the current failure image. Make sure to call release() to release this reference. * * @param scaleType * The scale type of the failure drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. */ public void setFailureReference( @Nullable Reference<Drawable> failureReference, @Nullable ScalingUtils.ScaleType scaleType) { if (mFailureReference != null) { if (!Reference.shouldUpdate(mFailureReference, failureReference)) { return; } else { Reference.release(mContext, mFailure, mFailureReference); mFailureReference = null; mFailure = null; } } if (failureReference == null) { mGenericDraweeHierarchy.setFailureImage(null); return; } mFailureReference = failureReference; mFailure = Reference.acquire(mContext, failureReference); mGenericDraweeHierarchy.setFailureImage( DrawableUtils.cloneDrawable(mFailure), scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the progress bar image of the wrapped GenericDraweeHierarchythrough the use of a * reference. * * @param progressBarReference * The reference which references the progress bar drawable to use. Accepts null which will * remove the current progress bar image. Make sure to call release() to release this * reference. * * @param scaleType * The scale type of the progress bar drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. * * @param autoRotateInterval * The period at which the progressBarReference's drawable will rotate about itself, * measured in milliseconds. */ public void setProgressBarReference( @Nullable Reference<Drawable> progressBarReference, @Nullable ScalingUtils.ScaleType scaleType, int autoRotateInterval) { if (mProgressBarReference != null) { if (!Reference.shouldUpdate(mProgressBarReference, progressBarReference)) { return; } else { Reference.release(mContext, mProgressBar, mProgressBarReference); mProgressBarReference = null; mProgressBar = null; } } if (progressBarReference == null) { mGenericDraweeHierarchy.setProgressBarImage(null); return; } mProgressBarReference = progressBarReference; mProgressBar = Reference.acquire(mContext, progressBarReference); Drawable progressBarDrawable = DrawableUtils.cloneDrawable(mProgressBar); if (autoRotateInterval > 0) { progressBarDrawable = new AutoRotateDrawable(progressBarDrawable, autoRotateInterval); } mGenericDraweeHierarchy.setProgressBarImage( progressBarDrawable, scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the overlay image of the wrapped GenericDraweeHierarchy through the use of a * reference. * * @param overlayImageReference * The reference which references the overlay drawable to use. Accepts null which will * remove the current overlay image. Make sure to call release() to release this * reference. */ public void setOverlayImageReference(@Nullable Reference<Drawable> overlayImageReference) {
src/main/java/com/facebook/components/fresco/common/GenericReferenceDraweeHierarchy.java
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho.fresco.common; import android.graphics.ColorFilter; import android.graphics.PointF; import android.graphics.drawable.Drawable; import android.support.annotation.Nullable; import com.facebook.litho.ComponentContext; import com.facebook.litho.reference.Reference; import com.facebook.drawee.drawable.AutoRotateDrawable; import com.facebook.drawee.drawable.DrawableUtils; import com.facebook.drawee.drawable.ScalingUtils; import com.facebook.drawee.generic.GenericDraweeHierarchy; import com.facebook.drawee.generic.RoundingParams; import com.facebook.drawee.interfaces.SettableDraweeHierarchy; import static com.facebook.drawee.generic.GenericDraweeHierarchyBuilder.DEFAULT_SCALE_TYPE; /** * A wrapper around {@link com.facebook.drawee.generic.GenericDraweeHierarchy} which correctly * manages references. */ public class GenericReferenceDraweeHierarchy implements SettableDraweeHierarchy { private final GenericDraweeHierarchy mGenericDraweeHierarchy; private ComponentContext mContext; private Reference<Drawable> mPlaceholderReference; private Reference<Drawable> mRetryReference; private Reference<Drawable> mFailureReference; private Reference<Drawable> mProgressBarReference; private Reference<Drawable> mOverlayImageReference; private Drawable mPlaceholder; private Drawable mRetry; private Drawable mFailure; private Drawable mProgressBar; private Drawable mOverlayImage; public GenericReferenceDraweeHierarchy(GenericDraweeHierarchy genericDraweeHierarchy) { mGenericDraweeHierarchy = genericDraweeHierarchy; } /** * @return * The {@link GenericDraweeHierarchy} to which this reference points to. */ public GenericDraweeHierarchy getHierarchy() { return mGenericDraweeHierarchy; } /** * Set the context to be used to acquire and release references. * * @param c * The context used to acquire and release references. This context is set to null on * releaseReferences() */ public void setContext(ComponentContext c) { mContext = c; } /** * Set the placeholder image of the wrapped GenericDraweeHierarchy through the use of a reference. * * @param placeholderReference * The reference which references the placeholder drawable to use. Accepts null which will * remove the current placeholder image. Make sure to call release() to release this reference. * * @param scaleType * The scale type of the placeholder drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. */ public void setPlaceholderReference( @Nullable Reference<Drawable> placeholderReference, @Nullable ScalingUtils.ScaleType scaleType) { if (mPlaceholderReference != null) { if (!Reference.shouldUpdate(mPlaceholderReference, placeholderReference)) { return; } else { Reference.release(mContext, mPlaceholder, mPlaceholderReference); mPlaceholderReference = null; mPlaceholder = null; } } if (placeholderReference == null) { mGenericDraweeHierarchy.setPlaceholderImage(null); return; } mPlaceholderReference = placeholderReference; mPlaceholder = Reference.acquire(mContext, placeholderReference); mGenericDraweeHierarchy.setPlaceholderImage( DrawableUtils.cloneDrawable(mPlaceholder), scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the retry image of the wrapped GenericDraweeHierarchy through the use of a reference. * * @param retryReference * The reference which references the retry drawable to use. Accepts null which will * remove the current retry image. Make sure to call release() to release this reference. * * @param scaleType * The scale type of the retry drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. */ public void setRetryReference( @Nullable Reference<Drawable> retryReference, @Nullable ScalingUtils.ScaleType scaleType) { if (mRetryReference != null) { if (!Reference.shouldUpdate(mRetryReference, retryReference)) { return; } else { Reference.release(mContext, mRetry, mRetryReference); mRetryReference = null; mRetry = null; } } if (retryReference == null) { mGenericDraweeHierarchy.setRetryImage(null); return; } mRetryReference = retryReference; mRetry = Reference.acquire(mContext, retryReference); mGenericDraweeHierarchy.setRetryImage( DrawableUtils.cloneDrawable(mRetry), scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the failure of the wrapped GenericDraweeHierarchy through the use of a reference. * * @param failureReference * The reference which references the failure drawable to use. Accepts null which will * remove the current failure image. Make sure to call release() to release this reference. * * @param scaleType * The scale type of the failure drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. */ public void setFailureReference( @Nullable Reference<Drawable> failureReference, @Nullable ScalingUtils.ScaleType scaleType) { if (mFailureReference != null) { if (!Reference.shouldUpdate(mFailureReference, failureReference)) { return; } else { Reference.release(mContext, mFailure, mFailureReference); mFailureReference = null; mFailure = null; } } if (failureReference == null) { mGenericDraweeHierarchy.setFailureImage(null); return; } mFailureReference = failureReference; mFailure = Reference.acquire(mContext, failureReference); mGenericDraweeHierarchy.setFailureImage( DrawableUtils.cloneDrawable(mFailure), scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); } /** * Set the progress bar image of the wrapped GenericDraweeHierarchythrough the use of a * reference. * * @param progressBarReference * The reference which references the progress bar drawable to use. Accepts null which will * remove the current progress bar image. Make sure to call release() to release this * reference. * * @param scaleType * The scale type of the progress bar drawable. Accepts null which will use the default * scale type defined by GenericDraweeHierarchy. * * @param autoRotateInterval * The period at which the progressBarReference's drawable will rotate about itself, * measured in milliseconds. */ public void setProgressBarReference( @Nullable Reference<Drawable> progressBarReference, @Nullable ScalingUtils.ScaleType scaleType, int autoRotateInterval) { if (mProgressBarReference != null) { if (!Reference.shouldUpdate(mProgressBarReference, progressBarReference)) { return; } else { Reference.release(mContext, mProgressBar, mProgressBarReference); mProgressBarReference = null; mProgressBar = null; } } if (progressBarReference == null) { mGenericDraweeHierarchy.setProgressBarImage(null); return; } mProgressBarReference = progressBarReference; mProgressBar = Reference.acquire(mContext, progressBarReference); Drawable progressBarDrawable = DrawableUtils.cloneDrawable(mProgressBar); if (autoRotateInterval > 0) { progressBarDrawable = new AutoRotateDrawable(progressBarDrawable, autoRotateInterval); } mGenericDraweeHierarchy.setProgressBarImage( progressBarDrawable, scaleType != null ? scaleType : DEFAULT_SCALE_TYPE); }
Lines authored by achernukha This commit forms part of the blame-preserving initial commit suite.
src/main/java/com/facebook/components/fresco/common/GenericReferenceDraweeHierarchy.java
Lines authored by achernukha
Java
apache-2.0
5532d54d7798ff51b96b3ad49e8f3463fdb5806a
0
Erudika/para,Erudika/para
/* * Copyright 2013-2018 Erudika. https://erudika.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For issues and patches go to: https://github.com/erudika */ package com.erudika.para.metrics; import com.codahale.metrics.JmxReporter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.SharedMetricRegistries; import com.codahale.metrics.Slf4jReporter; import com.codahale.metrics.Timer; import com.codahale.metrics.graphite.Graphite; import com.codahale.metrics.graphite.GraphiteReporter; import com.erudika.para.AppCreatedListener; import com.erudika.para.AppDeletedListener; import com.erudika.para.AppSettingAddedListener; import com.erudika.para.AppSettingRemovedListener; import com.erudika.para.InitializeListener; import com.erudika.para.Para; import com.erudika.para.core.App; import com.erudika.para.rest.CustomResourceHandler; import com.erudika.para.rest.RestUtils; import com.erudika.para.utils.Config; import com.erudika.para.utils.HealthUtils; import com.erudika.para.utils.RegistryUtils; import com.erudika.para.utils.Pager; import com.erudika.para.utils.Utils; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Objects; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static com.erudika.para.Para.getCustomResourceHandlers; /** * A centralized utility for managing and retrieving all Para performance metrics. * @author Jeremy Wiesner [[email protected]] */ public enum MetricsUtils implements InitializeListener, Runnable { /** * Singleton. */ INSTANCE { private ScheduledFuture<?> scheduledRegistryCheck; @Override public void onInitialize() { if (!Config.getConfigBoolean("metrics_enabled", true)) { return; } // setup metrics log file reporting MetricRegistry systemRegistry = SharedMetricRegistries.tryGetDefault(); if (systemRegistry == null) { systemRegistry = SharedMetricRegistries.setDefault(SYSTEM_METRICS_NAME); } Logger metricsLogger = LoggerFactory.getLogger("paraMetrics"); int loggingRate = Config.getConfigInt("metrics.logging_rate", 60); if (loggingRate > 0) { Slf4jReporter.forRegistry(systemRegistry).outputTo(metricsLogger).build(). start(loggingRate, TimeUnit.SECONDS); } // initialize metrics for the system and all existing applications MetricsUtils.initializeMetrics(SYSTEM_METRICS_NAME); // setup graphite reporting for the system String host = Config.getConfigParam("metrics.graphite.host", null); if (GRAPHITE_PERIOD > 0 && !StringUtils.isBlank(host)) { int port = Config.getConfigInt("metrics.graphite.port", 2003); String prefixSystem = Config.getConfigParam("metrics.graphite.prefix_system", null); if (INSTANCE_ID != null) { HashMap<String, Object> prefixContext = new HashMap<>(); prefixContext.put("INSTANCE_ID", INSTANCE_ID); prefixSystem = Utils.compileMustache(prefixContext, prefixSystem); } GraphiteSettings settings = new GraphiteSettings(host, port); MetricsUtils.createGraphiteReporter(SYSTEM_METRICS_NAME, settings, prefixSystem); } if (HealthUtils.getInstance().isHealthy()) { // find all app objects even if there are more than 10000 apps in the system // apps will be added in chronological order, root app first, followed by child apps Pager pager = new Pager(1, "_docid", false, Config.DEFAULT_LIMIT); List<App> apps = new LinkedList<>(); List<App> appsPage; do { appsPage = Para.getSearch().findQuery(Utils.type(App.class), "*", pager); apps.addAll(appsPage); logger.debug("Found a page of {} apps.", appsPage.size()); } while (!appsPage.isEmpty()); logger.info("Found root app '{}' and {} existing child app(s){}", Config.getRootAppIdentifier(), apps.size() - 1, apps.isEmpty() || !logger.isDebugEnabled() ? "." : ":"); for (App app : apps) { logger.debug(" {}{}", app.getAppIdentifier(), app.isRootApp() ? " (root app)" : ""); MetricsUtils.initializeMetrics(app.getAppIdentifier()); } } // schedule the regular check on metrics settings registries to establish app-specific reporting if (scheduledRegistryCheck == null) { scheduledRegistryCheck = Para.getScheduledExecutorService(). scheduleAtFixedRate(this, 0, 1, TimeUnit.MINUTES); } // setup initialization/cleanup for all new apps App.addAppCreatedListener(new AppCreatedListener() { public void onAppCreated(App app) { if (app != null) { MetricsUtils.initializeMetrics(app.getAppIdentifier()); } } }); App.addAppDeletedListener(new AppDeletedListener() { public void onAppDeleted(App app) { if (app != null) { app.clearSettings(); // removes the app from all registries, metrics stop on next interval } } }); // setup listeners for push metrics settings App.addAppSettingAddedListener(new AppSettingAddedListener() { public void onSettingAdded(App app, String settingKey, Object settingValue) { if (app != null) { MetricsUtils.addAppSetting(app, settingKey, settingValue); } } }); App.addAppSettingRemovedListener(new AppSettingRemovedListener() { public void onSettingRemoved(App app, String settingKey) { if (app != null) { MetricsUtils.removeAppSetting(app, settingKey); } } }); } @Override public void run() { MetricsUtils.syncAppMetricsReporters(); } }; private static final Logger logger = LoggerFactory.getLogger(MetricsUtils.class); private static final String INSTANCE_ID = Config.getConfigParam("instance_id", null); private static final Map<String, GraphiteReporter> GRAPHITE_REPORTERS = new HashMap<>(); private static final Map<String, GraphiteSettings> GRAPHITE_SETTINGS = new HashMap<>(); private static final String GRAPHITE_APP_PREFIX_TEMPLATE = Config.getConfigParam("metrics.graphite.prefix_apps", null); private static final int GRAPHITE_PERIOD = Config.getConfigInt("metrics.graphite.period", 0); /** * The name of the default system @{link MetricRegistry}. */ public static final String SYSTEM_METRICS_NAME = "_system"; /** * The name of the registry holding app-specific settings for reporting metrics to Graphite. */ public static final String GRAPHITE_REGISTRY_NAME = "GraphiteReporter"; /** * The name of the app settings object that contains the info to push an app's metrics to Graphite. */ public static final String GRAPHITE_APP_SETTINGS_NAME = "metricsGraphiteReporter"; /** * An auto-closeable class that manages timers for both the overall system as well as specific application. */ public static final class Context implements Closeable { private final Timer.Context systemContext; private final Timer.Context appContext; private Context(Timer systemTimer, Timer appTimer) { this.systemContext = systemTimer.time(); this.appContext = appTimer == null ? null : appTimer.time(); } @Override public void close() { systemContext.stop(); if (appContext != null) { appContext.stop(); } } } /** * A utility class for holding the settings for connecting to a Graphite server. */ private static final class GraphiteSettings extends HashMap<String, Object> { GraphiteSettings(String host, int port) { this.put("host", host); this.put("port", port); } public String getHost() { return (String) this.get("host"); } public int getPort() { return (int) this.get("port"); } public static GraphiteSettings parse(Object object) { Map map = (Map) object; if (map == null || !map.containsKey("host") || !map.containsKey("port")) { return null; } return new GraphiteSettings((String) map.get("host"), (int) map.get("port")); } @Override public boolean equals(Object obj) { if (obj == null || this.getClass() != obj.getClass()) { return false; } return Objects.equals(this.getHost(), ((GraphiteSettings) obj).getHost()) && Objects.equals(this.getPort(), ((GraphiteSettings) obj).getPort()); } @Override public int hashCode() { return 67 * Objects.hashCode(this.getPort()) + Objects.hashCode(this.getHost()); } } /** * Provides access to the singleton instance methods. * @return an instance of this class */ public static MetricsUtils getInstance() { return INSTANCE; } /** * Instantiate timing of a particular class and method for a specific application. * @param appid the application that invoked the request * @param clazz the Class to be timed * @param names one or more unique names to identify the timer - usually a method name * @return a closeable context that encapsulates the timed method */ public static MetricsUtils.Context time(String appid, Class clazz, String... names) { String className = getClassName(clazz); Timer systemTimer = getTimer(SYSTEM_METRICS_NAME, className, names); Timer appTimer = appid == null || appid.isEmpty() ? null : getTimer(appid, className, names); return new MetricsUtils.Context(systemTimer, appTimer); } private static String getClassName(Class clazz) { if (clazz.getSimpleName().contains("EnhancerByGuice")) { clazz = clazz.getSuperclass(); } if (CustomResourceHandler.class.isAssignableFrom(clazz)) { return clazz.getCanonicalName(); } else { return clazz.getSimpleName(); } } private static Timer getTimer(String registryName, String className, String... names) { return SharedMetricRegistries.getOrCreate(registryName).timer(MetricRegistry.name(className, names)); } /** * Initialize all the possible metrics for a specific registry (either the system registry or an application registry). * This ensures that all metrics report with zero values from system startup or application creation. * @param registryName the name of the registry to initialize. Either the system default name or an appid. */ private static void initializeMetrics(String registryName) { MetricRegistry registry = SharedMetricRegistries.getOrCreate(registryName); // register the DAO timers if (Para.getDAO() != null) { String daoClassName = getClassName(Para.getDAO().getClass()); registry.timer(MetricRegistry.name(daoClassName, "create")); registry.timer(MetricRegistry.name(daoClassName, "read")); registry.timer(MetricRegistry.name(daoClassName, "update")); registry.timer(MetricRegistry.name(daoClassName, "delete")); registry.timer(MetricRegistry.name(daoClassName, "createAll")); registry.timer(MetricRegistry.name(daoClassName, "readAll")); registry.timer(MetricRegistry.name(daoClassName, "readPage")); registry.timer(MetricRegistry.name(daoClassName, "updateAll")); registry.timer(MetricRegistry.name(daoClassName, "deleteAll")); } // register the search timers if (Config.isSearchEnabled()) { String searchClassName = getClassName(Para.getSearch().getClass()); registry.timer(MetricRegistry.name(searchClassName, "index")); registry.timer(MetricRegistry.name(searchClassName, "unindex")); registry.timer(MetricRegistry.name(searchClassName, "indexAll")); registry.timer(MetricRegistry.name(searchClassName, "unindexAll")); registry.timer(MetricRegistry.name(searchClassName, "findById")); registry.timer(MetricRegistry.name(searchClassName, "findByIds")); registry.timer(MetricRegistry.name(searchClassName, "findNearby")); registry.timer(MetricRegistry.name(searchClassName, "findPrefix")); registry.timer(MetricRegistry.name(searchClassName, "findQuery")); registry.timer(MetricRegistry.name(searchClassName, "findNestedQuery")); registry.timer(MetricRegistry.name(searchClassName, "findSimilar")); registry.timer(MetricRegistry.name(searchClassName, "findTagged")); registry.timer(MetricRegistry.name(searchClassName, "findTags")); registry.timer(MetricRegistry.name(searchClassName, "findTermInList")); registry.timer(MetricRegistry.name(searchClassName, "findTerms")); registry.timer(MetricRegistry.name(searchClassName, "findWildcard")); registry.timer(MetricRegistry.name(searchClassName, "getCount")); } // register the cache timers if (Config.isCacheEnabled()) { String cacheClassName = getClassName(Para.getCache().getClass()); registry.timer(MetricRegistry.name(cacheClassName, "contains")); registry.timer(MetricRegistry.name(cacheClassName, "put")); registry.timer(MetricRegistry.name(cacheClassName, "get")); registry.timer(MetricRegistry.name(cacheClassName, "remove")); registry.timer(MetricRegistry.name(cacheClassName, "putAll")); registry.timer(MetricRegistry.name(cacheClassName, "getAll")); registry.timer(MetricRegistry.name(cacheClassName, "removeAll")); } // register timers on the REST endpoints if (Config.API_ENABLED) { String restUtilsClassName = getClassName(RestUtils.class); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "read")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "create")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "overwrite")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "update")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "delete")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "read")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "create")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "update")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "delete")); registry.timer(MetricRegistry.name(restUtilsClassName, "links", "read")); registry.timer(MetricRegistry.name(restUtilsClassName, "links", "delete")); registry.timer(MetricRegistry.name(restUtilsClassName, "links", "create")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "id")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "ids")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "nested")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "nearby")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "prefix")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "similar")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "tagged")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "in")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "terms")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "wildcard")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "count")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "default")); } // register timers on custom resource handlers for (final CustomResourceHandler handler : getCustomResourceHandlers()) { String resourceHandlerClassName = getClassName(handler.getClass()); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handleGet")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handlePost")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handlePatch")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handlePut")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handleDelete")); } if (Config.getConfigBoolean("metrics.jmx_enabled", false)) { JmxReporter.forRegistry(registry).inDomain(registryName).build().start(); } } /** * Publish an app's @{link MetricRegistry} to Graphite. * @param appid the name of the app. * @param settings settings specifying the host URL and port of the Graphite server. */ private static void createAppGraphiteReporter(String appid, GraphiteSettings settings) { HashMap<String, Object> prefixContext = new HashMap<>(); prefixContext.put("APP_ID", appid); if (INSTANCE_ID != null) { prefixContext.put("INSTANCE_ID", INSTANCE_ID); } String appPrefix = Utils.compileMustache(prefixContext, GRAPHITE_APP_PREFIX_TEMPLATE); createGraphiteReporter(appid, settings, appPrefix); } /** * Publish a specific @{link MetricRegistry} to Graphite. * @param registryName the name of the registry. Either the system default name or an appid. * @param settings settings specifying the host URL and port of the Graphite server. * @param prefix an optional prefix to apply to the reported metrics. */ private static void createGraphiteReporter(String registryName, GraphiteSettings settings, String prefix) { Graphite graphite = new Graphite(settings.getHost(), settings.getPort()); GraphiteReporter reporter = GraphiteReporter.forRegistry(SharedMetricRegistries.getOrCreate(registryName)) .prefixedWith(prefix) .build(graphite); reporter.start(GRAPHITE_PERIOD, TimeUnit.SECONDS); GRAPHITE_REPORTERS.put(registryName, reporter); GRAPHITE_SETTINGS.put(registryName, settings); logger.info("Created Graphite reporter for registry \"{}\", pushing to {{}:{}}", registryName, settings.getHost(), settings.getPort()); } /** * A listener method to process new settings registered on applications (including the root app). * @param app the application the setting was added to. * @param key the name of the setting * @param value the value of the setting */ public static void addAppSetting(App app, String key, Object value) { if (GRAPHITE_APP_SETTINGS_NAME.equals(key)) { // validate the graphite reporter settings and, if valid, save them to the registry if (Map.class.isAssignableFrom(value.getClass())) { Map graphiteSettings = (Map) value; if (graphiteSettings.containsKey("host") && graphiteSettings.containsKey("port")) { String host = (String) graphiteSettings.get("host"); Integer port = (Integer) graphiteSettings.get("port"); if (!StringUtils.isBlank(host) && port != null && port.intValue() > 0) { GraphiteSettings settings = new GraphiteSettings(host, port); RegistryUtils.putValue(GRAPHITE_REGISTRY_NAME, app.getAppIdentifier(), settings); } } } } } /** * A listener method to process removed settings for an application (including the root app). * @param app the application the setting was removed from. * @param key the name of the setting */ public static void removeAppSetting(App app, String key) { if (GRAPHITE_APP_SETTINGS_NAME.equals(key)) { RegistryUtils.removeValue(GRAPHITE_REGISTRY_NAME, app.getAppIdentifier()); } } /** * A scheduled check of metrics setting registries to detect changes and apply them. * * Note: this method keeps the local registry of reporters in sync with the central one in the database. * This ensures that all nodes in a cluster push metrics to the corresponding Graphite servers for each app. */ private static void syncAppMetricsReporters() { // check for app-specific graphite push settings Map<String, Object> graphiteRegistry = RegistryUtils.getRegistry(GRAPHITE_REGISTRY_NAME); if (graphiteRegistry != null && GRAPHITE_PERIOD > 0) { // iterate the registry values for (Map.Entry<String, Object> iter : graphiteRegistry.entrySet()) { String appid = iter.getKey(); GraphiteSettings settings = GraphiteSettings.parse(iter.getValue()); if (settings == null) { continue; } // close an existing reporter if (GRAPHITE_REPORTERS.containsKey(appid)) { if (!settings.equals(GRAPHITE_SETTINGS.get(appid))) { // the new settings aren't the same, stop the existing reporter and replace it with a new one GRAPHITE_REPORTERS.get(appid).stop(); GRAPHITE_REPORTERS.remove(appid); GRAPHITE_SETTINGS.remove(appid); createAppGraphiteReporter(appid, settings); } } else { // no existing reporter for this app, create it createAppGraphiteReporter(appid, settings); } } // check if any of the graphite reporters was disabled by an app, if so, remove it List<Map.Entry<String, GraphiteReporter>> appsToRemove = Lists.newArrayList(); for (Map.Entry<String, GraphiteReporter> iter : GRAPHITE_REPORTERS.entrySet()) { if (!iter.getKey().equals(SYSTEM_METRICS_NAME) && !graphiteRegistry.containsKey(iter.getKey())) { appsToRemove.add(iter); } } for (Map.Entry<String, GraphiteReporter> iter : appsToRemove) { iter.getValue().stop(); GRAPHITE_REPORTERS.remove(iter.getKey()); GRAPHITE_SETTINGS.remove(iter.getKey()); } } } }
para-server/src/main/java/com/erudika/para/metrics/MetricsUtils.java
/* * Copyright 2013-2018 Erudika. https://erudika.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For issues and patches go to: https://github.com/erudika */ package com.erudika.para.metrics; import com.codahale.metrics.JmxReporter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.SharedMetricRegistries; import com.codahale.metrics.Slf4jReporter; import com.codahale.metrics.Timer; import com.codahale.metrics.graphite.Graphite; import com.codahale.metrics.graphite.GraphiteReporter; import com.erudika.para.AppCreatedListener; import com.erudika.para.AppSettingAddedListener; import com.erudika.para.AppSettingRemovedListener; import com.erudika.para.InitializeListener; import com.erudika.para.Para; import com.erudika.para.core.App; import com.erudika.para.rest.CustomResourceHandler; import com.erudika.para.rest.RestUtils; import com.erudika.para.utils.Config; import com.erudika.para.utils.HealthUtils; import com.erudika.para.utils.RegistryUtils; import com.erudika.para.utils.Pager; import com.erudika.para.utils.Utils; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Closeable; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Objects; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static com.erudika.para.Para.getCustomResourceHandlers; /** * A centralized utility for managing and retrieving all Para performance metrics. * @author Jeremy Wiesner [[email protected]] */ public enum MetricsUtils implements InitializeListener, Runnable { /** * Singleton. */ INSTANCE { private ScheduledFuture<?> scheduledRegistryCheck; @Override public void onInitialize() { if (!Config.getConfigBoolean("metrics_enabled", true)) { return; } // setup metrics log file reporting MetricRegistry systemRegistry = SharedMetricRegistries.tryGetDefault(); if (systemRegistry == null) { systemRegistry = SharedMetricRegistries.setDefault(SYSTEM_METRICS_NAME); } Logger metricsLogger = LoggerFactory.getLogger("paraMetrics"); int loggingRate = Config.getConfigInt("metrics.logging_rate", 60); if (loggingRate > 0) { Slf4jReporter.forRegistry(systemRegistry).outputTo(metricsLogger).build(). start(loggingRate, TimeUnit.SECONDS); } // initialize metrics for the system and all existing applications MetricsUtils.initializeMetrics(SYSTEM_METRICS_NAME); // setup graphite reporting for the system String host = Config.getConfigParam("metrics.graphite.host", null); if (GRAPHITE_PERIOD > 0 && !StringUtils.isBlank(host)) { int port = Config.getConfigInt("metrics.graphite.port", 2003); String prefixSystem = Config.getConfigParam("metrics.graphite.prefix_system", null); if (INSTANCE_ID != null) { HashMap<String, Object> prefixContext = new HashMap<>(); prefixContext.put("INSTANCE_ID", INSTANCE_ID); prefixSystem = Utils.compileMustache(prefixContext, prefixSystem); } GraphiteSettings settings = new GraphiteSettings(host, port); MetricsUtils.createGraphiteReporter(SYSTEM_METRICS_NAME, settings, prefixSystem); } if (HealthUtils.getInstance().isHealthy()) { // find all app objects even if there are more than 10000 apps in the system // apps will be added in chronological order, root app first, followed by child apps Pager pager = new Pager(1, "_docid", false, Config.DEFAULT_LIMIT); List<App> apps = new LinkedList<>(); List<App> appsPage; do { appsPage = Para.getSearch().findQuery(Utils.type(App.class), "*", pager); apps.addAll(appsPage); logger.debug("Found a page of {} apps.", appsPage.size()); } while (!appsPage.isEmpty()); logger.info("Found root app '{}' and {} existing child app(s){}", Config.getRootAppIdentifier(), apps.size() - 1, apps.isEmpty() || !logger.isDebugEnabled() ? "." : ":"); for (App app : apps) { logger.debug(" {}{}", app.getAppIdentifier(), app.isRootApp() ? " (root app)" : ""); MetricsUtils.initializeMetrics(app.getAppIdentifier()); } } // schedule the regular check on metrics settings registries to establish app-specific reporting if (scheduledRegistryCheck == null) { scheduledRegistryCheck = Para.getScheduledExecutorService(). scheduleAtFixedRate(this, 0, 1, TimeUnit.MINUTES); } // setup initialization for all new apps App.addAppCreatedListener(new AppCreatedListener() { public void onAppCreated(App app) { if (app != null) { MetricsUtils.initializeMetrics(app.getAppIdentifier()); } } }); // setup listeners for push metrics settings App.addAppSettingAddedListener(new AppSettingAddedListener() { @Override public void onSettingAdded(App app, String settingKey, Object settingValue) { if (app != null) { MetricsUtils.addAppSetting(app, settingKey, settingValue); } } }); App.addAppSettingRemovedListener(new AppSettingRemovedListener() { @Override public void onSettingRemoved(App app, String settingKey) { if (app != null) { MetricsUtils.removeAppSetting(app, settingKey); } } }); } @Override public void run() { MetricsUtils.syncAppMetricsReporters(); } }; private static final Logger logger = LoggerFactory.getLogger(MetricsUtils.class); private static final String INSTANCE_ID = Config.getConfigParam("instance_id", null); private static final Map<String, GraphiteReporter> GRAPHITE_REPORTERS = new HashMap<>(); private static final Map<String, GraphiteSettings> GRAPHITE_SETTINGS = new HashMap<>(); private static final String GRAPHITE_APP_PREFIX_TEMPLATE = Config.getConfigParam("metrics.graphite.prefix_apps", null); private static final int GRAPHITE_PERIOD = Config.getConfigInt("metrics.graphite.period", 0); /** * The name of the default system @{link MetricRegistry}. */ public static final String SYSTEM_METRICS_NAME = "_system"; /** * The name of the registry holding app-specific settings for reporting metrics to Graphite. */ public static final String GRAPHITE_REGISTRY_NAME = "GraphiteReporter"; /** * The name of the app settings object that contains the info to push an app's metrics to Graphite. */ public static final String GRAPHITE_APP_SETTINGS_NAME = "metricsGraphiteReporter"; /** * An auto-closeable class that manages timers for both the overall system as well as specific application. */ public static final class Context implements Closeable { private final Timer.Context systemContext; private final Timer.Context appContext; private Context(Timer systemTimer, Timer appTimer) { this.systemContext = systemTimer.time(); this.appContext = appTimer == null ? null : appTimer.time(); } @Override public void close() { systemContext.stop(); if (appContext != null) { appContext.stop(); } } } /** * A utility class for holding the settings for connecting to a Graphite server. */ private static final class GraphiteSettings extends HashMap<String, Object> { GraphiteSettings(String host, int port) { this.put("host", host); this.put("port", port); } public String getHost() { return (String) this.get("host"); } public int getPort() { return (int) this.get("port"); } public static GraphiteSettings parse(Object object) { Map map = (Map) object; if (map == null || !map.containsKey("host") || !map.containsKey("port")) { return null; } return new GraphiteSettings((String) map.get("host"), (int) map.get("port")); } @Override public boolean equals(Object obj) { if (obj == null || this.getClass() != obj.getClass()) { return false; } return Objects.equals(this.getHost(), ((GraphiteSettings) obj).getHost()) && Objects.equals(this.getPort(), ((GraphiteSettings) obj).getPort()); } @Override public int hashCode() { return 67 * Objects.hashCode(this.getPort()) + Objects.hashCode(this.getHost()); } } /** * Provides access to the singleton instance methods. * @return an instance of this class */ public static MetricsUtils getInstance() { return INSTANCE; } /** * Instantiate timing of a particular class and method for a specific application. * @param appid the application that invoked the request * @param clazz the Class to be timed * @param names one or more unique names to identify the timer - usually a method name * @return a closeable context that encapsulates the timed method */ public static MetricsUtils.Context time(String appid, Class clazz, String... names) { String className = getClassName(clazz); Timer systemTimer = getTimer(SYSTEM_METRICS_NAME, className, names); Timer appTimer = appid == null || appid.isEmpty() ? null : getTimer(appid, className, names); return new MetricsUtils.Context(systemTimer, appTimer); } private static String getClassName(Class clazz) { if (clazz.getSimpleName().contains("EnhancerByGuice")) { clazz = clazz.getSuperclass(); } if (CustomResourceHandler.class.isAssignableFrom(clazz)) { return clazz.getCanonicalName(); } else { return clazz.getSimpleName(); } } private static Timer getTimer(String registryName, String className, String... names) { return SharedMetricRegistries.getOrCreate(registryName).timer(MetricRegistry.name(className, names)); } /** * Initialize all the possible metrics for a specific registry (either the system registry or an application registry). * This ensures that all metrics report with zero values from system startup or application creation. * @param registryName the name of the registry to initialize. Either the system default name or an appid. */ private static void initializeMetrics(String registryName) { MetricRegistry registry = SharedMetricRegistries.getOrCreate(registryName); // register the DAO timers if (Para.getDAO() != null) { String daoClassName = getClassName(Para.getDAO().getClass()); registry.timer(MetricRegistry.name(daoClassName, "create")); registry.timer(MetricRegistry.name(daoClassName, "read")); registry.timer(MetricRegistry.name(daoClassName, "update")); registry.timer(MetricRegistry.name(daoClassName, "delete")); registry.timer(MetricRegistry.name(daoClassName, "createAll")); registry.timer(MetricRegistry.name(daoClassName, "readAll")); registry.timer(MetricRegistry.name(daoClassName, "readPage")); registry.timer(MetricRegistry.name(daoClassName, "updateAll")); registry.timer(MetricRegistry.name(daoClassName, "deleteAll")); } // register the search timers if (Config.isSearchEnabled()) { String searchClassName = getClassName(Para.getSearch().getClass()); registry.timer(MetricRegistry.name(searchClassName, "index")); registry.timer(MetricRegistry.name(searchClassName, "unindex")); registry.timer(MetricRegistry.name(searchClassName, "indexAll")); registry.timer(MetricRegistry.name(searchClassName, "unindexAll")); registry.timer(MetricRegistry.name(searchClassName, "findById")); registry.timer(MetricRegistry.name(searchClassName, "findByIds")); registry.timer(MetricRegistry.name(searchClassName, "findNearby")); registry.timer(MetricRegistry.name(searchClassName, "findPrefix")); registry.timer(MetricRegistry.name(searchClassName, "findQuery")); registry.timer(MetricRegistry.name(searchClassName, "findNestedQuery")); registry.timer(MetricRegistry.name(searchClassName, "findSimilar")); registry.timer(MetricRegistry.name(searchClassName, "findTagged")); registry.timer(MetricRegistry.name(searchClassName, "findTags")); registry.timer(MetricRegistry.name(searchClassName, "findTermInList")); registry.timer(MetricRegistry.name(searchClassName, "findTerms")); registry.timer(MetricRegistry.name(searchClassName, "findWildcard")); registry.timer(MetricRegistry.name(searchClassName, "getCount")); } // register the cache timers if (Config.isCacheEnabled()) { String cacheClassName = getClassName(Para.getCache().getClass()); registry.timer(MetricRegistry.name(cacheClassName, "contains")); registry.timer(MetricRegistry.name(cacheClassName, "put")); registry.timer(MetricRegistry.name(cacheClassName, "get")); registry.timer(MetricRegistry.name(cacheClassName, "remove")); registry.timer(MetricRegistry.name(cacheClassName, "putAll")); registry.timer(MetricRegistry.name(cacheClassName, "getAll")); registry.timer(MetricRegistry.name(cacheClassName, "removeAll")); } // register timers on the REST endpoints if (Config.API_ENABLED) { String restUtilsClassName = getClassName(RestUtils.class); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "read")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "create")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "overwrite")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "update")); registry.timer(MetricRegistry.name(restUtilsClassName, "crud", "delete")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "read")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "create")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "update")); registry.timer(MetricRegistry.name(restUtilsClassName, "batch", "delete")); registry.timer(MetricRegistry.name(restUtilsClassName, "links", "read")); registry.timer(MetricRegistry.name(restUtilsClassName, "links", "delete")); registry.timer(MetricRegistry.name(restUtilsClassName, "links", "create")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "id")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "ids")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "nested")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "nearby")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "prefix")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "similar")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "tagged")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "in")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "terms")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "wildcard")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "count")); registry.timer(MetricRegistry.name(restUtilsClassName, "search", "default")); } // register timers on custom resource handlers for (final CustomResourceHandler handler : getCustomResourceHandlers()) { String resourceHandlerClassName = getClassName(handler.getClass()); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handleGet")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handlePost")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handlePatch")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handlePut")); registry.timer(MetricRegistry.name(resourceHandlerClassName, "handleDelete")); } if (Config.getConfigBoolean("metrics.jmx_enabled", false)) { JmxReporter.forRegistry(registry).inDomain(registryName).build().start(); } } /** * Publish an app's @{link MetricRegistry} to Graphite. * @param appid the name of the app. * @param settings settings specifying the host URL and port of the Graphite server. */ private static void createAppGraphiteReporter(String appid, GraphiteSettings settings) { HashMap<String, Object> prefixContext = new HashMap<>(); prefixContext.put("APP_ID", appid); if (INSTANCE_ID != null) { prefixContext.put("INSTANCE_ID", INSTANCE_ID); } String appPrefix = Utils.compileMustache(prefixContext, GRAPHITE_APP_PREFIX_TEMPLATE); createGraphiteReporter(appid, settings, appPrefix); } /** * Publish a specific @{link MetricRegistry} to Graphite. * @param registryName the name of the registry. Either the system default name or an appid. * @param settings settings specifying the host URL and port of the Graphite server. * @param prefix an optional prefix to apply to the reported metrics. */ private static void createGraphiteReporter(String registryName, GraphiteSettings settings, String prefix) { Graphite graphite = new Graphite(settings.getHost(), settings.getPort()); GraphiteReporter reporter = GraphiteReporter.forRegistry(SharedMetricRegistries.getOrCreate(registryName)) .prefixedWith(prefix) .build(graphite); reporter.start(GRAPHITE_PERIOD, TimeUnit.SECONDS); GRAPHITE_REPORTERS.put(registryName, reporter); GRAPHITE_SETTINGS.put(registryName, settings); logger.info("Created Graphite reporter for registry \"{}\", pushing to {{}:{}}", registryName, settings.getHost(), settings.getPort()); } /** * A listener method to process new settings registered on applications (including the root app). * @param app the application the setting was added to. * @param key the name of the setting * @param value the value of the setting */ public static void addAppSetting(App app, String key, Object value) { if (GRAPHITE_APP_SETTINGS_NAME.equals(key)) { // validate the graphite reporter settings and, if valid, save them to the registry if (Map.class.isAssignableFrom(value.getClass())) { Map graphiteSettings = (Map) value; if (graphiteSettings.containsKey("host") && graphiteSettings.containsKey("port")) { String host = (String) graphiteSettings.get("host"); Integer port = (Integer) graphiteSettings.get("port"); if (!StringUtils.isBlank(host) && port != null && port.intValue() > 0) { GraphiteSettings settings = new GraphiteSettings(host, port); RegistryUtils.putValue(GRAPHITE_REGISTRY_NAME, app.getAppIdentifier(), settings); } } } } } /** * A listener method to process removed settings for an application (including the root app). * @param app the application the setting was removed from. * @param key the name of the setting */ public static void removeAppSetting(App app, String key) { if (GRAPHITE_APP_SETTINGS_NAME.equals(key)) { RegistryUtils.removeValue(GRAPHITE_REGISTRY_NAME, app.getAppIdentifier()); } } /** * A scheduled check of metrics setting registries to detect changes and apply them. */ private static void syncAppMetricsReporters() { // check for app-specific graphite push settings Map<String, Object> graphiteRegistry = RegistryUtils.getRegistry(GRAPHITE_REGISTRY_NAME); if (graphiteRegistry != null && GRAPHITE_PERIOD > 0) { // iterate the registry values int numNewReporters = 0; for (Map.Entry<String, Object> iter : graphiteRegistry.entrySet()) { String appid = iter.getKey(); GraphiteSettings settings = GraphiteSettings.parse(iter.getValue()); if (settings == null) { continue; } // close an existing reporter if (GRAPHITE_REPORTERS.containsKey(appid)) { if (!settings.equals(GRAPHITE_SETTINGS.get(appid))) { // the new settings aren't the same, stop the existing reporter and replace it with a new one GRAPHITE_REPORTERS.get(appid).stop(); GRAPHITE_REPORTERS.remove(appid); GRAPHITE_SETTINGS.remove(appid); createAppGraphiteReporter(appid, settings); } } else { // no existing reporter for this app, create it numNewReporters++; createAppGraphiteReporter(appid, settings); } } if (graphiteRegistry.size() < (GRAPHITE_REPORTERS.size() - numNewReporters)) { // at least one of the graphite reporters was disabled by an app, so we need to remove it List<Map.Entry<String, GraphiteReporter>> appsToRemove = Lists.newArrayList(); for (Map.Entry<String, GraphiteReporter> iter : GRAPHITE_REPORTERS.entrySet()) { if (!graphiteRegistry.containsKey(iter.getKey())) { appsToRemove.add(iter); } } for (Map.Entry<String, GraphiteReporter> iter : appsToRemove) { iter.getValue().stop(); GRAPHITE_REPORTERS.remove(iter.getKey()); GRAPHITE_SETTINGS.remove(iter.getKey()); } } } } }
added method to stop metrics reporting for deleted apps, fixed system metrics removed on sync
para-server/src/main/java/com/erudika/para/metrics/MetricsUtils.java
added method to stop metrics reporting for deleted apps, fixed system metrics removed on sync
Java
apache-2.0
254fd6b932ef8a0ce74ea0853390fb477a953707
0
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
package uk.ac.ebi.quickgo.index.annotation.coterms; import uk.ac.ebi.quickgo.annotation.common.document.AnnotationDocument; import com.google.common.base.Preconditions; import java.util.*; import java.util.function.Predicate; import org.springframework.batch.item.ItemWriter; import static uk.ac.ebi.quickgo.index.annotation.coterms.GeneProductBatch.buildBatch; /** * Aggregates all the data need to calculate all co-occurrence stat data points. * * @author Tony Wardell * Date: 26/11/2015 * Time: 11:59 * Created with IntelliJ IDEA. */ public class AnnotationCoOccurringTermsAggregator implements ItemWriter<AnnotationDocument> { //A list of all unique geneProducts encountered - it exists so we can get a count of the total unique gene products. private final Set<String> geneProductList; //Determines which annotations get processed. private final Predicate<AnnotationDocument> toBeProcessed; private GeneProductBatch geneProductBatch; private final CoTermMatrix coTerms; private final TermGPCount termGPCount; public AnnotationCoOccurringTermsAggregator(Predicate<AnnotationDocument> toBeProcessed) { Preconditions .checkArgument(toBeProcessed != null, "Null predicate passed AnnotationCoOccurringTermsAggregator" + " constructor"); this.toBeProcessed = toBeProcessed; this.coTerms = new CoTermMatrix(); geneProductList = new HashSet<>(); termGPCount = new TermGPCount(); geneProductBatch = new GeneProductBatch(); } /** * Number of unique gene products processed from Annotations * * @return unique gene product count */ public long getTotalOfAnnotatedGeneProducts() { return geneProductList.size(); } /** * This is the count of all unique gene products for terms encountered during processing. We hold this figure * separately as it is used many times. * * @return map of GO terms to count of unique gene products for the term. */ public Map<String, HitCount> getGeneProductCounts() { return termGPCount.termGPCount; } /** * Holds a termN by termN matrix, each cell of which holds the count of gp this intersection of terms hold * * @return map of processed terms to all co-occurring terms, together with count of how many times they have * co-occurred. */ public Map<String, Map<String, HitCount>> getCoTerms() { return coTerms.coTermMatrix; } /** * For each AnnotationDocument item passed to this method, check whether it passed the criteria for aggregating, * and if so add its data to the aggregated data. * * @param items a list of AnnotationDocuments. * @throws java.lang.Exception - if there are errors. The framework will catch the exception and convert or rethrow it as appropriate. */ @Override public void write(List<? extends AnnotationDocument> items) throws Exception { Preconditions.checkArgument(items != null, "Null annotation passed to process"); items.stream() .filter(this.toBeProcessed::test) .forEach(this::addGOTermToAggregationForGeneProduct); } /** * Add the data in an AnnotationDocument instance to the aggregation. * The documents are processed by this class in the gene product order. * So the first thing to do is check if this doc has a previously unseen gene product id. * If it doesn't we use the existing aggregation object (newOrExistingBatch instance) to aggregate too. * If it is a new gene product the we have a new newOrExistingBatch instance created. * Add the data in this document to the target newOrExistingBatch. * Add the gene product id to the list of geneproduct ids that have been processed (we need a list of all gene * products processed for the statistics calculations at the end of the calculation. * * @param doc */ private void addGOTermToAggregationForGeneProduct(AnnotationDocument doc) { if (geneProductBatch.geneProduct == null) { geneProductBatch.geneProduct = doc.geneProductId; } if (!doc.geneProductId.equals(geneProductBatch.geneProduct)) { increaseCountsForTermsInBatch(); geneProductBatch = buildBatch(doc); }else{ geneProductBatch.addTerm(doc.goId); } geneProductList.add(doc.geneProductId); //set so each gp is only added once. } /** * The client must call finish() when all annotation documents have been processed by the write method to wrap up * processing. */ public void finish() { increaseCountsForTermsInBatch(); } /** * Got to the end of the list of annotations for this gene product * Record which terms annotate the same gene products. */ private void increaseCountsForTermsInBatch() { for (String termId : geneProductBatch.terms) { coTerms.incrementCountForCoTerms(termId, geneProductBatch.terms); termGPCount.incrementGeneProductCountForTerm(termId); } } } /** * A data bucket for aggregating annotation document data. Each batch is created for all data with the same gene * product id. */ class GeneProductBatch { //A set of all terms encountered for a Gene Product. Therefore all these terms are co-occurring with each other. final Set<String> terms; //The input file has annotations in gene product order, so we use this value to note changes in gene product. public String geneProduct; public GeneProductBatch() { terms = new HashSet<>(); } void addTerm(String termId) { terms.add(termId); } /** * Create a new GeneProductBatch to aggregate terms for the 'new' gene product id. * @param doc * @return */ static GeneProductBatch buildBatch(AnnotationDocument doc){ GeneProductBatch geneProductBatch = new GeneProductBatch(); geneProductBatch.geneProduct = doc.geneProductId; geneProductBatch.addTerm(doc.goId); return geneProductBatch; } } /** * This class represents a matrix of term to compared term, and its used to hold the number of permutation occurrences. */ class CoTermMatrix { // Key is the target term, the value is a map of all the GO terms that are used in annotations for the same gene // product. i.e. Key =>target term, value=> map (key=>co-occurring term, value => HitCountForCo-occurrence) // For example key=>'GO:0003824', value=> map(entry 1 :: key=>'GO:0008152' value=>1346183 hits, entry 2 key=>'GO:0016740' value=>1043613 hits) final Map<String, Map<String, HitCount>> coTermMatrix; public CoTermMatrix() { coTermMatrix = new TreeMap<>(); } /** * For all terms encountered for gene product batch, increment its hit count. If this is a new {@code termId}, then * its hit count is initialised as 1. * @param termId single term from batch * @param termsInBatch a list of all terms encountered in annotations for a particular gene product. */ void incrementCountForCoTerms(String termId, Set<String> termsInBatch) { Map<String, HitCount> co_occurringTerms = getCo_occurringTerms(termId); //Loop through all the terms we have encountered in this batch and update the quantities for (String co_occurringTerm : termsInBatch) { //Get 'permanent' record for this termId/termId permutation HitCount permutationHitCount = co_occurringTerms.get(co_occurringTerm); //Create if it doesn't exist. if (permutationHitCount == null) { permutationHitCount = new HitCount(); co_occurringTerms.put(co_occurringTerm, permutationHitCount); } //Update it with a count of 'one' as this batch is for one gene protein and so the count must be one permutationHitCount.hits++; } } /** * Get the co-stats for this termId * * @param termId * @return All terms that are co-occurring term to argument */ private Map<String, HitCount> getCo_occurringTerms(String termId) { //look in the store Map<String, HitCount> termCoTerms = coTermMatrix.get(termId); //Create if it doesn't exist. if (termCoTerms == null) { termCoTerms = new HashMap<>(); coTermMatrix.put(termId, termCoTerms); } return termCoTerms; } } class TermGPCount { final Map<String, HitCount> termGPCount; public TermGPCount() { this.termGPCount = new HashMap<>(); } /** * For every term, increment by one the count of gene products for this term */ void incrementGeneProductCountForTerm(String term) { HitCount hitCount = termGPCount.get(term); if (hitCount == null) { hitCount = new HitCount(); termGPCount.put(term, hitCount); } hitCount.hits++; } }
indexing/src/main/java/uk/ac/ebi/quickgo/index/annotation/coterms/AnnotationCoOccurringTermsAggregator.java
package uk.ac.ebi.quickgo.index.annotation.coterms; import uk.ac.ebi.quickgo.annotation.common.document.AnnotationDocument; import com.google.common.base.Preconditions; import java.util.*; import java.util.function.Predicate; import org.springframework.batch.item.ItemWriter; import static uk.ac.ebi.quickgo.index.annotation.coterms.GeneProductBatch.buildBatch; /** * Aggregates all the data need to calculate all co-occurrence stat data points. * * @author Tony Wardell * Date: 26/11/2015 * Time: 11:59 * Created with IntelliJ IDEA. */ public class AnnotationCoOccurringTermsAggregator implements ItemWriter<AnnotationDocument> { //A list of all unique geneProducts encountered - it exists so we can get a count of the total unique gene products. private final Set<String> geneProductList; //Determines which annotations get processed. private final Predicate<AnnotationDocument> toBeProcessed; private GeneProductBatch geneProductBatch; private final CoTermMatrix coTerms; private final TermGPCount termGPCount; public AnnotationCoOccurringTermsAggregator(Predicate<AnnotationDocument> toBeProcessed) { Preconditions .checkArgument(toBeProcessed != null, "Null predicate passed AnnotationCoOccurringTermsAggregator" + " constructor"); this.toBeProcessed = toBeProcessed; this.coTerms = new CoTermMatrix(); geneProductList = new HashSet<>(); termGPCount = new TermGPCount(); geneProductBatch = new GeneProductBatch(); } /** * Number of unique gene products processed from Annotations * * @return unique gene product count */ public long getTotalOfAnnotatedGeneProducts() { return geneProductList.size(); } /** * This is the count of all unique gene products for terms encountered during processing. We hold this figure * separately as it is used many times. * * @return map of GO terms to count of unique gene products for the term. */ public Map<String, HitCount> getGeneProductCounts() { return termGPCount.termGPCount; } /** * Holds a termN by termN matrix, each cell of which holds the count of gp this intersection of terms hold * * @return map of processed terms to all co-occurring terms, together with count of how many times they have * co-occurred. */ public Map<String, Map<String, HitCount>> getCoTerms() { return coTerms.coTermMatrix; } /** * For each AnnotationDocument item passed to this method, check whether it passed the criteria for aggregating, * and if so add its data to the aggregated data. * * @param items a list of AnnotationDocuments. * @throws java.lang.Exception - if there are errors. The framework will catch the exception and convert or rethrow it as appropriate. */ @Override public void write(List<? extends AnnotationDocument> items) throws Exception { Preconditions.checkArgument(items != null, "Null annotation passed to process"); items.stream() .filter(this.toBeProcessed::test) .forEach(this::addGOTermToAggregationForGeneProduct); } /** * Add the data in an AnnotationDocument instance to the aggregation. * The documents are processed by this class in the gene product order. * So the first thing to do is check if this doc has a previously unseen gene product id. * If it doesn't we use the existing aggregation object (newOrExistingBatch instance) to aggregate too. * If it is a new gene product the we have a new newOrExistingBatch instance created. * Add the data in this document to the target newOrExistingBatch. * Add the gene product id to the list of geneproduct ids that have been processed (we need a list of all gene * products processed for the statistics calculations at the end of the calculation. * * @param doc */ private void addGOTermToAggregationForGeneProduct(AnnotationDocument doc) { if (geneProductBatch.geneProduct == null) { geneProductBatch.geneProduct = doc.geneProductId; } if (!doc.geneProductId.equals(geneProductBatch.geneProduct)) { increaseCountsForTermsInBatch(); geneProductBatch = buildBatch(doc); }else{ geneProductBatch.addTerm(doc.goId); } geneProductList.add(doc.geneProductId); //set so each gp is only added once. } /** * The client must call finish() when all annotation documents have been processed by the write method to wrap up * processing. */ public void finish() { increaseCountsForTermsInBatch(); } /** * Got to the end of the list of annotations for this gene product * Record which terms annotate the same gene products. */ private void increaseCountsForTermsInBatch() { for (String termId : geneProductBatch.terms) { coTerms.incrementCountForCo_occurringTerms(termId, geneProductBatch.terms); termGPCount.incrementGeneProductCountForTerm(termId); } } } /** * A data bucket for aggregating annotation document data. Each batch is created for all data with the same gene * product id. */ class GeneProductBatch { //A set of all terms encountered for a Gene Product. Therefore all these terms are co-occurring with each other. final Set<String> terms; //The input file has annotations in gene product order, so we use this value to note changes in gene product. public String geneProduct; public GeneProductBatch() { terms = new HashSet<>(); } void addTerm(String termId) { terms.add(termId); } /** * Create a new GeneProductBatch to aggregate terms for the 'new' gene product id. * @param doc * @return */ static GeneProductBatch buildBatch(AnnotationDocument doc){ GeneProductBatch geneProductBatch = new GeneProductBatch(); geneProductBatch.geneProduct = doc.geneProductId; geneProductBatch.addTerm(doc.goId); return geneProductBatch; } } /** * This class represents a matrix of term to compared term, and its used to hold the number of permutation occurrences. */ class CoTermMatrix { // Key is the target term, the value is a map of all the GO terms that are used in annotations for the same gene // product. i.e. Key =>target term, value=> map (key=>co-occurring term, value => HitCountForCo-occurrence) // For example key=>'GO:0003824', value=> map(entry 1 :: key=>'GO:0008152' value=>1346183 hits, entry 2 key=>'GO:0016740' value=>1043613 hits) final Map<String, Map<String, HitCount>> coTermMatrix; public CoTermMatrix() { coTermMatrix = new TreeMap<>(); } /** * For all terms encountered for gene product batch, increment its hit count. If this is a new {@code termId}, then * its hit count is initialised as 1. * @param termId single term from batch * @param termsInBatch a list of all terms encountered in annotations for a particular gene product. */ void incrementCountForCo_occurringTerms(String termId, Set<String> termsInBatch) { Map<String, HitCount> co_occurringTerms = getCo_occurringTerms(termId); //Loop through all the terms we have encountered in this batch and update the quantities for (String co_occurringTerm : termsInBatch) { //Get 'permanent' record for this termId/termId permutation HitCount permutationHitCount = co_occurringTerms.get(co_occurringTerm); //Create if it doesn't exist. if (permutationHitCount == null) { permutationHitCount = new HitCount(); co_occurringTerms.put(co_occurringTerm, permutationHitCount); } //Update it with a count of 'one' as this batch is for one gene protein and so the count must be one permutationHitCount.hits++; } } /** * Get the co-stats for this termId * * @param termId * @return All terms that are co-occurring term to argument */ private Map<String, HitCount> getCo_occurringTerms(String termId) { //look in the store Map<String, HitCount> termCoTerms = coTermMatrix.get(termId); //Create if it doesn't exist. if (termCoTerms == null) { termCoTerms = new HashMap<>(); coTermMatrix.put(termId, termCoTerms); } return termCoTerms; } } class TermGPCount { final Map<String, HitCount> termGPCount; public TermGPCount() { this.termGPCount = new HashMap<>(); } /** * For every term, increment by one the count of gene products for this term */ void incrementGeneProductCountForTerm(String term) { HitCount hitCount = termGPCount.get(term); if (hitCount == null) { hitCount = new HitCount(); termGPCount.put(term, hitCount); } hitCount.hits++; } }
Rename method incrementCountForCo_occurringTerms to incrementCountForCoTerms;
indexing/src/main/java/uk/ac/ebi/quickgo/index/annotation/coterms/AnnotationCoOccurringTermsAggregator.java
Rename method incrementCountForCo_occurringTerms to incrementCountForCoTerms;
Java
bsd-2-clause
f627742f27cc04fb1f27b05d184f26f2b86ea67c
0
Grantoo/fuel-sdk-unity-android,Grantoo/fuel-sdk-unity-android
package com.fuelpowered.lib.fuelsdk.unity; /** * Created by alexisbarra on 8/30/15. */ import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.fuelpowered.lib.fuelsdk.fuel; import com.fuelpowered.lib.fuelsdk.fuelbroadcastreceiver; import com.fuelpowered.lib.fuelsdk.fuelbroadcasttype; import com.fuelpowered.lib.fuelsdk.fuelcompeteui; import com.fuelpowered.lib.fuelsdk.fuelimpl.fueljsonhelper; import com.unity3d.player.UnityPlayer; import org.json.JSONObject; import java.util.Map; public class FuelSDKUnitySharedActivity { private static final String LOG_TAG = "FuelSDKUnitySharedActiv"; private static Activity sActivity; private static IntentFilter mIntentFilter; public static void onCreate(Bundle savedInstanceState) { Log.d(LOG_TAG, "onCreate()"); sActivity = UnityPlayer.currentActivity; Log.d(LOG_TAG, sActivity.getLocalClassName()); fuel.onCreate(sActivity); mIntentFilter = new IntentFilter(); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_VG_LIST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_VG_ROLLBACK.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_NOTIFICATION_ENABLED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_NOTIFICATION_DISABLED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SOCIAL_LOGIN_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SOCIAL_INVITE_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SOCIAL_SHARE_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IMPLICIT_LAUNCH_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_USER_VALUES.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_CHALLENGE_COUNT.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_TOURNAMENT_INFO.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_EXIT.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_MATCH.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_FAIL.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_EVENTS.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_LEADERBOARD.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_MISSION.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_QUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_OFFER.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_ACCEPT_OFFER.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_JOIN_EVENT.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_LOCALIZATION_LOADED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_USER_INFO_UPDATED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_USER_AVATARS.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_RECEIVE_DATA.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_DYNAMICS_ENGINE_LOADED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_ENGINE_LOADED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SERVER_API_STATUS_UPDATED.toString()); } public static void onActivityResult(int requestCode, int resultCode, Intent data) { Log.d(LOG_TAG, "onActivityResult()"); fuel.onActivityResult(sActivity, requestCode, resultCode, data); } public static void onResume() { Log.d(LOG_TAG, "onResume()"); LocalBroadcastManager.getInstance(sActivity).registerReceiver( sBroadcastReceiver, mIntentFilter); fuel.onResume(sActivity); } public static void onPause() { Log.d(LOG_TAG, "onPause()"); LocalBroadcastManager.getInstance(sActivity).unregisterReceiver(sBroadcastReceiver); fuel.onPause(sActivity); } private static fuelbroadcastreceiver sBroadcastReceiver = new fuelbroadcastreceiver() { @SuppressWarnings("unchecked") @Override public void onReceive(Context context, String action, Map<String, Object> data) { String message = null; JSONObject jsonObject = new JSONObject(); if (data == null) { message = "{\"action\": \""+action+"\" , \"data\" : {}}"; } else { try { jsonObject.put( "action" , action ); jsonObject.put("data", fueljsonhelper.sharedInstance().toJSONObject(data) ); message = jsonObject.toString(); } catch (Exception exception) { Log.w(LOG_TAG, "Exception in processing broadcast message: " + exception.getMessage()); } } UnityPlayer.UnitySendMessage("FuelSDK", "DataReceiver", message); } }; }
fuelsdkunity/src/main/java/com/fuelpowered/lib/fuelsdk/unity/FuelSDKUnitySharedActivity.java
package com.fuelpowered.lib.fuelsdk.unity; /** * Created by alexisbarra on 8/30/15. */ import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.fuelpowered.lib.fuelsdk.fuel; import com.fuelpowered.lib.fuelsdk.fuelbroadcastreceiver; import com.fuelpowered.lib.fuelsdk.fuelbroadcasttype; import com.fuelpowered.lib.fuelsdk.fuelcompeteui; import com.fuelpowered.lib.fuelsdk.fuelimpl.fueljsonhelper; import com.unity3d.player.UnityPlayer; import org.json.JSONObject; import java.util.Map; public class FuelSDKUnitySharedActivity { private static final String LOG_TAG = "FuelSDKUnitySharedActiv"; private static Activity sActivity; private static IntentFilter mIntentFilter; public static void onCreate(Bundle savedInstanceState) { Log.d(LOG_TAG, "onCreate()"); sActivity = UnityPlayer.currentActivity; Log.d(LOG_TAG, sActivity.getLocalClassName()); fuel.onCreate(sActivity); mIntentFilter = new IntentFilter(); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_VG_LIST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_VG_ROLLBACK.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_NOTIFICATION_ENABLED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_NOTIFICATION_DISABLED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SOCIAL_LOGIN_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SOCIAL_INVITE_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_SOCIAL_SHARE_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IMPLICIT_LAUNCH_REQUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_USER_VALUES.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_CHALLENGE_COUNT.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_TOURNAMENT_INFO.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_EXIT.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_MATCH.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_COMPETE_FAIL.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_EVENTS.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_LEADERBOARD.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_MISSION.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_QUEST.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_OFFER.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_ACCEPT_OFFER.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_JOIN_EVENT.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_LOCALIZATION_LOADED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_USER_INFO_UPDATED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_USER_AVATARS.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_RECEIVE_DATA.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_DYNAMICS_ENGINE_LOADED.toString()); mIntentFilter.addAction(fuelbroadcasttype.FSDK_BROADCAST_IGNITE_ENGINE_LOADED.toString()); } public static void onActivityResult(int requestCode, int resultCode, Intent data) { Log.d(LOG_TAG, "onActivityResult()"); fuel.onActivityResult(sActivity, requestCode, resultCode, data); } public static void onResume() { Log.d(LOG_TAG, "onResume()"); LocalBroadcastManager.getInstance(sActivity).registerReceiver( sBroadcastReceiver, mIntentFilter); fuel.onResume(sActivity); } public static void onPause() { Log.d(LOG_TAG, "onPause()"); LocalBroadcastManager.getInstance(sActivity).unregisterReceiver(sBroadcastReceiver); fuel.onPause(sActivity); } private static fuelbroadcastreceiver sBroadcastReceiver = new fuelbroadcastreceiver() { @SuppressWarnings("unchecked") @Override public void onReceive(Context context, String action, Map<String, Object> data) { String message = null; JSONObject jsonObject = new JSONObject(); if (data == null) { message = "{\"action\": \""+action+"\" , \"data\" : {}}"; } else { try { jsonObject.put( "action" , action ); jsonObject.put("data", fueljsonhelper.sharedInstance().toJSONObject(data) ); message = jsonObject.toString(); } catch (Exception exception) { Log.w(LOG_TAG, "Exception in processing broadcast message: " + exception.getMessage()); } } UnityPlayer.UnitySendMessage("FuelSDK", "DataReceiver", message); } }; }
[UPDATE] Adding support for monitoring response results from server requests in order to deduce the health of the server
fuelsdkunity/src/main/java/com/fuelpowered/lib/fuelsdk/unity/FuelSDKUnitySharedActivity.java
[UPDATE] Adding support for monitoring response results from server requests in order to deduce the health of the server
Java
mit
553e1800e2098a7d7128406c16b11cf55a09d8d9
0
Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin
package fr.adrienbrault.idea.symfony2plugin.templating.path; import com.google.gson.Gson; import com.google.gson.JsonIOException; import com.google.gson.JsonSyntaxException; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiFile; import com.intellij.psi.search.FilenameIndex; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.CachedValue; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiModificationTracker; import fr.adrienbrault.idea.symfony2plugin.extension.TwigNamespaceExtension; import fr.adrienbrault.idea.symfony2plugin.extension.TwigNamespaceExtensionParameter; import fr.adrienbrault.idea.symfony2plugin.templating.dict.TwigConfigJson; import fr.adrienbrault.idea.symfony2plugin.templating.path.dict.TwigPathJson; import fr.adrienbrault.idea.symfony2plugin.templating.util.TwigUtil; import fr.adrienbrault.idea.symfony2plugin.util.VfsExUtil; import org.apache.commons.lang.StringUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; /** * @author Daniel Espendiller <[email protected]> */ public class JsonFileIndexTwigNamespaces implements TwigNamespaceExtension { private static final Key<CachedValue<Collection<TwigPath>>> CACHE = new Key<>("TWIG_JSON_INDEX_CACHE"); @NotNull @Override public Collection<TwigPath> getNamespaces(final @NotNull TwigNamespaceExtensionParameter parameter) { CachedValue<Collection<TwigPath>> cache = parameter.getProject().getUserData(CACHE); if (cache == null) { cache = CachedValuesManager.getManager(parameter.getProject()).createCachedValue(() -> CachedValueProvider.Result.create(getNamespacesInner(parameter), PsiModificationTracker.MODIFICATION_COUNT), false ); parameter.getProject().putUserData(CACHE, cache); } return cache.getValue(); } @NotNull private Collection<TwigPath> getNamespacesInner(@NotNull TwigNamespaceExtensionParameter parameter) { Collection<TwigPath> twigPaths = new ArrayList<>(); for (final PsiFile psiFile : FilenameIndex.getFilesByName(parameter.getProject(), "ide-twig.json", GlobalSearchScope.allScope(parameter.getProject()))) { Collection<TwigPath> cachedValue = CachedValuesManager.getCachedValue(psiFile, new MyJsonCachedValueProvider(psiFile)); if(cachedValue != null) { twigPaths.addAll(cachedValue); } } return twigPaths; } private static class MyJsonCachedValueProvider implements CachedValueProvider<Collection<TwigPath>> { private final PsiFile psiFile; public MyJsonCachedValueProvider(PsiFile psiFile) { this.psiFile = psiFile; } @Nullable @Override public Result<Collection<TwigPath>> compute() { Collection<TwigPath> twigPaths = new ArrayList<>(); String text = psiFile.getText(); TwigConfigJson configJson = null; try { configJson = new Gson().fromJson(text, TwigConfigJson.class); } catch (JsonSyntaxException | JsonIOException | IllegalStateException ignored) { } if(configJson == null) { return Result.create(twigPaths, psiFile, psiFile.getVirtualFile()); } for(TwigPathJson twigPath : configJson.getNamespaces()) { String path = twigPath.getPath(); if(path == null || path.equals(".")) { path = ""; } path = StringUtils.stripStart(path.replace("\\", "/"), "/"); PsiDirectory parent = psiFile.getParent(); if(parent == null) { continue; } // current directory check and subfolder VirtualFile twigRoot; if(path.length() > 0) { twigRoot = VfsUtil.findRelativeFile(parent.getVirtualFile(), path.split("/")); } else { twigRoot = psiFile.getParent().getVirtualFile(); } if(twigRoot == null) { continue; } String relativePath = VfsExUtil.getRelativeProjectPath(psiFile.getProject(), twigRoot); if(relativePath == null) { continue; } String namespace = twigPath.getNamespace(); TwigUtil.NamespaceType pathType = TwigUtil.NamespaceType.ADD_PATH; String type = twigPath.getType(); if("bundle".equalsIgnoreCase(type)) { pathType = TwigUtil.NamespaceType.BUNDLE; } String namespacePath = StringUtils.stripStart(relativePath, "/"); if(StringUtils.isNotBlank(namespace)) { twigPaths.add(new TwigPath(namespacePath, namespace, pathType, true)); } else { twigPaths.add(new TwigPath(namespacePath, TwigUtil.MAIN, pathType, true)); } } return Result.create(twigPaths, psiFile, psiFile.getVirtualFile()); } } }
src/main/java/fr/adrienbrault/idea/symfony2plugin/templating/path/JsonFileIndexTwigNamespaces.java
package fr.adrienbrault.idea.symfony2plugin.templating.path; import com.google.gson.Gson; import com.google.gson.JsonIOException; import com.google.gson.JsonSyntaxException; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiFile; import com.intellij.psi.search.FilenameIndex; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.CachedValue; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiModificationTracker; import fr.adrienbrault.idea.symfony2plugin.extension.TwigNamespaceExtension; import fr.adrienbrault.idea.symfony2plugin.extension.TwigNamespaceExtensionParameter; import fr.adrienbrault.idea.symfony2plugin.templating.dict.TwigConfigJson; import fr.adrienbrault.idea.symfony2plugin.templating.path.dict.TwigPathJson; import fr.adrienbrault.idea.symfony2plugin.templating.util.TwigUtil; import fr.adrienbrault.idea.symfony2plugin.util.VfsExUtil; import org.apache.commons.lang.StringUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collection; /** * @author Daniel Espendiller <[email protected]> */ public class JsonFileIndexTwigNamespaces implements TwigNamespaceExtension { private static final Key<CachedValue<Collection<TwigPath>>> CACHE = new Key<>("TWIG_JSON_INDEX_CACHE"); @NotNull @Override public Collection<TwigPath> getNamespaces(final @NotNull TwigNamespaceExtensionParameter parameter) { CachedValue<Collection<TwigPath>> cache = parameter.getProject().getUserData(CACHE); if (cache == null) { cache = CachedValuesManager.getManager(parameter.getProject()).createCachedValue(() -> CachedValueProvider.Result.create(getNamespacesInner(parameter), PsiModificationTracker.MODIFICATION_COUNT), false ); parameter.getProject().putUserData(CACHE, cache); } return cache.getValue(); } @NotNull private Collection<TwigPath> getNamespacesInner(@NotNull TwigNamespaceExtensionParameter parameter) { Collection<TwigPath> twigPaths = new ArrayList<>(); for (final PsiFile psiFile : FilenameIndex.getFilesByName(parameter.getProject(), "ide-twig.json", GlobalSearchScope.allScope(parameter.getProject()))) { Collection<TwigPath> cachedValue = CachedValuesManager.getCachedValue(psiFile, new MyJsonCachedValueProvider(psiFile)); if(cachedValue != null) { twigPaths.addAll(cachedValue); } } return twigPaths; } private static class MyJsonCachedValueProvider implements CachedValueProvider<Collection<TwigPath>> { private final PsiFile psiFile; public MyJsonCachedValueProvider(PsiFile psiFile) { this.psiFile = psiFile; } @Nullable @Override public Result<Collection<TwigPath>> compute() { Collection<TwigPath> twigPaths = new ArrayList<>(); String text = psiFile.getText(); TwigConfigJson configJson = null; try { configJson = new Gson().fromJson(text, TwigConfigJson.class); } catch (JsonSyntaxException | JsonIOException | IllegalStateException ignored) { } if(configJson == null) { return Result.create(twigPaths, psiFile, psiFile.getVirtualFile()); } for(TwigPathJson twigPath : configJson.getNamespaces()) { String path = twigPath.getPath(); if(path == null) { path = ""; } path = StringUtils.stripStart(path.replace("\\", "/"), "/"); PsiDirectory parent = psiFile.getParent(); if(parent == null) { continue; } // current directory check and subfolder VirtualFile twigRoot; if(path.length() > 0) { twigRoot = VfsUtil.findRelativeFile(parent.getVirtualFile(), path.split("/")); } else { twigRoot = psiFile.getParent().getVirtualFile(); } if(twigRoot == null) { continue; } String relativePath = VfsExUtil.getRelativeProjectPath(psiFile.getProject(), twigRoot); if(relativePath == null) { continue; } String namespace = twigPath.getNamespace(); TwigUtil.NamespaceType pathType = TwigUtil.NamespaceType.ADD_PATH; String type = twigPath.getType(); if("bundle".equalsIgnoreCase(type)) { pathType = TwigUtil.NamespaceType.BUNDLE; } String namespacePath = StringUtils.stripStart(relativePath, "/"); if(StringUtils.isNotBlank(namespace)) { twigPaths.add(new TwigPath(namespacePath, namespace, pathType, true)); } else { twigPaths.add(new TwigPath(namespacePath, TwigUtil.MAIN, pathType, true)); } } return Result.create(twigPaths, psiFile, psiFile.getVirtualFile()); } } }
support same directory in Twig namespace json
src/main/java/fr/adrienbrault/idea/symfony2plugin/templating/path/JsonFileIndexTwigNamespaces.java
support same directory in Twig namespace json
Java
mit
106d0bea41902fda351d829b04058bd8e72cfb6e
0
TinusTinus/game-engine
package nl.mvdr.tinustris.gui; import java.util.Arrays; import java.util.List; import javafx.scene.Group; import javafx.scene.Node; import nl.mvdr.tinustris.model.OnePlayerGameState; import nl.mvdr.tinustris.model.Orientation; import nl.mvdr.tinustris.model.Point; import nl.mvdr.tinustris.model.Tetromino; /** * Shows the upcoming block. * * @author Martijn van de Rijdt */ class NextBlockRenderer extends BlockGroupRenderer { /** Previous value for the next tetromino field, currently being displayed. Initially null. */ private Tetromino previousValue = null; /** * Constructor. * * @param blockCreator creator */ NextBlockRenderer(BlockCreator blockCreator) { super(blockCreator); } /** {@inheritDoc} */ @Override public void render(OnePlayerGameState gameState) { super.render(gameState); previousValue = gameState.getNext(); } /** {@inheritDoc} */ @Override List<Group> createGroups(OnePlayerGameState gameState) { Group group; Tetromino nextBlock = gameState.getNext(); if (previousValue == nextBlock) { // Active block location is unchanged; no need to update. group = null; } else { // This is the first frame, or the active block's location has changed. // Render the group. group = new Group(); for (Point point : nextBlock.getPoints(Orientation.getDefault())) { // for aesthetics, center the tetromino if (nextBlock != Tetromino.O) { point = point.translate(0, -1); } Node node = createBlock(point.getX(), point.getY(), 4, nextBlock.getBlock(), BlockStyle.NEXT, gameState.getNumFramesUntilLinesDisappear(), gameState.getNumFramesSinceLastLock()); group.getChildren().add(node); } } return Arrays.asList(group); } }
tinustris/src/main/java/nl/mvdr/tinustris/gui/NextBlockRenderer.java
package nl.mvdr.tinustris.gui; import java.util.Arrays; import java.util.List; import javafx.scene.Group; import javafx.scene.Node; import nl.mvdr.tinustris.model.OnePlayerGameState; import nl.mvdr.tinustris.model.Orientation; import nl.mvdr.tinustris.model.Point; import nl.mvdr.tinustris.model.Tetromino; /** * Shows the upcoming block. * * @author Martijn van de Rijdt */ class NextBlockRenderer extends BlockGroupRenderer { /** Previous value for the next tetromino field, currently being displayed. Initially null. */ private Tetromino previousValue = null; /** * Constructor. * * @param blockCreator creator */ NextBlockRenderer(BlockCreator blockCreator) { super(blockCreator); } /** {@inheritDoc} */ @Override public void render(OnePlayerGameState gameState) { super.render(gameState); previousValue = gameState.getNext(); } /** {@inheritDoc} */ @Override List<Group> createGroups(OnePlayerGameState gameState) { Group group; Tetromino nextBlock = gameState.getNext(); if (previousValue != null && previousValue == nextBlock) { // Active block location is unchanged; no need to update. group = null; } else { // This is the first frame, or the active block's location has changed. // Render the group. group = new Group(); for (Point point : nextBlock.getPoints(Orientation.getDefault())) { // for aesthetics, center the tetromino if (nextBlock != Tetromino.O) { point = point.translate(0, -1); } Node node = createBlock(point.getX(), point.getY(), 4, nextBlock.getBlock(), BlockStyle.NEXT, gameState.getNumFramesUntilLinesDisappear(), gameState.getNumFramesSinceLastLock()); group.getChildren().add(node); } } return Arrays.asList(group); } }
Removed unnecessary null check.
tinustris/src/main/java/nl/mvdr/tinustris/gui/NextBlockRenderer.java
Removed unnecessary null check.
Java
mit
038b0940a168d55ad5f3931285ab0aca107519f8
0
Nunnery/MythicDrops
package net.nunnerycode.bukkit.mythicdrops.sockets; import com.conventnunnery.libraries.config.CommentedConventYamlConfiguration; import com.conventnunnery.libraries.config.ConventYamlConfiguration; import net.nunnerycode.bukkit.mythicdrops.utils.ItemUtil; import net.nunnerycode.java.libraries.cannonball.DebugPrinter; import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.math.NumberUtils; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Effect; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.enchantments.Enchantment; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.entity.Projectile; import org.bukkit.event.Event; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.material.MaterialData; import org.bukkit.plugin.java.JavaPlugin; import org.bukkit.potion.PotionEffectType; import se.ranzdo.bukkit.methodcommand.Arg; import se.ranzdo.bukkit.methodcommand.Command; import se.ranzdo.bukkit.methodcommand.CommandHandler; import se.ranzdo.bukkit.methodcommand.FlagArg; import se.ranzdo.bukkit.methodcommand.Flags; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; public class MythicDropsSockets extends JavaPlugin implements Listener { private static MythicDropsSockets _INSTANCE; private final Map<String, HeldItem> heldSocket = new HashMap<>(); private DebugPrinter debugPrinter; private Map<String, String> language; private String socketGemName; private List<String> socketGemLore; private String sockettedItemSocket; private List<String> sockettedItemLore; private ConventYamlConfiguration configYAML; private ConventYamlConfiguration socketGemsYAML; private boolean useAttackerItemInHand; private boolean useAttackerArmorEquipped; private boolean useDefenderItemInHand; private boolean useDefenderArmorEquipped; private double socketGemChanceToSpawn; private List<MaterialData> socketGemMaterialIds; private Map<String, SocketGem> socketGemMap; private List<String> socketGemPrefixes; private boolean preventMultipleChangesFromSockets; private List<String> socketGemSuffixes; public static MythicDropsSockets getInstance() { return _INSTANCE; } public Map<String, SocketGem> getSocketGemMap() { return Collections.unmodifiableMap(socketGemMap); } public List<String> getSocketGemPrefixes() { return Collections.unmodifiableList(socketGemPrefixes); } public List<String> getSocketGemSuffixes() { return Collections.unmodifiableList(socketGemSuffixes); } public String getSockettedItemSocket() { return sockettedItemSocket; } public List<String> getSockettedItemLore() { return sockettedItemLore; } public ConventYamlConfiguration getConfigYAML() { return configYAML; } public ConventYamlConfiguration getSocketGemsYAML() { return socketGemsYAML; } @Override public void onDisable() { debugPrinter.debug(Level.INFO, "v" + getDescription().getVersion() + " disabled"); } @Override public void onEnable() { _INSTANCE = this; debugPrinter = new DebugPrinter(getDataFolder().getPath(), "debug.log"); language = new HashMap<>(); socketGemMaterialIds = new ArrayList<>(); socketGemMap = new HashMap<>(); socketGemPrefixes = new ArrayList<>(); socketGemSuffixes = new ArrayList<>(); unpackConfigurationFiles(new String[]{"config.yml", "socketGems.yml"}, false); configYAML = new ConventYamlConfiguration(new File(getDataFolder(), "config.yml"), YamlConfiguration.loadConfiguration(getResource("config.yml")).getString("version")); configYAML.options().backupOnUpdate(true); configYAML.options().updateOnLoad(true); configYAML.load(); socketGemsYAML = new ConventYamlConfiguration(new File(getDataFolder(), "socketGems.yml"), YamlConfiguration.loadConfiguration(getResource("socketGems.yml")).getString("version")); socketGemsYAML.options().backupOnUpdate(true); socketGemsYAML.options().updateOnLoad(true); socketGemsYAML.load(); loadSettings(); loadGems(); getServer().getPluginManager().registerEvents(this, this); CommandHandler commandHandler = new CommandHandler(this); commandHandler.registerCommands(this); debugPrinter.debug(Level.INFO, "v" + getDescription().getVersion() + " enabled"); } private void loadGems() { socketGemMap.clear(); List<String> loadedSocketGems = new ArrayList<>(); if (!socketGemsYAML.isConfigurationSection("socket-gems")) { return; } ConfigurationSection cs = socketGemsYAML.getConfigurationSection("socket-gems"); for (String key : cs.getKeys(false)) { if (!cs.isConfigurationSection(key)) { continue; } ConfigurationSection gemCS = cs.getConfigurationSection(key); GemType gemType = GemType.getFromName(gemCS.getString("type")); if (gemType == null) { gemType = GemType.ANY; } List<SocketPotionEffect> socketPotionEffects = buildSocketPotionEffects(gemCS); List<SocketParticleEffect> socketParticleEffects = buildSocketParticleEffects(gemCS); double chance = gemCS.getDouble("chance"); String prefix = gemCS.getString("prefix"); if (prefix != null && !prefix.equalsIgnoreCase("")) { socketGemPrefixes.add(prefix); } String suffix = gemCS.getString("suffix"); if (suffix != null && !suffix.equalsIgnoreCase("")) { socketGemSuffixes.add(suffix); } List<String> lore = gemCS.getStringList("lore"); Map<Enchantment, Integer> enchantments = new HashMap<>(); if (gemCS.isConfigurationSection("enchantments")) { ConfigurationSection enchCS = gemCS.getConfigurationSection("enchantments"); for (String key1 : enchCS.getKeys(false)) { Enchantment ench = null; for (Enchantment ec : Enchantment.values()) { if (ec.getName().equalsIgnoreCase(key1)) { ench = ec; break; } } if (ench == null) { continue; } int level = enchCS.getInt(key1); enchantments.put(ench, level); } } List<String> commands = gemCS.getStringList("commands"); List<SocketCommand> socketCommands = new ArrayList<>(); for (String s : commands) { SocketCommand sc = new SocketCommand(s); socketCommands.add(sc); } SocketGem sg = new SocketGem(key, gemType, socketPotionEffects, socketParticleEffects, chance, prefix, suffix, lore, enchantments, socketCommands); socketGemMap.put(key, sg); loadedSocketGems.add(key); } debugPrinter.debug(Level.INFO, "Loaded socket gems: " + loadedSocketGems.toString()); } private List<SocketPotionEffect> buildSocketPotionEffects(ConfigurationSection cs) { List<SocketPotionEffect> socketPotionEffectList = new ArrayList<>(); if (!cs.isConfigurationSection("potion-effects")) { return socketPotionEffectList; } ConfigurationSection cs1 = cs.getConfigurationSection("potion-effects"); for (String key : cs1.getKeys(false)) { PotionEffectType pet = PotionEffectType.getByName(key); if (pet == null) { continue; } int duration = cs1.getInt(key + ".duration"); int intensity = cs1.getInt(key + ".intensity"); int radius = cs1.getInt(key + ".radius"); String target = cs1.getString(key + ".target"); EffectTarget et = EffectTarget.getFromName(target); if (et == null) { et = EffectTarget.NONE; } boolean affectsWielder = cs1.getBoolean(key + ".affectsWielder"); boolean affectsTarget = cs1.getBoolean(key + ".affectsTarget"); socketPotionEffectList.add(new SocketPotionEffect(pet, intensity, duration, radius, et, affectsWielder, affectsTarget)); } return socketPotionEffectList; } private List<SocketParticleEffect> buildSocketParticleEffects(ConfigurationSection cs) { List<SocketParticleEffect> socketParticleEffectList = new ArrayList<>(); if (!cs.isConfigurationSection("particle-effects")) { return socketParticleEffectList; } ConfigurationSection cs1 = cs.getConfigurationSection("particle-effects"); for (String key : cs1.getKeys(false)) { Effect pet; try { pet = Effect.valueOf(key); } catch (Exception e) { continue; } if (pet == null) { continue; } int duration = cs1.getInt(key + ".duration"); int intensity = cs1.getInt(key + ".intensity"); int radius = cs1.getInt(key + ".radius"); String target = cs1.getString(key + ".target"); EffectTarget et = EffectTarget.getFromName(target); if (et == null) { et = EffectTarget.NONE; } boolean affectsWielder = cs1.getBoolean(key + ".affectsWielder"); boolean affectsTarget = cs1.getBoolean(key + ".affectsTarget"); socketParticleEffectList.add(new SocketParticleEffect(pet, intensity, duration, radius, et, affectsWielder, affectsTarget)); } return socketParticleEffectList; } private void loadSettings() { useAttackerItemInHand = configYAML.getBoolean("options.use-attacker-item-in-hand", true); useAttackerArmorEquipped = configYAML.getBoolean("options.use-attacker-armor-equipped", false); useDefenderItemInHand = configYAML.getBoolean("options.use-defender-item-in-hand", false); useDefenderArmorEquipped = configYAML.getBoolean("options.use-defender-armor-equipped", true); socketGemChanceToSpawn = configYAML.getDouble("options.socket-gem-chance-to-spawn", 0.25); preventMultipleChangesFromSockets = configYAML.getBoolean("options.prevent-multiple-changes-from-sockets", true); List<String> socketGemMats = configYAML.getStringList("options.socket-gem-material-ids"); for (String s : socketGemMats) { int id; byte data; if (s.contains(";")) { String[] split = s.split(";"); id = NumberUtils.toInt(split[0], 0); data = (byte) NumberUtils.toInt(split[1], 0); } else { id = NumberUtils.toInt(s, 0); data = 0; } if (id == 0) { continue; } socketGemMaterialIds.add(new MaterialData(id, data)); } socketGemName = configYAML.getString("items.socket-name", "&6Socket Gem - %socketgem%"); socketGemLore = configYAML.getStringList("items.socket-lore"); sockettedItemSocket = configYAML.getString("items.socketted-item-socket", "&6(Socket)"); sockettedItemLore = configYAML.getStringList("items.socketted-item-lore"); language.clear(); for (String key : configYAML.getConfigurationSection("language").getKeys(true)) { if (configYAML.getConfigurationSection("language").isConfigurationSection(key)) { continue; } language.put(key, configYAML.getConfigurationSection("language").getString(key, key)); } } private void unpackConfigurationFiles(String[] configurationFiles, boolean overwrite) { for (String s : configurationFiles) { YamlConfiguration yc = CommentedConventYamlConfiguration.loadConfiguration(getResource(s)); try { File f = new File(getDataFolder(), s); if (!f.exists()) { yc.save(f); continue; } if (overwrite) { yc.save(f); } } catch (IOException e) { getLogger().warning("Could not unpack " + s); } } } @EventHandler(priority = EventPriority.NORMAL) public void onRightClick(PlayerInteractEvent event) { if (event.getAction() != Action.RIGHT_CLICK_AIR && event.getAction() != Action.RIGHT_CLICK_BLOCK) { return; } if (event.getItem() == null) { return; } Player player = event.getPlayer(); ItemStack itemInHand = event.getItem(); String itemType = ItemUtil.getItemTypeFromMaterialData(itemInHand.getData()); if (getSocketGemMaterialIds().contains(itemInHand.getData())) { event.setUseItemInHand(Event.Result.DENY); player.updateInventory(); } if (itemType != null && ItemUtil.isArmor(itemType) && itemInHand.hasItemMeta()) { event.setUseItemInHand(Event.Result.DENY); player.updateInventory(); } if (heldSocket.containsKey(player.getName())) { socketItem(event, player, itemInHand, itemType); heldSocket.remove(player.getName()); } else { addHeldSocket(event, player, itemInHand); } } public String getLanguageString(String key, String[][] args) { String s = getLanguageString(key); for (String[] arg : args) { s = s.replace(arg[0], arg[1]); } return s; } public String getLanguageString(String key) { return language.containsKey(key) ? language.get(key) : key; } public List<String> replaceArgs(List<String> strings, String[][] args) { List<String> list = new ArrayList<>(); for (String s : strings) { list.add(replaceArgs(s, args)); } return list; } public String replaceArgs(String string, String[][] args) { String s = string; for (String[] arg : args) { s = s.replace(arg[0], arg[1]); } return s; } public String getSocketGemName() { return socketGemName; } public List<String> getSocketGemLore() { return socketGemLore; } public double getSocketGemChanceToSpawn() { return socketGemChanceToSpawn; } public boolean socketGemTypeMatchesItemStack(SocketGem socketGem, ItemStack itemStack) { String itemType = ItemUtil.getItemTypeFromMaterialData(itemStack.getData()); if (itemType == null) { return false; } switch (socketGem.getGemType()) { case TOOL: return ItemUtil.isTool(itemType); case ARMOR: return ItemUtil.isArmor(itemType); case ANY: return true; default: return false; } } private void addHeldSocket(PlayerInteractEvent event, final Player player, ItemStack itemInHand) { if (!getSocketGemMaterialIds().contains(itemInHand.getData())) { return; } if (!itemInHand.hasItemMeta()) { return; } ItemMeta im = itemInHand.getItemMeta(); if (!im.hasDisplayName()) { return; } String replacedArgs = ChatColor.stripColor(replaceArgs(socketGemName, new String[][]{{"%socketgem%", ""}}).replace('&', '\u00A7').replace("\u00A7\u00A7", "&")); String type = ChatColor.stripColor(im.getDisplayName().replace(replacedArgs, "")); if (type == null) { return; } SocketGem socketGem = socketGemMap.get(type); if (socketGem == null) { socketGem = getSocketGemFromName(type); if (socketGem == null) { return; } } sendMessage(player, "messages.instructions", new String[][]{}); HeldItem hg = new HeldItem(socketGem.getName(), itemInHand); heldSocket.put(player.getName(), hg); Bukkit.getScheduler().runTaskLaterAsynchronously(this, new Runnable() { @Override public void run() { heldSocket.remove(player.getName()); } }, 30 * 20L); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); player.updateInventory(); } public boolean isPreventMultipleChangesFromSockets() { return preventMultipleChangesFromSockets; } private void socketItem(PlayerInteractEvent event, Player player, ItemStack itemInHand, String itemType) { if (ItemUtil.isArmor(itemType) || ItemUtil.isTool(itemType)) { if (!itemInHand.hasItemMeta()) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } ItemMeta im = itemInHand.getItemMeta(); if (!im.hasLore()) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } List<String> lore = new ArrayList<String>(im.getLore()); String socketString = getFormattedLanguageString("items.socketted-item-socket"); int index = indexOfStripColor(lore, socketString); if (index < 0) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } HeldItem heldSocket1 = heldSocket.get(player.getName()); String socketGemType = ChatColor.stripColor(heldSocket1 .getName()); SocketGem socketGem = getSocketGemFromName(socketGemType); if (socketGem == null || !socketGemTypeMatchesItemStack(socketGem, itemInHand)) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } lore.set(index, ChatColor.GOLD + socketGem.getName()); lore.removeAll(sockettedItemLore); im.setLore(lore); itemInHand.setItemMeta(im); prefixItemStack(itemInHand, socketGem); suffixItemStack(itemInHand, socketGem); loreItemStack(itemInHand, socketGem); enchantmentItemStack(itemInHand, socketGem); if (player.getInventory().contains(heldSocket1.getItemStack())) { int indexOfItem = player.getInventory().first(heldSocket1.getItemStack()); ItemStack inInventory = player.getInventory().getItem(indexOfItem); inInventory.setAmount(inInventory.getAmount() - 1); player.getInventory().setItem(indexOfItem, inInventory); player.updateInventory(); } else { sendMessage(player, "messages.do-not-have", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } player.setItemInHand(itemInHand); sendMessage(player, "messages.success", new String[][]{}); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); } else { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); } } public int indexOfStripColor(List<String> list, String string) { String[] array = list.toArray(new String[list.size()]); for (int i = 0; i < array.length; i++) { if (ChatColor.stripColor(array[i]).equalsIgnoreCase(ChatColor.stripColor(string))) { return i; } } return -1; } public int indexOfStripColor(String[] array, String string) { for (int i = 0; i < array.length; i++) { if (ChatColor.stripColor(array[i]).equalsIgnoreCase(ChatColor.stripColor(string))) { return i; } } return -1; } public ItemStack loreItemStack(ItemStack itemStack, SocketGem socketGem) { ItemMeta im; if (itemStack.hasItemMeta()) { im = itemStack.getItemMeta(); } else { im = Bukkit.getItemFactory().getItemMeta(itemStack.getType()); } if (!im.hasLore()) { im.setLore(new ArrayList<String>()); } List<String> lore = new ArrayList<String>(im.getLore()); if (lore.containsAll(socketGem.getLore())) { return itemStack; } for (String s : socketGem.getLore()) { lore.add(s.replace('&', '\u00A7').replace("\u00A7\u00A7", "&")); } im.setLore(lore); itemStack.setItemMeta(im); return itemStack; } public ItemStack enchantmentItemStack(ItemStack itemStack, SocketGem socketGem) { if (itemStack == null || socketGem == null) { return itemStack; } Map<Enchantment, Integer> itemStackEnchantments = new HashMap<Enchantment, Integer>(itemStack.getEnchantments()); for (Map.Entry<Enchantment, Integer> entry : socketGem.getEnchantments().entrySet()) { if (itemStackEnchantments.containsKey(entry.getKey())) { itemStack.removeEnchantment(entry.getKey()); int level = Math.abs(itemStackEnchantments.get(entry.getKey()) + entry.getValue()); if (level <= 0) { continue; } itemStack.addUnsafeEnchantment(entry.getKey(), level); } else { itemStack.addUnsafeEnchantment(entry.getKey(), entry.getValue() <= 0 ? Math.abs(entry.getValue()) == 0 ? 1 : Math.abs(entry.getValue()) : entry.getValue()); } } return itemStack; } public ItemStack suffixItemStack(ItemStack itemStack, SocketGem socketGem) { ItemMeta im; if (!itemStack.hasItemMeta()) { im = Bukkit.getItemFactory().getItemMeta(itemStack.getType()); } else { im = itemStack.getItemMeta(); } String name = im.getDisplayName(); if (name == null) { return itemStack; } ChatColor beginColor = findColor(name); String lastColors = ChatColor.getLastColors(name); if (beginColor == null) { beginColor = ChatColor.WHITE; } String suffix = socketGem.getSuffix(); if (suffix == null || suffix.equalsIgnoreCase("")) { return itemStack; } if (isPreventMultipleChangesFromSockets() && ChatColor.stripColor(name).contains(suffix) || containsAnyFromList(ChatColor.stripColor(name), socketGemSuffixes)) { return itemStack; } im.setDisplayName(name + " " + beginColor + suffix + lastColors); itemStack.setItemMeta(im); return itemStack; } public ItemStack prefixItemStack(ItemStack itemStack, SocketGem socketGem) { ItemMeta im; if (itemStack.hasItemMeta()) { im = itemStack.getItemMeta(); } else { im = Bukkit.getItemFactory().getItemMeta(itemStack.getType()); } String name = im.getDisplayName(); if (name == null) { return itemStack; } ChatColor beginColor = findColor(name); if (beginColor == null) { beginColor = ChatColor.WHITE; } String prefix = socketGem.getPrefix(); if (prefix == null || prefix.equalsIgnoreCase("")) { return itemStack; } if (isPreventMultipleChangesFromSockets() && ChatColor.stripColor(name).contains(prefix) || containsAnyFromList(ChatColor.stripColor(name), socketGemPrefixes)) { return itemStack; } im.setDisplayName(beginColor + prefix + " " + name); itemStack.setItemMeta(im); return itemStack; } public ChatColor findColor(final String s) { char[] c = s.toCharArray(); for (int i = 0; i < c.length; i++) { if (c[i] == (char) 167 && i + 1 < c.length) { return ChatColor.getByChar(c[i + 1]); } } return null; } public boolean containsAnyFromList(String string, List<String> list) { for (String s : list) { if (string.toUpperCase().contains(s.toUpperCase())) { return true; } } return false; } public void applyEffects(LivingEntity attacker, LivingEntity defender) { if (attacker == null || defender == null) { return; } // handle attacker if (isUseAttackerArmorEquipped()) { for (ItemStack attackersItem : attacker.getEquipment().getArmorContents()) { if (attackersItem == null) { continue; } List<SocketGem> attackerSocketGems = getSocketGems(attackersItem); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } if (sg.getGemType() != GemType.TOOL && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } } } } } if (isUseAttackerItemInHand() && attacker.getEquipment().getItemInHand() != null) { List<SocketGem> attackerSocketGems = getSocketGems(attacker.getEquipment().getItemInHand()); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } if (sg.getGemType() != GemType.TOOL && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } } } } // handle defender if (isUseDefenderArmorEquipped()) { for (ItemStack defenderItem : defender.getEquipment().getArmorContents()) { if (defenderItem == null) { continue; } List<SocketGem> defenderSocketGems = getSocketGems(defenderItem); for (SocketGem sg : defenderSocketGems) { if (sg.getGemType() != GemType.ARMOR && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } } } } if (isUseDefenderItemInHand() && defender.getEquipment().getItemInHand() != null) { List<SocketGem> defenderSocketGems = getSocketGems(defender.getEquipment().getItemInHand()); if (defenderSocketGems != null && !defenderSocketGems.isEmpty()) { for (SocketGem sg : defenderSocketGems) { if (sg.getGemType() != GemType.ARMOR && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } } } } } public boolean isUseAttackerItemInHand() { return useAttackerItemInHand; } public boolean isUseAttackerArmorEquipped() { return useAttackerArmorEquipped; } public boolean isUseDefenderItemInHand() { return useDefenderItemInHand; } public boolean isUseDefenderArmorEquipped() { return useDefenderArmorEquipped; } public List<SocketGem> getSocketGems(ItemStack itemStack) { List<SocketGem> socketGemList = new ArrayList<SocketGem>(); ItemMeta im; if (itemStack.hasItemMeta()) { im = itemStack.getItemMeta(); } else { return socketGemList; } List<String> lore = im.getLore(); if (lore == null) { return socketGemList; } for (String s : lore) { SocketGem sg = getSocketGemFromName(ChatColor.stripColor(s)); if (sg == null) { continue; } socketGemList.add(sg); } return socketGemList; } public void runCommands(LivingEntity attacker, LivingEntity defender) { if (attacker == null || defender == null) { return; } if (attacker instanceof Player) { if (isUseAttackerArmorEquipped()) { for (ItemStack attackersItem : attacker.getEquipment().getArmorContents()) { if (attackersItem == null) { continue; } List<SocketGem> attackerSocketGems = getSocketGems(attackersItem); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } ((Player) attacker).chat("/" + command); } } } } } } if (isUseAttackerItemInHand() && attacker.getEquipment().getItemInHand() != null) { List<SocketGem> attackerSocketGems = getSocketGems(attacker.getEquipment().getItemInHand()); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } ((Player) attacker).chat("/" + command); } } } } } } if (defender instanceof Player) { if (isUseDefenderArmorEquipped()) { for (ItemStack defendersItem : defender.getEquipment().getArmorContents()) { if (defendersItem == null) { continue; } List<SocketGem> defenderSocketGems = getSocketGems(defendersItem); if (defenderSocketGems != null && !defenderSocketGems.isEmpty()) { for (SocketGem sg : defenderSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } ((Player) defender).chat("/" + command); } } } } } } if (isUseDefenderItemInHand() && defender.getEquipment().getItemInHand() != null) { List<SocketGem> defenderSocketGems = getSocketGems(defender.getEquipment().getItemInHand()); if (defenderSocketGems != null && !defenderSocketGems.isEmpty()) { for (SocketGem sg : defenderSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } ((Player) defender).chat("/" + command); } } } } } } } @EventHandler(priority = EventPriority.MONITOR) public void onEntityDamageByEntityEvent(EntityDamageByEntityEvent event) { if (event.isCancelled()) { return; } Entity e = event.getEntity(); Entity d = event.getDamager(); if (!(e instanceof LivingEntity)) { return; } LivingEntity lee = (LivingEntity) e; LivingEntity led; if (d instanceof LivingEntity) { led = (LivingEntity) d; } else if (d instanceof Projectile) { led = ((Projectile) d).getShooter(); } else { return; } applyEffects(led, lee); runCommands(led, lee); } @Command(identifier = "mythicdropssockets gem", description = "Gives MythicDrops gems", permissions = "mythicdrops.command.gem") @Flags(identifier = {"a", "g"}, description = {"Amount to spawn", "Socket Gem to spawn"}) public void customSubcommand(CommandSender sender, @Arg(name = "player", def = "self") String playerName, @Arg(name = "amount", def = "1") @FlagArg("a") int amount, @Arg(name = "item", def = "*") @FlagArg("g") String itemName) { Player player; if (playerName.equalsIgnoreCase("self")) { if (sender instanceof Player) { player = (Player) sender; } else { sendMessage(sender, "command.no-access", new String[][]{}); return; } } else { player = Bukkit.getPlayer(playerName); } if (player == null) { sendMessage(sender, "command.player-does-not-exist", new String[][]{}); return; } SocketGem socketGem = null; if (!itemName.equalsIgnoreCase("*")) { try { socketGem = getSocketGemFromName(itemName); } catch (NullPointerException e) { e.printStackTrace(); sendMessage(sender, "command.socket-gem-does-not-exist", new String[][]{}); return; } } int amountGiven = 0; for (int i = 0; i < amount; i++) { try { ItemStack itemStack; if (socketGem == null) { itemStack = new SocketItem(getRandomSocketGemMaterial(), getRandomSocketGemWithChance()); } else { itemStack = new SocketItem(getRandomSocketGemMaterial(), socketGem); } itemStack.setDurability((short) 0); player.getInventory().addItem(itemStack); amountGiven++; } catch (Exception ignored) { ignored.printStackTrace(); } } sendMessage(player, "command.give-gem-receiver", new String[][]{{"%amount%", String.valueOf(amountGiven)}}); sendMessage(sender, "command.give-gem-sender", new String[][]{{"%amount%", String.valueOf(amountGiven)}, {"%receiver%", player.getName()}}); } public void sendMessage(CommandSender reciever, String path, String[][] arguments) { String message = getFormattedLanguageString(path, arguments); if (message == null) { return; } reciever.sendMessage(message); } public String getFormattedLanguageString(String key, String[][] args) { String s = getFormattedLanguageString(key); for (String[] arg : args) { s = s.replace(arg[0], arg[1]); } return s; } public String getFormattedLanguageString(String key) { return getLanguageString(key).replace('&', '\u00A7').replace("\u00A7\u00A7", "&"); } public SocketGem getRandomSocketGemWithChance() { if (socketGemMap == null || socketGemMap.isEmpty()) { return null; } Set<SocketGem> zeroChanceSocketGems = new HashSet<>(); while (zeroChanceSocketGems.size() != socketGemMap.size()) { for (SocketGem socket : socketGemMap.values()) { if (socket.getChance() <= 0.0D) { zeroChanceSocketGems.add(socket); continue; } if (RandomUtils.nextDouble() < socket.getChance()) { return socket; } } } return null; } public MaterialData getRandomSocketGemMaterial() { if (getSocketGemMaterialIds() == null || getSocketGemMaterialIds().isEmpty()) { return null; } return getSocketGemMaterialIds().get(RandomUtils.nextInt(getSocketGemMaterialIds().size())); } public List<MaterialData> getSocketGemMaterialIds() { return socketGemMaterialIds; } public SocketGem getSocketGemFromName(String name) { for (SocketGem sg : socketGemMap.values()) { if (sg.getName().equalsIgnoreCase(name)) { return sg; } } return null; } private class HeldItem { private final String name; private final ItemStack itemStack; public HeldItem(String name, ItemStack itemStack) { this.name = name; this.itemStack = itemStack; } public String getName() { return name; } public ItemStack getItemStack() { return itemStack; } } }
MythicDrops-Sockets/src/main/java/net/nunnerycode/bukkit/mythicdrops/sockets/MythicDropsSockets.java
package net.nunnerycode.bukkit.mythicdrops.sockets; import com.conventnunnery.libraries.config.CommentedConventYamlConfiguration; import com.conventnunnery.libraries.config.ConventYamlConfiguration; import net.nunnerycode.bukkit.mythicdrops.utils.ItemUtil; import net.nunnerycode.java.libraries.cannonball.DebugPrinter; import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.math.NumberUtils; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Effect; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.enchantments.Enchantment; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.entity.Projectile; import org.bukkit.event.Event; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.material.MaterialData; import org.bukkit.plugin.java.JavaPlugin; import org.bukkit.potion.PotionEffectType; import se.ranzdo.bukkit.methodcommand.Arg; import se.ranzdo.bukkit.methodcommand.Command; import se.ranzdo.bukkit.methodcommand.CommandHandler; import se.ranzdo.bukkit.methodcommand.FlagArg; import se.ranzdo.bukkit.methodcommand.Flags; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; public class MythicDropsSockets extends JavaPlugin implements Listener { private static MythicDropsSockets _INSTANCE; private final Map<String, HeldItem> heldSocket = new HashMap<>(); private DebugPrinter debugPrinter; private Map<String, String> language; private String socketGemName; private List<String> socketGemLore; private String sockettedItemSocket; private List<String> sockettedItemLore; private ConventYamlConfiguration configYAML; private ConventYamlConfiguration socketGemsYAML; private boolean useAttackerItemInHand; private boolean useAttackerArmorEquipped; private boolean useDefenderItemInHand; private boolean useDefenderArmorEquipped; private double socketGemChanceToSpawn; private List<MaterialData> socketGemMaterialIds; private Map<String, SocketGem> socketGemMap; private List<String> socketGemPrefixes; private boolean preventMultipleChangesFromSockets; private List<String> socketGemSuffixes; public static MythicDropsSockets getInstance() { return _INSTANCE; } public Map<String, SocketGem> getSocketGemMap() { return Collections.unmodifiableMap(socketGemMap); } public List<String> getSocketGemPrefixes() { return Collections.unmodifiableList(socketGemPrefixes); } public List<String> getSocketGemSuffixes() { return Collections.unmodifiableList(socketGemSuffixes); } public String getSockettedItemSocket() { return sockettedItemSocket; } public List<String> getSockettedItemLore() { return sockettedItemLore; } public ConventYamlConfiguration getConfigYAML() { return configYAML; } public ConventYamlConfiguration getSocketGemsYAML() { return socketGemsYAML; } @Override public void onDisable() { debugPrinter.debug(Level.INFO, "v" + getDescription().getVersion() + " disabled"); } @Override public void onEnable() { _INSTANCE = this; debugPrinter = new DebugPrinter(getDataFolder().getPath(), "debug.log"); language = new HashMap<>(); socketGemMaterialIds = new ArrayList<>(); socketGemMap = new HashMap<>(); socketGemPrefixes = new ArrayList<>(); socketGemSuffixes = new ArrayList<>(); unpackConfigurationFiles(new String[]{"config.yml", "socketGems.yml"}, false); configYAML = new ConventYamlConfiguration(new File(getDataFolder(), "config.yml"), YamlConfiguration.loadConfiguration(getResource("config.yml")).getString("version")); configYAML.options().backupOnUpdate(true); configYAML.options().updateOnLoad(true); configYAML.load(); socketGemsYAML = new ConventYamlConfiguration(new File(getDataFolder(), "socketGems.yml"), YamlConfiguration.loadConfiguration(getResource("socketGems.yml")).getString("version")); socketGemsYAML.options().backupOnUpdate(true); socketGemsYAML.options().updateOnLoad(true); socketGemsYAML.load(); loadSettings(); loadGems(); getServer().getPluginManager().registerEvents(this, this); CommandHandler commandHandler = new CommandHandler(this); commandHandler.registerCommands(this); debugPrinter.debug(Level.INFO, "v" + getDescription().getVersion() + " enabled"); } @EventHandler(priority = EventPriority.NORMAL) public void onRightClick(PlayerInteractEvent event) { if (event.getAction() != Action.RIGHT_CLICK_AIR && event.getAction() != Action.RIGHT_CLICK_BLOCK) { return; } if (event.getItem() == null) { return; } Player player = event.getPlayer(); ItemStack itemInHand = event.getItem(); String itemType = ItemUtil.getItemTypeFromMaterialData(itemInHand.getData()); if (getSocketGemMaterialIds().contains(itemInHand.getData())) { event.setUseItemInHand(Event.Result.DENY); player.updateInventory(); } if (itemType != null && ItemUtil.isArmor(itemType) && itemInHand.hasItemMeta()) { event.setUseItemInHand(Event.Result.DENY); player.updateInventory(); } if (heldSocket.containsKey(player.getName())) { socketItem(event, player, itemInHand, itemType); heldSocket.remove(player.getName()); } else { addHeldSocket(event, player, itemInHand); } } private void loadGems() { socketGemMap.clear(); List<String> loadedSocketGems = new ArrayList<>(); if (!socketGemsYAML.isConfigurationSection("socket-gems")) { return; } ConfigurationSection cs = socketGemsYAML.getConfigurationSection("socket-gems"); for (String key : cs.getKeys(false)) { if (!cs.isConfigurationSection(key)) { continue; } ConfigurationSection gemCS = cs.getConfigurationSection(key); GemType gemType = GemType.getFromName(gemCS.getString("type")); if (gemType == null) { gemType = GemType.ANY; } List<SocketPotionEffect> socketPotionEffects = buildSocketPotionEffects(gemCS); List<SocketParticleEffect> socketParticleEffects = buildSocketParticleEffects(gemCS); double chance = gemCS.getDouble("chance"); String prefix = gemCS.getString("prefix"); if (prefix != null && !prefix.equalsIgnoreCase("")) { socketGemPrefixes.add(prefix); } String suffix = gemCS.getString("suffix"); if (suffix != null && !suffix.equalsIgnoreCase("")) { socketGemSuffixes.add(suffix); } List<String> lore = gemCS.getStringList("lore"); Map<Enchantment, Integer> enchantments = new HashMap<>(); if (gemCS.isConfigurationSection("enchantments")) { ConfigurationSection enchCS = gemCS.getConfigurationSection("enchantments"); for (String key1 : enchCS.getKeys(false)) { Enchantment ench = null; for (Enchantment ec : Enchantment.values()) { if (ec.getName().equalsIgnoreCase(key1)) { ench = ec; break; } } if (ench == null) { continue; } int level = enchCS.getInt(key1); enchantments.put(ench, level); } } List<String> commands = gemCS.getStringList("commands"); List<SocketCommand> socketCommands = new ArrayList<>(); for (String s : commands) { SocketCommand sc = new SocketCommand(s); socketCommands.add(sc); } SocketGem sg = new SocketGem(key, gemType, socketPotionEffects, socketParticleEffects, chance, prefix, suffix, lore, enchantments, socketCommands); socketGemMap.put(key, sg); loadedSocketGems.add(key); } debugPrinter.debug(Level.INFO, "Loaded socket gems: " + loadedSocketGems.toString()); } private List<SocketPotionEffect> buildSocketPotionEffects(ConfigurationSection cs) { List<SocketPotionEffect> socketPotionEffectList = new ArrayList<>(); if (!cs.isConfigurationSection("potion-effects")) { return socketPotionEffectList; } ConfigurationSection cs1 = cs.getConfigurationSection("potion-effects"); for (String key : cs1.getKeys(false)) { PotionEffectType pet = PotionEffectType.getByName(key); if (pet == null) { continue; } int duration = cs1.getInt(key + ".duration"); int intensity = cs1.getInt(key + ".intensity"); int radius = cs1.getInt(key + ".radius"); String target = cs1.getString(key + ".target"); EffectTarget et = EffectTarget.getFromName(target); if (et == null) { et = EffectTarget.NONE; } boolean affectsWielder = cs1.getBoolean(key + ".affectsWielder"); boolean affectsTarget = cs1.getBoolean(key + ".affectsTarget"); socketPotionEffectList.add(new SocketPotionEffect(pet, intensity, duration, radius, et, affectsWielder, affectsTarget)); } return socketPotionEffectList; } private List<SocketParticleEffect> buildSocketParticleEffects(ConfigurationSection cs) { List<SocketParticleEffect> socketParticleEffectList = new ArrayList<>(); if (!cs.isConfigurationSection("particle-effects")) { return socketParticleEffectList; } ConfigurationSection cs1 = cs.getConfigurationSection("particle-effects"); for (String key : cs1.getKeys(false)) { Effect pet; try { pet = Effect.valueOf(key); } catch (Exception e) { continue; } if (pet == null) { continue; } int duration = cs1.getInt(key + ".duration"); int intensity = cs1.getInt(key + ".intensity"); int radius = cs1.getInt(key + ".radius"); String target = cs1.getString(key + ".target"); EffectTarget et = EffectTarget.getFromName(target); if (et == null) { et = EffectTarget.NONE; } boolean affectsWielder = cs1.getBoolean(key + ".affectsWielder"); boolean affectsTarget = cs1.getBoolean(key + ".affectsTarget"); socketParticleEffectList.add(new SocketParticleEffect(pet, intensity, duration, radius, et, affectsWielder, affectsTarget)); } return socketParticleEffectList; } private void loadSettings() { useAttackerItemInHand = configYAML.getBoolean("options.use-attacker-item-in-hand", true); useAttackerArmorEquipped = configYAML.getBoolean("options.use-attacker-armor-equipped", false); useDefenderItemInHand = configYAML.getBoolean("options.use-defender-item-in-hand", false); useDefenderArmorEquipped = configYAML.getBoolean("options.use-defender-armor-equipped", true); socketGemChanceToSpawn = configYAML.getDouble("options.socket-gem-chance-to-spawn", 0.25); preventMultipleChangesFromSockets = configYAML.getBoolean("options.prevent-multiple-changes-from-sockets", true); List<String> socketGemMats = configYAML.getStringList("options.socket-gem-material-ids"); for (String s : socketGemMats) { int id; byte data; if (s.contains(";")) { String[] split = s.split(";"); id = NumberUtils.toInt(split[0], 0); data = (byte) NumberUtils.toInt(split[1], 0); } else { id = NumberUtils.toInt(s, 0); data = 0; } if (id == 0) { continue; } socketGemMaterialIds.add(new MaterialData(id, data)); } socketGemName = configYAML.getString("items.socket-name", "&6Socket Gem - %socketgem%"); socketGemLore = configYAML.getStringList("items.socket-lore"); sockettedItemSocket = configYAML.getString("items.socketted-item-socket", "&6(Socket)"); sockettedItemLore = configYAML.getStringList("items.socketted-item-lore"); language.clear(); for (String key : configYAML.getConfigurationSection("language").getKeys(true)) { if (configYAML.getConfigurationSection("language").isConfigurationSection(key)) { continue; } language.put(key, configYAML.getConfigurationSection("language").getString(key, key)); } } private void unpackConfigurationFiles(String[] configurationFiles, boolean overwrite) { for (String s : configurationFiles) { YamlConfiguration yc = CommentedConventYamlConfiguration.loadConfiguration(getResource(s)); try { File f = new File(getDataFolder(), s); if (!f.exists()) { yc.save(f); continue; } if (overwrite) { yc.save(f); } } catch (IOException e) { getLogger().warning("Could not unpack " + s); } } } public String getLanguageString(String key, String[][] args) { String s = getLanguageString(key); for (String[] arg : args) { s = s.replace(arg[0], arg[1]); } return s; } public String getLanguageString(String key) { return language.containsKey(key) ? language.get(key) : key; } public List<String> replaceArgs(List<String> strings, String[][] args) { List<String> list = new ArrayList<>(); for (String s : strings) { list.add(replaceArgs(s, args)); } return list; } public String replaceArgs(String string, String[][] args) { String s = string; for (String[] arg : args) { s = s.replace(arg[0], arg[1]); } return s; } public String getSocketGemName() { return socketGemName; } public List<String> getSocketGemLore() { return socketGemLore; } public double getSocketGemChanceToSpawn() { return socketGemChanceToSpawn; } public boolean socketGemTypeMatchesItemStack(SocketGem socketGem, ItemStack itemStack) { String itemType = ItemUtil.getItemTypeFromMaterialData(itemStack.getData()); if (itemType == null) { return false; } switch (socketGem.getGemType()) { case TOOL: return ItemUtil.isTool(itemType); case ARMOR: return ItemUtil.isArmor(itemType); case ANY: return true; default: return false; } } private void addHeldSocket(PlayerInteractEvent event, final Player player, ItemStack itemInHand) { if (!getSocketGemMaterialIds().contains(itemInHand.getData())) { return; } if (!itemInHand.hasItemMeta()) { return; } ItemMeta im = itemInHand.getItemMeta(); if (!im.hasDisplayName()) { return; } String type = ChatColor.stripColor(im.getDisplayName().replace(replaceArgs(socketGemName, new String[][]{{"%socketgem%", ""}}), "")); if (type == null) { return; } SocketGem socketGem = socketGemMap.get(type); if (socketGem == null) { return; } sendMessage(player, "messages.instructions", new String[][]{}); HeldItem hg = new HeldItem(socketGem.getName(), itemInHand); heldSocket.put(player.getName(), hg); Bukkit.getScheduler().runTaskLaterAsynchronously(this, new Runnable() { @Override public void run() { heldSocket.remove(player.getName()); } }, 30 * 20L); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); player.updateInventory(); } public List<MaterialData> getSocketGemMaterialIds() { return socketGemMaterialIds; } public void sendMessage(CommandSender reciever, String path, String[][] arguments) { String message = getFormattedLanguageString(path, arguments); if (message == null) { return; } reciever.sendMessage(message); } public String getFormattedLanguageString(String key, String[][] args) { String s = getFormattedLanguageString(key); for (String[] arg : args) { s = s.replace(arg[0], arg[1]); } return s; } public String getFormattedLanguageString(String key) { return getLanguageString(key).replace('&', '\u00A7').replace("\u00A7\u00A7", "&"); } public boolean isPreventMultipleChangesFromSockets() { return preventMultipleChangesFromSockets; } private void socketItem(PlayerInteractEvent event, Player player, ItemStack itemInHand, String itemType) { if (ItemUtil.isArmor(itemType) || ItemUtil.isTool(itemType)) { if (!itemInHand.hasItemMeta()) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } ItemMeta im = itemInHand.getItemMeta(); if (!im.hasLore()) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } List<String> lore = new ArrayList<String>(im.getLore()); String socketString = getFormattedLanguageString("items.socketted-item-socket"); int index = indexOfStripColor(lore, socketString); if (index < 0) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } HeldItem heldSocket1 = heldSocket.get(player.getName()); String socketGemType = ChatColor.stripColor(heldSocket1 .getName()); SocketGem socketGem = getSocketGemFromName(socketGemType); if (socketGem == null || !socketGemTypeMatchesItemStack(socketGem, itemInHand)) { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } lore.set(index, ChatColor.GOLD + socketGem.getName()); lore.removeAll(sockettedItemLore); im.setLore(lore); itemInHand.setItemMeta(im); prefixItemStack(itemInHand, socketGem); suffixItemStack(itemInHand, socketGem); loreItemStack(itemInHand, socketGem); enchantmentItemStack(itemInHand, socketGem); if (player.getInventory().contains(heldSocket1.getItemStack())) { int indexOfItem = player.getInventory().first(heldSocket1.getItemStack()); ItemStack inInventory = player.getInventory().getItem(indexOfItem); inInventory.setAmount(inInventory.getAmount() - 1); player.getInventory().setItem(indexOfItem, inInventory); player.updateInventory(); } else { sendMessage(player, "messages.do-not-have", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); return; } player.setItemInHand(itemInHand); sendMessage(player, "messages.success", new String[][]{}); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); } else { sendMessage(player, "messages.cannot-use", new String[][]{}); event.setCancelled(true); event.setUseInteractedBlock(Event.Result.DENY); event.setUseItemInHand(Event.Result.DENY); heldSocket.remove(player.getName()); player.updateInventory(); } } public int indexOfStripColor(List<String> list, String string) { String[] array = list.toArray(new String[list.size()]); for (int i = 0; i < array.length; i++) { if (ChatColor.stripColor(array[i]).equalsIgnoreCase(ChatColor.stripColor(string))) { return i; } } return -1; } public int indexOfStripColor(String[] array, String string) { for (int i = 0; i < array.length; i++) { if (ChatColor.stripColor(array[i]).equalsIgnoreCase(ChatColor.stripColor(string))) { return i; } } return -1; } public ItemStack loreItemStack(ItemStack itemStack, SocketGem socketGem) { ItemMeta im; if (itemStack.hasItemMeta()) { im = itemStack.getItemMeta(); } else { im = Bukkit.getItemFactory().getItemMeta(itemStack.getType()); } if (!im.hasLore()) { im.setLore(new ArrayList<String>()); } List<String> lore = new ArrayList<String>(im.getLore()); if (lore.containsAll(socketGem.getLore())) { return itemStack; } for (String s : socketGem.getLore()) { lore.add(s.replace('&', '\u00A7').replace("\u00A7\u00A7", "&")); } im.setLore(lore); itemStack.setItemMeta(im); return itemStack; } public ItemStack enchantmentItemStack(ItemStack itemStack, SocketGem socketGem) { if (itemStack == null || socketGem == null) { return itemStack; } Map<Enchantment, Integer> itemStackEnchantments = new HashMap<Enchantment, Integer>(itemStack.getEnchantments()); for (Map.Entry<Enchantment, Integer> entry : socketGem.getEnchantments().entrySet()) { if (itemStackEnchantments.containsKey(entry.getKey())) { itemStack.removeEnchantment(entry.getKey()); int level = Math.abs(itemStackEnchantments.get(entry.getKey()) + entry.getValue()); if (level <= 0) { continue; } itemStack.addUnsafeEnchantment(entry.getKey(), level); } else { itemStack.addUnsafeEnchantment(entry.getKey(), entry.getValue() <= 0 ? Math.abs(entry.getValue()) == 0 ? 1 : Math.abs(entry.getValue()) : entry.getValue()); } } return itemStack; } public ItemStack suffixItemStack(ItemStack itemStack, SocketGem socketGem) { ItemMeta im; if (!itemStack.hasItemMeta()) { im = Bukkit.getItemFactory().getItemMeta(itemStack.getType()); } else { im = itemStack.getItemMeta(); } String name = im.getDisplayName(); if (name == null) { return itemStack; } ChatColor beginColor = findColor(name); String lastColors = ChatColor.getLastColors(name); if (beginColor == null) { beginColor = ChatColor.WHITE; } String suffix = socketGem.getSuffix(); if (suffix == null || suffix.equalsIgnoreCase("")) { return itemStack; } if (isPreventMultipleChangesFromSockets() && ChatColor.stripColor(name).contains(suffix) || containsAnyFromList(ChatColor.stripColor(name), socketGemSuffixes)) { return itemStack; } im.setDisplayName(name + " " + beginColor + suffix + lastColors); itemStack.setItemMeta(im); return itemStack; } public ItemStack prefixItemStack(ItemStack itemStack, SocketGem socketGem) { ItemMeta im; if (itemStack.hasItemMeta()) { im = itemStack.getItemMeta(); } else { im = Bukkit.getItemFactory().getItemMeta(itemStack.getType()); } String name = im.getDisplayName(); if (name == null) { return itemStack; } ChatColor beginColor = findColor(name); if (beginColor == null) { beginColor = ChatColor.WHITE; } String prefix = socketGem.getPrefix(); if (prefix == null || prefix.equalsIgnoreCase("")) { return itemStack; } if (isPreventMultipleChangesFromSockets() && ChatColor.stripColor(name).contains(prefix) || containsAnyFromList(ChatColor.stripColor(name), socketGemPrefixes)) { return itemStack; } im.setDisplayName(beginColor + prefix + " " + name); itemStack.setItemMeta(im); return itemStack; } public ChatColor findColor(final String s) { char[] c = s.toCharArray(); for (int i = 0; i < c.length; i++) { if (c[i] == (char) 167 && i + 1 < c.length) { return ChatColor.getByChar(c[i + 1]); } } return null; } public boolean containsAnyFromList(String string, List<String> list) { for (String s : list) { if (string.toUpperCase().contains(s.toUpperCase())) { return true; } } return false; } public SocketGem getRandomSocketGemWithChance() { if (socketGemMap == null || socketGemMap.isEmpty()) { return null; } Set<SocketGem> zeroChanceSocketGems = new HashSet<>(); while (zeroChanceSocketGems.size() != socketGemMap.size()) { for (SocketGem socket : socketGemMap.values()) { if (socket.getChance() <= 0.0D) { zeroChanceSocketGems.add(socket); continue; } if (RandomUtils.nextDouble() < socket.getChance()) { return socket; } } } return null; } public MaterialData getRandomSocketGemMaterial() { if (getSocketGemMaterialIds() == null || getSocketGemMaterialIds().isEmpty()) { return null; } return getSocketGemMaterialIds().get(RandomUtils.nextInt(getSocketGemMaterialIds().size())); } public void applyEffects(LivingEntity attacker, LivingEntity defender) { if (attacker == null || defender == null) { return; } // handle attacker if (isUseAttackerArmorEquipped()) { for (ItemStack attackersItem : attacker.getEquipment().getArmorContents()) { if (attackersItem == null) { continue; } List<SocketGem> attackerSocketGems = getSocketGems(attackersItem); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } if (sg.getGemType() != GemType.TOOL && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } } } } } if (isUseAttackerItemInHand() && attacker.getEquipment().getItemInHand() != null) { List<SocketGem> attackerSocketGems = getSocketGems(attacker.getEquipment().getItemInHand()); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } if (sg.getGemType() != GemType.TOOL && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(attacker); break; case OTHER: se.apply(defender); break; case AREA: for (Entity e : attacker.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(defender)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(attacker); } break; default: break; } } } } } // handle defender if (isUseDefenderArmorEquipped()) { for (ItemStack defenderItem : defender.getEquipment().getArmorContents()) { if (defenderItem == null) { continue; } List<SocketGem> defenderSocketGems = getSocketGems(defenderItem); for (SocketGem sg : defenderSocketGems) { if (sg.getGemType() != GemType.ARMOR && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } } } } if (isUseDefenderItemInHand() && defender.getEquipment().getItemInHand() != null) { List<SocketGem> defenderSocketGems = getSocketGems(defender.getEquipment().getItemInHand()); if (defenderSocketGems != null && !defenderSocketGems.isEmpty()) { for (SocketGem sg : defenderSocketGems) { if (sg.getGemType() != GemType.ARMOR && sg.getGemType() != GemType.ANY) { continue; } for (SocketPotionEffect se : sg.getSocketPotionEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } for (SocketParticleEffect se : sg.getSocketParticleEffects()) { if (se == null) { continue; } switch (se.getEffectTarget()) { case SELF: se.apply(defender); break; case OTHER: se.apply(attacker); break; case AREA: for (Entity e : defender.getNearbyEntities(se.getRadius(), se.getRadius(), se.getRadius())) { if (!(e instanceof LivingEntity)) { continue; } if (!se.isAffectsTarget() && e.equals(attacker)) { continue; } se.apply((LivingEntity) e); } if (se.isAffectsWielder()) { se.apply(defender); } break; default: break; } } } } } } public boolean isUseAttackerItemInHand() { return useAttackerItemInHand; } public boolean isUseAttackerArmorEquipped() { return useAttackerArmorEquipped; } public boolean isUseDefenderItemInHand() { return useDefenderItemInHand; } public boolean isUseDefenderArmorEquipped() { return useDefenderArmorEquipped; } public List<SocketGem> getSocketGems(ItemStack itemStack) { List<SocketGem> socketGemList = new ArrayList<SocketGem>(); ItemMeta im; if (itemStack.hasItemMeta()) { im = itemStack.getItemMeta(); } else { return socketGemList; } List<String> lore = im.getLore(); if (lore == null) { return socketGemList; } for (String s : lore) { SocketGem sg = getSocketGemFromName(ChatColor.stripColor(s)); if (sg == null) { continue; } socketGemList.add(sg); } return socketGemList; } public SocketGem getSocketGemFromName(String name) { for (SocketGem sg : socketGemMap.values()) { if (sg.getName().equalsIgnoreCase(name)) { return sg; } } return null; } public void runCommands(LivingEntity attacker, LivingEntity defender) { if (attacker == null || defender == null) { return; } if (attacker instanceof Player) { if (isUseAttackerArmorEquipped()) { for (ItemStack attackersItem : attacker.getEquipment().getArmorContents()) { if (attackersItem == null) { continue; } List<SocketGem> attackerSocketGems = getSocketGems(attackersItem); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } ((Player) attacker).chat("/" + command); } } } } } } if (isUseAttackerItemInHand() && attacker.getEquipment().getItemInHand() != null) { List<SocketGem> attackerSocketGems = getSocketGems(attacker.getEquipment().getItemInHand()); if (attackerSocketGems != null && !attackerSocketGems.isEmpty()) { for (SocketGem sg : attackerSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) attacker).getName()); } if (command.contains("%target%")) { if (defender instanceof Player) { command = command.replace("%target%", ((Player) defender).getName()); } else { continue; } } } ((Player) attacker).chat("/" + command); } } } } } } if (defender instanceof Player) { if (isUseDefenderArmorEquipped()) { for (ItemStack defendersItem : defender.getEquipment().getArmorContents()) { if (defendersItem == null) { continue; } List<SocketGem> defenderSocketGems = getSocketGems(defendersItem); if (defenderSocketGems != null && !defenderSocketGems.isEmpty()) { for (SocketGem sg : defenderSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } ((Player) defender).chat("/" + command); } } } } } } if (isUseDefenderItemInHand() && defender.getEquipment().getItemInHand() != null) { List<SocketGem> defenderSocketGems = getSocketGems(defender.getEquipment().getItemInHand()); if (defenderSocketGems != null && !defenderSocketGems.isEmpty()) { for (SocketGem sg : defenderSocketGems) { if (sg == null) { continue; } for (SocketCommand sc : sg.getCommands()) { if (sc.getRunner() == SocketCommandRunner.CONSOLE) { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } Bukkit.dispatchCommand(Bukkit.getConsoleSender(), command); } else { String command = sc.getCommand(); if (command.contains("%wielder%") || command.contains("%target%")) { if (command.contains("%wielder%")) { command = command.replace("%wielder%", ((Player) defender).getName()); } if (command.contains("%target%")) { if (attacker instanceof Player) { command = command.replace("%target%", ((Player) attacker).getName()); } else { continue; } } } ((Player) defender).chat("/" + command); } } } } } } } private class HeldItem { private final String name; private final ItemStack itemStack; public HeldItem(String name, ItemStack itemStack) { this.name = name; this.itemStack = itemStack; } public String getName() { return name; } public ItemStack getItemStack() { return itemStack; } } @EventHandler(priority = EventPriority.MONITOR) public void onEntityDamageByEntityEvent(EntityDamageByEntityEvent event) { if (event.isCancelled()) { return; } Entity e = event.getEntity(); Entity d = event.getDamager(); if (!(e instanceof LivingEntity)) { return; } LivingEntity lee = (LivingEntity) e; LivingEntity led; if (d instanceof LivingEntity) { led = (LivingEntity) d; } else if (d instanceof Projectile) { led = ((Projectile) d).getShooter(); } else { return; } applyEffects(led, lee); runCommands(led, lee); } @Command(identifier = "mythicdropssockets gem", description = "Gives MythicDrops gems", permissions = "mythicdrops.command.gem") @Flags(identifier = {"a", "g"}, description = {"Amount to spawn", "Socket Gem to spawn"}) public void customSubcommand(CommandSender sender, @Arg(name = "player", def = "self") String playerName, @Arg(name = "amount", def = "1") @FlagArg("a") int amount, @Arg(name = "item", def = "*") @FlagArg("g") String itemName) { Player player; if (playerName.equalsIgnoreCase("self")) { if (sender instanceof Player) { player = (Player) sender; } else { sendMessage(sender, "command.no-access", new String[][]{}); return; } } else { player = Bukkit.getPlayer(playerName); } if (player == null) { sendMessage(sender, "command.player-does-not-exist", new String[][]{}); return; } SocketGem socketGem = null; if (!itemName.equalsIgnoreCase("*")) { try { socketGem = getSocketGemFromName(itemName); } catch (NullPointerException e) { e.printStackTrace(); sendMessage(sender, "command.socket-gem-does-not-exist", new String[][]{}); return; } } int amountGiven = 0; for (int i = 0; i < amount; i++) { try { ItemStack itemStack; if (socketGem == null) { itemStack = new SocketItem(getRandomSocketGemMaterial(), getRandomSocketGemWithChance()); } else { itemStack = new SocketItem(getRandomSocketGemMaterial(), socketGem); } itemStack.setDurability((short) 0); player.getInventory().addItem(itemStack); amountGiven++; } catch (Exception ignored) { ignored.printStackTrace(); } } sendMessage(player, "command.give-gem-receiver", new String[][]{{"%amount%", String.valueOf(amountGiven)}}); sendMessage(sender, "command.give-gem-sender", new String[][]{{"%amount%", String.valueOf(amountGiven)}, {"%receiver%", player.getName()}}); } }
socketting begins
MythicDrops-Sockets/src/main/java/net/nunnerycode/bukkit/mythicdrops/sockets/MythicDropsSockets.java
socketting begins
Java
mit
2fa9976cbb549ac1a7f3f2c4a889f15731e2203e
0
utnas/reactive_x_example
import org.junit.Test; import rx.Observable; import rx.observers.TestSubscriber; import static java.util.Arrays.asList; import static java.util.Collections.EMPTY_LIST; import static java.util.Collections.singletonList; public class ObservableTest { @Test public void emptyObservableTest() { //Given final Observable<String> emptyObservable = Observable.empty(); final TestSubscriber<String> testSubscriber = new TestSubscriber<>(); //When emptyObservable.subscribe(testSubscriber); //Then testSubscriber.assertNoErrors(); testSubscriber.assertReceivedOnNext(EMPTY_LIST); testSubscriber.assertCompleted(); } @Test public void justObservableWithValueTestT() { // Given final Observable<String> observable = Observable.just("value"); final TestSubscriber<String> testSubscriber = new TestSubscriber<>(); //When observable.subscribe(testSubscriber); //Then testSubscriber.assertNoErrors(); testSubscriber.assertReceivedOnNext(singletonList("value")); testSubscriber.assertCompleted(); } @Test public void fromObservableTest() { //Given final Observable<String> observable = Observable.from(asList("one", "two", "three")); final TestSubscriber<String> testSubscriber = new TestSubscriber<>(); //When observable.subscribe(testSubscriber); //Then testSubscriber.assertNoErrors(); testSubscriber.assertReceivedOnNext(asList("one", "two", "three")); testSubscriber.assertCompleted(); } }
java-example/src/test/java/ObservableTest.java
import org.junit.Test; import rx.Observable; import rx.observers.TestSubscriber; import static java.util.Arrays.asList; import static java.util.Collections.EMPTY_LIST; import static java.util.Collections.singletonList; public class ObservableTest { @Test public void emptyObservableTest() { //Given final Observable<String> empty = Observable.empty(); final TestSubscriber<String> testSubscriber = new TestSubscriber<>(); //When empty.subscribe(testSubscriber); //Then testSubscriber.assertNoErrors(); testSubscriber.assertReceivedOnNext(EMPTY_LIST); testSubscriber.assertCompleted(); } @Test public void justObservableWithValueTestT() { // Given final Observable<String> stringValue = Observable.just("value"); final TestSubscriber<String> testSubscriber = new TestSubscriber<>(); //When stringValue.subscribe(testSubscriber); //Then testSubscriber.assertNoErrors(); testSubscriber.assertReceivedOnNext(singletonList("value")); testSubscriber.assertCompleted(); } @Test public void fromObservableTest() { //Given final Observable<String> values = Observable.from(asList("one", "two", "three")); final TestSubscriber<String> testSubscriber = new TestSubscriber<>(); //When values.subscribe(testSubscriber); //Then testSubscriber.assertNoErrors(); testSubscriber.assertReceivedOnNext(asList("one", "two", "three")); testSubscriber.assertCompleted(); } }
WIP renaming observable tests
java-example/src/test/java/ObservableTest.java
WIP renaming observable tests
Java
mit
634ce7e566207ab031d7d2fb70a1133919659bd7
0
FAForever/faf-java-api,micheljung/faf-java-api,FAForever/faf-java-api,micheljung/faf-java-api,FAForever/faf-java-api
package com.faforever.api.data; import com.faforever.api.clan.ClanMembershipRepository; import com.faforever.api.clan.ClanRepository; import com.faforever.api.client.ClientType; import com.faforever.api.client.OAuthClient; import com.faforever.api.client.OAuthClientRepository; import com.faforever.api.data.domain.Clan; import com.faforever.api.data.domain.ClanMembership; import com.faforever.api.data.domain.Player; import com.faforever.api.data.domain.User; import com.faforever.api.player.PlayerRepository; import com.faforever.api.user.UserRepository; import lombok.SneakyThrows; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ObjectNode; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.security.authentication.encoding.ShaPasswordEncoder; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.util.Base64Utils; import org.springframework.web.context.WebApplicationContext; import javax.inject.Inject; import javax.servlet.Filter; import java.util.Arrays; import java.util.Collections; import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringRunner.class) @SpringBootTest public class JsonApiClanTest { private MockMvc mvc; private WebApplicationContext context; private Filter springSecurityFilterChain; private ClanRepository clanRepository; private UserRepository userRepository; private ClanMembershipRepository clanMembershipRepository; private PlayerRepository playerRepository; private OAuthClientRepository oAuthClientRepository; private ObjectMapper objectMapper; private ShaPasswordEncoder shaPasswordEncoder; private Player me; private static final String OAUTH_CLIENT_ID = "1234"; private static final String OAUTH_SECRET = "secret"; public JsonApiClanTest() { objectMapper = new ObjectMapper(); shaPasswordEncoder = new ShaPasswordEncoder(256); } @Inject public void init(WebApplicationContext context, ClanRepository clanRepository, UserRepository userRepository, PlayerRepository playerRepository, OAuthClientRepository oAuthClientRepository, Filter springSecurityFilterChain, ClanMembershipRepository clanMembershipRepository) { this.context = context; this.clanRepository = clanRepository; this.userRepository = userRepository; this.playerRepository = playerRepository; this.oAuthClientRepository = oAuthClientRepository; this.springSecurityFilterChain = springSecurityFilterChain; this.clanMembershipRepository = clanMembershipRepository; } @Before public void setUp() { mvc = MockMvcBuilders .webAppContextSetup(context) .addFilter(springSecurityFilterChain) .build(); me = null; } @After public void tearDown() { clanMembershipRepository.deleteAll(); clanRepository.deleteAll(); userRepository.deleteAll(); oAuthClientRepository.deleteAll(); assertEquals(0, clanMembershipRepository.count()); assertEquals(0, clanRepository.count()); assertEquals(0, userRepository.count()); assertEquals(0, oAuthClientRepository.count()); } @SneakyThrows public String createUserAndGetAccessToken(String login, String password) { OAuthClient client = new OAuthClient() .setId(OAUTH_CLIENT_ID) .setName("test") .setClientSecret(OAUTH_SECRET) .setRedirectUris("test") .setDefaultRedirectUri("test") .setDefaultScope("test") .setClientType(ClientType.PUBLIC); oAuthClientRepository.save(client); User user = (User) new User() .setPassword(shaPasswordEncoder.encodePassword(password, null)) .setLogin(login) .setEMail(login + "@faforever.com"); userRepository.save(user); me = playerRepository.findOne(user.getId()); String authorization = "Basic " + new String(Base64Utils.encode((OAUTH_CLIENT_ID + ":" + OAUTH_SECRET).getBytes())); ResultActions auth = mvc .perform( post("/oauth/token") .header("Authorization", authorization) .param("username", login) .param("password", password) .param("grant_type", "password")); auth.andExpect(status().isOk()); JsonNode node = objectMapper.readTree(auth.andReturn().getResponse().getContentAsString()); return "Bearer " + node.get("access_token").asText(); } private Player createPlayer(String login) { User user = (User) new User() .setPassword("foo") .setLogin(login) .setEMail(login + "@faforever.com"); userRepository.save(user); return playerRepository.findOne(user.getId()); } @Test @SneakyThrows public void cannotKickLeaderFromClan() { String accessToken = createUserAndGetAccessToken("Dragonfire", "foo"); Clan clan = new Clan().setLeader(me).setTag("123").setName("abcClanName"); ClanMembership membership = new ClanMembership().setPlayer(me).setClan(clan); clan.setMemberships(Collections.singletonList(membership)); clanRepository.save(clan); assertEquals(1, clanMembershipRepository.count()); this.mvc.perform(delete("/data/clan_membership/" + membership.getId()) .header("Authorization", accessToken)) .andExpect(content().string("{\"errors\":[\"ForbiddenAccessException\"]}")) .andExpect(status().is(403)); assertEquals(1, clanMembershipRepository.count()); } @Test @SneakyThrows public void cannotKickAsMember() { String accessToken = createUserAndGetAccessToken("Dragonfire", "foo"); Player bob = createPlayer("Bob"); Clan clan = new Clan().setLeader(bob).setTag("123").setName("abcClanName"); ClanMembership myMembership = new ClanMembership().setPlayer(me).setClan(clan); ClanMembership bobsMembership = new ClanMembership().setPlayer(bob).setClan(clan); clan.setMemberships(Arrays.asList(myMembership, bobsMembership)); clanRepository.save(clan); assertEquals(2, clanMembershipRepository.count()); this.mvc.perform(delete("/data/clan_membership/" + bobsMembership.getId()) .header("Authorization", accessToken)) .andExpect(content().string("{\"errors\":[\"ForbiddenAccessException\"]}")) .andExpect(status().is(403)); assertEquals(2, clanMembershipRepository.count()); } @Test @SneakyThrows public void canKickMember() { String accessToken = createUserAndGetAccessToken("Dragonfire", "foo"); Player bob = createPlayer("Bob"); Clan clan = new Clan().setLeader(me).setTag("123").setName("abcClanName"); ClanMembership myMembership = new ClanMembership().setPlayer(me).setClan(clan); ClanMembership bobsMembership = new ClanMembership().setPlayer(bob).setClan(clan); clan.setMemberships(Arrays.asList(myMembership, bobsMembership)); clanRepository.save(clan); assertEquals(2, clanMembershipRepository.count()); this.mvc.perform(delete("/data/clan_membership/" + bobsMembership.getId()) .header("Authorization", accessToken)) .andExpect(status().is(204)); assertEquals(1, clanMembershipRepository.count()); } @Test @SneakyThrows public void getFilteredPlayerForClanInvite() { String[] players = new String[]{"Dragonfire", "DRAGON", "Fire of Dragon", "d r a g o n", "firedragon"}; Arrays.stream(players).forEach(name -> createPlayer(name)); assertEquals(players.length, playerRepository.count()); ResultActions action = this.mvc.perform(get("/data/player?filter=lowerCaseLogin==dragon*&sort=lowerCaseLogin")); JsonNode node = objectMapper.readTree(action.andReturn().getResponse().getContentAsString()); assertEquals(2, node.get("data").size()); assertEquals(players[1], node.get("data").get(0).get("attributes").get("login").asText()); assertEquals(players[0], node.get("data").get(1).get("attributes").get("login").asText()); action.andExpect(status().isOk()); } @Test @SneakyThrows public void transferLeadership() { String accessToken = createUserAndGetAccessToken("Leader", "foo"); Player bob = createPlayer("Bob"); Clan clan = new Clan().setLeader(me).setTag("123").setName("abcClanName"); ClanMembership myMembership = new ClanMembership().setPlayer(me).setClan(clan); ClanMembership bobsMembership = new ClanMembership().setPlayer(bob).setClan(clan); clan.setMemberships(Arrays.asList(myMembership, bobsMembership)); clanRepository.save(clan); ObjectNode node = this.objectMapper.createObjectNode(); ObjectNode data = this.objectMapper.createObjectNode(); ObjectNode relationships = this.objectMapper.createObjectNode(); ObjectNode leaderData = this.objectMapper.createObjectNode(); ObjectNode leader = this.objectMapper.createObjectNode(); node.put("data", data); data.put("id", clan.getId()); data.put("type", "clan"); data.put("relationships", relationships); relationships.put("leader", leaderData); leaderData.put("data", leader); leader.put("id", bob.getId()); leader.put("type", "player"); String dataString = node.toString(); clan = clanRepository.findOne(clan.getId()); assertEquals(me.getId(), clan.getLeader().getId()); ResultActions action = this.mvc.perform(patch("/data/clan/" + clan.getId()) .content(dataString) .header("Authorization", accessToken)); action.andExpect(content().string("")) .andExpect(status().is(204)); clan = clanRepository.findOne(clan.getId()); assertEquals(bob.getId(), clan.getLeader().getId()); } }
src/test/java/com/faforever/api/data/JsonApiClanTest.java
package com.faforever.api.data; import com.faforever.api.clan.ClanMembershipRepository; import com.faforever.api.clan.ClanRepository; import com.faforever.api.client.ClientType; import com.faforever.api.client.OAuthClient; import com.faforever.api.client.OAuthClientRepository; import com.faforever.api.data.domain.Clan; import com.faforever.api.data.domain.ClanMembership; import com.faforever.api.data.domain.Player; import com.faforever.api.data.domain.User; import com.faforever.api.player.PlayerRepository; import com.faforever.api.user.UserRepository; import lombok.SneakyThrows; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.security.authentication.encoding.ShaPasswordEncoder; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.util.Base64Utils; import org.springframework.web.context.WebApplicationContext; import javax.inject.Inject; import javax.servlet.Filter; import java.util.Arrays; import java.util.Collections; import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringRunner.class) @SpringBootTest public class JsonApiClanTest { private MockMvc mvc; private WebApplicationContext context; private Filter springSecurityFilterChain; private ClanRepository clanRepository; private UserRepository userRepository; private ClanMembershipRepository clanMembershipRepository; private PlayerRepository playerRepository; private OAuthClientRepository oAuthClientRepository; private ObjectMapper objectMapper; private ShaPasswordEncoder shaPasswordEncoder; private Player me; private static final String OAUTH_CLIENT_ID = "1234"; private static final String OAUTH_SECRET = "secret"; public JsonApiClanTest() { objectMapper = new ObjectMapper(); shaPasswordEncoder = new ShaPasswordEncoder(256); } @Inject public void init(WebApplicationContext context, ClanRepository clanRepository, UserRepository userRepository, PlayerRepository playerRepository, OAuthClientRepository oAuthClientRepository, Filter springSecurityFilterChain, ClanMembershipRepository clanMembershipRepository) { this.context = context; this.clanRepository = clanRepository; this.userRepository = userRepository; this.playerRepository = playerRepository; this.oAuthClientRepository = oAuthClientRepository; this.springSecurityFilterChain = springSecurityFilterChain; this.clanMembershipRepository = clanMembershipRepository; } @Before public void setUp() { mvc = MockMvcBuilders .webAppContextSetup(context) .addFilter(springSecurityFilterChain) .build(); me = null; } @After public void tearDown() { clanMembershipRepository.deleteAll(); clanRepository.deleteAll(); userRepository.deleteAll(); oAuthClientRepository.deleteAll(); assertEquals(0, clanMembershipRepository.count()); assertEquals(0, clanRepository.count()); assertEquals(0, userRepository.count()); assertEquals(0, oAuthClientRepository.count()); } @SneakyThrows public String createUserAndGetAccessToken(String login, String password) { OAuthClient client = new OAuthClient() .setId(OAUTH_CLIENT_ID) .setName("test") .setClientSecret(OAUTH_SECRET) .setRedirectUris("test") .setDefaultRedirectUri("test") .setDefaultScope("test") .setClientType(ClientType.PUBLIC); oAuthClientRepository.save(client); User user = (User) new User() .setPassword(shaPasswordEncoder.encodePassword(password, null)) .setLogin(login) .setEMail(login + "@faforever.com"); userRepository.save(user); me = playerRepository.findOne(user.getId()); String authorization = "Basic " + new String(Base64Utils.encode((OAUTH_CLIENT_ID + ":" + OAUTH_SECRET).getBytes())); ResultActions auth = mvc .perform( post("/oauth/token") .header("Authorization", authorization) .param("username", login) .param("password", password) .param("grant_type", "password")); auth.andExpect(status().isOk()); JsonNode node = objectMapper.readTree(auth.andReturn().getResponse().getContentAsString()); return "Bearer " + node.get("access_token").asText(); } private Player createPlayer(String login) { User user = (User) new User() .setPassword("foo") .setLogin(login) .setEMail(login + "@faforever.com"); userRepository.save(user); return playerRepository.findOne(user.getId()); } @Test @SneakyThrows public void cannotKickLeaderFromClan() { String accessToken = createUserAndGetAccessToken("Dragonfire", "foo"); Clan clan = new Clan().setLeader(me).setTag("123").setName("abcClanName"); ClanMembership membership = new ClanMembership().setPlayer(me).setClan(clan); clan.setMemberships(Collections.singletonList(membership)); clanRepository.save(clan); assertEquals(1, clanMembershipRepository.count()); this.mvc.perform(delete("/data/clan_membership/" + membership.getId()) .header("Authorization", accessToken)) .andExpect(content().string("{\"errors\":[\"ForbiddenAccessException\"]}")) .andExpect(status().is(403)); assertEquals(1, clanMembershipRepository.count()); } @Test @SneakyThrows public void cannotKickAsMember() { String accessToken = createUserAndGetAccessToken("Dragonfire", "foo"); Player bob = createPlayer("Bob"); Clan clan = new Clan().setLeader(bob).setTag("123").setName("abcClanName"); ClanMembership myMembership = new ClanMembership().setPlayer(me).setClan(clan); ClanMembership bobsMembership = new ClanMembership().setPlayer(bob).setClan(clan); clan.setMemberships(Arrays.asList(myMembership, bobsMembership)); clanRepository.save(clan); assertEquals(2, clanMembershipRepository.count()); this.mvc.perform(delete("/data/clan_membership/" + bobsMembership.getId()) .header("Authorization", accessToken)) .andExpect(content().string("{\"errors\":[\"ForbiddenAccessException\"]}")) .andExpect(status().is(403)); assertEquals(2, clanMembershipRepository.count()); } @Test @SneakyThrows public void canKickMember() { String accessToken = createUserAndGetAccessToken("Dragonfire", "foo"); Player bob = createPlayer("Bob"); Clan clan = new Clan().setLeader(me).setTag("123").setName("abcClanName"); ClanMembership myMembership = new ClanMembership().setPlayer(me).setClan(clan); ClanMembership bobsMembership = new ClanMembership().setPlayer(bob).setClan(clan); clan.setMemberships(Arrays.asList(myMembership, bobsMembership)); clanRepository.save(clan); assertEquals(2, clanMembershipRepository.count()); this.mvc.perform(delete("/data/clan_membership/" + bobsMembership.getId()) .header("Authorization", accessToken)) .andExpect(status().is(204)); assertEquals(1, clanMembershipRepository.count()); } @Test @SneakyThrows public void getFilteredPlayerForClanInvite() { String[] players = new String[]{"Dragonfire", "DRAGON", "Fire of Dragon", "d r a g o n", "firedragon"}; Arrays.stream(players).forEach(name -> createPlayer(name)); assertEquals(players.length, playerRepository.count()); ResultActions action = this.mvc.perform(get("/data/player?filter=lowerCaseLogin==dragon*&sort=lowerCaseLogin")); JsonNode node = objectMapper.readTree(action.andReturn().getResponse().getContentAsString()); assertEquals(2, node.get("data").size()); assertEquals(players[1], node.get("data").get(0).get("attributes").get("login").asText()); assertEquals(players[0], node.get("data").get(1).get("attributes").get("login").asText()); action.andExpect(status().isOk()); } }
Implement transferLeadership Test
src/test/java/com/faforever/api/data/JsonApiClanTest.java
Implement transferLeadership Test
Java
mit
cd6a94c41f8babfb684fe792b869f2ec2179b859
0
BrassGoggledCoders/SteamAgeRevolution
package xyz.brassgoggledcoders.steamagerevolution; import me.modmuss50.jsonDestroyer.JsonDestroyer; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.Item; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.Instance; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import xyz.brassgoggledcoders.boilerplate.lib.BoilerplateLib; import xyz.brassgoggledcoders.boilerplate.lib.common.BaseCreativeTab; import xyz.brassgoggledcoders.boilerplate.lib.common.IBoilerplateMod; import xyz.brassgoggledcoders.boilerplate.lib.common.utils.ModLogger; import xyz.brassgoggledcoders.steamagerevolution.init.InitItems; @Mod(modid = SteamAgeRevolution.MODID, name = SteamAgeRevolution.MODNAME, version = SteamAgeRevolution.MODVERSION) public class SteamAgeRevolution implements IBoilerplateMod { @Instance("steamagerevolution") public static SteamAgeRevolution instance; public static final String MODID = "steamagerevolution"; public static final String MODNAME = "Steam Age Revolution"; public static final String MODVERSION = "@VERSION@"; public static JsonDestroyer jsonDestroyer = new JsonDestroyer(); public CreativeTabs tab = new SARTab(); public static ModLogger logger; @Mod.EventHandler public void preInit(FMLPreInitializationEvent event) { logger = new ModLogger(MODID); BoilerplateLib.getInstance().preInitStart(event); logger = BoilerplateLib.getLogger(); InitItems.init(); BoilerplateLib.getInstance().preInitEnd(event); } @Mod.EventHandler public void init(FMLInitializationEvent event) { BoilerplateLib.getInstance().init(event); jsonDestroyer.load(); } @Mod.EventHandler public void postInit(FMLPostInitializationEvent event) { BoilerplateLib.getInstance().postInit(event); } @Override public Object getInstance() { return instance; } @Override public CreativeTabs getCreativeTab() { return tab; } @Override public String getID() { return MODID; } @Override public String getName() { return MODNAME; } @Override public String getVersion() { return MODVERSION; } @Override public String getPrefix() { return MODID + ":"; } public class SARTab extends BaseCreativeTab { public SARTab() { super(SteamAgeRevolution.MODID + "_tab"); } @Override public boolean hasSearchBar() { return true; } @Override public Item getTabIconItem() { return InitItems.ITEM_PLATES; } } }
src/main/java/xyz/brassgoggledcoders/steamagerevolution/SteamAgeRevolution.java
package xyz.brassgoggledcoders.steamagerevolution; import me.modmuss50.jsonDestroyer.JsonDestroyer; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.Item; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.Instance; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import xyz.brassgoggledcoders.boilerplate.lib.BoilerplateLib; import xyz.brassgoggledcoders.boilerplate.lib.common.BaseCreativeTab; import xyz.brassgoggledcoders.boilerplate.lib.common.IBoilerplateMod; import xyz.brassgoggledcoders.boilerplate.lib.common.utils.ModLogger; import xyz.brassgoggledcoders.steamagerevolution.init.InitItems; @Mod(modid = SteamAgeRevolution.MODID, name = SteamAgeRevolution.MODNAME, version = SteamAgeRevolution.MODVERSION) public class SteamAgeRevolution implements IBoilerplateMod { @Instance("steamagerevolution") public static SteamAgeRevolution instance; public static final String MODID = "steamagerevolution"; public static final String MODNAME = "Steam Age Revolution"; public static final String MODVERSION = "@VERSION@"; public static JsonDestroyer jsonDestroyer = new JsonDestroyer(); public CreativeTabs tab = new SARTab(); public static ModLogger logger; @Mod.EventHandler public void preInit(FMLPreInitializationEvent event) { logger = new ModLogger(MODID); BoilerplateLib.getInstance().preInitStart(event); logger = BoilerplateLib.getLogger(); InitItems.init(); BoilerplateLib.getInstance().preInitEnd(event); } @Mod.EventHandler public void init(FMLInitializationEvent event) { BoilerplateLib.getInstance().init(event); jsonDestroyer.load(); } @Mod.EventHandler public void postInit(FMLPostInitializationEvent event) { BoilerplateLib.getInstance().postInit(event); } @Override public Object getInstance() { return instance; } @Override public CreativeTabs getCreativeTab() { return tab; } @Override public String getID() { return MODID; } @Override public String getName() { return MODNAME; } @Override public String getVersion() { return MODVERSION; } @Override public String getPrefix() { return MODID + ":"; } @Override public ModLogger getLogger() { return logger; } @Override public Configuration getConfig() { return null; } @Override public String getClientProxyPath() { return null; } @Override public String getCommonProxyPath() { return null; } public class SARTab extends BaseCreativeTab { public SARTab() { super(SteamAgeRevolution.MODID + "_tab"); } @Override public boolean hasSearchBar() { return true; } @Override public Item getTabIconItem() { return InitItems.ITEM_PLATES; } } }
Fix master
src/main/java/xyz/brassgoggledcoders/steamagerevolution/SteamAgeRevolution.java
Fix master
Java
mit
c24f0f638b7917a9a1c904d14fa70db469022faf
0
ria-ee/X-Road,vrk-kpa/X-Road,vrk-kpa/X-Road,ria-ee/X-Road,vrk-kpa/X-Road,ria-ee/X-Road,ria-ee/X-Road,vrk-kpa/X-Road,vrk-kpa/X-Road,ria-ee/X-Road,vrk-kpa/X-Road,vrk-kpa/X-Road,vrk-kpa/X-Road,ria-ee/X-Road,ria-ee/X-Road,ria-ee/X-Road
/** * The MIT License * Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.xroad.proxy.messagelog; import akka.actor.UntypedActor; import ee.ria.xroad.common.CodedException; import ee.ria.xroad.common.ErrorCodes; import ee.ria.xroad.common.messagelog.LogRecord; import ee.ria.xroad.common.messagelog.MessageLogProperties; import ee.ria.xroad.common.messagelog.MessageRecord; import ee.ria.xroad.common.messagelog.TimestampRecord; import ee.ria.xroad.common.messagelog.archive.DigestEntry; import ee.ria.xroad.common.messagelog.archive.LogArchiveBase; import ee.ria.xroad.common.messagelog.archive.LogArchiveWriter; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.NullOutputStream; import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.criterion.Order; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import static ee.ria.xroad.common.messagelog.MessageLogProperties.getArchiveTransferCommand; import static ee.ria.xroad.proxy.messagelog.MessageLogDatabaseCtx.doInTransaction; import static org.apache.commons.lang3.StringUtils.isBlank; /** * Reads all non-archived time-stamped records from the database, writes them * to archive file and marks the records as archived. */ @Slf4j @RequiredArgsConstructor public class LogArchiver extends UntypedActor { private static final int MAX_RECORDS_IN_ARCHIVE = 10; private static final int MAX_RECORDS_IN_PATCHS = 360; public static final String START_ARCHIVING = "doArchive"; private final Path archivePath; private final Path workingPath; private boolean safeTransactionBatch; @Override public void onReceive(Object message) throws Exception { log.trace("onReceive({})", message); if (START_ARCHIVING.equals(message)) { try { long maxTimestampId = doInTransaction(session -> getMaxTimestampId(session)); log.info("Here is max Timestamp id: {} - what is it?", maxTimestampId); while (handleArchive(maxTimestampId)) { } } catch (Exception ex) { log.error("Failed to archive log records", ex); } } else { unhandled(message); } } private boolean handleArchive(long maxTimestampId) throws Exception { return doInTransaction(session -> { List<LogRecord> records = getRecordsToBeArchived(session, maxTimestampId); if (records == null || records.isEmpty()) { log.info("No records to be archived at this time"); return false; } log.info("Archiving log records..."); long start = System.currentTimeMillis(); int recordsArchived = 0; try (LogArchiveWriter archiveWriter = createLogArchiveWriter(session)) { while (!records.isEmpty()) { archive(archiveWriter, records); runTransferCommand(getArchiveTransferCommand()); recordsArchived += records.size(); //flush changes (records marked as archived) and free memory //used up by cached records retrieved previously in the session session.flush(); session.clear(); if (safeTransactionBatch && recordsArchived >= MessageLogProperties.getArchiveTransactionBatchSize()) { log.info("Archived {} log records in {} ms", recordsArchived, System.currentTimeMillis() - start); return true; } records = getRecordsToBeArchived(session, maxTimestampId); } } catch (Exception e) { throw new CodedException(ErrorCodes.X_INTERNAL_ERROR, e); } log.info("Archived {} log records in {} ms", recordsArchived, System.currentTimeMillis() - start); return false; }); } private void archive(LogArchiveWriter archiveWriter, List<LogRecord> records) throws Exception { for (LogRecord record : records) { archiveWriter.write(record); } } private LogArchiveWriter createLogArchiveWriter(Session session) { return new LogArchiveWriter( getArchivePath(), getWorkingPath(), this.new HibernateLogArchiveBase(session) ); } private Path getArchivePath() { if (!Files.isDirectory(archivePath)) { throw new RuntimeException( "Log output path (" + archivePath + ") must be directory"); } if (!Files.isWritable(archivePath)) { throw new RuntimeException( "Log output path (" + archivePath + ") must be writable"); } return archivePath; } private Path getWorkingPath() { if (!Files.isDirectory(workingPath)) { throw new RuntimeException( "Log working path (" + workingPath + ") must be directory"); } if (!Files.isWritable(workingPath)) { throw new RuntimeException( "Log working path (" + workingPath + ") must be writable"); } return workingPath; } protected List<LogRecord> getRecordsToBeArchived(Session session, long maxTimestampId) { List<LogRecord> recordsToArchive = new ArrayList<>(); safeTransactionBatch = false; int allowedInArchiveCount = MAX_RECORDS_IN_ARCHIVE; for (TimestampRecord ts : getNonArchivedTimestampRecords(session, MAX_RECORDS_IN_PATCHS, maxTimestampId)) { List<MessageRecord> messages = getNonArchivedMessageRecords(session, ts.getId(), allowedInArchiveCount); if (allTimestampMessagesArchived(session, ts.getId())) { log.trace("Timestamp record #{} will be archived", ts.getId()); recordsToArchive.add(ts); safeTransactionBatch = true; } else { log.trace("Timestamp record #{} still related to" + " non-archived message records", ts.getId()); } recordsToArchive.addAll(messages); allowedInArchiveCount -= messages.size(); if (safeTransactionBatch || allowedInArchiveCount <= 0) { break; } } return recordsToArchive; } @SuppressWarnings("unchecked") protected List<TimestampRecord> getNonArchivedTimestampRecords( Session session, int maxRecordsToGet, long maxTimestampId) { Criteria criteria = session.createCriteria(TimestampRecord.class); criteria.add(Restrictions.eq("archived", false)); criteria.add(Restrictions.le("id", maxTimestampId)); criteria.setMaxResults(maxRecordsToGet); criteria.addOrder(Order.asc("id")); return criteria.list(); } @SuppressWarnings("unchecked") protected long getMaxTimestampId(Session session) { return (long) session .createCriteria(TimestampRecord.class) .add(Restrictions.eq("archived", false)) .setProjection(Projections.max("id")) .uniqueResult(); } @SuppressWarnings("unchecked") protected List<MessageRecord> getNonArchivedMessageRecords(Session session, Long timestampRecordNumber, int maxRecordsToGet) { return session .createCriteria(MessageRecord.class) .add(Restrictions.eq("archived", false)) .add(Restrictions.eq("timestampRecord.id", timestampRecordNumber)) .setMaxResults(maxRecordsToGet) .list(); } protected boolean allTimestampMessagesArchived(Session session, Long timestampRecordNumber) { Long result = (Long) session .createCriteria(MessageRecord.class) .add(Restrictions.eq("archived", false)) .add(Restrictions.eq("timestampRecord.id", timestampRecordNumber)) .setProjection(Projections.rowCount()) .uniqueResult(); return result == 0; } protected void markArchiveCreated(final DigestEntry lastArchive, final Session session) throws Exception { if (lastArchive != null) { log.debug("Digest entry will be saved here..."); session.createQuery( "delete from " + DigestEntry.class.getName() ) .executeUpdate(); session.save(lastArchive); } } private static void runTransferCommand(String transferCommand) { if (isBlank(transferCommand)) { return; } log.info("Transferring archives with shell command: \t{}", transferCommand); try { String[] command = new String[] {"/bin/bash", "-c", transferCommand}; Process process = new ProcessBuilder(command).start(); StandardErrorCollector standardErrorCollector = new StandardErrorCollector(process); new StandardOutputReader(process).start(); standardErrorCollector.start(); standardErrorCollector.join(); process.waitFor(); int exitCode = process.exitValue(); if (exitCode != 0) { String errorMsg = String.format( "Running archive transfer command '%s' " + "exited with status '%d'", transferCommand, exitCode); log.error( "{}\n -- STANDARD ERROR START\n{}\n" + " -- STANDARD ERROR END", errorMsg, standardErrorCollector.getStandardError()); } } catch (Exception e) { log.error( "Failed to execute archive transfer command '{}'", transferCommand, e); } } @Value private class HibernateLogArchiveBase implements LogArchiveBase { private Session session; @Override public void markArchiveCreated(DigestEntry lastArchive) throws Exception { LogArchiver.this.markArchiveCreated(lastArchive, session); } @Override public void markRecordArchived(LogRecord logRecord) throws Exception { log.trace("Setting {} #{} archived", logRecord.getClass().getName(), logRecord.getId()); logRecord.setArchived(true); session.saveOrUpdate(logRecord); } @Override @SuppressWarnings("unchecked") public DigestEntry loadLastArchive() throws Exception { List<DigestEntry> lastArchiveEntries = session .createQuery( "select new " + DigestEntry.class.getName() + "(d.digest, d.fileName) from DigestEntry d" ) .setMaxResults(1) .list(); return lastArchiveEntries.isEmpty() ? DigestEntry.empty() : lastArchiveEntries.get(0); } } @RequiredArgsConstructor private static class StandardOutputReader extends Thread { private final Process process; @Override public void run() { try (InputStream input = process.getInputStream()) { IOUtils.copy(input, new NullOutputStream()); } catch (IOException e) { // We can ignore it. log.error("Could not read standard output", e); } } } @RequiredArgsConstructor private static class StandardErrorCollector extends Thread { private final Process process; @Getter private String standardError; @Override public void run() { try (InputStream error = process.getErrorStream()) { standardError = IOUtils.toString(error, StandardCharsets.UTF_8); } catch (IOException e) { // We can ignore it. log.error("Could not read standard error", e); } } } }
src/addons/messagelog/src/main/java/ee/ria/xroad/proxy/messagelog/LogArchiver.java
/** * The MIT License * Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.xroad.proxy.messagelog; import akka.actor.UntypedActor; import ee.ria.xroad.common.CodedException; import ee.ria.xroad.common.ErrorCodes; import ee.ria.xroad.common.messagelog.LogRecord; import ee.ria.xroad.common.messagelog.MessageLogProperties; import ee.ria.xroad.common.messagelog.MessageRecord; import ee.ria.xroad.common.messagelog.TimestampRecord; import ee.ria.xroad.common.messagelog.archive.DigestEntry; import ee.ria.xroad.common.messagelog.archive.LogArchiveBase; import ee.ria.xroad.common.messagelog.archive.LogArchiveWriter; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.NullOutputStream; import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.criterion.Order; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import static ee.ria.xroad.common.messagelog.MessageLogProperties.getArchiveTransferCommand; import static ee.ria.xroad.proxy.messagelog.MessageLogDatabaseCtx.doInTransaction; import static org.apache.commons.lang3.StringUtils.isBlank; /** * Reads all non-archived time-stamped records from the database, writes them * to archive file and marks the records as archived. */ @Slf4j @RequiredArgsConstructor public class LogArchiver extends UntypedActor { private static final int MAX_RECORDS_IN_ARCHIVE = 10; private static final int MAX_RECORDS_IN_PATCHS = 360; public static final String START_ARCHIVING = "doArchive"; private final Path archivePath; private final Path workingPath; private boolean safeTransactionBatch; @Override public void onReceive(Object message) throws Exception { log.trace("onReceive({})", message); if (START_ARCHIVING.equals(message)) { try { long maxTimestampId = doInTransaction(session -> getMaxTimestampId(session)); while (handleArchive(maxTimestampId)) { } } catch (Exception ex) { log.error("Failed to archive log records", ex); } } else { unhandled(message); } } private boolean handleArchive(long maxTimestampId) throws Exception { return doInTransaction(session -> { List<LogRecord> records = getRecordsToBeArchived(session, maxTimestampId); if (records == null || records.isEmpty()) { log.info("No records to be archived at this time"); return false; } log.info("Archiving log records..."); long start = System.currentTimeMillis(); int recordsArchived = 0; try (LogArchiveWriter archiveWriter = createLogArchiveWriter(session)) { while (!records.isEmpty()) { archive(archiveWriter, records); runTransferCommand(getArchiveTransferCommand()); recordsArchived += records.size(); //flush changes (records marked as archived) and free memory //used up by cached records retrieved previously in the session session.flush(); session.clear(); if (safeTransactionBatch && recordsArchived >= MessageLogProperties.getArchiveTransactionBatchSize()) { log.info("Archived {} log records in {} ms", recordsArchived, System.currentTimeMillis() - start); return true; } records = getRecordsToBeArchived(session, maxTimestampId); } } catch (Exception e) { throw new CodedException(ErrorCodes.X_INTERNAL_ERROR, e); } log.info("Archived {} log records in {} ms", recordsArchived, System.currentTimeMillis() - start); return false; }); } private void archive(LogArchiveWriter archiveWriter, List<LogRecord> records) throws Exception { for (LogRecord record : records) { archiveWriter.write(record); } } private LogArchiveWriter createLogArchiveWriter(Session session) { return new LogArchiveWriter( getArchivePath(), getWorkingPath(), this.new HibernateLogArchiveBase(session) ); } private Path getArchivePath() { if (!Files.isDirectory(archivePath)) { throw new RuntimeException( "Log output path (" + archivePath + ") must be directory"); } if (!Files.isWritable(archivePath)) { throw new RuntimeException( "Log output path (" + archivePath + ") must be writable"); } return archivePath; } private Path getWorkingPath() { if (!Files.isDirectory(workingPath)) { throw new RuntimeException( "Log working path (" + workingPath + ") must be directory"); } if (!Files.isWritable(workingPath)) { throw new RuntimeException( "Log working path (" + workingPath + ") must be writable"); } return workingPath; } protected List<LogRecord> getRecordsToBeArchived(Session session, long maxTimestampId) { List<LogRecord> recordsToArchive = new ArrayList<>(); safeTransactionBatch = false; int allowedInArchiveCount = MAX_RECORDS_IN_ARCHIVE; for (TimestampRecord ts : getNonArchivedTimestampRecords(session, MAX_RECORDS_IN_PATCHS, maxTimestampId)) { List<MessageRecord> messages = getNonArchivedMessageRecords(session, ts.getId(), allowedInArchiveCount); if (allTimestampMessagesArchived(session, ts.getId())) { log.trace("Timestamp record #{} will be archived", ts.getId()); recordsToArchive.add(ts); safeTransactionBatch = true; } else { log.trace("Timestamp record #{} still related to" + " non-archived message records", ts.getId()); } recordsToArchive.addAll(messages); allowedInArchiveCount -= messages.size(); if (safeTransactionBatch || allowedInArchiveCount <= 0) { break; } } return recordsToArchive; } @SuppressWarnings("unchecked") protected List<TimestampRecord> getNonArchivedTimestampRecords( Session session, int maxRecordsToGet, long maxTimestampId) { Criteria criteria = session.createCriteria(TimestampRecord.class); criteria.add(Restrictions.eq("archived", false)); criteria.add(Restrictions.le("id", maxTimestampId)); criteria.setMaxResults(maxRecordsToGet); criteria.addOrder(Order.asc("id")); return criteria.list(); } @SuppressWarnings("unchecked") protected long getMaxTimestampId(Session session) { return (long) session .createCriteria(TimestampRecord.class) .add(Restrictions.eq("archived", false)) .setProjection(Projections.max("id")) .uniqueResult(); } @SuppressWarnings("unchecked") protected List<MessageRecord> getNonArchivedMessageRecords(Session session, Long timestampRecordNumber, int maxRecordsToGet) { return session .createCriteria(MessageRecord.class) .add(Restrictions.eq("archived", false)) .add(Restrictions.eq("timestampRecord.id", timestampRecordNumber)) .setMaxResults(maxRecordsToGet) .list(); } protected boolean allTimestampMessagesArchived(Session session, Long timestampRecordNumber) { Long result = (Long) session .createCriteria(MessageRecord.class) .add(Restrictions.eq("archived", false)) .add(Restrictions.eq("timestampRecord.id", timestampRecordNumber)) .setProjection(Projections.rowCount()) .uniqueResult(); return result == 0; } protected void markArchiveCreated(final DigestEntry lastArchive, final Session session) throws Exception { if (lastArchive != null) { log.debug("Digest entry will be saved here..."); session.createQuery( "delete from " + DigestEntry.class.getName() ) .executeUpdate(); session.save(lastArchive); } } private static void runTransferCommand(String transferCommand) { if (isBlank(transferCommand)) { return; } log.info("Transferring archives with shell command: \t{}", transferCommand); try { String[] command = new String[] {"/bin/bash", "-c", transferCommand}; Process process = new ProcessBuilder(command).start(); StandardErrorCollector standardErrorCollector = new StandardErrorCollector(process); new StandardOutputReader(process).start(); standardErrorCollector.start(); standardErrorCollector.join(); process.waitFor(); int exitCode = process.exitValue(); if (exitCode != 0) { String errorMsg = String.format( "Running archive transfer command '%s' " + "exited with status '%d'", transferCommand, exitCode); log.error( "{}\n -- STANDARD ERROR START\n{}\n" + " -- STANDARD ERROR END", errorMsg, standardErrorCollector.getStandardError()); } } catch (Exception e) { log.error( "Failed to execute archive transfer command '{}'", transferCommand, e); } } @Value private class HibernateLogArchiveBase implements LogArchiveBase { private Session session; @Override public void markArchiveCreated(DigestEntry lastArchive) throws Exception { LogArchiver.this.markArchiveCreated(lastArchive, session); } @Override public void markRecordArchived(LogRecord logRecord) throws Exception { log.trace("Setting {} #{} archived", logRecord.getClass().getName(), logRecord.getId()); logRecord.setArchived(true); session.saveOrUpdate(logRecord); } @Override @SuppressWarnings("unchecked") public DigestEntry loadLastArchive() throws Exception { List<DigestEntry> lastArchiveEntries = session .createQuery( "select new " + DigestEntry.class.getName() + "(d.digest, d.fileName) from DigestEntry d" ) .setMaxResults(1) .list(); return lastArchiveEntries.isEmpty() ? DigestEntry.empty() : lastArchiveEntries.get(0); } } @RequiredArgsConstructor private static class StandardOutputReader extends Thread { private final Process process; @Override public void run() { try (InputStream input = process.getInputStream()) { IOUtils.copy(input, new NullOutputStream()); } catch (IOException e) { // We can ignore it. log.error("Could not read standard output", e); } } } @RequiredArgsConstructor private static class StandardErrorCollector extends Thread { private final Process process; @Getter private String standardError; @Override public void run() { try (InputStream error = process.getErrorStream()) { standardError = IOUtils.toString(error, StandardCharsets.UTF_8); } catch (IOException e) { // We can ignore it. log.error("Could not read standard error", e); } } } }
testing logging
src/addons/messagelog/src/main/java/ee/ria/xroad/proxy/messagelog/LogArchiver.java
testing logging