lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
epl-1.0
|
46c70c875507b623bc5d3dec1058b98e4209ae0f
| 0 |
jcryptool/core,jcryptool/core,jcryptool/core,jcryptool/core,jcryptool/core
|
package org.jcryptool.core.help;
import java.util.Optional;
import org.eclipse.core.runtime.Platform;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.ResourceService;
import org.eclipse.jetty.server.handler.AbstractHandler;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.ServerSocket;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.eclipse.core.runtime.PlatformObject;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.util.resource.PathResource;
import org.eclipse.jetty.util.resource.Resource;
public class JCTJS_Server {
private static Optional<JCTJS_Server> instance = Optional.empty();
public static JCTJS_Server getInstance() {
if(instance.isEmpty()) {
instance = Optional.of(createAndTryStartServer());
return getInstance();
}
return instance.get();
}
/**
* creates a server instance and tries to start it. Returns the server instance.
* users should check if isServing is true TODO: implement (not implemented yet)
*
* @return
*/
private static JCTJS_Server createAndTryStartServer() {
int open_port = get_open_port();
JCTJS_Server server = new JCTJS_Server(open_port);
try {
server.start();
} catch (Exception e) {
e.printStackTrace();
}
return server;
}
private static int get_open_port() {
try (ServerSocket socket = new ServerSocket(0);) {
int openport = socket.getLocalPort();
socket.close();
return openport;
} catch (IOException e) {
e.printStackTrace();
// if something fails, try port 31339. Guaranteed to work (R).
return 31339;
}
}
private int port;
private Server server;
public final int helpsystemPort;
public int getPort() {
return this.port;
}
public static URL getRootURL() {
return HelpPlugin.getInstance().getBundle().getEntry("/");
}
public String makeUrlStringFor(String projectRelativePath) {
return String.format("http://127.0.0.1:%s/%s", getPort(), projectRelativePath);
}
public String makeHelpsystemUrlStringFor(String relpath) {
return String.format("http://127.0.0.1:%s/%s", this.helpsystemPort, relpath);
}
public JCTJS_Server(int open_port) {
this.port = open_port;
// try {
// org.eclipse.help.internal.server.WebappManager.start(webappName);
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
this.helpsystemPort = org.eclipse.help.internal.server.WebappManager.getPort();
if(this.helpsystemPort == -1) {
throw new RuntimeException("helpsystem not started yet!");
}
URL rootResource = getRootURL();
// System.out.println("Server root resource: " + rootResource);
// System.out.println("Server root resource: classloader: " + HelpPlugin.getInstance().getClass().getClassLoader());
// System.out.println("Server root resource: classloader type: " + HelpPlugin.getInstance().getClass().getClassLoader().getClass().toString());
Resource baseResource = Resource.newResource(rootResource);
Server server = new Server(port);
ResourceHandler resourceHandler = new ResourceHandler(new ResourceService());
resourceHandler.setDirAllowed(true);
resourceHandler.setDirectoriesListed(true);
resourceHandler.setWelcomeFiles(new String[]{"index.html"});
resourceHandler.setBaseResource(baseResource);
// URL res = Platform.getBundle("org.jcryptool.core.help").getEntry("./javascript/test.txt");
// System.out.println();
HandlerList handlerList = new HandlerList();
handlerList.setHandlers(new Handler[]{resourceHandler, new DefaultHandler()});
server.setHandler(handlerList);
this.server = server;
}
private String slurpUrl(URL res) {
InputStream stream = null;
try {
stream = Resource.newResource(res).getInputStream();
} catch (IOException e1) {
throw new RuntimeException(e1);
}
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
String line;
String result = "";
try {
while((line = reader.readLine()) != null) {
result = result + line + "\n";
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return result;
}
public void start() throws Exception {
this.server.start();
}
}
|
org.jcryptool.core.help/src/org/jcryptool/core/help/JCTJS_Server.java
|
package org.jcryptool.core.help;
import java.util.Optional;
import org.eclipse.core.runtime.Platform;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.ResourceService;
import org.eclipse.jetty.server.handler.AbstractHandler;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.ServerSocket;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.eclipse.core.runtime.PlatformObject;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.util.resource.PathResource;
import org.eclipse.jetty.util.resource.Resource;
public class JCTJS_Server {
private static Optional<JCTJS_Server> instance = Optional.empty();
public static JCTJS_Server getInstance() {
if(instance.isEmpty()) {
instance = Optional.of(createAndTryStartServer());
return getInstance();
}
return instance.get();
}
/**
* creates a server instance and tries to start it. Returns the server instance.
* users should check if isServing is true TODO: implement (not implemented yet)
*
* @return
*/
private static JCTJS_Server createAndTryStartServer() {
int open_port = get_open_port();
JCTJS_Server server = new JCTJS_Server(open_port);
try {
server.start();
} catch (Exception e) {
e.printStackTrace();
}
return server;
}
private static int get_open_port() {
try (ServerSocket socket = new ServerSocket(0);) {
int openport = socket.getLocalPort();
socket.close();
return openport;
} catch (IOException e) {
e.printStackTrace();
// if something fails, try port 31339. Guaranteed to work (R).
return 31339;
}
}
private int port;
private Server server;
public final int helpsystemPort;
public int getPort() {
return this.port;
}
public static URL getRootURL() {
return HelpPlugin.getInstance().getBundle().getEntry("/");
}
public String makeUrlStringFor(String projectRelativePath) {
return String.format("http://127.0.0.1:%s/%s", getPort(), projectRelativePath);
}
public String makeHelpsystemUrlStringFor(String relpath) {
return String.format("http://127.0.0.1:%s/%s", this.helpsystemPort, relpath);
}
public JCTJS_Server(int open_port) {
this.port = open_port;
// try {
// org.eclipse.help.internal.server.WebappManager.start(webappName);
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
this.helpsystemPort = org.eclipse.help.internal.server.WebappManager.getPort();
if(this.helpsystemPort == -1) {
throw new RuntimeException("helpsystem not started yet!");
}
URL rootResource = getRootURL();
System.out.println("Server root resource: " + rootResource);
System.out.println("Server root resource: classloader: " + HelpPlugin.getInstance().getClass().getClassLoader());
System.out.println("Server root resource: classloader type: " + HelpPlugin.getInstance().getClass().getClassLoader().getClass().toString());
Resource baseResource = Resource.newResource(rootResource);
Server server = new Server(port);
ResourceHandler resourceHandler = new ResourceHandler(new ResourceService());
resourceHandler.setDirAllowed(true);
resourceHandler.setDirectoriesListed(true);
resourceHandler.setWelcomeFiles(new String[]{"index.html"});
resourceHandler.setBaseResource(baseResource);
// URL res = Platform.getBundle("org.jcryptool.core.help").getEntry("./javascript/test.txt");
// System.out.println();
HandlerList handlerList = new HandlerList();
handlerList.setHandlers(new Handler[]{resourceHandler, new DefaultHandler()});
server.setHandler(handlerList);
this.server = server;
}
private String slurpUrl(URL res) {
InputStream stream = null;
try {
stream = Resource.newResource(res).getInputStream();
} catch (IOException e1) {
throw new RuntimeException(e1);
}
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
String line;
String result = "";
try {
while((line = reader.readLine()) != null) {
result = result + line + "\n";
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return result;
}
public void start() throws Exception {
this.server.start();
}
}
|
remove stdout debug output
|
org.jcryptool.core.help/src/org/jcryptool/core/help/JCTJS_Server.java
|
remove stdout debug output
|
<ide><path>rg.jcryptool.core.help/src/org/jcryptool/core/help/JCTJS_Server.java
<ide>
<ide> URL rootResource = getRootURL();
<ide>
<del> System.out.println("Server root resource: " + rootResource);
<del> System.out.println("Server root resource: classloader: " + HelpPlugin.getInstance().getClass().getClassLoader());
<del> System.out.println("Server root resource: classloader type: " + HelpPlugin.getInstance().getClass().getClassLoader().getClass().toString());
<add>// System.out.println("Server root resource: " + rootResource);
<add>// System.out.println("Server root resource: classloader: " + HelpPlugin.getInstance().getClass().getClassLoader());
<add>// System.out.println("Server root resource: classloader type: " + HelpPlugin.getInstance().getClass().getClassLoader().getClass().toString());
<ide> Resource baseResource = Resource.newResource(rootResource);
<ide> Server server = new Server(port);
<ide>
|
|
Java
|
apache-2.0
|
09ea7cf7c1154299a52f858b144a1198f3d65d47
| 0 |
leelance/spring-boot-all,leelance/spring-boot-all,leelance/spring-boot-all
|
package com.lance.quartz.web;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.alibaba.fastjson.JSON;
import com.lance.quartz.common.exception.ServiceException;
import com.lance.quartz.common.json.ResultInfo;
import com.lance.quartz.domain.TaskInfo;
import com.lance.quartz.service.TaskServiceImpl;
/**
* 任务管理
* @author lance
*/
@Controller
public class TaskManageController {
@Autowired
private TaskServiceImpl taskServiceImpl;
/**
* Index.jsp
* 2016年10月8日下午6:39:15
*/
@RequestMapping(value={"", "/", "index"})
public String info(){
return "index.jsp";
}
/**
* 任务列表
* @return
* 2016年10月9日上午11:36:03
*/
@ResponseBody
@RequestMapping(value="list", method=RequestMethod.POST)
public String list(){
Map<String, Object> map = new HashMap<>();
List<TaskInfo> infos = taskServiceImpl.list();
map.put("rows", infos);
map.put("total", infos.size());
return JSON.toJSONString(map);
}
/**
* 保存定时任务
* @param info
* 2016年10月9日下午1:36:59
*/
@ResponseBody
@RequestMapping(value="save", method=RequestMethod.POST, produces = "application/json; charset=UTF-8")
public String save(TaskInfo info){
try {
if(info.getId() == 0) {
taskServiceImpl.addJob(info);
}else{
taskServiceImpl.edit(info);
}
} catch (ServiceException e) {
return ResultInfo.error(-1, e.getMessage());
}
return ResultInfo.success();
}
/**
* 删除定时任务
* @param jobName
* @param jobGroup
* 2016年10月9日下午1:52:20
*/
@ResponseBody
@RequestMapping(value="delete/{jobName}/{jobGroup}", produces = "application/json; charset=UTF-8")
public String delete(@PathVariable String jobName, @PathVariable String jobGroup){
try {
taskServiceImpl.delete(jobName, jobGroup);
} catch (ServiceException e) {
return ResultInfo.error(-1, e.getMessage());
}
return ResultInfo.success();
}
}
|
spring-boot-quartz/src/main/java/com/lance/quartz/web/TaskManageController.java
|
package com.lance.web.system.setting;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.lance.common.exception.ServiceException;
import com.lance.common.json.ResultInfo;
import com.lance.domain.setting.TaskInfo;
import com.lance.service.setting.TaskServiceImpl;
import com.lance.utils.FastJsonUtils;
/**
* 任务管理
* @author lance
*/
@Controller
@RequestMapping("admin/task/")
public class TaskManageController {
@Autowired
private TaskServiceImpl taskServiceImpl;
/**
* 任务页面
* 2016年10月8日下午6:39:15
*/
@RequestMapping(value="info")
public String info(){
return "admin/setting/task-info.jsp";
}
/**
* 任务列表
* @return
* 2016年10月9日上午11:36:03
*/
@ResponseBody
@RequestMapping(value="list", method=RequestMethod.POST)
public String list(){
Map<String, Object> map = new HashMap<>();
List<TaskInfo> infos = taskServiceImpl.list();
map.put("rows", infos);
map.put("total", infos.size());
return FastJsonUtils.toJson(map);
}
/**
* 保存定时任务
* @param info
* 2016年10月9日下午1:36:59
*/
@ResponseBody
@RequestMapping(value="save", method=RequestMethod.POST, produces = "application/json; charset=UTF-8")
public String save(TaskInfo info){
try {
if(info.getId() == 0) {
taskServiceImpl.addJob(info);
}else{
taskServiceImpl.edit(info);
}
} catch (ServiceException e) {
return ResultInfo.error(-1, e.getMessage());
}
return ResultInfo.success();
}
/**
* 删除定时任务
* @param jobName
* @param jobGroup
* 2016年10月9日下午1:52:20
*/
@ResponseBody
@RequestMapping(value="delete/{jobName}/{jobGroup}", produces = "application/json; charset=UTF-8")
public String delete(@PathVariable String jobName, @PathVariable String jobGroup){
try {
taskServiceImpl.delete(jobName, jobGroup);
} catch (ServiceException e) {
return ResultInfo.error(-1, e.getMessage());
}
return ResultInfo.success();
}
}
|
add requestmap
|
spring-boot-quartz/src/main/java/com/lance/quartz/web/TaskManageController.java
|
add requestmap
|
<ide><path>pring-boot-quartz/src/main/java/com/lance/quartz/web/TaskManageController.java
<del>package com.lance.web.system.setting;
<add>package com.lance.quartz.web;
<ide>
<ide> import java.util.HashMap;
<ide> import java.util.List;
<ide> import org.springframework.web.bind.annotation.RequestMethod;
<ide> import org.springframework.web.bind.annotation.ResponseBody;
<ide>
<del>import com.lance.common.exception.ServiceException;
<del>import com.lance.common.json.ResultInfo;
<del>import com.lance.domain.setting.TaskInfo;
<del>import com.lance.service.setting.TaskServiceImpl;
<del>import com.lance.utils.FastJsonUtils;
<add>import com.alibaba.fastjson.JSON;
<add>import com.lance.quartz.common.exception.ServiceException;
<add>import com.lance.quartz.common.json.ResultInfo;
<add>import com.lance.quartz.domain.TaskInfo;
<add>import com.lance.quartz.service.TaskServiceImpl;
<ide>
<ide> /**
<ide> * 任务管理
<ide> * @author lance
<ide> */
<ide> @Controller
<del>@RequestMapping("admin/task/")
<ide> public class TaskManageController {
<ide> @Autowired
<ide> private TaskServiceImpl taskServiceImpl;
<ide>
<ide> /**
<del> * 任务页面
<add> * Index.jsp
<ide> * 2016年10月8日下午6:39:15
<ide> */
<del> @RequestMapping(value="info")
<add> @RequestMapping(value={"", "/", "index"})
<ide> public String info(){
<del> return "admin/setting/task-info.jsp";
<add> return "index.jsp";
<ide> }
<ide>
<ide> /**
<ide> List<TaskInfo> infos = taskServiceImpl.list();
<ide> map.put("rows", infos);
<ide> map.put("total", infos.size());
<del> return FastJsonUtils.toJson(map);
<add> return JSON.toJSONString(map);
<ide> }
<ide>
<ide> /**
|
|
JavaScript
|
mit
|
f62b46378de213339fe8fc8e7dae37d836e978d9
| 0 |
perdona/pickadate.js,cmaddalozzo/pickadate.js,perdona/pickadate.js,amsul/pickadate.js,rollbrettler/pickadate.js,iwanttotellyou/pickadate.js,arkmancetz/pickadate.js,nsmith7989/pickadate.js,b-cuts/pickadate.js,mdehoog/pickadate.js,mskrajnowski/pickadate.js,Betterez/pickadate.js,mskrajnowski/pickadate.js,amsul/pickadate.js,mohamnag/pickadate.js,spinlister/pickadate.js,bespoormsed/pickadate.js,bluespore/pickadate.js,spinlister/pickadate.js,mohamnag/pickadate.js,bianjp/pickadate.js,AleksandrChukhray/pickadate.js,nikoz84/pickadate.js,arkmancetz/pickadate.js,burakkp/pickadate.js,ben-nsng/pickadate.js,blacklane/pickadate.js,okusawa/pickadate.js,burakkp/pickadate.js,FOOLHOLI/pickadate.js,wghust/pickadate.js,nikoz84/pickadate.js,fecori/pickadate.js,Betterez/pickadate.js,ryaneof/pickadate.js,Drooids/pickadate.js,iwanttotellyou/pickadate.js,b-cuts/pickadate.js,ivandoric/pickadate.js,mdehoog/pickadate.js,nayosx/pickadate.js,dribehance/pickadate.js,loki315zx/pickadate.js,blacklane/pickadate.js,bluespore/pickadate.js,nsmith7989/pickadate.js,okusawa/pickadate.js,AleksandrChukhray/pickadate.js,bespoormsed/pickadate.js,prashen/pickadate.js,rollbrettler/pickadate.js,elton0895/pickadate.js,Drooids/pickadate.js,prashen/pickadate.js,ben-nsng/pickadate.js,grgcnnr/pickadate.js-SASS,baminteractive/pickadatebam,ivandoric/pickadate.js,bianjp/pickadate.js,elton0895/pickadate.js,wghust/pickadate.js,FOOLHOLI/pickadate.js,cmaddalozzo/pickadate.js,ryaneof/pickadate.js,FronterAS/pickadate.js,loki315zx/pickadate.js,dribehance/pickadate.js,atis--/pickadate.js,baminteractive/pickadatebam,atis--/pickadate.js,nayosx/pickadate.js,fecori/pickadate.js
|
// Polish
$.extend( $.fn.pickadate.defaults, {
monthsFull: [ 'styczeń', 'luty', 'marzec', 'kwiecień', 'maj', 'czerwiec', 'lipiec', 'sierpień', 'wrzesień', 'październik', 'listopad', 'grudzień' ],
monthsShort: [ 'sty', 'lut', 'mar', 'kwi', 'maj', 'cze', 'lip', 'sie', 'wrz', 'paź', 'lis', 'gru' ],
weekdaysFull: [ 'niedziela', 'poniedziałek', 'wtorek', 'środa', 'czwartek', 'piątek', 'sobota' ],
weekdaysShort: [ 'N', 'Pn', 'Wt', 'Śr', 'Cz', 'Pt', 'So' ],
today: 'dzisiaj',
clear: 'usunąć',
firstDay: 1,
format: 'd mmmm yyyy',
formatSubmit: 'yyyy/mm/dd'
});
|
lib/translations/pl_PL.js
|
// Polish
$.extend( $.fn.pickadate.defaults, {
monthsFull: [ 'styczeń', 'luty', 'marzec', 'kwiecień', 'maj', 'czerwiec', 'lipiec', 'sierpień', 'wrzesień', 'październik', 'listopad', 'grudzień' ],
monthsShort: [ 'sty', 'lut', 'mar', 'kwi', 'maj', 'cze', 'lip', 'sie', 'wrz', 'paź', 'lis', 'gru' ],
weekdaysFull: [ 'niedziela', 'poniedziałek', 'wtorek', 'środa', 'czwartek', 'piąąek', 'sobota' ],
weekdaysShort: [ 'N', 'Pn', 'Wt', 'Śr', 'Cz', 'Pt', 'So' ],
today: 'dzisiaj',
clear: 'usunąć',
firstDay: 1,
format: 'd mmmm yyyy',
formatSubmit: 'yyyy/mm/dd'
});
|
Update pl_PL.js
Fix Friday name
|
lib/translations/pl_PL.js
|
Update pl_PL.js
|
<ide><path>ib/translations/pl_PL.js
<ide> $.extend( $.fn.pickadate.defaults, {
<ide> monthsFull: [ 'styczeń', 'luty', 'marzec', 'kwiecień', 'maj', 'czerwiec', 'lipiec', 'sierpień', 'wrzesień', 'październik', 'listopad', 'grudzień' ],
<ide> monthsShort: [ 'sty', 'lut', 'mar', 'kwi', 'maj', 'cze', 'lip', 'sie', 'wrz', 'paź', 'lis', 'gru' ],
<del> weekdaysFull: [ 'niedziela', 'poniedziałek', 'wtorek', 'środa', 'czwartek', 'piąąek', 'sobota' ],
<add> weekdaysFull: [ 'niedziela', 'poniedziałek', 'wtorek', 'środa', 'czwartek', 'piątek', 'sobota' ],
<ide> weekdaysShort: [ 'N', 'Pn', 'Wt', 'Śr', 'Cz', 'Pt', 'So' ],
<ide> today: 'dzisiaj',
<ide> clear: 'usunąć',
|
|
Java
|
apache-2.0
|
77633e1a589570bd7ee75ea8d4fc30d0e9b44942
| 0 |
alphafoobar/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,amith01994/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,ibinti/intellij-community,da1z/intellij-community,caot/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,asedunov/intellij-community,holmes/intellij-community,semonte/intellij-community,izonder/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,asedunov/intellij-community,izonder/intellij-community,kdwink/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,semonte/intellij-community,ibinti/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,dslomov/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,caot/intellij-community,signed/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,semonte/intellij-community,clumsy/intellij-community,holmes/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,tmpgit/intellij-community,caot/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,izonder/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,caot/intellij-community,allotria/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,caot/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,ibinti/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,supersven/intellij-community,da1z/intellij-community,asedunov/intellij-community,xfournet/intellij-community,slisson/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,da1z/intellij-community,samthor/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,kool79/intellij-community,FHannes/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,adedayo/intellij-community,adedayo/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,slisson/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,izonder/intellij-community,fitermay/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,FHannes/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ryano144/intellij-community,fitermay/intellij-community,ibinti/intellij-community,adedayo/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,samthor/intellij-community,xfournet/intellij-community,signed/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,adedayo/intellij-community,kdwink/intellij-community,vladmm/intellij-community,asedunov/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,caot/intellij-community,clumsy/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,samthor/intellij-community,supersven/intellij-community,gnuhub/intellij-community,slisson/intellij-community,da1z/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,clumsy/intellij-community,diorcety/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,supersven/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,xfournet/intellij-community,petteyg/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,vladmm/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,retomerz/intellij-community,da1z/intellij-community,holmes/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,clumsy/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,caot/intellij-community,slisson/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,signed/intellij-community,dslomov/intellij-community,retomerz/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,caot/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,signed/intellij-community,da1z/intellij-community,signed/intellij-community,holmes/intellij-community,fnouama/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,samthor/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,izonder/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,akosyakov/intellij-community,allotria/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,da1z/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,slisson/intellij-community,Distrotech/intellij-community,da1z/intellij-community,ryano144/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,kool79/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,signed/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,ibinti/intellij-community,blademainer/intellij-community,kool79/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,vladmm/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,da1z/intellij-community,FHannes/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,samthor/intellij-community,dslomov/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,signed/intellij-community,caot/intellij-community,akosyakov/intellij-community,izonder/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,robovm/robovm-studio,semonte/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,fnouama/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,da1z/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,slisson/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,kdwink/intellij-community,ryano144/intellij-community,izonder/intellij-community,fitermay/intellij-community,izonder/intellij-community,petteyg/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,signed/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,vladmm/intellij-community,FHannes/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,retomerz/intellij-community,hurricup/intellij-community,fnouama/intellij-community,semonte/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,clumsy/intellij-community,holmes/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,holmes/intellij-community,fitermay/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,semonte/intellij-community,fnouama/intellij-community,xfournet/intellij-community,ryano144/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,signed/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,hurricup/intellij-community,samthor/intellij-community,youdonghai/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,signed/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,ryano144/intellij-community,retomerz/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,slisson/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,adedayo/intellij-community,FHannes/intellij-community,kool79/intellij-community,dslomov/intellij-community,da1z/intellij-community,slisson/intellij-community,suncycheng/intellij-community,caot/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,FHannes/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,supersven/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,samthor/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,dslomov/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,caot/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,kdwink/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,supersven/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,robovm/robovm-studio,hurricup/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,apixandru/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,allotria/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,semonte/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,caot/intellij-community,jagguli/intellij-community,vladmm/intellij-community,supersven/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,slisson/intellij-community,allotria/intellij-community,kdwink/intellij-community,petteyg/intellij-community,kdwink/intellij-community,fnouama/intellij-community,jagguli/intellij-community,semonte/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,clumsy/intellij-community,asedunov/intellij-community,diorcety/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,allotria/intellij-community,holmes/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,kool79/intellij-community
|
package org.jetbrains.postfixCompletion.settings;
import com.intellij.application.options.editor.EditorOptionsProvider;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.postfixCompletion.templates.PostfixTemplate;
import javax.swing.*;
import java.util.Arrays;
import java.util.Map;
public class PostfixCompletionConfigurable implements SearchableConfigurable, EditorOptionsProvider, Configurable.NoScroll {
private static final Logger LOG = Logger.getInstance(PostfixCompletionConfigurable.class);
@Nullable
private PostfixTemplatesListPanel myTemplatesListPanel;
@NotNull
@Override
public String getId() {
return "reference.settingsdialog.IDE.editor.postfix.completion";
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
@Nls
@Override
public String getDisplayName() {
return "Postfix Completion";
}
@Nullable
public PostfixTemplatesListPanel getTemplatesListPanel() {
if (myTemplatesListPanel == null) {
createComponent();
}
return myTemplatesListPanel;
}
@Nullable
@Override
public JComponent createComponent() {
if (myTemplatesListPanel == null) {
final PostfixTemplate[] templates = PostfixTemplate.EP_NAME.getExtensions();
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings == null) {
LOG.error("Can't retrieve postfix template settings");
return null;
}
myTemplatesListPanel = new PostfixTemplatesListPanel(Arrays.asList(templates));
}
return myTemplatesListPanel.getComponent();
}
@Override
public void apply() throws ConfigurationException {
if (myTemplatesListPanel != null) {
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings != null) {
Map<String, Boolean> newTemplatesState = ContainerUtil.newHashMap();
for (Map.Entry<String, Boolean> entry : myTemplatesListPanel.getState().entrySet()) {
Boolean value = entry.getValue();
if (value != null && !value) {
newTemplatesState.put(entry.getKey(), entry.getValue());
}
}
templatesSettings.setTemplatesState(newTemplatesState);
}
}
}
@Override
public void reset() {
if (myTemplatesListPanel != null) {
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings != null) {
myTemplatesListPanel.setState(templatesSettings.getTemplatesState());
}
}
}
@Override
public void disposeUIResources() {
myTemplatesListPanel = null;
}
@Override
public boolean isModified() {
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings == null) return false;
return myTemplatesListPanel != null && !myTemplatesListPanel.getState().equals(templatesSettings.getTemplatesState());
}
@Nullable
@Override
public Runnable enableSearch(String s) {
return null;
}
}
|
src/org/jetbrains/postfixCompletion/settings/PostfixCompletionConfigurable.java
|
package org.jetbrains.postfixCompletion.settings;
import com.intellij.application.options.editor.EditorOptionsProvider;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.postfixCompletion.templates.PostfixTemplate;
import javax.swing.*;
import java.util.Arrays;
import java.util.Map;
public class PostfixCompletionConfigurable implements SearchableConfigurable, EditorOptionsProvider, Configurable.NoScroll {
private static final Logger LOG = Logger.getInstance(PostfixCompletionConfigurable.class);
@Nullable
private PostfixTemplatesListPanel myPanel;
@NotNull
@Override
public String getId() {
return "reference.settingsdialog.IDE.editor.postfix.completion";
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
@Nls
@Override
public String getDisplayName() {
return "Postfix Completion";
}
@Nullable
public PostfixTemplatesListPanel getTemplatesListPanel() {
if (myPanel == null) {
createComponent();
}
return myPanel;
}
@Nullable
@Override
public JComponent createComponent() {
if (myPanel == null) {
final PostfixTemplate[] templates = PostfixTemplate.EP_NAME.getExtensions();
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings == null) {
LOG.error("Can't retrieve postfix template settings");
return null;
}
myPanel = new PostfixTemplatesListPanel(Arrays.asList(templates));
}
return myPanel.getComponent();
}
@Override
public void apply() throws ConfigurationException {
if (myPanel != null) {
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings != null) {
Map<String, Boolean> newTemplatesState = ContainerUtil.newHashMap();
for (Map.Entry<String, Boolean> entry : myPanel.getState().entrySet()) {
Boolean value = entry.getValue();
if (value != null && !value) {
newTemplatesState.put(entry.getKey(), entry.getValue());
}
}
templatesSettings.setTemplatesState(newTemplatesState);
}
}
}
@Override
public void reset() {
if (myPanel != null) {
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings != null) {
myPanel.setState(templatesSettings.getTemplatesState());
}
}
}
@Override
public void disposeUIResources() {
myPanel = null;
}
@Override
public boolean isModified() {
PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
if (templatesSettings == null) return false;
return myPanel != null && !myPanel.getState().equals(templatesSettings.getTemplatesState());
}
@Nullable
@Override
public Runnable enableSearch(String s) {
return null;
}
}
|
Rename panel
|
src/org/jetbrains/postfixCompletion/settings/PostfixCompletionConfigurable.java
|
Rename panel
|
<ide><path>rc/org/jetbrains/postfixCompletion/settings/PostfixCompletionConfigurable.java
<ide> private static final Logger LOG = Logger.getInstance(PostfixCompletionConfigurable.class);
<ide>
<ide> @Nullable
<del> private PostfixTemplatesListPanel myPanel;
<add> private PostfixTemplatesListPanel myTemplatesListPanel;
<ide>
<ide> @NotNull
<ide> @Override
<ide>
<ide> @Nullable
<ide> public PostfixTemplatesListPanel getTemplatesListPanel() {
<del> if (myPanel == null) {
<add> if (myTemplatesListPanel == null) {
<ide> createComponent();
<ide> }
<del> return myPanel;
<add> return myTemplatesListPanel;
<ide> }
<ide>
<ide> @Nullable
<ide> @Override
<ide> public JComponent createComponent() {
<del> if (myPanel == null) {
<add> if (myTemplatesListPanel == null) {
<ide> final PostfixTemplate[] templates = PostfixTemplate.EP_NAME.getExtensions();
<ide>
<ide> PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
<ide> return null;
<ide> }
<ide>
<del> myPanel = new PostfixTemplatesListPanel(Arrays.asList(templates));
<add> myTemplatesListPanel = new PostfixTemplatesListPanel(Arrays.asList(templates));
<ide> }
<ide>
<del> return myPanel.getComponent();
<add> return myTemplatesListPanel.getComponent();
<ide> }
<ide>
<ide> @Override
<ide> public void apply() throws ConfigurationException {
<del> if (myPanel != null) {
<add> if (myTemplatesListPanel != null) {
<ide> PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
<ide> if (templatesSettings != null) {
<ide> Map<String, Boolean> newTemplatesState = ContainerUtil.newHashMap();
<del> for (Map.Entry<String, Boolean> entry : myPanel.getState().entrySet()) {
<add> for (Map.Entry<String, Boolean> entry : myTemplatesListPanel.getState().entrySet()) {
<ide> Boolean value = entry.getValue();
<ide> if (value != null && !value) {
<ide> newTemplatesState.put(entry.getKey(), entry.getValue());
<ide>
<ide> @Override
<ide> public void reset() {
<del> if (myPanel != null) {
<add> if (myTemplatesListPanel != null) {
<ide> PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
<ide> if (templatesSettings != null) {
<del> myPanel.setState(templatesSettings.getTemplatesState());
<add> myTemplatesListPanel.setState(templatesSettings.getTemplatesState());
<ide> }
<ide> }
<ide> }
<ide>
<ide> @Override
<ide> public void disposeUIResources() {
<del> myPanel = null;
<add> myTemplatesListPanel = null;
<ide> }
<ide>
<ide> @Override
<ide> PostfixCompletionSettings templatesSettings = PostfixCompletionSettings.getInstance();
<ide> if (templatesSettings == null) return false;
<ide>
<del> return myPanel != null && !myPanel.getState().equals(templatesSettings.getTemplatesState());
<add> return myTemplatesListPanel != null && !myTemplatesListPanel.getState().equals(templatesSettings.getTemplatesState());
<ide> }
<ide>
<ide> @Nullable
|
|
Java
|
apache-2.0
|
096a4968b44ea043aa8d036365a7e60a5c933bac
| 0 |
ferstl/depgraph-maven-plugin
|
package com.github.ferstl.depgraph.graph.text;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import com.github.ferstl.depgraph.graph.Edge;
import com.github.ferstl.depgraph.graph.Node;
public class TextGraphWriter {
private final Map<String, Node<?>> nodesById;
private final Map<String, List<Edge>> relations;
private final Collection<String> roots;
public TextGraphWriter(Collection<Node<?>> nodes, Collection<Edge> edges) {
this.nodesById = new HashMap<>();
this.relations = new LinkedHashMap<>();
this.roots = new LinkedHashSet<>();
for (Node<?> node : nodes) {
String nodeId = node.getNodeId();
this.nodesById.put(nodeId, node);
this.relations.put(nodeId, new ArrayList<Edge>());
}
this.roots.addAll(this.nodesById.keySet());
for (Edge edge : edges) {
this.relations.get(edge.getFromNodeId()).add(edge);
if (!edge.getFromNodeId().equals(edge.getToNodeId())) {
this.roots.remove(edge.getToNodeId());
}
}
}
public void write(StringBuilder stringBuilder) {
for (String root : this.roots) {
Node<?> fromNode = this.nodesById.get(root);
stringBuilder.append(fromNode.getNodeName()).append("\n");
writeChildren(stringBuilder, root, 0, false);
}
}
private void writeChildren(StringBuilder stringBuilder, String parent, int level, boolean lastParent) {
List<Edge> edges = this.relations.get(parent);
for (int i = 0; i < edges.size(); i++) {
Edge edge = edges.get(i);
indent(stringBuilder, level, lastParent, i == edges.size() - 1);
Node<?> childNode = this.nodesById.get(edge.getToNodeId());
stringBuilder.append(childNode.getNodeName());
if (edge.getName() != null && !edge.getName().isEmpty()) {
stringBuilder.append(" (").append(edge.getName()).append(")");
}
stringBuilder.append("\n");
writeChildren(stringBuilder, childNode.getNodeId(), level + 1, i == edges.size() - 1);
}
edges.clear();
}
private void indent(StringBuilder stringBuilder, int level, boolean lastParent, boolean lastElement) {
for (int i = 0; i < level - 1; i++) {
stringBuilder.append("| ");
}
if (level > 0) {
stringBuilder.append(lastParent ? " " : "| ");
}
if (lastElement) {
stringBuilder.append("\\- ");
} else {
stringBuilder.append("+- ");
}
}
}
|
src/main/java/com/github/ferstl/depgraph/graph/text/TextGraphWriter.java
|
package com.github.ferstl.depgraph.graph.text;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import com.github.ferstl.depgraph.graph.Edge;
import com.github.ferstl.depgraph.graph.Node;
public class TextGraphWriter {
private final Map<String, Node<?>> nodesById;
private final Map<String, List<Edge>> relations;
private final Collection<String> roots;
public TextGraphWriter(Collection<Node<?>> nodes, Collection<Edge> edges) {
this.nodesById = new HashMap<>();
this.relations = new LinkedHashMap<>();
this.roots = new LinkedHashSet<>();
for (Node<?> node : nodes) {
String nodeId = node.getNodeId();
this.nodesById.put(nodeId, node);
this.relations.put(nodeId, new ArrayList<Edge>());
}
this.roots.addAll(this.nodesById.keySet());
for (Edge edge : edges) {
this.relations.get(edge.getFromNodeId()).add(edge);
if (!edge.getFromNodeId().equals(edge.getToNodeId())) {
this.roots.remove(edge.getToNodeId());
}
}
}
public void write(StringBuilder stringBuilder) {
for (String root : this.roots) {
Node<?> fromNode = this.nodesById.get(root);
stringBuilder.append(fromNode.getNodeName()).append("\n");
writeChildren(stringBuilder, root, 0);
}
}
private void writeChildren(StringBuilder stringBuilder, String parent, int level) {
List<Edge> edges = this.relations.get(parent);
for (int i = 0; i < edges.size(); i++) {
Edge edge = edges.get(i);
if (i != edges.size() - 1) {
indent(stringBuilder, level);
} else {
indentEnd(stringBuilder, level);
}
Node<?> childNode = this.nodesById.get(edge.getToNodeId());
stringBuilder.append(childNode.getNodeName());
if (edge.getName() != null && !edge.getName().isEmpty()) {
stringBuilder.append(" (").append(edge.getName()).append(")");
}
stringBuilder.append("\n");
writeChildren(stringBuilder, childNode.getNodeId(), level + 1);
}
edges.clear();
}
private void indent(StringBuilder stringBuilder, int level) {
for (int i = 0; i < level; i++) {
stringBuilder.append("| ");
}
stringBuilder.append("+- ");
}
private void indentEnd(StringBuilder stringBuilder, int level) {
for (int i = 0; i < level; i++) {
stringBuilder.append("| ");
}
stringBuilder.append("\\- ");
}
}
|
#50 Improve appearance of last elements
|
src/main/java/com/github/ferstl/depgraph/graph/text/TextGraphWriter.java
|
#50 Improve appearance of last elements
|
<ide><path>rc/main/java/com/github/ferstl/depgraph/graph/text/TextGraphWriter.java
<ide> for (String root : this.roots) {
<ide> Node<?> fromNode = this.nodesById.get(root);
<ide> stringBuilder.append(fromNode.getNodeName()).append("\n");
<del> writeChildren(stringBuilder, root, 0);
<add> writeChildren(stringBuilder, root, 0, false);
<ide> }
<ide> }
<ide>
<del> private void writeChildren(StringBuilder stringBuilder, String parent, int level) {
<add> private void writeChildren(StringBuilder stringBuilder, String parent, int level, boolean lastParent) {
<ide> List<Edge> edges = this.relations.get(parent);
<ide> for (int i = 0; i < edges.size(); i++) {
<ide> Edge edge = edges.get(i);
<del> if (i != edges.size() - 1) {
<del> indent(stringBuilder, level);
<del> } else {
<del> indentEnd(stringBuilder, level);
<del> }
<add> indent(stringBuilder, level, lastParent, i == edges.size() - 1);
<ide>
<ide> Node<?> childNode = this.nodesById.get(edge.getToNodeId());
<ide> stringBuilder.append(childNode.getNodeName());
<ide> }
<ide>
<ide> stringBuilder.append("\n");
<del> writeChildren(stringBuilder, childNode.getNodeId(), level + 1);
<add> writeChildren(stringBuilder, childNode.getNodeId(), level + 1, i == edges.size() - 1);
<ide> }
<ide>
<ide> edges.clear();
<ide> }
<ide>
<del> private void indent(StringBuilder stringBuilder, int level) {
<del> for (int i = 0; i < level; i++) {
<add> private void indent(StringBuilder stringBuilder, int level, boolean lastParent, boolean lastElement) {
<add> for (int i = 0; i < level - 1; i++) {
<ide> stringBuilder.append("| ");
<ide> }
<del> stringBuilder.append("+- ");
<del> }
<ide>
<del> private void indentEnd(StringBuilder stringBuilder, int level) {
<del> for (int i = 0; i < level; i++) {
<del> stringBuilder.append("| ");
<add> if (level > 0) {
<add> stringBuilder.append(lastParent ? " " : "| ");
<ide> }
<del> stringBuilder.append("\\- ");
<add>
<add> if (lastElement) {
<add> stringBuilder.append("\\- ");
<add> } else {
<add> stringBuilder.append("+- ");
<add> }
<ide> }
<ide> }
|
|
JavaScript
|
mit
|
5e7e28baff029651b86d2cc5575c266a9b4f588c
| 0 |
bhavyaab/MasterBlaster,bhavyaab/MasterBlaster
|
var myData = [
{
userName: 'Jesus',
passWord: 'pwd',
name: 'Jesus',
image: './img/headshots/jesus.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 100,
losses: 0,
},
{
userName: 'Michael',
passWord: 'pwd',
name: 'Michael Molinari',
image: './img/headshots/michael2.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 14,
losses: 2,
},
{
userName: 'Brian',
passWord: 'pwd',
name: 'Brian Nations',
image: './img/headshots/brian.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 8,
losses: 4,
},
{
userName: 'Craig',
passWord: 'pwd',
name: 'Craig Campbell',
image: './img/headshots/craig.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 2,
losses: 6,
},
{
userName: 'Brook',
passWord: 'pwd',
name: 'Brook Riggio',
image: './img/headshots/brook.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 12,
},
{
userName: 'Munir',
passWord: 'pwd',
name: 'Munir Ibrahim',
image: './img/headshots/munir.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 0,
losses: 60,
},
{
userName: 'Will',
passWord: 'pwd',
name: 'Will Weatherford',
image: './img/headshots/will.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 32,
losses: 8,
},
{
userName: 'Maggie',
passWord: 'pwd',
name: 'Maggie Q',
image: './img/headshots/maggie.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 36,
losses: 20,
},
{
userName: 'Randy',
passWord: 'pwd',
name: 'Randy Daytona',
image: './img/headshots/randy.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 14,
},
{
userName: 'Karl',
passWord: 'pwd',
name: 'Karl Wolfschtagg',
image: './img/headshots/karl.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 40,
}
];
|
js/data.js
|
var myData = [
{
userName: 'Jesus',
passWord: 'pwd',
name: 'Jesus',
image: './img/headshots/jesus.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 100,
losses: 0,
},
{
userName: 'Michael',
passWord: 'pwd',
name: 'Michael Molinari',
image: './img/headshots/michael2.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 14,
losses: 2,
},
{
userName: 'Brian',
passWord: 'pwd',
name: 'Brian Nations',
image: './img/headshots/brian.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 8,
losses: 4,
},
{
userName: 'Craig',
passWord: 'pwd',
name: 'Craig Campbell',
image: './img/headshots/craig.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 2,
losses: 6,
},
{
userName: 'Brook',
passWord: 'pwd',
name: 'Brook Riggio',
image: './img/headshots/brook.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 12,
},
{
userName: 'Munir',
passWord: 'pwd',
name: 'Munir Ibrahim',
image: './img/headshots/munir.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 0,
losses: 60,
},
{
userName: 'Will',
passWord: 'pwd',
name: 'Will Weatherford',
image: './img/headshots/will.png',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 18,
},
{
userName: 'Maggie',
passWord: 'pwd',
name: 'Maggie Q',
image: './img/headshots/maggie.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 36,
losses: 20,
},
{
userName: 'Randy',
passWord: 'pwd',
name: 'Randy Daytona',
image: './img/headshots/randy.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 14,
},
{
userName: 'Karl',
passWord: 'pwd',
name: 'Karl Wolfschtagg',
image: './img/headshots/karl.jpg',
score: function() {
return Math.floor(this.wins * 100 / (this.wins + this.losses));
},
wins: 26,
losses: 40,
}
];
|
fixed data
|
js/data.js
|
fixed data
|
<ide><path>s/data.js
<ide> score: function() {
<ide> return Math.floor(this.wins * 100 / (this.wins + this.losses));
<ide> },
<del> wins: 26,
<del> losses: 18,
<add> wins: 32,
<add> losses: 8,
<ide> },
<ide> {
<ide> userName: 'Maggie',
|
|
JavaScript
|
mit
|
89bfcab18b59e2320dfb2e4d873454b39a58707a
| 0 |
DrummerHead/JS-Inject,DrummerHead/JS-Inject
|
/* - This is a javascript snippet to be used with
http://mcdlr.com/js-inject/ - */
/* - Multisite - *\
|* - Clicks on correct download torrent link - *|
\* - v1.0 - */
(function(document){
var flash = function(message, isNice){
document.querySelector('body').insertAdjacentHTML('beforeEnd', '<div style="position: fixed; top: 0; left: 0; z-index: 7777; width: 99%; padding: 1%; text-align: center; background-color: ' + (isNice ? 'green' : 'red') + '; color: #fff; font-weight: bold; font-family: sans-serif;">' + message + '</div>');
};
var c = function(target) {
var click = new MouseEvent('click', {
'view': window,
'bubbles': true,
'cancelable': true
});
var targetElement = document.querySelector(target);
if(document.contains(targetElement)){
targetElement.dispatchEvent(click);
flash('Downloaded successfully', true);
}
else{
flash('The element does not exist', false);
}
};
var host = window.location.host;
switch(host){
case 'thepiratebay.se':
case 'baymirror.com':
case 'fastpiratebay.eu':
case 'thepiratebay.mn':
c('.download a:first-child');
break;
case 'yourbittorrent.com':
c('#main .row a[href$=torrent]');
break;
case 'www.monova.org':
c('#downloadbox h2 a');
break;
case 'www.seedpeer.me':
case 'www.seedpeer.eu':
c('.leftSideHolder .downloadMenu .downloadTorrent > a:first-child');
break;
case 'www.torrentdownloads.me':
c('.inner_container .download li:nth-child(2) > a');
break;
case 'www.torrents.net':
c('.holder .download-holder a.btn2-download');
break;
case 'www.torrentfunk.com':
c('.content table[cellspacing="4"] tr:first-child td:nth-child(2) > a');
break;
case 'www.limetorrents.com':
case 'www.limetorrents.cc':
c('.dltorrent a.csprite_dltorrent');
break;
case 'torrentcrazy.com':
c('#dl-links > a:first-child');
break;
case 'kickass.to':
case 'kickass.so':
case 'kickmirror.com':
case 'katproxy.com':
case 'kat.cr':
c('.downloadButtonGroup a.verifTorrentButton');
break;
case 'torcache.net':
c('.container-fluid > .row-fluid > center:nth-of-type(2) a');
break;
case 'bitsnoop.com':
c('#dload a.dlbtn.dl_tor2');
break;
case 'www.torrentreactor.net':
c('a#download-magnet');
break;
case 'publichd.se':
c('#torrmain tr:nth-child(5) td:nth-child(2) a');
break;
case '1337x.org':
case '1337x.to':
c('a.torrent');
break;
case 'extratorrent.cc':
c('.tabledata0 a[title="Download"]');
break;
case 'www.torrenthound.com':
case 'www.houndmirror.com':
c('#torrent a');
break;
case 'www.torlock.com':
c('a[href^="/tor/"]');
break;
case 'www.torrentzap.com':
c('#rightside .downbuts a.downloadLink');
break;
case 'rarbg.com':
case 'rarbg.to':
c('table.lista tr:first-child td.lista a');
break;
case 'www.vertor.com':
case 'vertor.eu':
c('.down_but li:nth-child(2) a.downloadLink');
break;
case 'www.fulldls.com':
c('.downl-buttons .btn-grp a:first-child');
break;
case 'www.newtorrents.info':
c('#tablediv .sm tr:first-child td:nth-child(2) a');
break;
case 'h33t.to':
c('table.lista tr:nth-child(2) td:nth-child(2) table td:nth-child(2) a');
break;
case 'www.torrentbit.net':
c('.tor_item a[title="Download torrent"]');
break;
case 'torrentproject.se':
c('#download .usite:nth-child(2) a');
break;
case 'www.bt-chat.com':
window.open(document.querySelector('a[href^="download"]').getAttribute('href').replace('download', 'download1') + '&type=torrent');
flash('Downloaded in new tab', true);
break;
case 'www.demonoid.pw':
c('[src="/images/arrows/blue.png"]');
break;
case 'yts.re':
c('.std-btn.torrentDwl');
break;
case 'isohunt.to':
c('.btn-download');
break;
case 'yts.to':
if(document.querySelector('.modal-download .modal-content .modal-torrent:nth-child(2)') === null){
c('.modal-download a.download-torrent');
}
else{
c('.modal-download .modal-content .modal-torrent:nth-child(2) a.download-torrent');
}
break;
/*
case '':
c('');
break;
*/
default:
flash('Unknown site', false);
}
})(document);
/*
console.log(window.location.host);
* */
/* - /Multisite - */
|
examples/download-torrent.js
|
/* - This is a javascript snippet to be used with
http://mcdlr.com/js-inject/ - */
/* - Multisite - *\
|* - Clicks on correct download torrent link - *|
\* - v1.0 - */
(function(document){
var flash = function(message, isNice){
document.querySelector('body').insertAdjacentHTML('beforeEnd', '<div style="position: fixed; top: 0; left: 0; z-index: 7777; width: 99%; padding: 1%; text-align: center; background-color: ' + (isNice ? 'green' : 'red') + '; color: #fff; font-weight: bold; font-family: sans-serif;">' + message + '</div>');
};
var c = function(target) {
var click = new MouseEvent('click', {
'view': window,
'bubbles': true,
'cancelable': true
});
var targetElement = document.querySelector(target);
if(document.contains(targetElement)){
targetElement.dispatchEvent(click);
flash('Downloaded successfully', true);
}
else{
flash('The element does not exist', false);
}
};
var host = window.location.host;
switch(host){
case 'thepiratebay.se':
case 'baymirror.com':
case 'fastpiratebay.eu':
c('.download a:first-child');
break;
case 'yourbittorrent.com':
c('#content table:nth-of-type(2) td[height="100"] a');
break;
case 'www.monova.org':
c('#downloadbox h2 a');
break;
case 'www.seedpeer.me':
case 'www.seedpeer.eu':
c('.leftSideHolder .downloadMenu .downloadTorrent > a:first-child');
break;
case 'www.torrentdownloads.me':
c('.inner_container .download li:nth-child(2) > a');
break;
case 'www.torrents.net':
c('.holder .download-holder a.btn2-download');
break;
case 'www.torrentfunk.com':
c('.content table[cellspacing="4"] tr:first-child td:nth-child(2) > a');
break;
case 'www.limetorrents.com':
case 'www.limetorrents.cc':
c('.dltorrent a.csprite_dltorrent');
break;
case 'torrentcrazy.com':
c('#dl-links > a:first-child');
break;
case 'kickass.to':
case 'kickass.so':
case 'kickmirror.com':
case 'katproxy.com':
c('.downloadButtonGroup a.verifTorrentButton');
break;
case 'torcache.net':
c('.container-fluid > .row-fluid > center:nth-of-type(2) a');
break;
case 'bitsnoop.com':
c('#dload a.dlbtn.dl_tor2');
break;
case 'www.torrentreactor.net':
c('a#download-magnet');
break;
case 'publichd.se':
c('#torrmain tr:nth-child(5) td:nth-child(2) a');
break;
case '1337x.org':
case '1337x.to':
c('a.torrent');
break;
case 'extratorrent.cc':
c('.tabledata0 a[title="Download"]');
break;
case 'www.torrenthound.com':
case 'www.houndmirror.com':
c('#torrent a');
break;
case 'www.torlock.com':
c('a[href^="/tor/"]');
break;
case 'www.torrentzap.com':
c('#rightside .downbuts a.downloadLink');
break;
case 'rarbg.com':
c('table.lista tr:first-child td.lista a');
break;
case 'www.vertor.com':
case 'vertor.eu':
c('.down_but li:nth-child(2) a.downloadLink');
break;
case 'www.fulldls.com':
c('.downl-buttons .btn-grp a:first-child');
break;
case 'www.newtorrents.info':
c('#tablediv .sm tr:first-child td:nth-child(2) a');
break;
case 'h33t.to':
c('table.lista tr:nth-child(2) td:nth-child(2) table td:nth-child(2) a');
break;
case 'www.torrentbit.net':
c('.tor_item a[title="Download torrent"]');
break;
case 'torrentproject.se':
c('#download .usite:nth-child(2) a');
break;
case 'www.bt-chat.com':
window.open(document.querySelector('a[href^="download"]').getAttribute('href').replace('download', 'download1') + '&type=torrent');
flash('Downloaded in new tab', true);
break;
case 'www.demonoid.pw':
c('[src="/images/arrows/blue.png"]');
break;
case 'yts.re':
c('.std-btn.torrentDwl');
break;
case 'isohunt.to':
c('.btn-download');
break;
/*
case '':
c('');
break;
*/
default:
flash('Unknown site', false);
}
})(document);
/* - /Multisite - */
|
Add yts.to, update selectors
|
examples/download-torrent.js
|
Add yts.to, update selectors
|
<ide><path>xamples/download-torrent.js
<ide> case 'thepiratebay.se':
<ide> case 'baymirror.com':
<ide> case 'fastpiratebay.eu':
<add> case 'thepiratebay.mn':
<ide> c('.download a:first-child');
<ide> break;
<ide> case 'yourbittorrent.com':
<del> c('#content table:nth-of-type(2) td[height="100"] a');
<add> c('#main .row a[href$=torrent]');
<ide> break;
<ide> case 'www.monova.org':
<ide> c('#downloadbox h2 a');
<ide> case 'kickass.so':
<ide> case 'kickmirror.com':
<ide> case 'katproxy.com':
<add> case 'kat.cr':
<ide> c('.downloadButtonGroup a.verifTorrentButton');
<ide> break;
<ide> case 'torcache.net':
<ide> c('#rightside .downbuts a.downloadLink');
<ide> break;
<ide> case 'rarbg.com':
<add> case 'rarbg.to':
<ide> c('table.lista tr:first-child td.lista a');
<ide> break;
<ide> case 'www.vertor.com':
<ide> case 'isohunt.to':
<ide> c('.btn-download');
<ide> break;
<add> case 'yts.to':
<add> if(document.querySelector('.modal-download .modal-content .modal-torrent:nth-child(2)') === null){
<add> c('.modal-download a.download-torrent');
<add> }
<add> else{
<add> c('.modal-download .modal-content .modal-torrent:nth-child(2) a.download-torrent');
<add> }
<add> break;
<ide>
<ide> /*
<ide> case '':
<ide> })(document);
<ide>
<ide>
<add>/*
<add> console.log(window.location.host);
<add> * */
<ide>
<ide>
<ide> /* - /Multisite - */
|
|
JavaScript
|
mit
|
1131898f3bb90a66185df014cc8f601216fd8967
| 0 |
angular-starter-kit/generator-ngx-app,angular-starter-kit/generator-ngx-app,ngx-rocket/generator-ngx-rocket,angular-starter-kit/generator-ngx-app,ngx-rocket/generator-ngx-rocket,ngx-rocket/generator-ngx-rocket,ngx-rocket/generator-ngx-rocket,angular-starter-kit/generator-ngx-app
|
const path = require('path');
const process = require('process');
const fs = require('fs-extra');
const chalk = require('chalk');
const Insight = require('insight');
const semver = require('semver');
const replace = require('replace-in-file');
const Generator = require('@ngx-rocket/core');
const asciiLogo = require('@ngx-rocket/ascii-logo');
const pkg = require('../../package.json');
const prompts = require('./prompts.js');
const options = require('./options.js');
const getLanguages = require('./languages.js');
const {deployers} = require('./deployers.js');
const packageJsonFile = 'package.json';
const appPath = 'src/app';
class NgxGenerator extends Generator {
initializing() {
this.version = pkg.version;
this.props = {};
// Disable automatic env install based on package.json
this.features.customInstallTask = true;
// Try to initialize analytics. Insight is broken for some users, so if it fails, proceed as if the --no-analytics flag was present.
try {
this.insight =
!this.options.analytics || process.env.DISABLE_NGX_ANALYTICS
? {track: () => {}}
: new Insight({trackingCode: 'UA-93069862-1', pkg});
} catch {
this.insight = {track: () => {}};
this.log(
chalk.yellow(
'There was a problem collecting analytics data. Proceeding without anonymous usage tracking. To suppress this warning in the future, use the --no-analytics flag.'
)
);
}
const minNodeVersion = pkg.engines.node.slice(2);
if (semver.lt(process.version, minNodeVersion)) {
this.log(chalk.yellow(`Angular CLI v13 needs NodeJS ${minNodeVersion} or greater.`));
this.log(chalk.yellow(`You are using ${process.version} which is unsupported, please upgrade.\n`));
// eslint-disable-next-line unicorn/no-process-exit
process.exit(-1);
}
this.argument('appName', {
description: 'Name of the app to generate',
type: String,
required: false
});
this.insight.optOut = !this.options.analytics || process.env.DISABLE_NGX_ANALYTICS;
if (this.options.raw) {
this.props.ui = 'raw';
}
if (this.options['location-strategy']) {
this.props.location = this.options['location-strategy'];
}
this.props.strict = this.options.strict;
this.props.skipInstall = this.options['skip-install'];
this.props.skipQuickstart = this.options['skip-quickstart'];
this.props.initGit = this.options.git;
this.props.usePrefix = this.options.prefix;
if (this.options.deploy) {
this.props.deploy = this.options.deploy;
}
// Updating
let fromVersion = null;
if (this.options.update) {
this.props = this.config.get('props') || this.props;
fromVersion = this.config.get('version');
}
if (fromVersion) {
if (semver.gte(fromVersion, this.version)) {
this.log(chalk.green('\nNothing to update, it’s all good!\n'));
// eslint-disable-next-line unicorn/no-process-exit
process.exit(0);
}
this.updating = true;
this.log(
`\nUpdating ${chalk.green(this.props.appName)} project (${chalk.yellow(fromVersion)} -> ${chalk.yellow(
this.version
)})\n`
);
this.log(`${chalk.yellow('Make sure you don’t have uncommitted changes before overwriting files!')}`);
this.insight.track('update', fromVersion, 'to', this.version);
} else if (!this.options['skip-welcome']) {
this.log(asciiLogo(pkg.version));
}
// Composition
const addonsOption = this.options.addons;
this.addons = addonsOption ? addonsOption.split(' ') : [];
this.addons.forEach((addon) => {
try {
if (/[:/]/.test(addon)) {
// Fetch addon name from URL/GitHub/Local package format
let splitIndex = addon.lastIndexOf('/');
splitIndex = splitIndex === -1 ? addon.lastIndexOf(':') : splitIndex;
addon = addon.slice(splitIndex + 1);
}
if (addon.startsWith('generator-')) {
// This prefix must be removed for Yeoman to work properly
addon = addon.slice(10);
}
if (addon.endsWith('.git')) {
// When working with git repos, this suffix must be removed
addon = addon.slice(0, -4);
}
this.composeWith(addon, this.options);
} catch {
this.log(chalk.red(`Error: add-on "${addon}" not found.`));
// eslint-disable-next-line unicorn/no-process-exit
process.exit(-1);
}
});
this.insight.track('version', this.version);
this.insight.track('node', process.version);
this.insight.track('platform', process.platform);
this.insight.track('addons', addonsOption);
}
async prompting() {
// Allow to pre-set any props in an add-on generator
Object.assign(this.props, this.sharedProps);
await super.prompting();
this.props.mobile = this.props.mobile || [];
this.props.desktop = this.props.desktop || [];
this.props.utility = this.props.utility || [];
this.props.tools = this.props.tools || [];
this.props.languages = this.props.languages || ['en-US', 'fr-FR'];
this.props.usePrefix = typeof this.props.usePrefix === 'boolean' ? this.props.usePrefix : true;
this.props.deploy = this.props.deploy || 'none';
this.props.features = this.props.features || [];
this.props.pwa = this.props.features.includes('pwa');
this.props.auth = this.props.features.includes('auth');
this.props.lazy = this.props.features.includes('lazy');
this.props.e2e = this.props.tools.includes('protractor'); // Legacy e2e
this.props.cypress = this.props.features.includes('cypress') && !this.props.e2e;
this.props.angulartics = this.props.features.includes('angulartics');
this.shareProps(this.props);
}
configuring() {
// Add prefix rules for languages
getLanguages().forEach((language) => {
this._prefixRules[language] = (props) => props.languages.includes(language);
});
this.insight.track(
'generator',
this.props.target,
this.props.target.includes('web') && this.props.pwa ? 'pwa' : '',
this.props.target.includes('cordova') ? this.props.mobile : '',
this.props.ui,
this.props.auth ? 'auth' : 'no-auth'
);
this.insight.track('package-manager', this.packageManager);
if (this.props.target.includes('cordova') && this.packageManager === 'yarn') {
this.log(chalk.yellow('\nWarning: Using Yarn with Cordova is NOT recommended!'));
this.log(chalk.yellow('Cordova still uses NPM to fetch packages, causing issues with Yarn.\n'));
}
}
install() {
if (!this.props.usePrefix) {
this.log(`\nConfiguring prefix, please wait…`);
const clientPath = this.isFullstack ? process.env.NGX_CLIENT_PATH : '';
const basePath = this.destinationPath(path.join(clientPath, appPath));
try {
// Rename folders
fs.removeSync(path.join(basePath, 'core'));
fs.removeSync(path.join(basePath, 'shared'));
fs.renameSync(path.join(basePath, '@shared'), path.join(basePath, 'shared'));
// Replace imports in files
const options = {files: 'src/**/*.ts'};
replace.sync({...options, from: /@shared/g, to: '@app/shared'});
} catch (error) {
this.log(`${chalk.red('An error occured during prefix config:')}\n${error && error.message}`);
}
}
if (this.props.initGit) {
this.spawnCommandSync('git', ['init', '--quiet']);
}
if (!this.props.skipInstall) {
this.log(`\nRunning ${chalk.yellow(`${this.packageManager} install`)}, please wait…`);
const install = this.packageManager === 'yarn' ? this.yarnInstall.bind(this) : this.npmInstall.bind(this);
// When using NPM, force install as peer dependencies with ionic-native packages
// cause install errors with NPM >= 7.
const options = this.packageManager === 'yarn' ? null : {force: true};
if (fs.existsSync(this.destinationPath(packageJsonFile))) {
install(null, options);
}
if (this.isFullstack) {
if (fs.existsSync(this.destinationPath(path.join(process.env.NGX_CLIENT_PATH, packageJsonFile)))) {
install(null, options, {cwd: this.destinationPath(process.env.NGX_CLIENT_PATH)});
}
if (fs.existsSync(this.destinationPath(path.join(process.env.NGX_SERVER_PATH, packageJsonFile)))) {
install(null, options, {cwd: this.destinationPath(process.env.NGX_SERVER_PATH)});
}
}
}
}
end() {
const deployer = deployers.find((d) => d.value === this.props.deploy);
if (this.props.deploy !== 'none') {
this.log(`\nConfiguring deployment with ${chalk.cyan(deployer.name)}, please wait…\n`);
const result = this.spawnCommandSync('ng', ['add', deployer.package]);
if (result.error) {
this.log(`${chalk.red('Something went wrong during deployment configuration :(')}`);
this.log(`You can retry manually using ${chalk.yellow(`npx ng add ${deployer.package}`)}`);
}
}
if (this.updating) {
this.log(`\nUpdated ${chalk.green(this.props.appName)} to ${chalk.yellow(this.version)} successfully!`);
return;
}
if (this.props.skipQuickstart) {
return;
}
this.log('\nAll done! Get started with these tasks:');
this.log(
`- $ ${chalk.green(`${this.packageManager} start`)}: start dev server with live reload on http://localhost:4200`
);
if (this.props.target.includes('web')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run build`)}: build web app for production`);
if (this.props.deploy !== 'none') {
this.log(`- $ ${chalk.green(`${this.packageManager} run deploy`)}: deploy app to ${deployer.name}`);
}
}
if (this.props.target.includes('cordova')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run cordova:prepare`)}: prepare for building mobile app`);
this.log(`- $ ${chalk.green(`${this.packageManager} run cordova:run`)}: run app on device or simulator`);
this.log(`- $ ${chalk.green(`${this.packageManager} run cordova:build`)}: build mobile app for production`);
}
if (this.props.target.includes('electron')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run electron:build`)}: build app for electron`);
this.log(`- $ ${chalk.green(`${this.packageManager} run electron:run`)}: run app in electron`);
this.log(
`- $ ${chalk.green(
`${this.packageManager} run electron:package`
)}: package executables for all selected platforms`
);
}
this.log(`- $ ${chalk.green(`${this.packageManager} test`)}: run unit tests in watch mode for TDD`);
this.log(`- $ ${chalk.green(`${this.packageManager} run test:ci`)}: lint code and run units tests with coverage`);
if (this.props.e2e || this.props.cypress) {
this.log(`- $ ${chalk.green(`${this.packageManager} run e2e`)}: launch e2e tests`);
}
if (this.props.tools.includes('hads')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run docs`)}: show docs and coding guides`);
}
if (this.props.tools.includes('compodoc')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run compodoc`)}: generates docs from code`);
}
if (this.props.tools.includes('prettier')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run prettier`)}: format your code automatically`);
}
}
}
module.exports = Generator.make({
baseDir: __dirname,
generator: NgxGenerator,
options,
prompts,
prefixRules: Object.assign(Generator.defaultPrefixRules, {
'ionic-tabs': (props) => props.ui === 'ionic' && props.layout === 'tabs',
'ionic-side-menu': (props) => props.ui === 'ionic' && props.layout === 'side-menu',
'material-simple': (props) => props.ui === 'material' && props.layout === 'simple',
'material-side-menu': (props) => props.ui === 'material' && props.layout === 'side-menu',
raw: (props) => props.ui === 'raw',
'electron-windows': (props) => props.desktop && props.desktop.includes('windows'),
'electron-mac': (props) => props.desktop && props.desktop.includes('mac'),
'electron-linux': (props) => props.desktop && props.desktop.includes('linux'),
'tools-hads': (props) => props.tools && props.tools.includes('hads'),
'tools-jest': (props) => props.tools && props.tools.includes('jest'),
'tools-karma': (props) => props.tools && !props.tools.includes('jest'),
e2e: (props) => props.e2e,
cypress: (props) => !props.e2e && props.features && props.features.includes('cypress'),
husky: (props) => props.initGit && props.tools.includes('prettier')
})
});
|
generators/app/index.js
|
const path = require('path');
const process = require('process');
const fs = require('fs-extra');
const chalk = require('chalk');
const Insight = require('insight');
const semver = require('semver');
const replace = require('replace-in-file');
const Generator = require('@ngx-rocket/core');
const asciiLogo = require('@ngx-rocket/ascii-logo');
const pkg = require('../../package.json');
const prompts = require('./prompts.js');
const options = require('./options.js');
const getLanguages = require('./languages.js');
const {deployers} = require('./deployers.js');
const packageJsonFile = 'package.json';
const appPath = 'src/app';
class NgxGenerator extends Generator {
initializing() {
this.version = pkg.version;
this.props = {};
// Disable automatic env install based on package.json
this.features.customInstallTask = true;
// Try to initialize analytics. Insight is broken for some users, so if it fails, proceed as if the --no-analytics flag was present.
try {
this.insight =
!this.options.analytics || process.env.DISABLE_NGX_ANALYTICS
? {track: () => {}}
: new Insight({trackingCode: 'UA-93069862-1', pkg});
} catch {
this.insight = {track: () => {}};
this.log(
chalk.yellow(
'There was a problem collecting analytics data. Proceeding without anonymous usage tracking. To suppress this warning in the future, use the --no-analytics flag.'
)
);
}
const minNodeVersion = pkg.engines.node.slice(2);
if (semver.lt(process.version, minNodeVersion)) {
this.log(chalk.yellow(`Angular CLI v13 needs NodeJS ${minNodeVersion} or greater.`));
this.log(chalk.yellow(`You are using ${process.version} which is unsupported, please upgrade.\n`));
// eslint-disable-next-line unicorn/no-process-exit
process.exit(-1);
}
this.argument('appName', {
description: 'Name of the app to generate',
type: String,
required: false
});
this.insight.optOut = !this.options.analytics || process.env.DISABLE_NGX_ANALYTICS;
if (this.options.raw) {
this.props.ui = 'raw';
}
if (this.options['location-strategy']) {
this.props.location = this.options['location-strategy'];
}
this.props.strict = this.options.strict;
this.props.skipInstall = this.options['skip-install'];
this.props.skipQuickstart = this.options['skip-quickstart'];
this.props.initGit = this.options.git;
this.props.usePrefix = this.options.prefix;
if (this.options.deploy) {
this.props.deploy = this.options.deploy;
}
// Updating
let fromVersion = null;
if (this.options.update) {
this.props = this.config.get('props') || this.props;
fromVersion = this.config.get('version');
}
if (fromVersion) {
if (semver.gte(fromVersion, this.version)) {
this.log(chalk.green('\nNothing to update, it’s all good!\n'));
// eslint-disable-next-line unicorn/no-process-exit
process.exit(0);
}
this.updating = true;
this.log(
`\nUpdating ${chalk.green(this.props.appName)} project (${chalk.yellow(fromVersion)} -> ${chalk.yellow(
this.version
)})\n`
);
this.log(`${chalk.yellow('Make sure you don’t have uncommitted changes before overwriting files!')}`);
this.insight.track('update', fromVersion, 'to', this.version);
} else if (!this.options['skip-welcome']) {
this.log(asciiLogo(pkg.version));
}
// Composition
const addonsOption = this.options.addons;
this.addons = addonsOption ? addonsOption.split(' ') : [];
this.addons.forEach((addon) => {
try {
if (/[:/]/.test(addon)) {
// Fetch addon name from URL/GitHub/Local package format
let splitIndex = addon.lastIndexOf('/');
splitIndex = splitIndex === -1 ? addon.lastIndexOf(':') : splitIndex;
addon = addon.slice(splitIndex + 1);
}
if (addon.startsWith('generator-')) {
// This prefix must be removed for Yeoman to work properly
addon = addon.slice(10);
}
if (addon.endsWith('.git')) {
// When working with git repos, this suffix must be removed
addon = addon.slice(0, -4);
}
this.composeWith(addon, this.options);
} catch {
this.log(chalk.red(`Error: add-on "${addon}" not found.`));
// eslint-disable-next-line unicorn/no-process-exit
process.exit(-1);
}
});
this.insight.track('version', this.version);
this.insight.track('node', process.version);
this.insight.track('platform', process.platform);
this.insight.track('addons', addonsOption);
}
async prompting() {
// Allow to pre-set any props in an add-on generator
Object.assign(this.props, this.sharedProps);
await super.prompting();
this.props.mobile = this.props.mobile || [];
this.props.desktop = this.props.desktop || [];
this.props.utility = this.props.utility || [];
this.props.tools = this.props.tools || [];
this.props.languages = this.props.languages || ['en-US', 'fr-FR'];
this.props.usePrefix = typeof this.props.usePrefix === 'boolean' ? this.props.usePrefix : true;
this.props.deploy = this.props.deploy || 'none';
this.props.features = this.props.features || [];
this.props.pwa = this.props.features.includes('pwa');
this.props.auth = this.props.features.includes('auth');
this.props.lazy = this.props.features.includes('lazy');
this.props.e2e = this.props.tools.includes('protractor'); // Legacy e2e
this.props.cypress = this.props.features.includes('cypress') && !this.props.e2e;
this.props.angulartics = this.props.features.includes('angulartics');
this.shareProps(this.props);
}
configuring() {
// Add prefix rules for languages
getLanguages().forEach((language) => {
this._prefixRules[language] = (props) => props.languages.includes(language);
});
this.insight.track(
'generator',
this.props.target,
this.props.target.includes('web') && this.props.pwa ? 'pwa' : '',
this.props.target.includes('cordova') ? this.props.mobile : '',
this.props.ui,
this.props.auth ? 'auth' : 'no-auth'
);
this.insight.track('package-manager', this.packageManager);
if (this.props.target.includes('cordova') && this.packageManager === 'yarn') {
this.log(chalk.yellow('\nWarning: Using Yarn with Cordova is NOT recommended!'));
this.log(chalk.yellow('Cordova still uses NPM to fetch packages, causing issues with Yarn.\n'));
}
}
install() {
if (!this.props.usePrefix) {
this.log(`\nConfiguring prefix, please wait…`);
const clientPath = this.isFullstack ? process.env.NGX_CLIENT_PATH : '';
const basePath = this.destinationPath(path.join(clientPath, appPath));
try {
// Rename folders
fs.removeSync(path.join(basePath, 'core'));
fs.removeSync(path.join(basePath, 'shared'));
fs.renameSync(path.join(basePath, '@shared'), path.join(basePath, 'shared'));
// Replace imports in files
const options = {files: 'src/**/*.ts'};
replace.sync({...options, from: /@shared/g, to: '@app/shared'});
} catch (error) {
this.log(`${chalk.red('An error occured during prefix config:')}\n${error && error.message}`);
}
}
if (this.props.initGit) {
this.spawnCommandSync('git', ['init', '--quiet']);
}
if (!this.props.skipInstall) {
this.log(`\nRunning ${chalk.yellow(`${this.packageManager} install`)}, please wait…`);
const install = this.packageManager === 'yarn' ? this.yarnInstall.bind(this) : this.npmInstall.bind(this);
// When using NPM, force install as peer dependencies with ionic-native packages
// cause install errors with NPM >= 7.
const options = this.packageManager === 'yarn' ? null : {force: true};
if (fs.existsSync(this.destinationPath(packageJsonFile))) {
install(null, options);
}
if (this.isFullstack) {
if (fs.existsSync(this.destinationPath(path.join(process.env.NGX_CLIENT_PATH, packageJsonFile)))) {
install(null, options, {cwd: this.destinationPath(process.env.NGX_CLIENT_PATH)});
}
if (fs.existsSync(this.destinationPath(path.join(process.env.NGX_SERVER_PATH, packageJsonFile)))) {
install(null, options, {cwd: this.destinationPath(process.env.NGX_SERVER_PATH)});
}
}
}
}
end() {
const deployer = deployers.find((d) => d.value === this.props.deploy);
if (this.props.deploy !== 'none') {
this.log(`\nConfiguring deployment with ${chalk.cyan(deployer.name)}, please wait…\n`);
const result = this.spawnCommandSync('ng', ['add', deployer.package]);
if (result.error) {
this.log(`${chalk.red('Something went wrong during deployment configuration :(')}`);
this.log(`You can retry manually using ${chalk.yellow(`npx ng add ${deployer.package}`)}`);
}
}
if (this.updating) {
this.log(`\nUpdated ${chalk.green(this.props.appName)} to ${chalk.yellow(this.version)} successfully!`);
return;
}
if (this.props.skipQuickstart) {
return;
}
this.log('\nAll done! Get started with these tasks:');
this.log(
`- $ ${chalk.green(`${this.packageManager} start`)}: start dev server with live reload on http://localhost:4200`
);
if (this.props.target.includes('web')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run build`)}: build web app for production`);
if (this.props.deploy !== 'none') {
this.log(`- $ ${chalk.green(`${this.packageManager} run deploy`)}: deploy app to ${deployer.name}`);
}
}
if (this.props.target.includes('cordova')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run cordova:prepare`)}: prepare for building mobile app`);
this.log(`- $ ${chalk.green(`${this.packageManager} run cordova:run`)}: run app on device or simulator`);
this.log(`- $ ${chalk.green(`${this.packageManager} run cordova:build`)}: build mobile app for production`);
}
if (this.props.target.includes('electron')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run electron:build`)}: build app for electron`);
this.log(`- $ ${chalk.green(`${this.packageManager} run electron:run`)}: run app in electron`);
this.log(
`- $ ${chalk.green(
`${this.packageManager} run electron:package`
)}: package executables for all selected platforms`
);
}
this.log(`- $ ${chalk.green(`${this.packageManager} test`)}: run unit tests in watch mode for TDD`);
this.log(`- $ ${chalk.green(`${this.packageManager} run test:ci`)}: lint code and run units tests with coverage`);
if (this.props.e2e || this.props.cypress) {
this.log(`- $ ${chalk.green(`${this.packageManager} run e2e`)}: launch e2e tests`);
}
if (this.props.tools.includes('hads')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run docs`)}: show docs and coding guides`);
}
if (this.props.tools.includes('compodoc')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run compodoc`)}: generates docs from code`);
}
if (this.props.tools.includes('prettier')) {
this.log(`- $ ${chalk.green(`${this.packageManager} run prettier`)}: format your code automatically`);
}
}
}
module.exports = Generator.make({
baseDir: __dirname,
generator: NgxGenerator,
options,
prompts,
prefixRules: Object.assign(Generator.defaultPrefixRules, {
'ionic-tabs': (props) => props.ui === 'ionic' && props.layout === 'tabs',
'ionic-side-menu': (props) => props.ui === 'ionic' && props.layout === 'side-menu',
'material-simple': (props) => props.ui === 'material' && props.layout === 'simple',
'material-side-menu': (props) => props.ui === 'material' && props.layout === 'side-menu',
raw: (props) => props.ui === 'raw',
'electron-windows': (props) => props.desktop && props.desktop.includes('windows'),
'electron-mac': (props) => props.desktop && props.desktop.includes('mac'),
'electron-linux': (props) => props.desktop && props.desktop.includes('linux'),
'tools-hads': (props) => props.tools && props.tools.includes('hads'),
'tools-jest': (props) => props.tools && props.tools.includes('jest'),
'tools-karma': (props) => props.tools && !props.tools.includes('jest'),
e2e: (props) => !props.features || props.features.includes('e2e'),
cypress: (props) => !props.features || props.features.includes('cypress'),
husky: (props) => props.initGit && props.tools.includes('prettier')
})
});
|
fix: e2e/cypress templates
|
generators/app/index.js
|
fix: e2e/cypress templates
|
<ide><path>enerators/app/index.js
<ide> 'tools-hads': (props) => props.tools && props.tools.includes('hads'),
<ide> 'tools-jest': (props) => props.tools && props.tools.includes('jest'),
<ide> 'tools-karma': (props) => props.tools && !props.tools.includes('jest'),
<del> e2e: (props) => !props.features || props.features.includes('e2e'),
<del> cypress: (props) => !props.features || props.features.includes('cypress'),
<add> e2e: (props) => props.e2e,
<add> cypress: (props) => !props.e2e && props.features && props.features.includes('cypress'),
<ide> husky: (props) => props.initGit && props.tools.includes('prettier')
<ide> })
<ide> });
|
|
Java
|
apache-2.0
|
cea17fe514740d4734fd36b681f7a52f607e57cb
| 0 |
JoshSharpe/java-sdk,supunucsc/java-sdk,watson-developer-cloud/java-sdk,supunucsc/java-sdk,supunucsc/java-sdk,watson-developer-cloud/java-sdk,watson-developer-cloud/java-sdk,watson-developer-cloud/java-sdk,JoshSharpe/java-sdk,JoshSharpe/java-sdk
|
/*
* Copyright 2015 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.ibm.watson.developer_cloud.alchemy.v1.model;
import java.util.List;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.ibm.watson.developer_cloud.alchemy.v1.AlchemyLanguage;
/**
* Typed relation between {@link TypedEntity}.
* @see AlchemyLanguage#getTypedRelations(java.util.Map)
*/
public class TypedRelation {
private String text;
private String type;
private Double score;
@JsonAdapter(TypedEntitiesAdapter.class)
@SerializedName("arguments")
private List<TypedEntity> entities;
/**
* Gets the text.
*
* @return The text
*/
public String getText() {
return text;
}
/**
* Sets the text.
*
* @param text The text
*/
public void setText(String text) {
this.text = text;
}
/**
* Gets the type.
*
* @return The type
*/
public String getType() {
return type;
}
/**
* Sets the type.
*
* @param type The type
*/
public void setType(String type) {
this.type = type;
}
/**
* Gets the score.
*
* @return The score
*/
public Double getScore() {
return score;
}
/**
* Sets the score.
*
* @param score The score
*/
public void setScore(Double score) {
this.score = score;
}
/**
* @return the entities
*/
public List<TypedEntity> getEntities() {
return entities;
}
/**
* @param entities the entities to set
*/
public void setEntities(List<TypedEntity> entities) {
this.entities = entities;
}
}
|
src/main/java/com/ibm/watson/developer_cloud/alchemy/v1/model/TypedRelation.java
|
/*
* Copyright 2015 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.ibm.watson.developer_cloud.alchemy.v1.model;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.ibm.watson.developer_cloud.alchemy.v1.AlchemyLanguage;
import java.util.List;
/**
* Typed relation between {@link TypedEntity}.
* @see AlchemyLanguage#getTypedRelations(java.util.Map)
*/
public class TypedRelation {
private String text;
private String type;
private Double score;
@JsonAdapter(TypedEntitiesAdapter.class)
@SerializedName("arguments")
private List<TypedEntity> entities;
/**
* Gets the text.
*
* @return The text
*/
public String getText() {
return text;
}
/**
* Sets the text.
*
* @param text The text
*/
public void setText(String text) {
this.text = text;
}
/**
* Gets the type.
*
* @return The type
*/
public String getType() {
return type;
}
/**
* Sets the type.
*
* @param type The type
*/
public void setType(String type) {
this.type = type;
}
/**
* Gets the score.
*
* @return The score
*/
public Double getScore() {
return score;
}
/**
* Sets the score.
*
* @param score The score
*/
public void setScore(Double score) {
this.score = score;
}
/**
* @return the entities
*/
public List<TypedEntity> getEntities() {
return entities;
}
/**
* @param entities the entities to set
*/
public void setEntities(List<TypedEntity> entities) {
this.entities = entities;
}
}
|
Fixing imports
|
src/main/java/com/ibm/watson/developer_cloud/alchemy/v1/model/TypedRelation.java
|
Fixing imports
|
<ide><path>rc/main/java/com/ibm/watson/developer_cloud/alchemy/v1/model/TypedRelation.java
<ide>
<ide> package com.ibm.watson.developer_cloud.alchemy.v1.model;
<ide>
<add>import java.util.List;
<add>
<ide> import com.google.gson.annotations.JsonAdapter;
<ide> import com.google.gson.annotations.SerializedName;
<ide> import com.ibm.watson.developer_cloud.alchemy.v1.AlchemyLanguage;
<del>
<del>import java.util.List;
<ide>
<ide> /**
<ide> * Typed relation between {@link TypedEntity}.
|
|
JavaScript
|
mit
|
784e308a5e80dba824a4f297ea1edaea4b86e81b
| 0 |
pownjs/pown,pownjs/pown
|
exports.yargs = {
command: 'install <modules...>',
describe: 'Install modules',
aliases: ['i'],
builder: (yargs) => {
yargs.option('development', {
type: 'boolean',
describe: 'Install development.',
alias: ['o'],
default: false
})
},
handler: async(yargs) => {
const { modules = [], development } = yargs
const util = require('util')
const { writeFile } = require('fs')
const { spawn } = require('child_process')
const { ensurePreferencesFilename, getPreferencesFilename, getPreferencesDirectory } = require('@pown/preferences')
const spawnAsync = util.promisify(spawn)
const writeFileAsync = util.promisify(writeFile)
await ensurePreferencesFilename('modules', 'package.json')
await writeFileAsync(getPreferencesFilename('modules', '.npmrc'), 'package-lock=false\n')
const dirname = getPreferencesDirectory('modules')
await spawnAsync('npm', ['install', ...modules, ...(development ? [] : ['--production'])], { stdio: 'inherit', cwd: dirname })
}
}
|
commands/modules/sub/install.js
|
exports.yargs = {
command: 'install <modules...>',
describe: 'Install modules',
aliases: ['i'],
handler: async(yargs) => {
const { modules = [] } = yargs
const util = require('util')
const { writeFile } = require('fs')
const { spawn } = require('child_process')
const { ensurePreferencesFilename, getPreferencesFilename, getPreferencesDirectory } = require('@pown/preferences')
const spawnAsync = util.promisify(spawn)
const writeFileAsync = util.promisify(writeFile)
await ensurePreferencesFilename('modules', 'package.json')
await writeFileAsync(getPreferencesFilename('modules', '.npmrc'), 'package-lock=false\n')
const dirname = getPreferencesDirectory('modules')
await spawnAsync('npm', ['install', ...modules], { stdio: 'inherit', cwd: dirname })
}
}
|
Ensure all modules are installed for production use.
|
commands/modules/sub/install.js
|
Ensure all modules are installed for production use.
|
<ide><path>ommands/modules/sub/install.js
<ide> describe: 'Install modules',
<ide> aliases: ['i'],
<ide>
<add> builder: (yargs) => {
<add> yargs.option('development', {
<add> type: 'boolean',
<add> describe: 'Install development.',
<add> alias: ['o'],
<add> default: false
<add> })
<add> },
<add>
<ide> handler: async(yargs) => {
<del> const { modules = [] } = yargs
<add> const { modules = [], development } = yargs
<ide>
<ide> const util = require('util')
<ide> const { writeFile } = require('fs')
<ide>
<ide> const dirname = getPreferencesDirectory('modules')
<ide>
<del> await spawnAsync('npm', ['install', ...modules], { stdio: 'inherit', cwd: dirname })
<add> await spawnAsync('npm', ['install', ...modules, ...(development ? [] : ['--production'])], { stdio: 'inherit', cwd: dirname })
<ide> }
<ide> }
|
|
Java
|
apache-2.0
|
3a7a9be1f5e13133867a66dcae58560e130af63c
| 0 |
googleinterns/step227-2020,googleinterns/step227-2020,googleinterns/step227-2020
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.sps.servlets;
import com.google.appengine.api.datastore.*;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.gson.Gson;
import com.google.sps.data.User;
import java.io.IOException;
import java.util.*;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/** Responsible for storing user info. */
@WebServlet("/user-info")
public class ProfileInfoServlet extends HttpServlet {
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
UserService userService = UserServiceFactory.getUserService();
// Check if user is logged in.
if (!userService.isUserLoggedIn()) {
System.out.println("ERROR:You are not logged in!");
response.sendRedirect("/index.html");
return;
}
// Get the input from the form.
String firstName = getParameter(request, "first-name", "Not set");
String lastName = getParameter(request, "last-name", "Not set");
String nickname = getParameter(request, "nickname", "Anonym");
boolean notifications;
if ((getParameter(request, "radio", "mute")) == "mute") {
notifications = false;
} else {
notifications = true;
}
// Get user's email.
String email = userService.getCurrentUser().getEmail();
// Store the comments as entities.
Entity userEntity = new Entity("User", email);
userEntity.setProperty("firstName", firstName);
userEntity.setProperty("lastName", lastName);
userEntity.setProperty("nickname", nickname);
userEntity.setProperty("notifications", notifications);
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
datastore.put(userEntity);
// Redirect back to the HTML page - comments section.
response.sendRedirect("/profile.html");
}
/**
* Return the request parameter, or the default value if the parameter was not specified by the
* client.
*/
private String getParameter(HttpServletRequest request, String name, String defaultValue) {
String value = request.getParameter(name);
if (value == null || value.isEmpty()) {
return defaultValue;
}
return value;
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
UserService userService = UserServiceFactory.getUserService();
Key userKey = KeyFactory.createKey("User", userService.getCurrentUser().getEmail());
User currentUser;
try {
Entity userEntity = datastore.get(userKey);
currentUser =
new User(
(String) userEntity.getProperty("firstName"),
(String) userEntity.getProperty("lastName"),
(String) userEntity.getProperty("nickname"),
(boolean) userEntity.getProperty("notifications"));
} catch (Exception e) {
currentUser = new User("Set first name...", "Set last name...", "Set nickname...", false);
}
Gson gson = new Gson();
// Respond with the user details.
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(currentUser));
}
}
|
src/main/java/com/google/sps/servlets/ProfileInfoServlet.java
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.sps.servlets;
import com.google.appengine.api.datastore.*;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.gson.Gson;
import com.google.sps.data.User;
import java.io.IOException;
import java.util.*;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/** Responsible for storing user info. */
@WebServlet("/user-info")
public class ProfileInfoServlet extends HttpServlet {
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
UserService userService = UserServiceFactory.getUserService();
// Check if user is logged in.
if (!userService.isUserLoggedIn()) {
System.out.println("ERROR:You are not logged in!");
response.sendRedirect("/index.html");
return;
}
// Get the input from the form.
String fname = getParameter(request, "first -name", "Not set");
String lname = getParameter(request, "last-name", "Not set");
String nickname = getParameter(request, "nickname", "Anonym");
boolean notifications;
if ((getParameter(request, "radio", "mute")) == "mute") {
notifications = false;
} else {
notifications = true;
}
// Get user's email.
String email = userService.getCurrentUser().getEmail();
// Store the comments as entities.
Entity userEntity = new Entity("User", email);
userEntity.setProperty("firstName", fname);
userEntity.setProperty("lastName", lname);
userEntity.setProperty("nickname", nickname);
userEntity.setProperty("notifications", notifications);
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
datastore.put(userEntity);
// Redirect back to the HTML page - comments section.
response.sendRedirect("/profile.html");
}
/**
* Return the request parameter, or the default value if the parameter was not specified by the
* client.
*/
private String getParameter(HttpServletRequest request, String name, String defaultValue) {
String value = request.getParameter(name);
if (value == null || value.isEmpty()) {
return defaultValue;
}
return value;
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
UserService userService = UserServiceFactory.getUserService();
Key userKey = KeyFactory.createKey("User", userService.getCurrentUser().getEmail());
User currentUser;
try {
Entity userEntity = datastore.get(userKey);
currentUser =
new User(
(String) userEntity.getProperty("firstName"),
(String) userEntity.getProperty("lastName"),
(String) userEntity.getProperty("nickname"),
(boolean) userEntity.getProperty("notifications"));
} catch (Exception e) {
currentUser = new User("Set first name...", "Set last name...", "Set nickname...", false);
}
Gson gson = new Gson();
// Respond with the user details.
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(currentUser));
}
}
|
Ignore annotation processor warnings
|
src/main/java/com/google/sps/servlets/ProfileInfoServlet.java
|
Ignore annotation processor warnings
|
<ide><path>rc/main/java/com/google/sps/servlets/ProfileInfoServlet.java
<ide> return;
<ide> }
<ide> // Get the input from the form.
<del> String fname = getParameter(request, "first -name", "Not set");
<del> String lname = getParameter(request, "last-name", "Not set");
<add> String firstName = getParameter(request, "first-name", "Not set");
<add> String lastName = getParameter(request, "last-name", "Not set");
<ide> String nickname = getParameter(request, "nickname", "Anonym");
<ide> boolean notifications;
<ide> if ((getParameter(request, "radio", "mute")) == "mute") {
<ide>
<ide> // Store the comments as entities.
<ide> Entity userEntity = new Entity("User", email);
<del> userEntity.setProperty("firstName", fname);
<del> userEntity.setProperty("lastName", lname);
<add> userEntity.setProperty("firstName", firstName);
<add> userEntity.setProperty("lastName", lastName);
<ide> userEntity.setProperty("nickname", nickname);
<ide> userEntity.setProperty("notifications", notifications);
<ide>
|
|
JavaScript
|
mit
|
0565a6bab57cc0b997d21e6f04fa4f5871bcfdbf
| 0 |
wistityhq/strapi-generate-users
|
'use strict';
/**
* Module dependencies
*/
// Public node modules
const _ = require('lodash');
const bcrypt = require('bcryptjs');
const pluralize = require('pluralize');
/**
* Authorization service
*/
module.exports = {
/**
* Helper used to hash the password of a `user`.
*
* @param {Object} user
* @param {Function} next
*/
hashPassword: function (user, next) {
if (!user.hasOwnProperty('password') || !user.password || this.isHashed(user.password)) {
next(null, user);
} else {
bcrypt.hash(user.password, 10, function (err, hash) {
user.password = hash;
next(err, user);
});
}
},
/**
* Check if the password is already a hash.
*
* @param {String} password
* @returns {boolean}
*/
isHashed: function (password) {
if (typeof password !== 'string' || !password) {
return false;
}
return password.split('$').length === 4;
},
/**
* Check is the user has the roles needed for
* the current route.
*
* @param _ctx
*
* @return {boolean}
*/
isUserAuthorized: function * (_ctx) {
// Init variables;
let user;
let route;
// Get and verify JWT via service.
try {
// User is authenticated.
user = yield strapi.api.user.services.jwt.getToken(_ctx, true);
// Store user id to request object.
_ctx.user = yield User.findOne(user.id).populate('roles');
// We delete the token from query and body to not mess.
_ctx.request.query && delete _ctx.request.query.token;
_ctx.request.body && delete _ctx.request.body.token;
} catch (err) {
}
// User is admin.
if (_ctx.user && _ctx.user.roles && _.find(_ctx.user.roles, {name: 'admin'})) {
return true;
}
// Find the current route and its authorized roles.
route = yield strapi.orm.collections.route.findOne({
name: _.trim(_ctx.request.route.endpoint)
}).populate('roles');
// Route not found.
if (!route) {
throw Error('Route not found');
}
// Check if _ctx route is public.
if (route.isPublic === true) {
return true;
}
// The user is not connected.
if (!_ctx.user) {
return false;
}
// Registered.
if (user.id && route.registeredAuthorized === true && !route.contributorsAuthorized) {
return true;
}
// Map the list of roles.
const authorizedRoles = _.isArray(route.roles) ? _.map(route.roles, 'name') : [];
let entry;
// Owner policy.
if (_ctx.request.route.controller && route.contributorsAuthorized === true) {
const controller = _ctx.request.route.controller && _ctx.request.route.controller.toLowerCase();
if (_ctx.params.id) {
// Specific behavior if the model requested is `user`.
if (_ctx.request.route.controller.toLowerCase() === 'user') {
// Attempting to find a user.
const userFound = yield strapi.orm.collections.user.findOne(_ctx.params.id);
// Check if the user found has the same `id that the authenticated user.
return userFound && userFound.id === user.id;
} else {
entry = yield strapi.orm.collections[controller].findOne(_ctx.params.id).populate('contributors');
if (entry && entry.contributors && _ctx.user && _ctx.user.id) {
// The authenticated `user` is a contributor.
return _.find(entry.contributors, {id: _ctx.user.id});
} else {
// Default behavior.
return false;
}
}
} else if (_ctx.request.route.verb && _ctx.request.route.verb.toLowerCase() === 'get') {
// Specific behavior if the model requested is `user`.
if (_ctx.request.route.controller.toLowerCase() === 'user') {
// Set the default `where` object.
_ctx.request.query.where = _ctx.request.query.where || {};
_ctx.request.query.where.id = [user.id];
} else {
// Pluralize the controller name in order to have the relation name.
const relation = pluralize.plural(route.controller).toLowerCase();
// Format request for `GET` requests (eg. the user will receive only the items he is contributor to).
yield formatGetRequest(user, relation, _ctx);
}
return true;
} else {
// Default behavior.
return false;
}
}
// Check by roles.
// user.roles is an empty array, so switching to _ctx.user.roles
let userRole;
for (let i = 0; i < _ctx.user.roles.length; i++) {
userRole = _ctx.user.roles[i].name;
if (userRole && _.contains(authorizedRoles, userRole)) {
return true;
}
}
// Defaults to `false`.
return false;
}
};
/**
* Format the `_ctx.request` object in order
* to filter sent items.
*
* @param user
* @param relation
* @param _ctx
*/
function * formatGetRequest(user, relation, _ctx) {
// Find the user and populate with the relation.
const userFound = yield strapi.orm.collections.user.findOne({
id: user && user.id
}).populate(relation);
// User not found.
if (!userFound) {
throw Error('User not found');
}
// Set the default `where` object.
_ctx.request.query.where = _ctx.request.query.where || {};
// The blueprints will filter the items by IDs.
_ctx.request.query.where.id = _.map(userFound[relation], function (item) {
return item.id;
});
}
|
files/api/user/services/user.js
|
'use strict';
/**
* Module dependencies
*/
// Public node modules
const _ = require('lodash');
const bcrypt = require('bcryptjs');
const pluralize = require('pluralize');
/**
* Authorization service
*/
module.exports = {
/**
* Helper used to hash the password of a `user`.
*
* @param {Object} user
* @param {Function} next
*/
hashPassword: function (user, next) {
if (!user.hasOwnProperty('password') || !user.password || this.isHashed(user.password)) {
next(null, user);
} else {
bcrypt.hash(user.password, 10, function (err, hash) {
user.password = hash;
next(err, user);
});
}
},
/**
* Check if the password is already a hash.
*
* @param {String} password
* @returns {boolean}
*/
isHashed: function (password) {
if (typeof password !== 'string' || !password) {
return false;
}
return password.split('$').length === 4;
},
/**
* Check is the user has the roles needed for
* the current route.
*
* @param _ctx
*
* @return {boolean}
*/
isUserAuthorized: function * (_ctx) {
// Init variables;
let user;
let route;
// Get and verify JWT via service.
try {
// User is authenticated.
user = yield strapi.api.user.services.jwt.getToken(_ctx, true);
// Store user id to request object.
_ctx.user = yield User.findOne(user.id).populate('roles');
// We delete the token from query and body to not mess.
_ctx.request.query && delete _ctx.request.query.token;
_ctx.request.body && delete _ctx.request.body.token;
} catch (err) {
}
// User is admin.
if (_ctx.user && _ctx.user.roles && _.find(_ctx.user.roles, {name: 'admin'})) {
return true;
}
// Find the current route and its authorized roles.
route = yield strapi.orm.collections.route.findOne({
name: _.trim(_ctx.request.route.endpoint)
}).populate('roles');
// Route not found.
if (!route) {
throw Error('Route not found');
}
// Check if _ctx route is public.
if (route.isPublic === true) {
return true;
}
// The user is not connected.
if (!_ctx.user) {
return false;
}
// Registered.
if (user.id && route.registeredAuthorized === true && !route.contributorsAuthorized) {
return true;
}
// Map the list of roles.
const authorizedRoles = _.isArray(route.roles) ? _.map(route.roles, 'name') : [];
let entry;
// Owner policy.
if (_ctx.request.route.controller && route.contributorsAuthorized === true) {
const controller = _ctx.request.route.controller && _ctx.request.route.controller.toLowerCase();
if (_ctx.params.id) {
// Specific behavior if the model requested is `user`.
if (_ctx.request.route.controller.toLowerCase() === 'user') {
// Attempting to find a user.
const userFound = yield strapi.orm.collections.user.findOne(_ctx.params.id);
// Check if the user found has the same `id that the authenticated user.
return userFound && userFound.id === user.id;
} else {
entry = yield strapi.orm.collections[controller].findOne(_ctx.params.id).populate('contributors');
if (entry && entry.contributors && _ctx.user && _ctx.user.id) {
// The authenticated `user` is a contributor.
return _.find(entry.contributors, {id: _ctx.user.id});
} else {
// Default behavior.
return false;
}
}
} else if (_ctx.request.route.verb && _ctx.request.route.verb.toLowerCase() === 'get') {
// Specific behavior if the model requested is `user`.
if (_ctx.request.route.controller.toLowerCase() === 'user') {
// Set the default `where` object.
_ctx.request.query.where = _ctx.request.query.where || {};
_ctx.request.query.where.id = [user.id];
} else {
// Pluralize the controller name in order to have the relation name.
const relation = pluralize.plural(route.controller).toLowerCase();
// Format request for `GET` requests (eg. the user will receive only the items he is contributor to).
yield formatGetRequest(user, relation, _ctx);
}
return true;
} else {
// Default behavior.
return false;
}
}
// Check by roles.
let userRole;
for (let i = 0; i < user.roles.length; i++) {
userRole = user.roles[i].name;
if (userRole && _.contains(authorizedRoles, userRole)) {
return true;
}
}
// Defaults to `false`.
return false;
}
};
/**
* Format the `_ctx.request` object in order
* to filter sent items.
*
* @param user
* @param relation
* @param _ctx
*/
function * formatGetRequest(user, relation, _ctx) {
// Find the user and populate with the relation.
const userFound = yield strapi.orm.collections.user.findOne({
id: user && user.id
}).populate(relation);
// User not found.
if (!userFound) {
throw Error('User not found');
}
// Set the default `where` object.
_ctx.request.query.where = _ctx.request.query.where || {};
// The blueprints will filter the items by IDs.
_ctx.request.query.where.id = _.map(userFound[relation], function (item) {
return item.id;
});
}
|
Fixing Validation of Role Based Authorization.
|
files/api/user/services/user.js
|
Fixing Validation of Role Based Authorization.
|
<ide><path>iles/api/user/services/user.js
<ide> }
<ide>
<ide> // Check by roles.
<add> // user.roles is an empty array, so switching to _ctx.user.roles
<ide> let userRole;
<del> for (let i = 0; i < user.roles.length; i++) {
<del> userRole = user.roles[i].name;
<add> for (let i = 0; i < _ctx.user.roles.length; i++) {
<add> userRole = _ctx.user.roles[i].name;
<ide> if (userRole && _.contains(authorizedRoles, userRole)) {
<ide> return true;
<ide> }
|
|
Java
|
apache-2.0
|
c1ef7ce3b6cad33313b9193b6bb62e65ab9e847e
| 0 |
whumph/sakai,kwedoff1/sakai,kingmook/sakai,ouit0408/sakai,rodriguezdevera/sakai,lorenamgUMU/sakai,colczr/sakai,zqian/sakai,whumph/sakai,introp-software/sakai,pushyamig/sakai,udayg/sakai,kingmook/sakai,colczr/sakai,ouit0408/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,clhedrick/sakai,buckett/sakai-gitflow,frasese/sakai,udayg/sakai,puramshetty/sakai,rodriguezdevera/sakai,rodriguezdevera/sakai,frasese/sakai,noondaysun/sakai,rodriguezdevera/sakai,tl-its-umich-edu/sakai,noondaysun/sakai,noondaysun/sakai,wfuedu/sakai,liubo404/sakai,bzhouduke123/sakai,ktakacs/sakai,conder/sakai,puramshetty/sakai,buckett/sakai-gitflow,frasese/sakai,willkara/sakai,willkara/sakai,Fudan-University/sakai,whumph/sakai,tl-its-umich-edu/sakai,Fudan-University/sakai,colczr/sakai,colczr/sakai,noondaysun/sakai,surya-janani/sakai,duke-compsci290-spring2016/sakai,whumph/sakai,introp-software/sakai,lorenamgUMU/sakai,buckett/sakai-gitflow,hackbuteer59/sakai,duke-compsci290-spring2016/sakai,Fudan-University/sakai,bzhouduke123/sakai,kwedoff1/sakai,hackbuteer59/sakai,noondaysun/sakai,zqian/sakai,kingmook/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,bkirschn/sakai,OpenCollabZA/sakai,clhedrick/sakai,conder/sakai,Fudan-University/sakai,tl-its-umich-edu/sakai,kwedoff1/sakai,pushyamig/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,noondaysun/sakai,surya-janani/sakai,kwedoff1/sakai,puramshetty/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,liubo404/sakai,joserabal/sakai,Fudan-University/sakai,puramshetty/sakai,kingmook/sakai,zqian/sakai,buckett/sakai-gitflow,hackbuteer59/sakai,joserabal/sakai,kwedoff1/sakai,clhedrick/sakai,zqian/sakai,conder/sakai,Fudan-University/sakai,noondaysun/sakai,introp-software/sakai,pushyamig/sakai,pushyamig/sakai,OpenCollabZA/sakai,udayg/sakai,ktakacs/sakai,willkara/sakai,willkara/sakai,udayg/sakai,willkara/sakai,surya-janani/sakai,conder/sakai,udayg/sakai,bzhouduke123/sakai,tl-its-umich-edu/sakai,duke-compsci290-spring2016/sakai,clhedrick/sakai,whumph/sakai,ktakacs/sakai,clhedrick/sakai,OpenCollabZA/sakai,kwedoff1/sakai,OpenCollabZA/sakai,introp-software/sakai,buckett/sakai-gitflow,ouit0408/sakai,bzhouduke123/sakai,bkirschn/sakai,pushyamig/sakai,bzhouduke123/sakai,wfuedu/sakai,Fudan-University/sakai,pushyamig/sakai,joserabal/sakai,kingmook/sakai,hackbuteer59/sakai,zqian/sakai,surya-janani/sakai,lorenamgUMU/sakai,puramshetty/sakai,joserabal/sakai,wfuedu/sakai,willkara/sakai,whumph/sakai,conder/sakai,colczr/sakai,hackbuteer59/sakai,introp-software/sakai,duke-compsci290-spring2016/sakai,kwedoff1/sakai,liubo404/sakai,frasese/sakai,willkara/sakai,bkirschn/sakai,zqian/sakai,duke-compsci290-spring2016/sakai,bzhouduke123/sakai,tl-its-umich-edu/sakai,liubo404/sakai,colczr/sakai,wfuedu/sakai,puramshetty/sakai,colczr/sakai,rodriguezdevera/sakai,rodriguezdevera/sakai,whumph/sakai,liubo404/sakai,surya-janani/sakai,ouit0408/sakai,ktakacs/sakai,conder/sakai,kwedoff1/sakai,ouit0408/sakai,buckett/sakai-gitflow,zqian/sakai,bkirschn/sakai,joserabal/sakai,bkirschn/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,puramshetty/sakai,wfuedu/sakai,ktakacs/sakai,Fudan-University/sakai,surya-janani/sakai,noondaysun/sakai,ktakacs/sakai,OpenCollabZA/sakai,willkara/sakai,wfuedu/sakai,udayg/sakai,buckett/sakai-gitflow,tl-its-umich-edu/sakai,frasese/sakai,lorenamgUMU/sakai,lorenamgUMU/sakai,introp-software/sakai,frasese/sakai,rodriguezdevera/sakai,surya-janani/sakai,kingmook/sakai,ktakacs/sakai,OpenCollabZA/sakai,frasese/sakai,lorenamgUMU/sakai,hackbuteer59/sakai,wfuedu/sakai,whumph/sakai,joserabal/sakai,clhedrick/sakai,lorenamgUMU/sakai,hackbuteer59/sakai,hackbuteer59/sakai,lorenamgUMU/sakai,clhedrick/sakai,buckett/sakai-gitflow,ktakacs/sakai,colczr/sakai,introp-software/sakai,udayg/sakai,bzhouduke123/sakai,joserabal/sakai,surya-janani/sakai,puramshetty/sakai,udayg/sakai,joserabal/sakai,ouit0408/sakai,bkirschn/sakai,bkirschn/sakai,conder/sakai,liubo404/sakai,kingmook/sakai,frasese/sakai,liubo404/sakai,liubo404/sakai,clhedrick/sakai,pushyamig/sakai,wfuedu/sakai,zqian/sakai,conder/sakai,tl-its-umich-edu/sakai,bzhouduke123/sakai,OpenCollabZA/sakai,kingmook/sakai
|
/*********************************************************************************a
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.util;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Enumeration;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.exception.IdInvalidException;
/**
* <p>
* Validator is utility class that helps to validate stuff.
* </p>
*/
public class Validator
{
/** Our logger. */
private static Log M_log = LogFactory.getLog(Validator.class);
/** These characters are not allowed in a resource id */
public static final String INVALID_CHARS_IN_RESOURCE_ID = "^/\\{}[]()%*?#&=\n\r\t\b\f";
/** These characters are not allowed in a user id */
protected static final String INVALID_CHARS_IN_USER_ID = "^/\\%*?\n\r\t\b\f";
protected static final String MAP_TO_A = "";
protected static final String MAP_TO_B = "";
protected static final String MAP_TO_C = "";
protected static final String MAP_TO_E = "";
protected static final String MAP_TO_I = "";
protected static final String MAP_TO_L = "";
protected static final String MAP_TO_N = "";
protected static final String MAP_TO_O = "";
protected static final String MAP_TO_U = "";
protected static final String MAP_TO_Y = "ش??";
protected static final String MAP_TO_X = "?????";
/**
* These characters are allowed; but if escapeResourceName() is called, they are escaped (actually, removed) Certain characters cause problems with filenames in certain OSes - so get rid of these characters in filenames
*/
protected static final String ESCAPE_CHARS_IN_RESOURCE_ID = ";'\"";
protected static final String INVALID_CHARS_IN_ZIP_ENTRY = "/\\%:*?'\"";
/** These characters are escaped when making a URL */
// protected static final String ESCAPE_URL = "#%?&='\"+ ";
// not '/' as that is assumed to be part of the path
protected static final String ESCAPE_URL = "$&+,:;=?@ '\"<>#%{}|\\^~[]`";
/**
* These can't be encoded in URLs safely even using %nn notation, so encode them using our own custom URL encoding
*/
protected static final String ESCAPE_URL_SPECIAL = "^?;";
/** Valid special email local id characters (- those that are invalid resource ids) */
protected static final String VALID_EMAIL = "abcdefghijklmnopqrstuvwxyz1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!#$&'*+-=?^_`{|}~.";
/**
* Escape a plaintext string so that it can be output as part of an HTML document. Amperstand, greater-than, less-than, newlines, etc, will be escaped so that they display (instead of being interpreted as formatting).
*
* @param value
* The string to escape.
* @return value fully escaped for HTML.
*/
public static String escapeHtml(String value)
{
return FormattedText.escapeHtml(value, true);
}
/**
* Escape a plaintext string so that it can be output as part of an HTML document, except that newlines are NOT escaped and therefore are treated as whitespace instead of displaying as line-breaks. Amperstand, greater-than, less-than, etc, will be
* escaped so that they display (instead of being interpreted as formatting).
*
* @param value
* The string to escape.
* @return value fully escaped for HTML.
*/
public static String escapeHtmlSupressNewlines(String value)
{
return FormattedText.escapeHtml(value, false);
}
/**
* Escape plaintext for display inside a plain textarea.
*/
public static String escapeHtmlTextarea(String value)
{
return FormattedText.escapeHtml(value, false);
}
/**
* Escape HTML-formatted text in preparation to include it in an HTML document.
*/
public static String escapeHtmlFormattedText(String value)
{
return FormattedText.escapeHtmlFormattedText(value);
}
/**
* Escape HTML-formatted text in preparation to include it in an HTML document, except that HTML line breaks ("<br />") will be supressed (removed).
*/
public static String escapeHtmlFormattedTextSupressNewlines(String value)
{
return FormattedText.escapeHtmlFormattedTextSupressNewlines(value);
}
/**
* Escapes the given HTML-formatted text for editing within the WYSIWYG editor. All HTML meta-characters in the string (such as amperstand, less-than, etc), will be escaped.
*
* @param value
* The formatted text to escape
* @return The string to use as the value of the formatted textarea widget
*/
public static String escapeHtmlFormattedTextarea(String value)
{
return FormattedText.escapeHtmlFormattedTextarea(value);
}
/**
* escapeHtml(), but also fix the case where we start with © and treat it as copyright (c) Note: ResourcesAction used to (before 1.1.05) place this as the copyright symbol. -ggolden
*/
public static String escapeHtmlFixCopyright(String value)
{
if (value.startsWith("©"))
{
value = "copyright (c)" + value.substring(6);
}
return escapeHtml(value);
} // escapeHtmlFixCopyright
/**
* Return a string based on id that is fully escaped using URL rules, using a UTF-8 underlying encoding.
*
* Note: java.net.URLEncode.encode() provides a more standard option
* FormattedText.decodeNumericCharacterReferences() undoes this op
*
* @param id
* The string to escape.
* @return id fully escaped using URL rules.
*/
public static String escapeUrl(String id)
{
if (id == null) return "";
id = id.trim();
try
{
// convert the string to bytes in UTF-8
byte[] bytes = id.getBytes("UTF-8");
StringBuilder buf = new StringBuilder();
for (int i = 0; i < bytes.length; i++)
{
byte b = bytes[i];
// escape ascii control characters, ascii high bits, specials
if (ESCAPE_URL_SPECIAL.indexOf((char) b) != -1)
{
buf.append("^^x"); // special funky way to encode bad URL characters
buf.append(toHex(b));
buf.append('^');
}
else if ((ESCAPE_URL.indexOf((char) b) != -1) || (b <= 0x1F) || (b == 0x7F) || (b >= 0x80))
{
buf.append("%");
buf.append(toHex(b));
}
else
{
buf.append((char) b);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeUrl: ", e);
return id;
}
} // escapeUrl
/**
* Return a string based on id that is valid according to Resource name validity rules.
*
* @param id
* The string to escape.
* @return id fully escaped using Resource name validity rules.
*/
public static String escapeResourceName(String id)
{
if (id == null) return "";
id = id.trim();
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < id.length(); i++)
{
char c = id.charAt(i);
if (MAP_TO_A.indexOf(c) >= 0)
{
buf.append('a');
}
else if (MAP_TO_E.indexOf(c) >= 0)
{
buf.append('e');
}
else if (MAP_TO_I.indexOf(c) >= 0)
{
buf.append('i');
}
else if (MAP_TO_O.indexOf(c) >= 0)
{
buf.append('o');
}
else if (MAP_TO_U.indexOf(c) >= 0)
{
buf.append('u');
}
else if (MAP_TO_Y.indexOf(c) >= 0)
{
buf.append('y');
}
else if (MAP_TO_N.indexOf(c) >= 0)
{
buf.append('n');
}
else if (MAP_TO_B.indexOf(c) >= 0)
{
buf.append('b');
}
else if (MAP_TO_C.indexOf(c) >= 0)
{
buf.append('c');
}
else if (MAP_TO_L.indexOf(c) >= 0)
{
buf.append('l');
}
else if (MAP_TO_X.indexOf(c) >= 0)
{
buf.append('x');
}
else if (c < '\040') // Remove any ascii control characters
{
buf.append('_');
}
else if (INVALID_CHARS_IN_RESOURCE_ID.indexOf(c) >= 0 || ESCAPE_CHARS_IN_RESOURCE_ID.indexOf(c) >= 0)
{
buf.append('_');
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeResourceName: ", e);
return id;
}
} // escapeResourceName
/**
* Return a string based on id that is fully escaped the question mark.
*
* @param id
* The string to escape.
* @return id fully escaped question mark.
*/
public static String escapeQuestionMark(String id)
{
if (id == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < id.length(); i++)
{
char c = id.charAt(i);
if (c == '?')
{
buf.append('_');
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeQuestionMark: ", e);
return id;
}
} // escapeQuestionMark
/**
* Return a string based on id that is fully escaped to create a zip entry
*
* @param id
* The string to escape.
* @return id fully escaped to create a zip entry
*/
public static String escapeZipEntry(String id)
{
if (id == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < id.length(); i++)
{
char c = id.charAt(i);
if (INVALID_CHARS_IN_ZIP_ENTRY.indexOf(c) != -1)
{
buf.append('_');
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeZipEntry: ", e);
return id;
}
} // escapeZipEntry
/**
* Return a string based on value that is safe to place into a javascript value that is in single quiotes.
*
* @param value
* The string to escape.
* @return value escaped.
*/
public static String escapeJsQuoted(String value)
{
if (value == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
// a single quote must be escaped with a leading backslash
if (c == '\'')
{
buf.append("\\'");
}
// a backslash must be escaped with another backslash
else if (c == '\\')
{
buf.append("\\\\");
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeJsQuoted: ", e);
return value;
}
} // escapeJsQuoted
/**
* Return a string based on value that is safe to place into a sql statement: sql statements use the single quote, and this must be doubled as an escape.
*
* @param value
* The string to escape.
* @return value escaped.
*/
public static String escapeSql(String value)
{
if (value == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
if (c == '\'')
{
buf.append("''");
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeSql: ", e);
return value;
}
} // escapeSql
/**
* Return a string based on value that is safe to place into a javascript / html identifier: anything not alphanumeric change to 'x'. If the first character is not alphabetic, a letter 'i' is prepended.
*
* @param value
* The string to escape.
* @return value fully escaped using javascript / html identifier rules.
*/
public static String escapeJavascript(String value)
{
if (value == null || "".equals(value)) return "";
try
{
StringBuilder buf = new StringBuilder();
// prepend 'i' if first character is not a letter
if (!java.lang.Character.isLetter(value.charAt(0)))
{
buf.append("i");
}
// change non-alphanumeric characters to 'x'
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
if (!java.lang.Character.isLetterOrDigit(c))
{
buf.append("x");
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeJavascript: ", e);
return value;
}
} // escapeJavascript
/**
* Check for a valid user id.
*
* @exception IdInvalidException
* if the id is invalid.
*/
public static boolean checkUserId(String id)
{
// the rules:
// Null is rejected
// all blank is rejected
// INVALID_CHARS_IN_USER_ID characters are rejected
if (id == null) return false;
if (id.trim().length() == 0) return false;
// we must reject certain characters that we cannot even escape and get into Tomcat via a URL
for (int i = 0; i < id.length(); i++)
{
if (INVALID_CHARS_IN_USER_ID.indexOf(id.charAt(i)) != -1) return false;
}
return true;
} // checkUserId
/**
* Check for a valid resource id.
*
* @return true if valid, false if not
*/
public static boolean checkResourceId(String id)
{
// the rules:
// Null is rejected
// all blank is rejected
// INVALID_CHARS_IN_RESOURCE_ID characters are rejected
if (id == null) return false;
if (id.trim().length() == 0) return false;
// we must reject certain characters that we cannot even escape and get into Tomcat via a URL
for (int i = 0; i < id.length(); i++)
{
if (INVALID_CHARS_IN_RESOURCE_ID.indexOf(id.charAt(i)) != -1) return false;
}
return true;
} // checkResourceId
/**
* Is this a valid local part of an email id?
*/
public static boolean checkEmailLocal(String id)
{
// rules based on rfc2882, but a bit more conservative
for (int i = 0; i < id.length(); i++)
{
if (VALID_EMAIL.indexOf(id.charAt(i)) == -1) return false;
}
return true;
} // checkEmailLocal
/**
* Isolate and return just the file name part of a full drive and path file name.
*
* @param fullName
* The full file name from a local os file system (mac, unix, windoze)
* @return Just the name (and extension) of the file, without the drive or path.
*/
public static String getFileName(String fullName)
{
// examples: windows: c:\this\that\me.doc
// unix: /usr/local/dev/test.txt
// mac:? one:two:three:four
// so... just take the last characters back till we see a \ or / or :
StringBuilder buf = new StringBuilder();
int index = fullName.length() - 1;
while (index >= 0)
{
char c = fullName.charAt(index--);
if ((c == '\\') || (c == '/') || (c == ':')) break;
buf.insert(0, c);
}
return buf.toString();
} // getFileName
/**
* Put the dividor (comma) inside the size string, for example, 1,003 for 1003
*
* @param size
* The string of size number
* @return The size string with the dividor added
*/
public static String getFileSizeWithDividor(String size)
{
StringBuilder newSize = new StringBuilder(size);
int length = size.length();
int index = size.length();
while (index > 3)
{
index = index - 3;
newSize.insert(index, ",");
}
return newSize.toString();
}
/**
* Isolate and return just the file extension part of a full drive and path file name.
*
* @param fullName
* The full file name from a local os file system (mac, unix, windoze)
* @return Just the extension of the file, to the right of the dot, not including the dot, or blank if none.
*/
public static String getFileExtension(String fullName)
{
// just take from the last dot to the end, or return "" if there's no dot.
int index = fullName.lastIndexOf('.');
if (index == -1) return "";
return fullName.substring(index + 1);
} // getFileExtension
/**
* Determine whether a file resource should be opened in the current window or a new window.
*
* @param contentType
* The content type to check
* @return A string identifying the window in which to open the resource: "_self" to open the resource in the current window, "_blank" for a new window, or an empty string if the resource is not a file.
*/
public static String getResourceTarget(String contentType)
{
// we will open a new window unless...
String rv = "_blank";
// get the resource's type
if (contentType != null)
{
// if the browser will not inline, but mark as attachments, let's not open a new window
if (!letBrowserInline(contentType))
{
rv = "_self";
}
}
return rv;
} // getResourceTarget
/**
* Is this a mime type that the browser can handle inline, in a browser window? If so, links to this type should be to a _blank, and content-disposition should be inline. If not, links to this type should be to _self, and content-disposition should be
* attachment.
*
* @param type
* The mime type to check.
* @return true if this type of resource the browser can handle in line, false if not.
*/
public static boolean letBrowserInline(String type)
{
if (type == null) return false;
String lType = type.toLowerCase();
// text (plain/html) mime types
if (lType.startsWith("text/")) return true;
// image mime types
if (lType.startsWith("image/")) return true;
// PDF mime types
if (lType.equals("application/pdf")) return true;
if (lType.equals("application/x-pdf")) return true;
// internal OSP/Forms
if (lType.equals("application/x-osp")) return true;
// Shockwave Flash mime types
if (lType.equals("application/x-shockwave-flash")) return true;
if (lType.equals("application/futuresplash")) return true;
// checks for VRML file MIME types:x-world/x-vrml, model/vrml, application/x-blaxxunCC3D, application/x-blaxxunCC3Dpro, application/x-CC3D
// need to check for any other MIME types which can be opened by browser plug-ins? %%%zqian
if (lType.indexOf("vrml") != -1 || lType.indexOf("CC3D") != -1) return true;
// check additional inline types for this instance specified in sakai.properties
String moreInlineTypes[] = ServerConfigurationService.getStrings("content.mime.inline");
if (moreInlineTypes != null) {
for (int i = 0; i < moreInlineTypes.length; i++) {
if (lType.equals(moreInlineTypes[i]))
return true;
}
}
return false;
} // letBrowserInline
/**
* Limit the string to a certain number of characters, adding "..." if it was truncated.
*
* @param value
* The string to limit.
* @param the
* length to limit to (as an Integer).
* @return The limited string.
*/
public static String limit(String value, Integer length)
{
return limit(value, length.intValue());
} // limit
/**
* Limit the string to a certain number of characters, adding "..." if it was truncated
*
* @param value
* The string to limit.
* @param the
* length to limit to (as an int).
* @return The limited string.
*/
public static String limit(String value, int length)
{
StringBuilder buf = new StringBuilder(value);
if (buf.length() > length)
{
buf.setLength(length);
buf.append("...");
}
return buf.toString();
} // limit
/**
* Limit the formatted to a certain number of DISPLAYED characters, adding "..." if it was truncated. For example, <xmp>trim("Hello \n<b>World</b>!", 7)</xmp> returns <xmp>"Hello \n<b>W</b>..."</xmp>
*
* @param value
* The formatted text to limit.
* @param the
* length to limit to (as an int).
* @return The limited string.
*/
public static String limitFormattedText(String value, int length)
{
StringBuilder ret = new StringBuilder();
value = FormattedText.escapeHtmlFormattedTextSupressNewlines(value);
boolean didTrim = FormattedText.trimFormattedText(value, length, ret);
if (didTrim) ret.append("...");
return ret.toString();
}
/**
* Clean the user input string of strange newlines, etc.
*
* @param value
* The user input string.
* @return value cleaned of string newlines, etc.
*/
public static String cleanInput(String value)
{
if (value == null) return null;
if (value.length() == 0) return value;
final int len = value.length();
StringBuilder buf = new StringBuilder();
for (int i = 0; i < len; i++)
{
char c = value.charAt(i);
char next = 0;
if (i + 1 < len) next = value.charAt(i + 1);
switch (c)
{
case '\r':
{
// detect CR LF, make it a \n
if (next == '\n')
{
buf.append('\n');
// eat the next character
i++;
}
else
{
buf.append(c);
}
}
break;
default:
{
buf.append(c);
}
}
}
if (buf.charAt(buf.length() - 1) == '\n')
{
buf.setLength(buf.length() - 1);
}
return buf.toString();
} // cleanInput
/**
* Clean the string parameter of all newlines (replace with space character) and trim leading and trailing spaces
*
* @param value
* The user input string.
* @return value cleaned of newlines, etc.
*/
public static String stripAllNewlines(String value)
{
if (value == null) return null;
value = value.trim();
if (value.length() == 0) return value;
final int len = value.length();
StringBuilder buf = new StringBuilder();
for (int i = 0; i < len; i++)
{
char c = value.charAt(i);
char next = 0;
if (i + 1 < len) next = value.charAt(i + 1);
switch (c)
{
case '\n':
case '\r':
{
buf.append(' ');
}
break;
default:
{
buf.append(c);
}
}
}
return buf.toString();
} // stripAllNewlines
/**
* Returns a hex representation of a byte.
*
* @param b
* The byte to convert to hex.
* @return The 2-digit hex value of the supplied byte.
*/
private static final String toHex(byte b)
{
char ret[] = new char[2];
ret[0] = hexDigit((b >>> 4) & (byte) 0x0F);
ret[1] = hexDigit((b >>> 0) & (byte) 0x0F);
return new String(ret);
}
/**
* Returns the hex digit cooresponding to a number between 0 and 15.
*
* @param i
* The number to get the hex digit for.
* @return The hex digit cooresponding to that number.
* @exception java.lang.IllegalArgumentException
* If supplied digit is not between 0 and 15 inclusive.
*/
private static final char hexDigit(int i)
{
switch (i)
{
case 0:
return '0';
case 1:
return '1';
case 2:
return '2';
case 3:
return '3';
case 4:
return '4';
case 5:
return '5';
case 6:
return '6';
case 7:
return '7';
case 8:
return '8';
case 9:
return '9';
case 10:
return 'A';
case 11:
return 'B';
case 12:
return 'C';
case 13:
return 'D';
case 14:
return 'E';
case 15:
return 'F';
}
throw new IllegalArgumentException("Invalid digit:" + i);
}
/**
* Validate whether the date input is valid
*/
public static boolean checkDate(int day, int month, int year)
{
// Is date valid for month?
if (month == 2)
{
// Check for leap year
if (((year % 4 == 0) && (year % 100 != 0)) || (year % 400 == 0))
{
// leap year
if (day > 29)
{
return false;
}
}
else
{
// normal year
if (day > 28)
{
return false;
}
}
}
else if ((month == 4) || (month == 6) || (month == 9) || (month == 11))
{
if (day > 30)
{
return false;
}
}
return true;
}
public static String generateQueryString(HttpServletRequest req) {
StringBuilder sb = new StringBuilder();
try {
for ( Enumeration<?> e = req.getParameterNames(); e.hasMoreElements(); ) {
String name = (String) e.nextElement();
for ( String value : req.getParameterValues(name) ) {
sb.append(URLEncoder.encode(name,"UTF-8")).append("=").append(URLEncoder.encode(value,"UTF-8")).append("&");
}
}
} catch ( UnsupportedEncodingException ex) {
M_log.error("No UTF-8 Encoding on this JVM, !!!!");
}
if ( sb.length() < 1 ) return null;
return sb.substring(0, sb.length()-1);
}
}
|
kernel/kernel-util/src/main/java/org/sakaiproject/util/Validator.java
|
/*********************************************************************************a
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.util;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Enumeration;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.exception.IdInvalidException;
/**
* <p>
* Validator is utility class that helps to validate stuff.
* </p>
*/
public class Validator
{
/** Our logger. */
private static Log M_log = LogFactory.getLog(Validator.class);
/** These characters are not allowed in a resource id */
public static final String INVALID_CHARS_IN_RESOURCE_ID = "^/\\{}[]()%*?#&=\n\r\t\b\f";
/** These characters are not allowed in a user id */
protected static final String INVALID_CHARS_IN_USER_ID = "^/\\%*?\n\r\t\b\f";
protected static final String MAP_TO_A = "";
protected static final String MAP_TO_B = "";
protected static final String MAP_TO_C = "";
protected static final String MAP_TO_E = "";
protected static final String MAP_TO_I = "";
protected static final String MAP_TO_L = "";
protected static final String MAP_TO_N = "";
protected static final String MAP_TO_O = "";
protected static final String MAP_TO_U = "";
protected static final String MAP_TO_Y = "ش??";
protected static final String MAP_TO_X = "?????";
/**
* These characters are allowed; but if escapeResourceName() is called, they are escaped (actually, removed) Certain characters cause problems with filenames in certain OSes - so get rid of these characters in filenames
*/
protected static final String ESCAPE_CHARS_IN_RESOURCE_ID = ";'\"";
protected static final String INVALID_CHARS_IN_ZIP_ENTRY = "/\\%:*?'\"";
/** These characters are escaped when making a URL */
// protected static final String ESCAPE_URL = "#%?&='\"+ ";
// not '/' as that is assumed to be part of the path
protected static final String ESCAPE_URL = "$&+,:;=?@ '\"<>#%{}|\\^~[]`";
/**
* These can't be encoded in URLs safely even using %nn notation, so encode them using our own custom URL encoding
*/
protected static final String ESCAPE_URL_SPECIAL = "^?;";
/** Valid special email local id characters (- those that are invalid resource ids) */
protected static final String VALID_EMAIL = "abcdefghijklmnopqrstuvwxyz1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!#$&'*+-=?^_`{|}~.";
/**
* Escape a plaintext string so that it can be output as part of an HTML document. Amperstand, greater-than, less-than, newlines, etc, will be escaped so that they display (instead of being interpreted as formatting).
*
* @param value
* The string to escape.
* @return value fully escaped for HTML.
*/
public static String escapeHtml(String value)
{
return FormattedText.escapeHtml(value, true);
}
/**
* Escape a plaintext string so that it can be output as part of an HTML document, except that newlines are NOT escaped and therefore are treated as whitespace instead of displaying as line-breaks. Amperstand, greater-than, less-than, etc, will be
* escaped so that they display (instead of being interpreted as formatting).
*
* @param value
* The string to escape.
* @return value fully escaped for HTML.
*/
public static String escapeHtmlSupressNewlines(String value)
{
return FormattedText.escapeHtml(value, false);
}
/**
* Escape plaintext for display inside a plain textarea.
*/
public static String escapeHtmlTextarea(String value)
{
return FormattedText.escapeHtml(value, false);
}
/**
* Escape HTML-formatted text in preparation to include it in an HTML document.
*/
public static String escapeHtmlFormattedText(String value)
{
return FormattedText.escapeHtmlFormattedText(value);
}
/**
* Escape HTML-formatted text in preparation to include it in an HTML document, except that HTML line breaks ("<br />") will be supressed (removed).
*/
public static String escapeHtmlFormattedTextSupressNewlines(String value)
{
return FormattedText.escapeHtmlFormattedTextSupressNewlines(value);
}
/**
* Escapes the given HTML-formatted text for editing within the WYSIWYG editor. All HTML meta-characters in the string (such as amperstand, less-than, etc), will be escaped.
*
* @param value
* The formatted text to escape
* @return The string to use as the value of the formatted textarea widget
*/
public static String escapeHtmlFormattedTextarea(String value)
{
return FormattedText.escapeHtmlFormattedTextarea(value);
}
/**
* escapeHtml(), but also fix the case where we start with © and treat it as copyright (c) Note: ResourcesAction used to (before 1.1.05) place this as the copyright symbol. -ggolden
*/
public static String escapeHtmlFixCopyright(String value)
{
if (value.startsWith("©"))
{
value = "copyright (c)" + value.substring(6);
}
return escapeHtml(value);
} // escapeHtmlFixCopyright
/**
* Return a string based on id that is fully escaped using URL rules, using a UTF-8 underlying encoding.
*
* Note: java.net.URLEncode.encode() provides a more standard option
* FormattedText.decodeNumericCharacterReferences() undoes this op
*
* @param id
* The string to escape.
* @return id fully escaped using URL rules.
*/
public static String escapeUrl(String id)
{
if (id == null) return "";
id = id.trim();
try
{
// convert the string to bytes in UTF-8
byte[] bytes = id.getBytes("UTF-8");
StringBuilder buf = new StringBuilder();
for (int i = 0; i < bytes.length; i++)
{
byte b = bytes[i];
// escape ascii control characters, ascii high bits, specials
if (ESCAPE_URL_SPECIAL.indexOf((char) b) != -1)
{
buf.append("^^x"); // special funky way to encode bad URL characters
buf.append(toHex(b));
buf.append('^');
}
else if ((ESCAPE_URL.indexOf((char) b) != -1) || (b <= 0x1F) || (b == 0x7F) || (b >= 0x80))
{
buf.append("%");
buf.append(toHex(b));
}
else
{
buf.append((char) b);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeUrl: ", e);
return id;
}
} // escapeUrl
/**
* Return a string based on id that is valid according to Resource name validity rules.
*
* @param id
* The string to escape.
* @return id fully escaped using Resource name validity rules.
*/
public static String escapeResourceName(String id)
{
if (id == null) return "";
id = id.trim();
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < id.length(); i++)
{
char c = id.charAt(i);
if (MAP_TO_A.indexOf(c) >= 0)
{
buf.append('a');
}
else if (MAP_TO_E.indexOf(c) >= 0)
{
buf.append('e');
}
else if (MAP_TO_I.indexOf(c) >= 0)
{
buf.append('i');
}
else if (MAP_TO_O.indexOf(c) >= 0)
{
buf.append('o');
}
else if (MAP_TO_U.indexOf(c) >= 0)
{
buf.append('u');
}
else if (MAP_TO_Y.indexOf(c) >= 0)
{
buf.append('y');
}
else if (MAP_TO_N.indexOf(c) >= 0)
{
buf.append('n');
}
else if (MAP_TO_B.indexOf(c) >= 0)
{
buf.append('b');
}
else if (MAP_TO_C.indexOf(c) >= 0)
{
buf.append('c');
}
else if (MAP_TO_L.indexOf(c) >= 0)
{
buf.append('l');
}
else if (MAP_TO_X.indexOf(c) >= 0)
{
buf.append('x');
}
else if (c < '\040') // Remove any ascii control characters
{
buf.append('_');
}
else if (INVALID_CHARS_IN_RESOURCE_ID.indexOf(c) >= 0 || ESCAPE_CHARS_IN_RESOURCE_ID.indexOf(c) >= 0)
{
buf.append('_');
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeResourceName: ", e);
return id;
}
} // escapeResourceName
/**
* Return a string based on id that is fully escaped the question mark.
*
* @param id
* The string to escape.
* @return id fully escaped question mark.
*/
public static String escapeQuestionMark(String id)
{
if (id == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < id.length(); i++)
{
char c = id.charAt(i);
if (c == '?')
{
buf.append('_');
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeQuestionMark: ", e);
return id;
}
} // escapeQuestionMark
/**
* Return a string based on id that is fully escaped to create a zip entry
*
* @param id
* The string to escape.
* @return id fully escaped to create a zip entry
*/
public static String escapeZipEntry(String id)
{
if (id == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < id.length(); i++)
{
char c = id.charAt(i);
if (INVALID_CHARS_IN_ZIP_ENTRY.indexOf(c) != -1)
{
buf.append('_');
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeZipEntry: ", e);
return id;
}
} // escapeZipEntry
/**
* Return a string based on value that is safe to place into a javascript value that is in single quiotes.
*
* @param value
* The string to escape.
* @return value escaped.
*/
public static String escapeJsQuoted(String value)
{
if (value == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
// a single quote must be escaped with a leading backslash
if (c == '\'')
{
buf.append("\\'");
}
// a backslash must be escaped with another backslash
else if (c == '\\')
{
buf.append("\\\\");
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeJsQuoted: ", e);
return value;
}
} // escapeJsQuoted
/**
* Return a string based on value that is safe to place into a sql statement: sql statements use the single quote, and this must be doubled as an escape.
*
* @param value
* The string to escape.
* @return value escaped.
*/
public static String escapeSql(String value)
{
if (value == null) return "";
try
{
StringBuilder buf = new StringBuilder();
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
if (c == '\'')
{
buf.append("''");
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeSql: ", e);
return value;
}
} // escapeSql
/**
* Return a string based on value that is safe to place into a javascript / html identifier: anything not alphanumeric change to 'x'. If the first character is not alphabetic, a letter 'i' is prepended.
*
* @param value
* The string to escape.
* @return value fully escaped using javascript / html identifier rules.
*/
public static String escapeJavascript(String value)
{
if (value == null || "".equals(value)) return "";
try
{
StringBuilder buf = new StringBuilder();
// prepend 'i' if first character is not a letter
if (!java.lang.Character.isLetter(value.charAt(0)))
{
buf.append("i");
}
// change non-alphanumeric characters to 'x'
for (int i = 0; i < value.length(); i++)
{
char c = value.charAt(i);
if (!java.lang.Character.isLetterOrDigit(c))
{
buf.append("x");
}
else
{
buf.append(c);
}
}
String rv = buf.toString();
return rv;
}
catch (Exception e)
{
M_log.warn("Validator.escapeJavascript: ", e);
return value;
}
} // escapeJavascript
/**
* Check for a valid user id.
*
* @exception IdInvalidException
* if the id is invalid.
*/
public static boolean checkUserId(String id)
{
// the rules:
// Null is rejected
// all blank is rejected
// INVALID_CHARS_IN_USER_ID characters are rejected
if (id == null) return false;
if (id.trim().length() == 0) return false;
// we must reject certain characters that we cannot even escape and get into Tomcat via a URL
for (int i = 0; i < id.length(); i++)
{
if (INVALID_CHARS_IN_USER_ID.indexOf(id.charAt(i)) != -1) return false;
}
return true;
} // checkUserId
/**
* Check for a valid resource id.
*
* @return true if valid, false if not
*/
public static boolean checkResourceId(String id)
{
// the rules:
// Null is rejected
// all blank is rejected
// INVALID_CHARS_IN_RESOURCE_ID characters are rejected
if (id == null) return false;
if (id.trim().length() == 0) return false;
// we must reject certain characters that we cannot even escape and get into Tomcat via a URL
for (int i = 0; i < id.length(); i++)
{
if (INVALID_CHARS_IN_RESOURCE_ID.indexOf(id.charAt(i)) != -1) return false;
}
return true;
} // checkResourceId
/**
* Is this a valid local part of an email id?
*/
public static boolean checkEmailLocal(String id)
{
// rules based on rfc2882, but a bit more conservative
for (int i = 0; i < id.length(); i++)
{
if (VALID_EMAIL.indexOf(id.charAt(i)) == -1) return false;
}
return true;
} // checkEmailLocal
/**
* Isolate and return just the file name part of a full drive and path file name.
*
* @param fullName
* The full file name from a local os file system (mac, unix, windoze)
* @return Just the name (and extension) of the file, without the drive or path.
*/
public static String getFileName(String fullName)
{
// examples: windows: c:\this\that\me.doc
// unix: /usr/local/dev/test.txt
// mac:? one:two:three:four
// so... just take the last characters back till we see a \ or / or :
StringBuilder buf = new StringBuilder();
int index = fullName.length() - 1;
while (index >= 0)
{
char c = fullName.charAt(index--);
if ((c == '\\') || (c == '/') || (c == ':')) break;
buf.insert(0, c);
}
return buf.toString();
} // getFileName
/**
* Put the dividor (comma) inside the size string, for example, 1,003 for 1003
*
* @param size
* The string of size number
* @return The size string with the dividor added
*/
public static String getFileSizeWithDividor(String size)
{
StringBuilder newSize = new StringBuilder(size);
int length = size.length();
int index = size.length();
while (index > 3)
{
index = index - 3;
newSize.insert(index, ",");
}
return newSize.toString();
}
/**
* Isolate and return just the file extension part of a full drive and path file name.
*
* @param fullName
* The full file name from a local os file system (mac, unix, windoze)
* @return Just the extension of the file, to the right of the dot, not including the dot, or blank if none.
*/
public static String getFileExtension(String fullName)
{
// just take from the last dot to the end, or return "" if there's no dot.
int index = fullName.lastIndexOf('.');
if (index == -1) return "";
return fullName.substring(index + 1);
} // getFileExtension
/**
* Determine whether a file resource should be opened in the current window or a new window.
*
* @param contentType
* The content type to check
* @return A string identifying the window in which to open the resource: "_self" to open the resource in the current window, "_blank" for a new window, or an empty string if the resource is not a file.
*/
public static String getResourceTarget(String contentType)
{
// we will open a new window unless...
String rv = "_blank";
// get the resource's type
if (contentType != null)
{
// if the browser will not inline, but mark as attachments, let's not open a new window
if (!letBrowserInline(contentType))
{
rv = "_self";
}
}
return rv;
} // getResourceTarget
/**
* Is this a mime type that the browser can handle inline, in a browser window? If so, links to this type should be to a _blank, and content-disposition should be inline. If not, links to this type should be to _self, and content-disposition should be
* attachment.
*
* @param type
* The mime type to check.
* @return true if this type of resource the browser can handle in line, false if not.
*/
public static boolean letBrowserInline(String type)
{
if (type == null) return false;
String lType = type.toLowerCase();
// text (plain/html) mime types
if (lType.startsWith("text/")) return true;
// image mime types
if (lType.startsWith("image/")) return true;
// PDF mime types
if (lType.equals("application/pdf")) return true;
if (lType.equals("application/x-pdf")) return true;
// internal OSP/Forms
if (lType.equals("application/x-osp")) return true;
// Shockwave Flash mime types
if (lType.equals("application/x-shockwave-flash")) return true;
if (lType.equals("application/futuresplash")) return true;
// checks for VRML file MIME types:x-world/x-vrml, model/vrml, application/x-blaxxunCC3D, application/x-blaxxunCC3Dpro, application/x-CC3D
// need to check for any other MIME types which can be opened by browser plug-ins? %%%zqian
if (lType.indexOf("vrml") != -1 || lType.indexOf("CC3D") != -1) return true;
return false;
} // letBrowserInline
/**
* Limit the string to a certain number of characters, adding "..." if it was truncated.
*
* @param value
* The string to limit.
* @param the
* length to limit to (as an Integer).
* @return The limited string.
*/
public static String limit(String value, Integer length)
{
return limit(value, length.intValue());
} // limit
/**
* Limit the string to a certain number of characters, adding "..." if it was truncated
*
* @param value
* The string to limit.
* @param the
* length to limit to (as an int).
* @return The limited string.
*/
public static String limit(String value, int length)
{
StringBuilder buf = new StringBuilder(value);
if (buf.length() > length)
{
buf.setLength(length);
buf.append("...");
}
return buf.toString();
} // limit
/**
* Limit the formatted to a certain number of DISPLAYED characters, adding "..." if it was truncated. For example, <xmp>trim("Hello \n<b>World</b>!", 7)</xmp> returns <xmp>"Hello \n<b>W</b>..."</xmp>
*
* @param value
* The formatted text to limit.
* @param the
* length to limit to (as an int).
* @return The limited string.
*/
public static String limitFormattedText(String value, int length)
{
StringBuilder ret = new StringBuilder();
value = FormattedText.escapeHtmlFormattedTextSupressNewlines(value);
boolean didTrim = FormattedText.trimFormattedText(value, length, ret);
if (didTrim) ret.append("...");
return ret.toString();
}
/**
* Clean the user input string of strange newlines, etc.
*
* @param value
* The user input string.
* @return value cleaned of string newlines, etc.
*/
public static String cleanInput(String value)
{
if (value == null) return null;
if (value.length() == 0) return value;
final int len = value.length();
StringBuilder buf = new StringBuilder();
for (int i = 0; i < len; i++)
{
char c = value.charAt(i);
char next = 0;
if (i + 1 < len) next = value.charAt(i + 1);
switch (c)
{
case '\r':
{
// detect CR LF, make it a \n
if (next == '\n')
{
buf.append('\n');
// eat the next character
i++;
}
else
{
buf.append(c);
}
}
break;
default:
{
buf.append(c);
}
}
}
if (buf.charAt(buf.length() - 1) == '\n')
{
buf.setLength(buf.length() - 1);
}
return buf.toString();
} // cleanInput
/**
* Clean the string parameter of all newlines (replace with space character) and trim leading and trailing spaces
*
* @param value
* The user input string.
* @return value cleaned of newlines, etc.
*/
public static String stripAllNewlines(String value)
{
if (value == null) return null;
value = value.trim();
if (value.length() == 0) return value;
final int len = value.length();
StringBuilder buf = new StringBuilder();
for (int i = 0; i < len; i++)
{
char c = value.charAt(i);
char next = 0;
if (i + 1 < len) next = value.charAt(i + 1);
switch (c)
{
case '\n':
case '\r':
{
buf.append(' ');
}
break;
default:
{
buf.append(c);
}
}
}
return buf.toString();
} // stripAllNewlines
/**
* Returns a hex representation of a byte.
*
* @param b
* The byte to convert to hex.
* @return The 2-digit hex value of the supplied byte.
*/
private static final String toHex(byte b)
{
char ret[] = new char[2];
ret[0] = hexDigit((b >>> 4) & (byte) 0x0F);
ret[1] = hexDigit((b >>> 0) & (byte) 0x0F);
return new String(ret);
}
/**
* Returns the hex digit cooresponding to a number between 0 and 15.
*
* @param i
* The number to get the hex digit for.
* @return The hex digit cooresponding to that number.
* @exception java.lang.IllegalArgumentException
* If supplied digit is not between 0 and 15 inclusive.
*/
private static final char hexDigit(int i)
{
switch (i)
{
case 0:
return '0';
case 1:
return '1';
case 2:
return '2';
case 3:
return '3';
case 4:
return '4';
case 5:
return '5';
case 6:
return '6';
case 7:
return '7';
case 8:
return '8';
case 9:
return '9';
case 10:
return 'A';
case 11:
return 'B';
case 12:
return 'C';
case 13:
return 'D';
case 14:
return 'E';
case 15:
return 'F';
}
throw new IllegalArgumentException("Invalid digit:" + i);
}
/**
* Validate whether the date input is valid
*/
public static boolean checkDate(int day, int month, int year)
{
// Is date valid for month?
if (month == 2)
{
// Check for leap year
if (((year % 4 == 0) && (year % 100 != 0)) || (year % 400 == 0))
{
// leap year
if (day > 29)
{
return false;
}
}
else
{
// normal year
if (day > 28)
{
return false;
}
}
}
else if ((month == 4) || (month == 6) || (month == 9) || (month == 11))
{
if (day > 30)
{
return false;
}
}
return true;
}
public static String generateQueryString(HttpServletRequest req) {
StringBuilder sb = new StringBuilder();
try {
for ( Enumeration<?> e = req.getParameterNames(); e.hasMoreElements(); ) {
String name = (String) e.nextElement();
for ( String value : req.getParameterValues(name) ) {
sb.append(URLEncoder.encode(name,"UTF-8")).append("=").append(URLEncoder.encode(value,"UTF-8")).append("&");
}
}
} catch ( UnsupportedEncodingException ex) {
M_log.error("No UTF-8 Encoding on this JVM, !!!!");
}
if ( sb.length() < 1 ) return null;
return sb.substring(0, sb.length()-1);
}
}
|
KNL-56
Patch from Stephen Marquard
Patch attached. Adds a new multi-value sakai.properties setting, content.mime.inline, e.g.
content.mime.inline.count=1
content.mime.inline.1=application/msword
Thanks
git-svn-id: 1bc6f63533c24e8bbbfb67ee5594815536d0113b@52746 66ffb92e-73f9-0310-93c1-f5514f145a0a
|
kernel/kernel-util/src/main/java/org/sakaiproject/util/Validator.java
|
KNL-56
|
<ide><path>ernel/kernel-util/src/main/java/org/sakaiproject/util/Validator.java
<ide>
<ide> import org.apache.commons.logging.Log;
<ide> import org.apache.commons.logging.LogFactory;
<add>import org.sakaiproject.component.cover.ServerConfigurationService;
<ide> import org.sakaiproject.exception.IdInvalidException;
<ide>
<ide> /**
<ide> // need to check for any other MIME types which can be opened by browser plug-ins? %%%zqian
<ide> if (lType.indexOf("vrml") != -1 || lType.indexOf("CC3D") != -1) return true;
<ide>
<add> // check additional inline types for this instance specified in sakai.properties
<add> String moreInlineTypes[] = ServerConfigurationService.getStrings("content.mime.inline");
<add>
<add> if (moreInlineTypes != null) {
<add> for (int i = 0; i < moreInlineTypes.length; i++) {
<add> if (lType.equals(moreInlineTypes[i]))
<add> return true;
<add> }
<add> }
<add>
<ide> return false;
<ide>
<ide> } // letBrowserInline
|
|
Java
|
apache-2.0
|
619a3b3074c4713dbc5e9daba407c11228642601
| 0 |
vtkhir/kaa,abohomol/kaa,Dubland/kaa,sashadidukh/kaa,kallelzied/kaa,abohomol/kaa,liuhu/Kaa,Oleh-Kravchenko/kaa,zofuthan/kaa,Deepnekroz/kaa,aglne/kaa,vtkhir/kaa,zofuthan/kaa,Oleh-Kravchenko/kaa,abohomol/kaa,vtkhir/kaa,rasendubi/kaa,Dubland/kaa,sashadidukh/kaa,sashadidukh/kaa,Deepnekroz/kaa,Oleh-Kravchenko/kaa,rasendubi/kaa,kallelzied/kaa,forGGe/kaa,forGGe/kaa,forGGe/kaa,abohomol/kaa,zofuthan/kaa,aglne/kaa,Dubland/kaa,sashadidukh/kaa,sashadidukh/kaa,aglne/kaa,aglne/kaa,liuhu/Kaa,Oleh-Kravchenko/kaa,kallelzied/kaa,vtkhir/kaa,rasendubi/kaa,sashadidukh/kaa,Dubland/kaa,zofuthan/kaa,zofuthan/kaa,rasendubi/kaa,aglne/kaa,forGGe/kaa,Deepnekroz/kaa,vtkhir/kaa,kallelzied/kaa,Dubland/kaa,sashadidukh/kaa,liuhu/Kaa,liuhu/Kaa,forGGe/kaa,aglne/kaa,liuhu/Kaa,rasendubi/kaa,abohomol/kaa,vtkhir/kaa,rasendubi/kaa,rasendubi/kaa,Dubland/kaa,sashadidukh/kaa,Oleh-Kravchenko/kaa,kallelzied/kaa,vtkhir/kaa,vtkhir/kaa,kallelzied/kaa,zofuthan/kaa,forGGe/kaa,liuhu/Kaa,Oleh-Kravchenko/kaa,aglne/kaa,liuhu/Kaa,Deepnekroz/kaa,Deepnekroz/kaa,Deepnekroz/kaa,Dubland/kaa,forGGe/kaa,Oleh-Kravchenko/kaa,abohomol/kaa,Deepnekroz/kaa,abohomol/kaa
|
/*
* Copyright 2014-2015 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.client.logging;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteStatement;
import android.text.TextUtils;
import android.util.Log;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class AndroidSQLiteDBLogStorage implements LogStorage, LogStorageStatus {
private static final String TAG = "AndroidSQLiteDBLogStorage";
private static final String CHANGES_QUERY_RESULT = "affected_row_count";
private static final String GET_CHANGES_QUERY = "SELECT changes() AS " + CHANGES_QUERY_RESULT;
private final SQLiteOpenHelper dbHelper;
private final SQLiteDatabase database;
private long recordCount;
private long consumedSize;
private int currentBucketId = 1;
private Map<Integer, Long> consumedMemoryStorage = new HashMap<>();
private SQLiteStatement insertStatement;
private SQLiteStatement deleteByRecordIdStatement;
private SQLiteStatement deleteByBucketIdStatement;
private SQLiteStatement resetBucketIdStatement;
public AndroidSQLiteDBLogStorage(Context context) {
this(context, PersistentLogStorageStorageInfo.DEFAULT_DB_NAME);
}
public AndroidSQLiteDBLogStorage(Context context, String dbName) {
Log.i(TAG, "Connecting to db with name: " + dbName);
dbHelper = new DataCollectionDBHelper(context, dbName);
database = dbHelper.getWritableDatabase();
retrieveConsumedSizeAndVolume();
if (recordCount > 0) {
resetBucketIDs();
}
}
@Override
public void addLogRecord(LogRecord record) {
synchronized (database) {
Log.d(TAG, "Adding a new log record...");
if (insertStatement == null) {
try {
insertStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_INSERT_NEW_RECORD);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create row insert statement", e);
throw new RuntimeException(e);
}
}
try {
insertStatement.bindBlob(1, record.getData());
long insertedId = insertStatement.executeInsert();
if (insertedId >= 0) {
consumedSize += record.getSize();
recordCount++;
Log.i(TAG, "Added a new log record, records count: " + recordCount + ", data: " + Arrays.toString(record.getData()));
} else {
Log.w(TAG, "No log record was added");
}
} catch (SQLiteException e) {
Log.e(TAG, "Can't add a new record", e);
}
}
}
@Override
public LogStorageStatus getStatus() {
return this;
}
@Override
public LogBlock getRecordBlock(long blockSize) {
synchronized (database) {
Log.d(TAG, "Creating a new record block, needed size: " + blockSize);
LogBlock logBlock = null;
Cursor cursor = null;
List<String> unmarkedRecordIds = new LinkedList<>();
List<LogRecord> logRecords = new LinkedList<>();
long leftBlockSize = blockSize;
try {
cursor = database.rawQuery(PersistentLogStorageStorageInfo.KAA_SELECT_UNMARKED_RECORDS, null);
while (cursor.moveToNext()) {
int recordId = cursor.getInt(0);
byte[] recordData = cursor.getBlob(1);
if (recordData != null && recordData.length > 0) {
if (leftBlockSize < recordData.length) {
break;
}
logRecords.add(new LogRecord(recordData));
unmarkedRecordIds.add(String.valueOf(recordId));
leftBlockSize -= recordData.length;
} else {
Log.w(TAG, "Found unmarked record with no data. Deleting it...");
removeRecordById(recordId);
}
}
if (!logRecords.isEmpty()) {
updateBucketIdForRecords(currentBucketId, unmarkedRecordIds);
logBlock = new LogBlock(currentBucketId++, logRecords);
long logBlockSize = blockSize - leftBlockSize;
consumedSize -= logBlockSize;
consumedMemoryStorage.put(logBlock.getBlockId(), logBlockSize);
Log.i(TAG, "Created log block: id [" + logBlock.getBlockId() + "], size: " + logBlockSize + ". Log block record count: " +
logBlock.getRecords().size() + ", Total record count: " + recordCount);
} else {
Log.i(TAG, "No unmarked log records found");
}
} catch (SQLiteException e) {
Log.e(TAG, "Can't retrieve unmarked records from storage", e);
} finally {
try {
tryCloseCursor(cursor);
} catch (SQLiteException e) {
Log.e(TAG, "Unable to close cursor", e);
}
}
return logBlock;
}
}
private void removeRecordById(int recordId) {
synchronized (database) {
Log.v(TAG, "Removing log record with id [" + recordId + "]");
if (deleteByRecordIdStatement == null) {
try {
deleteByRecordIdStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_DELETE_BY_RECORD_ID);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create log remove statement", e);
throw new RuntimeException(e);
}
}
try {
deleteByRecordIdStatement.bindLong(1, recordId);
deleteByRecordIdStatement.execute();
long affectedRows = getAffectedRowCount();
if (affectedRows > 0) {
recordCount--;
Log.i(TAG, "Removed log record with id [" + recordId + "]");
} else {
Log.w(TAG, "No log record was removed");
}
} catch (SQLiteException e) {
Log.e(TAG, "Failed to remove a log record by recordId [" + recordId + "]", e);
}
}
}
private void updateBucketIdForRecords(int bucketId, List<String> recordIds) {
synchronized (database) {
Log.v(TAG, "Updating bucket id [" + bucketId + "] for records with ids: " + recordIds);
SQLiteStatement setBucketIdStatement = null;
try {
setBucketIdStatement =
database.compileStatement(getUpdateBucketIdStatement(recordIds));
setBucketIdStatement.bindLong(1, bucketId);
setBucketIdStatement.execute();
long affectedRows = getAffectedRowCount();
if (affectedRows > 0) {
Log.i(TAG, "Successfully updated id [" + bucketId + "] for log records: " + affectedRows);
} else {
Log.w(TAG, "No log records were updated");
}
} catch (SQLiteException e) {
Log.e(TAG, "Failed to update bucket id [" + bucketId + "] for records with ids: " + recordIds, e);
} finally {
tryCloseStatement(setBucketIdStatement);
}
}
}
private String getUpdateBucketIdStatement(List<String> recordIds) {
String queryString = TextUtils.join(",", recordIds.toArray());
StringBuilder builder = new StringBuilder(PersistentLogStorageStorageInfo.KAA_UPDATE_BUCKET_ID);
int indexOf = builder.lastIndexOf(PersistentLogStorageStorageInfo.SUBSTITUTE_SYMBOL);
builder.replace(indexOf, indexOf + PersistentLogStorageStorageInfo.SUBSTITUTE_SYMBOL.length(), queryString);
return builder.toString();
}
@Override
public void removeRecordBlock(int recordBlockId) {
synchronized (database) {
Log.d(TAG, "Removing record block with id [" + recordBlockId + "] from storage");
if (deleteByBucketIdStatement == null) {
try {
deleteByBucketIdStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_DELETE_BY_BUCKET_ID);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create record block deletion statement", e);
throw new RuntimeException(e);
}
}
try {
deleteByBucketIdStatement.bindLong(1, recordBlockId);
deleteByBucketIdStatement.execute();
long removedRecordsCount = getAffectedRowCount();
if (removedRecordsCount > 0) {
recordCount -= removedRecordsCount;
Log.i(TAG, "Removed " + removedRecordsCount + " records from storage. Total log record count: " + recordCount);
} else {
Log.i(TAG, "No records were removed from storage");
}
} catch (SQLiteException e) {
Log.e(TAG, "Failed to remove record block with id [" + recordBlockId + "]", e);
}
}
}
@Override
public void notifyUploadFailed(int bucketId) {
synchronized (database) {
Log.d(TAG, "Notifying upload fail for bucket id: " + bucketId);
if (resetBucketIdStatement == null) {
try {
resetBucketIdStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_RESET_BY_BUCKET_ID);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create bucket id reset statement", e);
throw new RuntimeException(e);
}
}
try {
resetBucketIdStatement.bindLong(1, bucketId);
resetBucketIdStatement.execute();
long affectedRows = getAffectedRowCount();
if (affectedRows > 0) {
Log.i(TAG, "Total " + affectedRows + " log records reset for bucket id: [" + bucketId + "]");
} else {
Log.w(TAG, "No log records for bucket with id: [" + bucketId + "]");
}
long previouslyConsumedSize = consumedMemoryStorage.get(bucketId);
consumedMemoryStorage.remove(bucketId);
consumedSize += previouslyConsumedSize;
} catch (SQLiteException e) {
Log.e(TAG, "Failed to reset bucket with id [" + bucketId + "]", e);
}
}
}
@Override
public void close() {
tryCloseStatement(insertStatement);
tryCloseStatement(deleteByBucketIdStatement);
tryCloseStatement(deleteByRecordIdStatement);
tryCloseStatement(resetBucketIdStatement);
if (database != null) {
database.close();
}
if (dbHelper != null) {
dbHelper.close();
}
}
@Override
public long getConsumedVolume() {
return consumedSize;
}
@Override
public long getRecordCount() {
return recordCount;
}
private void retrieveConsumedSizeAndVolume() {
synchronized (database) {
Cursor cursor = null;
try {
cursor = database.rawQuery(PersistentLogStorageStorageInfo.KAA_HOW_MANY_LOGS_IN_DB, null);
if (cursor.moveToFirst()) {
recordCount = cursor.getLong(0);
consumedSize = cursor.getLong(1);
Log.i(TAG, "Retrieved record count: " + recordCount + ", consumed size: " + consumedSize);
} else {
Log.e(TAG, "Unable to retrieve consumed size and volume");
throw new RuntimeException("Unable to retrieve consumed size and volume");
}
} finally {
tryCloseCursor(cursor);
}
}
}
private void resetBucketIDs() {
synchronized (database) {
Log.d(TAG, "Resetting bucket ids on application start");
database.execSQL(PersistentLogStorageStorageInfo.KAA_RESET_BUCKET_ID_ON_START);
long updatedRows = getAffectedRowCount();
Log.v(TAG, "Number of rows affected: " + updatedRows);
}
}
private long getAffectedRowCount() {
synchronized (database) {
Cursor cursor = null;
try {
cursor = database.rawQuery(GET_CHANGES_QUERY, null);
if (cursor != null && cursor.getCount() > 0 && cursor.moveToFirst()) {
return cursor.getLong(cursor.getColumnIndex(CHANGES_QUERY_RESULT));
} else {
return 0;
}
} finally {
tryCloseCursor(cursor);
}
}
}
private void tryCloseCursor(Cursor cursor) {
if (cursor != null) {
cursor.close();
}
}
private void tryCloseStatement(SQLiteStatement statement) {
if (statement != null) {
statement.close();
}
}
}
|
client/client-multi/client-java-android/src/main/java/org/kaaproject/kaa/client/logging/AndroidSQLiteDBLogStorage.java
|
/*
* Copyright 2014-2015 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.client.logging;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteStatement;
import android.util.Log;
import com.sun.deploy.util.StringUtils;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class AndroidSQLiteDBLogStorage implements LogStorage, LogStorageStatus {
private static final String TAG = "AndroidSQLiteDBLogStorage";
private static final String CHANGES_QUERY_RESULT = "affected_row_count";
private static final String GET_CHANGES_QUERY = "SELECT changes() AS " + CHANGES_QUERY_RESULT;
private final SQLiteOpenHelper dbHelper;
private final SQLiteDatabase database;
private long recordCount;
private long consumedSize;
private int currentBucketId = 1;
private Map<Integer, Long> consumedMemoryStorage = new HashMap<>();
private SQLiteStatement insertStatement;
private SQLiteStatement deleteByRecordIdStatement;
private SQLiteStatement deleteByBucketIdStatement;
private SQLiteStatement resetBucketIdStatement;
public AndroidSQLiteDBLogStorage(Context context) {
this(context, PersistentLogStorageStorageInfo.DEFAULT_DB_NAME);
}
public AndroidSQLiteDBLogStorage(Context context, String dbName) {
Log.i(TAG, "Connecting to db with name: " + dbName);
dbHelper = new DataCollectionDBHelper(context, dbName);
database = dbHelper.getWritableDatabase();
retrieveConsumedSizeAndVolume();
if (recordCount > 0) {
resetBucketIDs();
}
}
@Override
public void addLogRecord(LogRecord record) {
synchronized (database) {
Log.d(TAG, "Adding a new log record...");
if (insertStatement == null) {
try {
insertStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_INSERT_NEW_RECORD);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create row insert statement", e);
throw new RuntimeException(e);
}
}
try {
insertStatement.bindBlob(1, record.getData());
long insertedId = insertStatement.executeInsert();
if (insertedId >= 0) {
consumedSize += record.getSize();
recordCount++;
Log.i(TAG, "Added a new log record, records count: " + recordCount + ", data: " + Arrays.toString(record.getData()));
} else {
Log.w(TAG, "No log record was added");
}
} catch (SQLiteException e) {
Log.e(TAG, "Can't add a new record", e);
}
}
}
@Override
public LogStorageStatus getStatus() {
return this;
}
@Override
public LogBlock getRecordBlock(long blockSize) {
synchronized (database) {
Log.d(TAG, "Creating a new record block, needed size: " + blockSize);
LogBlock logBlock = null;
Cursor cursor = null;
List<String> unmarkedRecordIds = new LinkedList<>();
List<LogRecord> logRecords = new LinkedList<>();
long leftBlockSize = blockSize;
try {
cursor = database.rawQuery(PersistentLogStorageStorageInfo.KAA_SELECT_UNMARKED_RECORDS, null);
while (cursor.moveToNext()) {
int recordId = cursor.getInt(0);
byte[] recordData = cursor.getBlob(1);
if (recordData != null && recordData.length > 0) {
if (leftBlockSize < recordData.length) {
break;
}
logRecords.add(new LogRecord(recordData));
unmarkedRecordIds.add(String.valueOf(recordId));
leftBlockSize -= recordData.length;
} else {
Log.w(TAG, "Found unmarked record with no data. Deleting it...");
removeRecordById(recordId);
}
}
if (!logRecords.isEmpty()) {
updateBucketIdForRecords(currentBucketId, unmarkedRecordIds);
logBlock = new LogBlock(currentBucketId++, logRecords);
long logBlockSize = blockSize - leftBlockSize;
consumedSize -= logBlockSize;
consumedMemoryStorage.put(logBlock.getBlockId(), logBlockSize);
Log.i(TAG, "Created log block: id [" + logBlock.getBlockId() + "], size: " + logBlockSize + ". Log block record count: " +
logBlock.getRecords().size() + ", Total record count: " + recordCount);
} else {
Log.i(TAG, "No unmarked log records found");
}
} catch (SQLiteException e) {
Log.e(TAG, "Can't retrieve unmarked records from storage", e);
} finally {
try {
tryCloseCursor(cursor);
} catch (SQLiteException e) {
Log.e(TAG, "Unable to close cursor", e);
}
}
return logBlock;
}
}
private void removeRecordById(int recordId) {
synchronized (database) {
Log.v(TAG, "Removing log record with id [" + recordId + "]");
if (deleteByRecordIdStatement == null) {
try {
deleteByRecordIdStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_DELETE_BY_RECORD_ID);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create log remove statement", e);
throw new RuntimeException(e);
}
}
try {
deleteByRecordIdStatement.bindLong(1, recordId);
deleteByRecordIdStatement.execute();
long affectedRows = getAffectedRowCount();
if (affectedRows > 0) {
recordCount--;
Log.i(TAG, "Removed log record with id [" + recordId + "]");
} else {
Log.w(TAG, "No log record was removed");
}
} catch (SQLiteException e) {
Log.e(TAG, "Failed to remove a log record by recordId [" + recordId + "]", e);
}
}
}
private void updateBucketIdForRecords(int bucketId, List<String> recordIds) {
synchronized (database) {
Log.v(TAG, "Updating bucket id [" + bucketId + "] for records with ids: " + recordIds);
SQLiteStatement setBucketIdStatement = null;
try {
setBucketIdStatement =
database.compileStatement(getUpdateBucketIdStatement(recordIds));
setBucketIdStatement.bindLong(1, bucketId);
setBucketIdStatement.execute();
long affectedRows = getAffectedRowCount();
if (affectedRows > 0) {
Log.i(TAG, "Successfully updated id [" + bucketId + "] for log records: " + affectedRows);
} else {
Log.w(TAG, "No log records were updated");
}
} catch (SQLiteException e) {
Log.e(TAG, "Failed to update bucket id [" + bucketId + "] for records with ids: " + recordIds, e);
} finally {
tryCloseStatement(setBucketIdStatement);
}
}
}
private String getUpdateBucketIdStatement(List<String> recordIds) {
String queryString = StringUtils.join(recordIds, ",");
StringBuilder builder = new StringBuilder(PersistentLogStorageStorageInfo.KAA_UPDATE_BUCKET_ID);
int indexOf = builder.lastIndexOf(PersistentLogStorageStorageInfo.SUBSTITUTE_SYMBOL);
builder.replace(indexOf, indexOf + PersistentLogStorageStorageInfo.SUBSTITUTE_SYMBOL.length(), queryString);
return builder.toString();
}
@Override
public void removeRecordBlock(int recordBlockId) {
synchronized (database) {
Log.d(TAG, "Removing record block with id [" + recordBlockId + "] from storage");
if (deleteByBucketIdStatement == null) {
try {
deleteByBucketIdStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_DELETE_BY_BUCKET_ID);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create record block deletion statement", e);
throw new RuntimeException(e);
}
}
try {
deleteByBucketIdStatement.bindLong(1, recordBlockId);
deleteByBucketIdStatement.execute();
long removedRecordsCount = getAffectedRowCount();
if (removedRecordsCount > 0) {
recordCount -= removedRecordsCount;
Log.i(TAG, "Removed " + removedRecordsCount + " records from storage. Total log record count: " + recordCount);
} else {
Log.i(TAG, "No records were removed from storage");
}
} catch (SQLiteException e) {
Log.e(TAG, "Failed to remove record block with id [" + recordBlockId + "]", e);
}
}
}
@Override
public void notifyUploadFailed(int bucketId) {
synchronized (database) {
Log.d(TAG, "Notifying upload fail for bucket id: " + bucketId);
if (resetBucketIdStatement == null) {
try {
resetBucketIdStatement = database.compileStatement(PersistentLogStorageStorageInfo.KAA_RESET_BY_BUCKET_ID);
} catch (SQLiteException e) {
Log.e(TAG, "Can't create bucket id reset statement", e);
throw new RuntimeException(e);
}
}
try {
resetBucketIdStatement.bindLong(1, bucketId);
resetBucketIdStatement.execute();
long affectedRows = getAffectedRowCount();
if (affectedRows > 0) {
Log.i(TAG, "Total " + affectedRows + " log records reset for bucket id: [" + bucketId + "]");
} else {
Log.w(TAG, "No log records for bucket with id: [" + bucketId + "]");
}
long previouslyConsumedSize = consumedMemoryStorage.get(bucketId);
consumedMemoryStorage.remove(bucketId);
consumedSize += previouslyConsumedSize;
} catch (SQLiteException e) {
Log.e(TAG, "Failed to reset bucket with id [" + bucketId + "]", e);
}
}
}
@Override
public void close() {
tryCloseStatement(insertStatement);
tryCloseStatement(deleteByBucketIdStatement);
tryCloseStatement(deleteByRecordIdStatement);
tryCloseStatement(resetBucketIdStatement);
if (database != null) {
database.close();
}
if (dbHelper != null) {
dbHelper.close();
}
}
@Override
public long getConsumedVolume() {
return consumedSize;
}
@Override
public long getRecordCount() {
return recordCount;
}
private void retrieveConsumedSizeAndVolume() {
synchronized (database) {
Cursor cursor = null;
try {
cursor = database.rawQuery(PersistentLogStorageStorageInfo.KAA_HOW_MANY_LOGS_IN_DB, null);
if (cursor.moveToFirst()) {
recordCount = cursor.getLong(0);
consumedSize = cursor.getLong(1);
Log.i(TAG, "Retrieved record count: " + recordCount + ", consumed size: " + consumedSize);
} else {
Log.e(TAG, "Unable to retrieve consumed size and volume");
throw new RuntimeException("Unable to retrieve consumed size and volume");
}
} finally {
tryCloseCursor(cursor);
}
}
}
private void resetBucketIDs() {
synchronized (database) {
Log.d(TAG, "Resetting bucket ids on application start");
database.execSQL(PersistentLogStorageStorageInfo.KAA_RESET_BUCKET_ID_ON_START);
long updatedRows = getAffectedRowCount();
Log.v(TAG, "Number of rows affected: " + updatedRows);
}
}
private long getAffectedRowCount() {
synchronized (database) {
Cursor cursor = null;
try {
cursor = database.rawQuery(GET_CHANGES_QUERY, null);
if (cursor != null && cursor.getCount() > 0 && cursor.moveToFirst()) {
return cursor.getLong(cursor.getColumnIndex(CHANGES_QUERY_RESULT));
} else {
return 0;
}
} finally {
tryCloseCursor(cursor);
}
}
}
private void tryCloseCursor(Cursor cursor) {
if (cursor != null) {
cursor.close();
}
}
private void tryCloseStatement(SQLiteStatement statement) {
if (statement != null) {
statement.close();
}
}
}
|
KAA-515: fix dependency issue
|
client/client-multi/client-java-android/src/main/java/org/kaaproject/kaa/client/logging/AndroidSQLiteDBLogStorage.java
|
KAA-515: fix dependency issue
|
<ide><path>lient/client-multi/client-java-android/src/main/java/org/kaaproject/kaa/client/logging/AndroidSQLiteDBLogStorage.java
<ide> import android.database.sqlite.SQLiteException;
<ide> import android.database.sqlite.SQLiteOpenHelper;
<ide> import android.database.sqlite.SQLiteStatement;
<add>import android.text.TextUtils;
<ide> import android.util.Log;
<del>import com.sun.deploy.util.StringUtils;
<ide>
<ide> import java.util.Arrays;
<ide> import java.util.HashMap;
<ide> }
<ide>
<ide> private String getUpdateBucketIdStatement(List<String> recordIds) {
<del> String queryString = StringUtils.join(recordIds, ",");
<add> String queryString = TextUtils.join(",", recordIds.toArray());
<ide> StringBuilder builder = new StringBuilder(PersistentLogStorageStorageInfo.KAA_UPDATE_BUCKET_ID);
<ide> int indexOf = builder.lastIndexOf(PersistentLogStorageStorageInfo.SUBSTITUTE_SYMBOL);
<ide> builder.replace(indexOf, indexOf + PersistentLogStorageStorageInfo.SUBSTITUTE_SYMBOL.length(), queryString);
|
|
Java
|
apache-2.0
|
47d10009d8cb4c8af1d5fc7eeca03c963370ece8
| 0 |
Deepnekroz/kaa,vtkhir/kaa,zofuthan/kaa,sashadidukh/kaa,sashadidukh/kaa,forGGe/kaa,vtkhir/kaa,Deepnekroz/kaa,vtkhir/kaa,kallelzied/kaa,liuhu/Kaa,zofuthan/kaa,zofuthan/kaa,sashadidukh/kaa,Dubland/kaa,rasendubi/kaa,rasendubi/kaa,Dubland/kaa,rasendubi/kaa,vtkhir/kaa,abohomol/kaa,Deepnekroz/kaa,kallelzied/kaa,Oleh-Kravchenko/kaa,vtkhir/kaa,rasendubi/kaa,Dubland/kaa,aglne/kaa,rasendubi/kaa,rasendubi/kaa,aglne/kaa,sashadidukh/kaa,liuhu/Kaa,forGGe/kaa,liuhu/Kaa,liuhu/Kaa,abohomol/kaa,aglne/kaa,liuhu/Kaa,Dubland/kaa,Dubland/kaa,Oleh-Kravchenko/kaa,forGGe/kaa,Deepnekroz/kaa,zofuthan/kaa,vtkhir/kaa,abohomol/kaa,vtkhir/kaa,sashadidukh/kaa,sashadidukh/kaa,kallelzied/kaa,Deepnekroz/kaa,abohomol/kaa,liuhu/Kaa,aglne/kaa,abohomol/kaa,forGGe/kaa,abohomol/kaa,kallelzied/kaa,Dubland/kaa,kallelzied/kaa,Deepnekroz/kaa,Oleh-Kravchenko/kaa,aglne/kaa,Oleh-Kravchenko/kaa,zofuthan/kaa,Oleh-Kravchenko/kaa,Oleh-Kravchenko/kaa,forGGe/kaa,sashadidukh/kaa,aglne/kaa,vtkhir/kaa,Deepnekroz/kaa,abohomol/kaa,Dubland/kaa,kallelzied/kaa,forGGe/kaa,sashadidukh/kaa,Oleh-Kravchenko/kaa,liuhu/Kaa,zofuthan/kaa,aglne/kaa,rasendubi/kaa,forGGe/kaa
|
/*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.avro.avrogenc;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
public class Compiler {
private static final String DIRECTION_PROP = "direction";
private final String generatedSourceName;
private Schema schema;
private VelocityEngine engine;
private PrintWriter headerWriter;
private PrintWriter sourceWriter;
private String namespacePrefix;
private final Map<Schema, GenerationContext> schemaQueue;
private void initVelocityEngine() {
engine = new VelocityEngine();
engine.addProperty("resource.loader", "class, file");
engine.addProperty("class.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
engine.addProperty("file.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.FileResourceLoader");
engine.addProperty("file.resource.loader.path", "/, .");
engine.setProperty("runtime.references.strict", true);
engine.setProperty("runtime.log.logsystem.class", "org.apache.velocity.runtime.log.NullLogSystem");
}
private Compiler(String sourceName) throws KaaCGeneratorException {
this.namespacePrefix = "kaa";
this.generatedSourceName = sourceName;
this.schemaQueue = new LinkedHashMap<>();
initVelocityEngine();
}
public Compiler(Schema schema, String sourceName, OutputStream hdrS, OutputStream srcS)
throws KaaCGeneratorException
{
this(sourceName);
this.schema = schema;
this.headerWriter = new PrintWriter(hdrS);
this.sourceWriter = new PrintWriter(srcS);
prepareTemplates(false);
}
public Compiler(String schemaPath, String outputPath, String sourceName) throws KaaCGeneratorException {
this(sourceName);
try {
this.schema = new Schema.Parser().parse(new File(schemaPath));
prepareTemplates(true);
File outputDir = new File(outputPath);
outputDir.mkdirs();
String headerPath = outputPath + File.separator + generatedSourceName + ".h";
String sourcePath = outputPath + File.separator + generatedSourceName + ".c";
Files.move(new File("src/main/resources/header.tmpl.gen").toPath()
, new File(headerPath).toPath(), StandardCopyOption.REPLACE_EXISTING);
Files.move(new File("src/main/resources/source.tmpl.gen").toPath()
, new File(sourcePath).toPath(), StandardCopyOption.REPLACE_EXISTING);
this.headerWriter = new PrintWriter(new BufferedWriter(new FileWriter(headerPath, true)));
this.sourceWriter = new PrintWriter(new BufferedWriter(new FileWriter(sourcePath, true)));
} catch (Exception e) {
throw new KaaCGeneratorException("Failed to create ouput path: " + e.toString());
}
}
private void prepareTemplates(boolean toFile) throws KaaCGeneratorException {
try {
VelocityContext context = new VelocityContext();
context.put("headerName", generatedSourceName);
StringWriter hdrWriter = new StringWriter();
engine.getTemplate("header.tmpl").merge(context, hdrWriter);
StringWriter srcWriter = new StringWriter();
engine.getTemplate("source.tmpl").merge(context, srcWriter);
if (toFile) {
writeToFile(hdrWriter, srcWriter);
} else {
writeToStream(hdrWriter, srcWriter);
}
} catch (Exception e) {
throw new KaaCGeneratorException("Failed to prepare source templates: " + e.toString());
}
}
private void writeToStream(StringWriter hdrWriter, StringWriter srcWriter) {
headerWriter.write(hdrWriter.toString());
sourceWriter.write(srcWriter.toString());
}
private void writeToFile(StringWriter hdrWriter, StringWriter srcWriter) throws Exception {
FileOutputStream hdrOs = new FileOutputStream("src/main/resources/header.tmpl.gen");
hdrOs.write(hdrWriter.toString().getBytes());
hdrOs.close();
FileOutputStream srcOs = new FileOutputStream("src/main/resources/source.tmpl.gen");
srcOs.write(srcWriter.toString().getBytes());
srcOs.close();
}
public void generate() throws KaaCGeneratorException {
try {
System.out.println("Processing schema: " + schema);
if (schema.getType() == Type.UNION) {
for (Schema s : schema.getTypes()) {
filterSchemas(s, null);
}
} else {
filterSchemas(schema, null);
}
doGenerate();
compeleteGeneration();
System.out.println("C sources were successfully generated");
} catch (Exception e) {
throw new KaaCGeneratorException("Failed to generate C sources: " + e.toString());
} finally {
headerWriter.close();
sourceWriter.close();
}
}
private void filterSchemas(Schema schema, GenerationContext context) {
GenerationContext existingContext = schemaQueue.get(schema);
if (existingContext != null) {
existingContext.updateDirection(context);
return;
}
switch (schema.getType()) {
case RECORD:
for (Field f : schema.getFields()) {
filterSchemas(f.schema(), new GenerationContext(
schema.getName(), f.name(), schema.getProp(DIRECTION_PROP)));
}
schemaQueue.put(schema, null);
break;
case UNION:
for (Schema branchSchema : schema.getTypes()) {
filterSchemas(branchSchema, context);
}
schemaQueue.put(schema, context);
break;
case ARRAY:
filterSchemas(schema.getElementType(), context);
break;
case ENUM:
schemaQueue.put(schema, null);
break;
default:
break;
}
}
private void doGenerate() {
for (Map.Entry<Schema, GenerationContext> cursor : schemaQueue.entrySet()) {
switch (cursor.getKey().getType()) {
case RECORD:
processRecord(cursor.getKey());
break;
case UNION:
processUnion(cursor.getKey(), cursor.getValue());
break;
case ENUM:
processEnum(cursor.getKey());
break;
default:
break;
}
}
}
private void processUnion(Schema schema, GenerationContext genContext) {
VelocityContext context = new VelocityContext();
context.put("schema", schema);
context.put("generationContext", genContext);
context.put("StyleUtils", StyleUtils.class);
context.put("TypeConverter", TypeConverter.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter headerWriter = new StringWriter();
engine.getTemplate("union.h.vm").merge(context, headerWriter);
appendResult(headerWriter.toString(), true);
StringWriter sourceWriter = new StringWriter();
engine.getTemplate("union.c.vm").merge(context, sourceWriter);
appendResult(sourceWriter.toString(), false);
}
private void processRecord(Schema schema) {
VelocityContext context = new VelocityContext();
context.put("schema", schema);
context.put("StyleUtils", StyleUtils.class);
context.put("TypeConverter", TypeConverter.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter headerWriter = new StringWriter();
engine.getTemplate("record.h.vm").merge(context, headerWriter);
appendResult(headerWriter.toString(), true);
StringWriter sourceWriter = new StringWriter();
engine.getTemplate("record.c.vm").merge(context, sourceWriter);
appendResult(sourceWriter.toString(), false);
}
private void processEnum(Schema schema) {
VelocityContext context = new VelocityContext();
List<String> symbols = schema.getEnumSymbols();
context.put("schema", schema);
context.put("symbols", symbols);
context.put("StyleUtils", StyleUtils.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter writer = new StringWriter();
engine.getTemplate("enum.h.vm").merge(context, writer);
appendResult(writer.toString(), true);
}
private void appendResult(String str, boolean toHeader) {
if (toHeader) {
headerWriter.write(str);
} else {
sourceWriter.write(str);
}
}
private void compeleteGeneration() {
headerWriter.write("#ifdef __cplusplus\n} /* extern \"C\" */\n#endif\n#endif");
}
public void setNamespacePrefix(String namespacePrefix) {
this.namespacePrefix = namespacePrefix;
}
}
|
avrogenc/src/main/java/org/kaaproject/kaa/avro/avrogenc/Compiler.java
|
/*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.avro.avrogenc;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
public class Compiler {
private static final String DIRECTION_PROP = "direction";
private final String generatedSourceName;
private Schema schema;
private VelocityEngine engine;
private PrintWriter headerWriter;
private PrintWriter sourceWriter;
private String namespacePrefix;
private final Map<Schema, GenerationContext> schemaQueue;
private void initVelocityEngine() {
engine = new VelocityEngine();
engine.addProperty("resource.loader", "class, file");
engine.addProperty("class.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
engine.addProperty("file.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.FileResourceLoader");
engine.addProperty("file.resource.loader.path", "/, .");
engine.setProperty("runtime.references.strict", true);
engine.setProperty("runtime.log.logsystem.class", "org.apache.velocity.runtime.log.NullLogSystem");
}
private Compiler(String sourceName) throws KaaCGeneratorException {
this.namespacePrefix = "kaa";
this.generatedSourceName = sourceName;
this.schemaQueue = new LinkedHashMap<>();
initVelocityEngine();
}
public Compiler(Schema schema, String sourceName, OutputStream hdrS, OutputStream srcS)
throws KaaCGeneratorException
{
this(sourceName);
this.schema = schema;
this.headerWriter = new PrintWriter(hdrS);
this.sourceWriter = new PrintWriter(srcS);
prepareTemplates(false);
}
public Compiler(String schemaPath, String outputPath, String sourceName) throws KaaCGeneratorException {
this(sourceName);
try {
this.schema = new Schema.Parser().parse(new File(schemaPath));
prepareTemplates(true);
File outputDir = new File(outputPath);
outputDir.mkdirs();
String headerPath = outputPath + File.separator + generatedSourceName + ".h";
String sourcePath = outputPath + File.separator + generatedSourceName + ".c";
Files.move(new File("src/main/resources/header.tmpl.gen").toPath()
, new File(headerPath).toPath(), StandardCopyOption.REPLACE_EXISTING);
Files.move(new File("src/main/resources/source.tmpl.gen").toPath()
, new File(sourcePath).toPath(), StandardCopyOption.REPLACE_EXISTING);
this.headerWriter = new PrintWriter(new BufferedWriter(new FileWriter(headerPath, true)));
this.sourceWriter = new PrintWriter(new BufferedWriter(new FileWriter(sourcePath, true)));
} catch (Exception e) {
throw new KaaCGeneratorException("Failed to create ouput path: " + e.toString());
}
}
private void prepareTemplates(boolean toFile) throws KaaCGeneratorException {
try {
VelocityContext context = new VelocityContext();
context.put("headerName", generatedSourceName);
StringWriter hdrWriter = new StringWriter();
engine.getTemplate("header.tmpl").merge(context, hdrWriter);
StringWriter srcWriter = new StringWriter();
engine.getTemplate("source.tmpl").merge(context, srcWriter);
if (toFile) {
writeToFile(hdrWriter, srcWriter);
} else {
writeToStream(hdrWriter, srcWriter);
}
} catch (Exception e) {
throw new KaaCGeneratorException("Failed to prepare source templates: " + e.toString());
}
}
private void writeToStream(StringWriter hdrWriter, StringWriter srcWriter) {
headerWriter.write(hdrWriter.toString());
sourceWriter.write(srcWriter.toString());
}
private void writeToFile(StringWriter hdrWriter, StringWriter srcWriter) throws Exception {
FileOutputStream hdrOs = new FileOutputStream("src/main/resources/header.tmpl.gen");
hdrOs.write(hdrWriter.toString().getBytes());
hdrOs.close();
FileOutputStream srcOs = new FileOutputStream("src/main/resources/source.tmpl.gen");
srcOs.write(srcWriter.toString().getBytes());
srcOs.close();
}
public void generate() throws KaaCGeneratorException {
try {
System.out.println("Processing schema: " + schema);
if (schema.getType() == Type.UNION) {
for (Schema s : schema.getTypes()) {
filterSchemas(s, null);
}
} else {
filterSchemas(schema, null);
}
doGenerate();
compeleteGeneration();
System.out.println("C sources were successfully generated");
} catch (Exception e) {
throw new KaaCGeneratorException("Failed to generate C sources: " + e.toString());
} finally {
headerWriter.close();
sourceWriter.close();
}
}
private void filterSchemas(Schema schema, GenerationContext context) {
GenerationContext existingContext = schemaQueue.get(schema);
if (existingContext != null) {
existingContext.updateDirection(context);
return;
}
switch (schema.getType()) {
case RECORD:
for (Field f : schema.getFields()) {
filterSchemas(f.schema(), new GenerationContext(
schema.getName(), f.name(), schema.getProp(DIRECTION_PROP)));
}
schemaQueue.put(schema, null);
break;
case UNION:
for (Schema branchSchema : schema.getTypes()) {
filterSchemas(branchSchema, context);
}
schemaQueue.put(schema, context);
break;
case ARRAY:
filterSchemas(schema.getElementType(), context);
break;
case ENUM:
schemaQueue.put(schema, null);
break;
default:
break;
}
}
private void doGenerate() {
for (Map.Entry<Schema, GenerationContext> cursor : schemaQueue.entrySet()) {
switch (cursor.getKey().getType()) {
case RECORD:
processRecord(cursor.getKey());
break;
case UNION:
processUnion(cursor.getKey(), cursor.getValue());
break;
case ENUM:
processEnum(cursor.getKey());
break;
default:
break;
}
}
}
private void processUnion(Schema schema, GenerationContext genContext) {
VelocityContext context = new VelocityContext();
context.put("schema", schema);
context.put("generationContext", genContext);
context.put("StyleUtils", StyleUtils.class);
context.put("TypeConverter", TypeConverter.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter headerWriter = new StringWriter();
engine.getTemplate("union.h.vm").merge(context, headerWriter);
appendResult(headerWriter.toString(), true);
StringWriter sourceWriter = new StringWriter();
engine.getTemplate("union.c.vm").merge(context, sourceWriter);
appendResult(sourceWriter.toString(), false);
}
private void processRecord(Schema schema) {
VelocityContext context = new VelocityContext();
context.put("schema", schema);
context.put("StyleUtils", StyleUtils.class);
context.put("TypeConverter", TypeConverter.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter headerWriter = new StringWriter();
engine.getTemplate("record.h.vm").merge(context, headerWriter);
appendResult(headerWriter.toString(), true);
StringWriter sourceWriter = new StringWriter();
engine.getTemplate("record.c.vm").merge(context, sourceWriter);
appendResult(sourceWriter.toString(), false);
}
private void processEnum(Schema schema) {
VelocityContext context = new VelocityContext();
List<String> symbols = schema.getEnumSymbols();
Collections.sort(symbols);
context.put("schema", schema);
context.put("symbols", symbols);
context.put("StyleUtils", StyleUtils.class);
context.put("namespacePrefix", namespacePrefix);
StringWriter writer = new StringWriter();
engine.getTemplate("enum.h.vm").merge(context, writer);
appendResult(writer.toString(), true);
}
private void appendResult(String str, boolean toHeader) {
if (toHeader) {
headerWriter.write(str);
} else {
sourceWriter.write(str);
}
}
private void compeleteGeneration() {
headerWriter.write("#ifdef __cplusplus\n} /* extern \"C\" */\n#endif\n#endif");
}
public void setNamespacePrefix(String namespacePrefix) {
this.namespacePrefix = namespacePrefix;
}
}
|
KAA-670: Remove sorting of enumeration symbols.
|
avrogenc/src/main/java/org/kaaproject/kaa/avro/avrogenc/Compiler.java
|
KAA-670: Remove sorting of enumeration symbols.
|
<ide><path>vrogenc/src/main/java/org/kaaproject/kaa/avro/avrogenc/Compiler.java
<ide> VelocityContext context = new VelocityContext();
<ide>
<ide> List<String> symbols = schema.getEnumSymbols();
<del> Collections.sort(symbols);
<ide>
<ide> context.put("schema", schema);
<ide> context.put("symbols", symbols);
|
|
Java
|
mit
|
e20e4cfed32b8de1e4b7920603c26eaa48c5de0c
| 0 |
Simbacode/mobipayments,Simbacode/mobipayments
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.simbacode.payments;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import net.oauth.OAuth;
import net.oauth.OAuthAccessor;
import net.oauth.OAuthConsumer;
import net.oauth.OAuthException;
import net.oauth.OAuthMessage;
import net.oauth.OAuthServiceProvider;
import net.oauth.client.OAuthClient;
import net.oauth.client.httpclient4.HttpClient4;
import org.apache.commons.lang3.StringEscapeUtils;
/**
* This is main Pesapal Oauth 1.0 java class
*
* @author Acellam Guy
* @version 0.0.2
*/
public class Pesapal {
private Properties props;
private File propFile;
/**
* Constructor for the main pesapal lib java class
*
* @param fileName
* the path of the config file that contains pesapal details.
* @throws IOException
*/
public Pesapal(String fileName) throws IOException {
props = new Properties();
propFile = new File(fileName);
props.load(new FileInputStream(propFile));
}
/**
* This is used to create the consumer for Oauth Request
*
* @param reqUrl
* the URL for making requests
*
* @see OAuthAccessor
* @return OAuthAccessor
*/
private OAuthAccessor createOAuthAccessor(String reqUrl) {
String consumerKey = props.getProperty("pesapal.consumerKey");
// This is really not needed for pesapal servers at this point.
// TODO see how to remove this
String callbackUrl = props.getProperty("pesapal.callbackURL");
String consumerSecret = props.getProperty("pesapal.consumerSecret");
OAuthServiceProvider provider = new OAuthServiceProvider(reqUrl,
reqUrl, reqUrl);
OAuthConsumer consumer = new OAuthConsumer(callbackUrl, consumerKey,
consumerSecret, provider);
return new OAuthAccessor(consumer);
}
/**
* Makes request to the pesapal server and returns the response that
* contains All the OATH details.
*
* Use this to post a transaction to PesaPal. PesaPal will return a response
* with a page which contains the available payment options and will
* redirect to your site once the user has completed the payment process. A
* tracking id will be returned as a query parameter – this can be used
* subsequently to track the payment status on pesapal for this transaction.
*
* @see OAuthMessage
* @param amount
* the amount of money for the good/service
* @param desc
* description of what is being paid for good or service
* @param type
* the type of pesapal account eg Merchant
* @param reference
* the unique id to your request.
* @param email
* @param phonenumber
* @param first_name
* @param last_name
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage PostPesapalDirectOrderV4(String amount, String desc,
String type, String reference, String email, String phonenumber,
String first_name, String last_name) throws IOException,
OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.PostPesapalDirectOrderV4");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
String callbackUrl = props.getProperty("pesapal.callbackURL");
callbackUrl = URLEncoder.encode(callbackUrl, "UTF-8");
// construct pesapal xml
String post_xml = "<?xml version=\"1.0\" encoding=\"utf-8\"?><PesapalDirectOrderInfo xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" Amount=\""
+ amount
+ "\" Description=\""
+ desc
+ "\" Type=\""
+ type
+ "\" Reference=\""
+ reference
+ "\" FirstName=\""
+ first_name
+ "\" LastName=\""
+ last_name
+ "\" Email=\""
+ email
+ "\" PhoneNumber=\""
+ phonenumber
+ "\" xmlns=\"http://www.pesapal.com\" />";
post_xml = StringEscapeUtils.escapeXml(post_xml);
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_request_data", post_xml));
p.add(new OAuth.Parameter("oauth_callback", callbackUrl));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Makes request to the pesapal server and returns the response that
* contains All the OATH details.
*
* Use this to post a transaction to PesaPal. PesaPal will return a response
* with a page which contains the available payment options and will
* redirect to your site once the user has completed the payment process. A
* tracking id will be returned as a query parameter – this can be used
* subsequently to track the payment status on pesapal for this transaction.
*
* @see OAuthMessage
* @param post_xml
* The XML formated order data.Take not of space.
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage PostPesapalDirectOrderV4(String post_xml)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.PostPesapalDirectOrderV4");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
String callbackUrl = props.getProperty("pesapal.callbackURL");
callbackUrl = URLEncoder.encode(callbackUrl, "UTF-8");
post_xml = StringEscapeUtils.escapeXml(post_xml);
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_request_data", post_xml));
p.add(new OAuth.Parameter("oauth_callback", callbackUrl));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Use this to query the status of the transaction. When a transaction is
* posted to PesaPal, it may be in a PENDING, COMPLETED or FAILED state. If
* the transaction is PENDING, the payment may complete or fail at a later
* stage. Both the unique order id generated by your system and the pesapal
* tracking id are required as input parameters.
*
* @param reference
* the order id/ reference id you created during
* {@link Pesapal#PostPesapalDirectOrderV4}
* @param trackingId
* the reference that was returned by pesapal server during post
* order
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage QueryPaymentStatus(String reference, String trackingId)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.QueryPaymentStatus");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
trackingId = URLEncoder.encode(trackingId, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
p.add(new OAuth.Parameter("pesapal_transaction_tracking_id", trackingId));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Same as {@link Pesapal#QueryPaymentStatus(String, String)}, but only the
* unique order id generated by your system is required as the input
* parameter.
*
* @param reference
* the unique id generated by your app
* @return {@link OAuthMessage}
*/
@SuppressWarnings("rawtypes")
public OAuthMessage QueryPaymentStatusByMerchantRef(String reference)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.QueryPaymentStatusByMerchantRef");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Same as {@link Pesapal#QueryPaymentStatus(String, String)}, but
* additional information is returned.
*
* @param reference
* the order id/ reference id you created during
* {@link Pesapal#PostPesapalDirectOrderV4}
* @param trackingId
* the reference that was returned by pesapal server during post
* order
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage QueryPaymentDetails(String reference, String trackingId)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.QueryPaymentDetails");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
trackingId = URLEncoder.encode(trackingId, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
p.add(new OAuth.Parameter("pesapal_transaction_tracking_id", trackingId));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* <p>
* This is to be used with a java services/servlet/web app that you have
* configured to be as your pesapal server IPN URL. When the web app
* receives the response from Pesapal, Get the parameters and pass the
* details to this function so that pesapal gives you the details about the
* transaction status in the format:
* pesapal_notification_type=CHANGE&pesapal_transaction_tracking_id =<the
* unique tracking id of the transaction>&pesapal_merchant_reference=<the
* merchant reference>. Also remember to parse the header of this response
* to get the payment status.
*
* </p>
* <p>
* After that remember to send back a response to pesapal in the same format
* using your web app.This is to acknowledge the receipt of the sent IPN.
* Send back the response after doing some things such as updating records
* in your data store.
* </p>
*
* @param notificationType
* this one of the notification types specified by pesapal
* @param reference
* the order id/ reference id you created during
* {@link Pesapal#PostPesapalDirectOrderV4}
* @param trackingId
* the reference that was returned by pesapal server during post
* order
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage InstantPaymentNotification(String notificationType,
String reference, String trackingId) throws IOException,
OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.querypaymentstatus");
if (notificationType == "CHANGE" && trackingId != "") {
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
trackingId = URLEncoder.encode(trackingId, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(
1) : new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
p.add(new OAuth.Parameter("pesapal_transaction_tracking_id",
trackingId));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
} else {
return null;
}
}
}
|
src/main/java/com/simbacode/payments/Pesapal.java
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.simbacode.payments;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import net.oauth.OAuth;
import net.oauth.OAuthAccessor;
import net.oauth.OAuthConsumer;
import net.oauth.OAuthException;
import net.oauth.OAuthMessage;
import net.oauth.OAuthServiceProvider;
import net.oauth.client.OAuthClient;
import net.oauth.client.httpclient4.HttpClient4;
import org.apache.commons.lang3.StringEscapeUtils;
/**
* This is main Pesapal Oauth 1.0 java class
*
* @author Acellam Guy
* @version 0.1
*/
public class Pesapal {
private Properties props;
private File propFile;
/**
* Constructor for the main pesapal lib java class
*
* @param fileName
* the path of the config file that contains pesapal details.
* @throws IOException
*/
public Pesapal(String fileName) throws IOException {
props = new Properties();
propFile = new File(fileName);
props.load(new FileInputStream(propFile));
}
/**
* This is used to create the consumer for Oauth Request
*
* @param reqUrl
* the URL for making requests
*
* @see OAuthAccessor
* @return OAuthAccessor
*/
private OAuthAccessor createOAuthAccessor(String reqUrl) {
String consumerKey = props.getProperty("pesapal.consumerKey");
// This is really not needed for pesapal servers at this point.
// TODO see how to remove this
String callbackUrl = props.getProperty("pesapal.callbackURL");
String consumerSecret = props.getProperty("pesapal.consumerSecret");
OAuthServiceProvider provider = new OAuthServiceProvider(reqUrl,
reqUrl, reqUrl);
OAuthConsumer consumer = new OAuthConsumer(callbackUrl, consumerKey,
consumerSecret, provider);
return new OAuthAccessor(consumer);
}
/**
* Makes request to the pesapal server and returns the response that
* contains All the OATH details.
*
* Use this to post a transaction to PesaPal. PesaPal will return a response
* with a page which contains the available payment options and will
* redirect to your site once the user has completed the payment process. A
* tracking id will be returned as a query parameter – this can be used
* subsequently to track the payment status on pesapal for this transaction.
*
* @see OAuthMessage
* @param amount
* the amount of money for the good/service
* @param desc
* description of what is being paid for good or service
* @param type
* the type of pesapal account eg Merchant
* @param reference
* the unique id to your request.
* @param email
* @param phonenumber
* @param first_name
* @param last_name
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage PostPesapalDirectOrderV4(String amount, String desc,
String type, String reference, String email, String phonenumber,
String first_name, String last_name) throws IOException,
OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.PostPesapalDirectOrderV4");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
String callbackUrl = props.getProperty("pesapal.callbackURL");
callbackUrl = URLEncoder.encode(callbackUrl, "UTF-8");
// construct pesapal xml
String post_xml = "<?xml version=\"1.0\" encoding=\"utf-8\"?><PesapalDirectOrderInfo xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" Amount=\""
+ amount
+ "\" Description=\""
+ desc
+ "\" Type=\""
+ type
+ "\" Reference=\""
+ reference
+ "\" FirstName=\""
+ first_name
+ "\" LastName=\""
+ last_name
+ "\" Email=\""
+ email
+ "\" PhoneNumber=\""
+ phonenumber
+ "\" xmlns=\"http://www.pesapal.com\" />";
post_xml = StringEscapeUtils.escapeXml(post_xml);
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_request_data", post_xml));
p.add(new OAuth.Parameter("oauth_callback", callbackUrl));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Makes request to the pesapal server and returns the response that
* contains All the OATH details.
*
* Use this to post a transaction to PesaPal. PesaPal will return a response
* with a page which contains the available payment options and will
* redirect to your site once the user has completed the payment process. A
* tracking id will be returned as a query parameter – this can be used
* subsequently to track the payment status on pesapal for this transaction.
*
* @see OAuthMessage
* @param post_xml
* The XML formated order data.Take not of space.
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage PostPesapalDirectOrderV4(String post_xml)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.PostPesapalDirectOrderV4");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
String callbackUrl = props.getProperty("pesapal.callbackURL");
callbackUrl = URLEncoder.encode(callbackUrl, "UTF-8");
post_xml = StringEscapeUtils.escapeXml(post_xml);
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_request_data", post_xml));
p.add(new OAuth.Parameter("oauth_callback", callbackUrl));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Use this to query the status of the transaction. When a transaction is
* posted to PesaPal, it may be in a PENDING, COMPLETED or FAILED state. If
* the transaction is PENDING, the payment may complete or fail at a later
* stage. Both the unique order id generated by your system and the pesapal
* tracking id are required as input parameters.
*
* @param reference
* the order id/ reference id you created during
* {@link Pesapal#PostPesapalDirectOrderV4}
* @param trackingId
* the reference that was returned by pesapal server during post
* order
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage QueryPaymentStatus(String reference, String trackingId)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.QueryPaymentStatus");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
trackingId = URLEncoder.encode(trackingId, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
p.add(new OAuth.Parameter("pesapal_transaction_tracking_id", trackingId));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Same as {@link Pesapal#QueryPaymentStatus(String, String)}, but only the
* unique order id generated by your system is required as the input
* parameter.
*
* @param reference
* the unique id generated by your app
* @return {@link OAuthMessage}
*/
@SuppressWarnings("rawtypes")
public OAuthMessage QueryPaymentStatusByMerchantRef(String reference)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.QueryPaymentStatusByMerchantRef");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* Same as {@link Pesapal#QueryPaymentStatus(String, String)}, but
* additional information is returned.
*
* @param reference
* the order id/ reference id you created during
* {@link Pesapal#PostPesapalDirectOrderV4}
* @param trackingId
* the reference that was returned by pesapal server during post
* order
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage QueryPaymentDetails(String reference, String trackingId)
throws IOException, OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.QueryPaymentDetails");
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
trackingId = URLEncoder.encode(trackingId, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(1)
: new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
p.add(new OAuth.Parameter("pesapal_transaction_tracking_id", trackingId));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
}
/**
* <p>
* This is to be used with a java services/servlet/web app that you have
* configured your pesapal server as the IPN URL. When the web app receives
* the response from Pesapal, Get the parameters and pass the details to
* this function to this function so that pesapal gives you the details
* about the transaction in the format:
* pesapal_notification_type=CHANGE&pesapal_transaction_tracking_id =<the
* unique tracking id of the transaction>&pesapal_merchant_reference=<the
* merchant reference>.
* </p>
* <p>
* After that remember to send back a response to pesapal in the same format
* using your web app.This is to acknowledge the receipt of the sent IPN.
* Send back the response after doing some things such as updating records
* in your data store.
* </p>
*
* @param notificationType
* this one of the notification types specified by pesapal
* @param reference
* the order id/ reference id you created during
* {@link Pesapal#PostPesapalDirectOrderV4}
* @param trackingId
* the reference that was returned by pesapal server during post
* order
* @return {@link OAuthMessage}
* @throws IOException
* @throws OAuthException
* @throws URISyntaxException
*/
@SuppressWarnings("rawtypes")
public OAuthMessage InstantPaymentNotification(String notificationType,
String reference, String trackingId) throws IOException,
OAuthException, URISyntaxException {
String reqUrl = props
.getProperty("pesapal.serviceProvider.querypaymentstatus");
if (notificationType == "CHANGE" && trackingId != "") {
OAuthAccessor accessor = createOAuthAccessor(reqUrl);
OAuthClient client = new OAuthClient(new HttpClient4());
reference = URLEncoder.encode(reference, "UTF-8");
trackingId = URLEncoder.encode(trackingId, "UTF-8");
// add other parameters
Collection<? extends Map.Entry> parameters = new ArrayList<Map.Entry>();
List<Map.Entry> p = (parameters == null) ? new ArrayList<Map.Entry>(
1) : new ArrayList<Map.Entry>(parameters);
p.add(new OAuth.Parameter("pesapal_merchant_reference", reference));
p.add(new OAuth.Parameter("pesapal_transaction_tracking_id",
trackingId));
parameters = p;
// make request
return client.getRequestResponse(accessor, "GET", parameters);
} else {
return null;
}
}
}
|
more edits to the documentation
|
src/main/java/com/simbacode/payments/Pesapal.java
|
more edits to the documentation
|
<ide><path>rc/main/java/com/simbacode/payments/Pesapal.java
<ide> * This is main Pesapal Oauth 1.0 java class
<ide> *
<ide> * @author Acellam Guy
<del> * @version 0.1
<add> * @version 0.0.2
<ide> */
<ide> public class Pesapal {
<ide>
<ide> /**
<ide> * <p>
<ide> * This is to be used with a java services/servlet/web app that you have
<del> * configured your pesapal server as the IPN URL. When the web app receives
<del> * the response from Pesapal, Get the parameters and pass the details to
<del> * this function to this function so that pesapal gives you the details
<del> * about the transaction in the format:
<add> * configured to be as your pesapal server IPN URL. When the web app
<add> * receives the response from Pesapal, Get the parameters and pass the
<add> * details to this function so that pesapal gives you the details about the
<add> * transaction status in the format:
<ide> * pesapal_notification_type=CHANGE&pesapal_transaction_tracking_id =<the
<ide> * unique tracking id of the transaction>&pesapal_merchant_reference=<the
<del> * merchant reference>.
<add> * merchant reference>. Also remember to parse the header of this response
<add> * to get the payment status.
<add> *
<ide> * </p>
<ide> * <p>
<ide> * After that remember to send back a response to pesapal in the same format
|
|
Java
|
bsd-3-clause
|
4b55383ba053af6460c218b3836285eee74e5947
| 0 |
Georgeto/jmonkeyengine,olafmaas/jmonkeyengine,atomixnmc/jmonkeyengine,d235j/jmonkeyengine,davidB/jmonkeyengine,danteinforno/jmonkeyengine,amit2103/jmonkeyengine,tr0k/jmonkeyengine,g-rocket/jmonkeyengine,bsmr-java/jmonkeyengine,mbenson/jmonkeyengine,skapi1992/jmonkeyengine,bsmr-java/jmonkeyengine,InShadow/jmonkeyengine,aaronang/jmonkeyengine,InShadow/jmonkeyengine,Georgeto/jmonkeyengine,wrvangeest/jmonkeyengine,tr0k/jmonkeyengine,phr00t/jmonkeyengine,davidB/jmonkeyengine,g-rocket/jmonkeyengine,g-rocket/jmonkeyengine,mbenson/jmonkeyengine,g-rocket/jmonkeyengine,zzuegg/jmonkeyengine,bsmr-java/jmonkeyengine,shurun19851206/jMonkeyEngine,bertleft/jmonkeyengine,amit2103/jmonkeyengine,GreenCubes/jmonkeyengine,OpenGrabeso/jmonkeyengine,shurun19851206/jMonkeyEngine,nickschot/jmonkeyengine,davidB/jmonkeyengine,sandervdo/jmonkeyengine,rbottema/jmonkeyengine,shurun19851206/jMonkeyEngine,davidB/jmonkeyengine,yetanotherindie/jMonkey-Engine,d235j/jmonkeyengine,yetanotherindie/jMonkey-Engine,d235j/jmonkeyengine,yetanotherindie/jMonkey-Engine,OpenGrabeso/jmonkeyengine,mbenson/jmonkeyengine,phr00t/jmonkeyengine,phr00t/jmonkeyengine,skapi1992/jmonkeyengine,amit2103/jmonkeyengine,nickschot/jmonkeyengine,InShadow/jmonkeyengine,Georgeto/jmonkeyengine,danteinforno/jmonkeyengine,d235j/jmonkeyengine,Georgeto/jmonkeyengine,olafmaas/jmonkeyengine,amit2103/jmonkeyengine,GreenCubes/jmonkeyengine,tr0k/jmonkeyengine,bertleft/jmonkeyengine,weilichuang/jmonkeyengine,delftsre/jmonkeyengine,mbenson/jmonkeyengine,sandervdo/jmonkeyengine,InShadow/jmonkeyengine,olafmaas/jmonkeyengine,wrvangeest/jmonkeyengine,atomixnmc/jmonkeyengine,d235j/jmonkeyengine,aaronang/jmonkeyengine,OpenGrabeso/jmonkeyengine,shurun19851206/jMonkeyEngine,danteinforno/jmonkeyengine,danteinforno/jmonkeyengine,Georgeto/jmonkeyengine,olafmaas/jmonkeyengine,delftsre/jmonkeyengine,shurun19851206/jMonkeyEngine,weilichuang/jmonkeyengine,weilichuang/jmonkeyengine,jMonkeyEngine/jmonkeyengine,amit2103/jmonkeyengine,aaronang/jmonkeyengine,weilichuang/jmonkeyengine,g-rocket/jmonkeyengine,sandervdo/jmonkeyengine,OpenGrabeso/jmonkeyengine,Georgeto/jmonkeyengine,atomixnmc/jmonkeyengine,davidB/jmonkeyengine,zzuegg/jmonkeyengine,rbottema/jmonkeyengine,GreenCubes/jmonkeyengine,atomixnmc/jmonkeyengine,atomixnmc/jmonkeyengine,mbenson/jmonkeyengine,aaronang/jmonkeyengine,bertleft/jmonkeyengine,zzuegg/jmonkeyengine,rbottema/jmonkeyengine,weilichuang/jmonkeyengine,jMonkeyEngine/jmonkeyengine,GreenCubes/jmonkeyengine,yetanotherindie/jMonkey-Engine,d235j/jmonkeyengine,nickschot/jmonkeyengine,nickschot/jmonkeyengine,yetanotherindie/jMonkey-Engine,zzuegg/jmonkeyengine,delftsre/jmonkeyengine,davidB/jmonkeyengine,mbenson/jmonkeyengine,sandervdo/jmonkeyengine,jMonkeyEngine/jmonkeyengine,delftsre/jmonkeyengine,wrvangeest/jmonkeyengine,yetanotherindie/jMonkey-Engine,rbottema/jmonkeyengine,tr0k/jmonkeyengine,weilichuang/jmonkeyengine,skapi1992/jmonkeyengine,wrvangeest/jmonkeyengine,atomixnmc/jmonkeyengine,OpenGrabeso/jmonkeyengine,phr00t/jmonkeyengine,amit2103/jmonkeyengine,g-rocket/jmonkeyengine,danteinforno/jmonkeyengine,bsmr-java/jmonkeyengine,shurun19851206/jMonkeyEngine,skapi1992/jmonkeyengine,bertleft/jmonkeyengine,OpenGrabeso/jmonkeyengine,jMonkeyEngine/jmonkeyengine,danteinforno/jmonkeyengine
|
package com.jme3.scene.plugins.blender.modifiers;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme3.animation.AnimControl;
import com.jme3.animation.Animation;
import com.jme3.animation.Bone;
import com.jme3.animation.BoneTrack;
import com.jme3.animation.Skeleton;
import com.jme3.animation.SkeletonControl;
import com.jme3.math.Matrix4f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.Node;
import com.jme3.scene.VertexBuffer;
import com.jme3.scene.VertexBuffer.Format;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.scene.VertexBuffer.Usage;
import com.jme3.scene.plugins.blender.BlenderContext;
import com.jme3.scene.plugins.blender.BlenderContext.LoadedFeatureDataType;
import com.jme3.scene.plugins.blender.animations.ArmatureHelper;
import com.jme3.scene.plugins.blender.constraints.Constraint;
import com.jme3.scene.plugins.blender.constraints.ConstraintHelper;
import com.jme3.scene.plugins.blender.exceptions.BlenderFileException;
import com.jme3.scene.plugins.blender.file.FileBlockHeader;
import com.jme3.scene.plugins.blender.file.Pointer;
import com.jme3.scene.plugins.blender.file.Structure;
import com.jme3.scene.plugins.blender.meshes.MeshContext;
import com.jme3.scene.plugins.blender.objects.ObjectHelper;
import com.jme3.scene.plugins.ogre.AnimData;
import com.jme3.util.BufferUtils;
/**
* This modifier allows to add bone animation to the object.
*
* @author Marcin Roguski (Kaelthas)
*/
/* package */class ArmatureModifier extends Modifier {
private static final Logger LOGGER = Logger.getLogger(ArmatureModifier.class.getName());
private static final int MAXIMUM_WEIGHTS_PER_VERTEX = 4;
// @Marcin it was an Ogre limitation, but as long as we use a MaxNumWeight
// variable in mesh,
// i guess this limitation has no sense for the blender loader...so i guess
// it's up to you. You'll have to deternine the max weight according to the
// provided blend file
// I added a check to avoid crash when loading a model that has more than 4
// weight per vertex on line 258
// If you decide to remove this limitation, remove this code.
// Rémy
/** Loaded animation data. */
private AnimData animData;
/** Old memory address of the mesh that will have the skeleton applied. */
private Long meshOMA;
/**
* The maxiumum amount of bone groups applied to a single vertex (max = MAXIMUM_WEIGHTS_PER_VERTEX).
*/
private int boneGroups;
/** The weights of vertices. */
private VertexBuffer verticesWeights;
/** The indexes of bones applied to vertices. */
private VertexBuffer verticesWeightsIndices;
/**
* This constructor reads animation data from the object structore. The
* stored data is the AnimData and additional data is armature's OMA.
*
* @param objectStructure
* the structure of the object
* @param modifierStructure
* the structure of the modifier
* @param blenderContext
* the blender context
* @throws BlenderFileException
* this exception is thrown when the blender file is somehow
* corrupted
*/
public ArmatureModifier(Structure objectStructure, Structure modifierStructure, BlenderContext blenderContext) throws BlenderFileException {
Structure meshStructure = ((Pointer) objectStructure.getFieldValue("data")).fetchData(blenderContext.getInputStream()).get(0);
Pointer pDvert = (Pointer) meshStructure.getFieldValue("dvert");// dvert = DeformVERTices
//if pDvert==null then there are not vertex groups and no need to load skeleton (untill bone envelopes are supported)
if (this.validate(modifierStructure, blenderContext) && pDvert.isNotNull()) {
Pointer pArmatureObject = (Pointer) modifierStructure.getFieldValue("object");
if (pArmatureObject.isNotNull()) {
ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
Structure armatureObject = pArmatureObject.fetchData(blenderContext.getInputStream()).get(0);
// load skeleton
Structure armatureStructure = ((Pointer) armatureObject.getFieldValue("data")).fetchData(blenderContext.getInputStream()).get(0);
Structure pose = ((Pointer) armatureObject.getFieldValue("pose")).fetchData(blenderContext.getInputStream()).get(0);
List<Structure> chanbase = ((Structure) pose.getFieldValue("chanbase")).evaluateListBase(blenderContext);
Map<Long, Structure> bonesPoseChannels = new HashMap<Long, Structure>(chanbase.size());
for (Structure poseChannel : chanbase) {
Pointer pBone = (Pointer) poseChannel.getFieldValue("bone");
bonesPoseChannels.put(pBone.getOldMemoryAddress(), poseChannel);
}
ObjectHelper objectHelper = blenderContext.getHelper(ObjectHelper.class);
Matrix4f armatureObjectMatrix = objectHelper.getMatrix(armatureObject, "obmat", true);
Matrix4f inverseMeshObjectMatrix = objectHelper.getMatrix(objectStructure, "obmat", true).invertLocal();
Matrix4f objectToArmatureTransformation = armatureObjectMatrix.multLocal(inverseMeshObjectMatrix);
List<Structure> bonebase = ((Structure) armatureStructure.getFieldValue("bonebase")).evaluateListBase(blenderContext);
List<Bone> bonesList = new ArrayList<Bone>();
for (int i = 0; i < bonebase.size(); ++i) {
armatureHelper.buildBones(bonebase.get(i), null, bonesList, objectToArmatureTransformation, bonesPoseChannels, blenderContext);
}
bonesList.add(0, new Bone(""));
Skeleton skeleton = new Skeleton(bonesList.toArray(new Bone[bonesList.size()]));
// read mesh indexes
this.meshOMA = meshStructure.getOldMemoryAddress();
this.readVerticesWeightsData(objectStructure, meshStructure, skeleton, blenderContext);
// read animations
ArrayList<Animation> animations = new ArrayList<Animation>();
List<FileBlockHeader> actionHeaders = blenderContext.getFileBlocks(Integer.valueOf(FileBlockHeader.BLOCK_AC00));
if (actionHeaders != null) {// it may happen that the model has
// armature with no actions
for (FileBlockHeader header : actionHeaders) {
Structure actionStructure = header.getStructure(blenderContext);
String actionName = actionStructure.getName();
BoneTrack[] tracks = armatureHelper.getTracks(actionStructure, skeleton, blenderContext);
// determining the animation time
float maximumTrackLength = 0;
for (BoneTrack track : tracks) {
float length = track.getLength();
if (length > maximumTrackLength) {
maximumTrackLength = length;
}
}
Animation boneAnimation = new Animation(actionName, maximumTrackLength);
boneAnimation.setTracks(tracks);
animations.add(boneAnimation);
}
}
animData = new AnimData(skeleton, animations);
// store the animation data for each bone
for (Structure boneStructure : bonebase) {
blenderContext.setAnimData(boneStructure.getOldMemoryAddress(), animData);
}
// loading constraints connected with this object
ConstraintHelper constraintHelper = blenderContext.getHelper(ConstraintHelper.class);
constraintHelper.loadConstraints(armatureObject, blenderContext);
}
}
}
@Override
@SuppressWarnings("unchecked")
public Node apply(Node node, BlenderContext blenderContext) {
if (invalid) {
LOGGER.log(Level.WARNING, "Armature modifier is invalid! Cannot be applied to: {0}", node.getName());
}// if invalid, animData will be null
if (animData == null) {
return node;
}
// setting weights for bones
List<Geometry> geomList = (List<Geometry>) blenderContext.getLoadedFeature(this.meshOMA, LoadedFeatureDataType.LOADED_FEATURE);
for (Geometry geom : geomList) {
Mesh mesh = geom.getMesh();
if (this.verticesWeights != null) {
mesh.setMaxNumWeights(this.boneGroups);
mesh.setBuffer(this.verticesWeights);
mesh.setBuffer(this.verticesWeightsIndices);
}
}
// applying bone transforms before constraints are baked
ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
//TODO: should we apply static bone poses ??? (this breaks the animation)
// for (int i = 0; i < animData.skeleton.getBoneCount(); ++i) {
// Bone bone = animData.skeleton.getBone(i);
// Transform transform = armatureHelper.getBoneBindTransform(bone);
// Transform boneTransform = armatureHelper.getLocalTransform(bone);
// if(transform!=null && boneTransform!=null) {
// bone.setBindTransforms(boneTransform.getTranslation().addLocal(transform.getTranslation()),
// boneTransform.getRotation().multLocal(transform.getRotation()),
// boneTransform.getScale().multLocal(transform.getScale()));
// }
// }
// applying constraints to Bones (and only to bones, object constraints
// are applied in the ObjectHelper)
for (int i = 0; i < animData.skeleton.getBoneCount(); ++i) {
Long boneOMA = armatureHelper.getBoneOMA(animData.skeleton.getBone(i));
List<Constraint> constraints = blenderContext.getConstraints(boneOMA);
if (constraints != null && constraints.size() > 0) {
for (Constraint constraint : constraints) {
constraint.bakeDynamic();
constraint.bakeStatic();
}
}
}
// applying animations
ArrayList<Animation> animList = animData.anims;
if (animList != null && animList.size() > 0) {
HashMap<String, Animation> anims = new HashMap<String, Animation>(animList.size());
for (int i = 0; i < animList.size(); ++i) {
Animation animation = animList.get(i);
anims.put(animation.getName(), animation);
}
AnimControl control = new AnimControl(animData.skeleton);
control.setAnimations(anims);
node.addControl(control);
}
node.addControl(new SkeletonControl(animData.skeleton));
return node;
}
/**
* This method reads mesh indexes
*
* @param objectStructure
* structure of the object that has the armature modifier applied
* @param meshStructure
* the structure of the object's mesh
* @param blenderContext
* the blender context
* @throws BlenderFileException
* this exception is thrown when the blend file structure is
* somehow invalid or corrupted
*/
private void readVerticesWeightsData(Structure objectStructure, Structure meshStructure, Skeleton skeleton, BlenderContext blenderContext) throws BlenderFileException {
ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
Structure defBase = (Structure) objectStructure.getFieldValue("defbase");
Map<Integer, Integer> groupToBoneIndexMap = armatureHelper.getGroupToBoneIndexMap(defBase, skeleton, blenderContext);
int[] bonesGroups = new int[] { 0 };
MeshContext meshContext = blenderContext.getMeshContext(meshStructure.getOldMemoryAddress());
VertexBuffer[] boneWeightsAndIndex = this.getBoneWeightAndIndexBuffer(meshStructure, meshContext.getVertexList().size(), bonesGroups, meshContext.getVertexReferenceMap(), groupToBoneIndexMap, blenderContext);
this.verticesWeights = boneWeightsAndIndex[0];
this.verticesWeightsIndices = boneWeightsAndIndex[1];
this.boneGroups = bonesGroups[0];
}
/**
* This method returns an array of size 2. The first element is a vertex
* buffer holding bone weights for every vertex in the model. The second
* element is a vertex buffer holding bone indices for vertices (the indices
* of bones the vertices are assigned to).
*
* @param meshStructure
* the mesh structure object
* @param vertexListSize
* a number of vertices in the model
* @param bonesGroups
* this is an output parameter, it should be a one-sized array;
* the maximum amount of weights per vertex (up to
* MAXIMUM_WEIGHTS_PER_VERTEX) is stored there
* @param vertexReferenceMap
* this reference map allows to map the original vertices read
* from blender to vertices that are really in the model; one
* vertex may appear several times in the result model
* @param groupToBoneIndexMap
* this object maps the group index (to which a vertices in
* blender belong) to bone index of the model
* @param blenderContext
* the blender context
* @return arrays of vertices weights and their bone indices and (as an
* output parameter) the maximum amount of weights for a vertex
* @throws BlenderFileException
* this exception is thrown when the blend file structure is
* somehow invalid or corrupted
*/
private VertexBuffer[] getBoneWeightAndIndexBuffer(Structure meshStructure, int vertexListSize, int[] bonesGroups, Map<Integer, List<Integer>> vertexReferenceMap, Map<Integer, Integer> groupToBoneIndexMap, BlenderContext blenderContext)
throws BlenderFileException {
Pointer pDvert = (Pointer) meshStructure.getFieldValue("dvert");// dvert = DeformVERTices
FloatBuffer weightsFloatData = BufferUtils.createFloatBuffer(vertexListSize * MAXIMUM_WEIGHTS_PER_VERTEX);
ByteBuffer indicesData = BufferUtils.createByteBuffer(vertexListSize * MAXIMUM_WEIGHTS_PER_VERTEX);
if (pDvert.isNotNull()) {// assigning weights and bone indices
List<Structure> dverts = pDvert.fetchData(blenderContext.getInputStream());// dverts.size() == verticesAmount (one dvert per
// vertex in blender)
int vertexIndex = 0;
for (Structure dvert : dverts) {
int totweight = ((Number) dvert.getFieldValue("totweight")).intValue();// total amount of weights assignet to the vertex
// (max. 4 in JME)
Pointer pDW = (Pointer) dvert.getFieldValue("dw");
List<Integer> vertexIndices = vertexReferenceMap.get(Integer.valueOf(vertexIndex));// we fetch the referenced vertices here
if (totweight > 0 && pDW.isNotNull() && groupToBoneIndexMap!=null) {// pDW should never be null here, but I check it just in case :)
int weightIndex = 0;
List<Structure> dw = pDW.fetchData(blenderContext.getInputStream());
for (Structure deformWeight : dw) {
Integer boneIndex = groupToBoneIndexMap.get(((Number) deformWeight.getFieldValue("def_nr")).intValue());
// Remove this code if 4 weights limitation is removed
if (weightIndex == 4) {
LOGGER.log(Level.WARNING, "{0} has more than 4 weight on bone index {1}", new Object[] { meshStructure.getName(), boneIndex });
break;
}
// null here means that we came accross group that has no bone attached to
if (boneIndex != null) {
float weight = ((Number) deformWeight.getFieldValue("weight")).floatValue();
if (weight == 0.0f) {
weight = 1;
boneIndex = Integer.valueOf(0);
}
// we apply the weight to all referenced vertices
for (Integer index : vertexIndices) {
weightsFloatData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX + weightIndex, weight);
indicesData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX + weightIndex, boneIndex.byteValue());
}
}
++weightIndex;
}
} else {
for (Integer index : vertexIndices) {
weightsFloatData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, 1.0f);
indicesData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, (byte) 0);
}
}
++vertexIndex;
}
} else {
// always bind all vertices to 0-indexed bone
// this bone makes the model look normally if vertices have no bone
// assigned
// and it is used in object animation, so if we come accross object
// animation
// we can use the 0-indexed bone for this
for (List<Integer> vertexIndexList : vertexReferenceMap.values()) {
// we apply the weight to all referenced vertices
for (Integer index : vertexIndexList) {
weightsFloatData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, 1.0f);
indicesData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, (byte) 0);
}
}
}
bonesGroups[0] = this.endBoneAssigns(vertexListSize, weightsFloatData);
VertexBuffer verticesWeights = new VertexBuffer(Type.BoneWeight);
verticesWeights.setupData(Usage.CpuOnly, bonesGroups[0], Format.Float, weightsFloatData);
VertexBuffer verticesWeightsIndices = new VertexBuffer(Type.BoneIndex);
verticesWeightsIndices.setupData(Usage.CpuOnly, bonesGroups[0], Format.UnsignedByte, indicesData);
return new VertexBuffer[] { verticesWeights, verticesWeightsIndices };
}
/**
* Normalizes weights if needed and finds largest amount of weights used for
* all vertices in the buffer.
*
* @param vertCount
* amount of vertices
* @param weightsFloatData
* weights for vertices
*/
private int endBoneAssigns(int vertCount, FloatBuffer weightsFloatData) {
int maxWeightsPerVert = 0;
weightsFloatData.rewind();
for (int v = 0; v < vertCount; ++v) {
float w0 = weightsFloatData.get(), w1 = weightsFloatData.get(), w2 = weightsFloatData.get(), w3 = weightsFloatData.get();
if (w3 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 4);
} else if (w2 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 3);
} else if (w1 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 2);
} else if (w0 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 1);
}
float sum = w0 + w1 + w2 + w3;
if (sum != 1f && sum != 0.0f) {
weightsFloatData.position(weightsFloatData.position() - 4);
// compute new vals based on sum
float sumToB = 1f / sum;
weightsFloatData.put(w0 * sumToB);
weightsFloatData.put(w1 * sumToB);
weightsFloatData.put(w2 * sumToB);
weightsFloatData.put(w3 * sumToB);
}
}
weightsFloatData.rewind();
return maxWeightsPerVert;
}
@Override
public String getType() {
return Modifier.ARMATURE_MODIFIER_DATA;
}
}
|
engine/src/blender/com/jme3/scene/plugins/blender/modifiers/ArmatureModifier.java
|
package com.jme3.scene.plugins.blender.modifiers;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jme3.animation.AnimControl;
import com.jme3.animation.Animation;
import com.jme3.animation.Bone;
import com.jme3.animation.BoneTrack;
import com.jme3.animation.Skeleton;
import com.jme3.animation.SkeletonControl;
import com.jme3.math.Matrix4f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.Node;
import com.jme3.scene.VertexBuffer;
import com.jme3.scene.VertexBuffer.Format;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.scene.VertexBuffer.Usage;
import com.jme3.scene.plugins.blender.BlenderContext;
import com.jme3.scene.plugins.blender.BlenderContext.LoadedFeatureDataType;
import com.jme3.scene.plugins.blender.animations.ArmatureHelper;
import com.jme3.scene.plugins.blender.constraints.Constraint;
import com.jme3.scene.plugins.blender.constraints.ConstraintHelper;
import com.jme3.scene.plugins.blender.exceptions.BlenderFileException;
import com.jme3.scene.plugins.blender.file.FileBlockHeader;
import com.jme3.scene.plugins.blender.file.Pointer;
import com.jme3.scene.plugins.blender.file.Structure;
import com.jme3.scene.plugins.blender.meshes.MeshContext;
import com.jme3.scene.plugins.blender.objects.ObjectHelper;
import com.jme3.scene.plugins.ogre.AnimData;
import com.jme3.util.BufferUtils;
/**
* This modifier allows to add bone animation to the object.
*
* @author Marcin Roguski (Kaelthas)
*/
/* package */class ArmatureModifier extends Modifier {
private static final Logger LOGGER = Logger.getLogger(ArmatureModifier.class.getName());
private static final int MAXIMUM_WEIGHTS_PER_VERTEX = 4;
// @Marcin it was an Ogre limitation, but as long as we use a MaxNumWeight
// variable in mesh,
// i guess this limitation has no sense for the blender loader...so i guess
// it's up to you. You'll have to deternine the max weight according to the
// provided blend file
// I added a check to avoid crash when loading a model that has more than 4
// weight per vertex on line 258
// If you decide to remove this limitation, remove this code.
// Rémy
/** Loaded animation data. */
private AnimData animData;
/** Old memory address of the mesh that will have the skeleton applied. */
private Long meshOMA;
/**
* The maxiumum amount of bone groups applied to a single vertex (max = MAXIMUM_WEIGHTS_PER_VERTEX).
*/
private int boneGroups;
/** The weights of vertices. */
private VertexBuffer verticesWeights;
/** The indexes of bones applied to vertices. */
private VertexBuffer verticesWeightsIndices;
/**
* This constructor reads animation data from the object structore. The
* stored data is the AnimData and additional data is armature's OMA.
*
* @param objectStructure
* the structure of the object
* @param modifierStructure
* the structure of the modifier
* @param blenderContext
* the blender context
* @throws BlenderFileException
* this exception is thrown when the blender file is somehow
* corrupted
*/
public ArmatureModifier(Structure objectStructure, Structure modifierStructure, BlenderContext blenderContext) throws BlenderFileException {
if (this.validate(modifierStructure, blenderContext)) {
Pointer pArmatureObject = (Pointer) modifierStructure.getFieldValue("object");
if (pArmatureObject.isNotNull()) {
ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
Structure armatureObject = pArmatureObject.fetchData(blenderContext.getInputStream()).get(0);
// load skeleton
Structure armatureStructure = ((Pointer) armatureObject.getFieldValue("data")).fetchData(blenderContext.getInputStream()).get(0);
Structure pose = ((Pointer) armatureObject.getFieldValue("pose")).fetchData(blenderContext.getInputStream()).get(0);
List<Structure> chanbase = ((Structure) pose.getFieldValue("chanbase")).evaluateListBase(blenderContext);
Map<Long, Structure> bonesPoseChannels = new HashMap<Long, Structure>(chanbase.size());
for (Structure poseChannel : chanbase) {
Pointer pBone = (Pointer) poseChannel.getFieldValue("bone");
bonesPoseChannels.put(pBone.getOldMemoryAddress(), poseChannel);
}
ObjectHelper objectHelper = blenderContext.getHelper(ObjectHelper.class);
Matrix4f armatureObjectMatrix = objectHelper.getMatrix(armatureObject, "obmat", true);
Matrix4f inverseMeshObjectMatrix = objectHelper.getMatrix(objectStructure, "obmat", true).invertLocal();
Matrix4f objectToArmatureTransformation = armatureObjectMatrix.multLocal(inverseMeshObjectMatrix);
List<Structure> bonebase = ((Structure) armatureStructure.getFieldValue("bonebase")).evaluateListBase(blenderContext);
List<Bone> bonesList = new ArrayList<Bone>();
for (int i = 0; i < bonebase.size(); ++i) {
armatureHelper.buildBones(bonebase.get(i), null, bonesList, objectToArmatureTransformation, bonesPoseChannels, blenderContext);
}
bonesList.add(0, new Bone(""));
Skeleton skeleton = new Skeleton(bonesList.toArray(new Bone[bonesList.size()]));
// read mesh indexes
Structure meshStructure = ((Pointer) objectStructure.getFieldValue("data")).fetchData(blenderContext.getInputStream()).get(0);
this.meshOMA = meshStructure.getOldMemoryAddress();
this.readVerticesWeightsData(objectStructure, meshStructure, skeleton, blenderContext);
// read animations
ArrayList<Animation> animations = new ArrayList<Animation>();
List<FileBlockHeader> actionHeaders = blenderContext.getFileBlocks(Integer.valueOf(FileBlockHeader.BLOCK_AC00));
if (actionHeaders != null) {// it may happen that the model has
// armature with no actions
for (FileBlockHeader header : actionHeaders) {
Structure actionStructure = header.getStructure(blenderContext);
String actionName = actionStructure.getName();
BoneTrack[] tracks = armatureHelper.getTracks(actionStructure, skeleton, blenderContext);
// determining the animation time
float maximumTrackLength = 0;
for (BoneTrack track : tracks) {
float length = track.getLength();
if (length > maximumTrackLength) {
maximumTrackLength = length;
}
}
Animation boneAnimation = new Animation(actionName, maximumTrackLength);
boneAnimation.setTracks(tracks);
animations.add(boneAnimation);
}
}
animData = new AnimData(skeleton, animations);
// store the animation data for each bone
for (Structure boneStructure : bonebase) {
blenderContext.setAnimData(boneStructure.getOldMemoryAddress(), animData);
}
// loading constraints connected with this object
ConstraintHelper constraintHelper = blenderContext.getHelper(ConstraintHelper.class);
constraintHelper.loadConstraints(armatureObject, blenderContext);
}
}
}
@Override
@SuppressWarnings("unchecked")
public Node apply(Node node, BlenderContext blenderContext) {
if (invalid) {
LOGGER.log(Level.WARNING, "Armature modifier is invalid! Cannot be applied to: {0}", node.getName());
}// if invalid, animData will be null
if (animData == null) {
return node;
}
// setting weights for bones
List<Geometry> geomList = (List<Geometry>) blenderContext.getLoadedFeature(this.meshOMA, LoadedFeatureDataType.LOADED_FEATURE);
for (Geometry geom : geomList) {
Mesh mesh = geom.getMesh();
if (this.verticesWeights != null) {
mesh.setMaxNumWeights(this.boneGroups);
mesh.setBuffer(this.verticesWeights);
mesh.setBuffer(this.verticesWeightsIndices);
}
}
// applying bone transforms before constraints are baked
ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
//TODO: should we apply static bone poses ??? (this breaks the animation)
// for (int i = 0; i < animData.skeleton.getBoneCount(); ++i) {
// Bone bone = animData.skeleton.getBone(i);
// Transform transform = armatureHelper.getBoneBindTransform(bone);
// Transform boneTransform = armatureHelper.getLocalTransform(bone);
// if(transform!=null && boneTransform!=null) {
// bone.setBindTransforms(boneTransform.getTranslation().addLocal(transform.getTranslation()),
// boneTransform.getRotation().multLocal(transform.getRotation()),
// boneTransform.getScale().multLocal(transform.getScale()));
// }
// }
// applying constraints to Bones (and only to bones, object constraints
// are applied in the ObjectHelper)
for (int i = 0; i < animData.skeleton.getBoneCount(); ++i) {
Long boneOMA = armatureHelper.getBoneOMA(animData.skeleton.getBone(i));
List<Constraint> constraints = blenderContext.getConstraints(boneOMA);
if (constraints != null && constraints.size() > 0) {
for (Constraint constraint : constraints) {
constraint.bakeDynamic();
constraint.bakeStatic();
}
}
}
// applying animations
ArrayList<Animation> animList = animData.anims;
if (animList != null && animList.size() > 0) {
HashMap<String, Animation> anims = new HashMap<String, Animation>(animList.size());
for (int i = 0; i < animList.size(); ++i) {
Animation animation = animList.get(i);
anims.put(animation.getName(), animation);
}
AnimControl control = new AnimControl(animData.skeleton);
control.setAnimations(anims);
node.addControl(control);
}
node.addControl(new SkeletonControl(animData.skeleton));
return node;
}
/**
* This method reads mesh indexes
*
* @param objectStructure
* structure of the object that has the armature modifier applied
* @param meshStructure
* the structure of the object's mesh
* @param blenderContext
* the blender context
* @throws BlenderFileException
* this exception is thrown when the blend file structure is
* somehow invalid or corrupted
*/
private void readVerticesWeightsData(Structure objectStructure, Structure meshStructure, Skeleton skeleton, BlenderContext blenderContext) throws BlenderFileException {
ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
Structure defBase = (Structure) objectStructure.getFieldValue("defbase");
Map<Integer, Integer> groupToBoneIndexMap = armatureHelper.getGroupToBoneIndexMap(defBase, skeleton, blenderContext);
int[] bonesGroups = new int[] { 0 };
MeshContext meshContext = blenderContext.getMeshContext(meshStructure.getOldMemoryAddress());
VertexBuffer[] boneWeightsAndIndex = this.getBoneWeightAndIndexBuffer(meshStructure, meshContext.getVertexList().size(), bonesGroups, meshContext.getVertexReferenceMap(), groupToBoneIndexMap, blenderContext);
this.verticesWeights = boneWeightsAndIndex[0];
this.verticesWeightsIndices = boneWeightsAndIndex[1];
this.boneGroups = bonesGroups[0];
}
/**
* This method returns an array of size 2. The first element is a vertex
* buffer holding bone weights for every vertex in the model. The second
* element is a vertex buffer holding bone indices for vertices (the indices
* of bones the vertices are assigned to).
*
* @param meshStructure
* the mesh structure object
* @param vertexListSize
* a number of vertices in the model
* @param bonesGroups
* this is an output parameter, it should be a one-sized array;
* the maximum amount of weights per vertex (up to
* MAXIMUM_WEIGHTS_PER_VERTEX) is stored there
* @param vertexReferenceMap
* this reference map allows to map the original vertices read
* from blender to vertices that are really in the model; one
* vertex may appear several times in the result model
* @param groupToBoneIndexMap
* this object maps the group index (to which a vertices in
* blender belong) to bone index of the model
* @param blenderContext
* the blender context
* @return arrays of vertices weights and their bone indices and (as an
* output parameter) the maximum amount of weights for a vertex
* @throws BlenderFileException
* this exception is thrown when the blend file structure is
* somehow invalid or corrupted
*/
private VertexBuffer[] getBoneWeightAndIndexBuffer(Structure meshStructure, int vertexListSize, int[] bonesGroups, Map<Integer, List<Integer>> vertexReferenceMap, Map<Integer, Integer> groupToBoneIndexMap, BlenderContext blenderContext)
throws BlenderFileException {
Pointer pDvert = (Pointer) meshStructure.getFieldValue("dvert");// dvert = DeformVERTices
FloatBuffer weightsFloatData = BufferUtils.createFloatBuffer(vertexListSize * MAXIMUM_WEIGHTS_PER_VERTEX);
ByteBuffer indicesData = BufferUtils.createByteBuffer(vertexListSize * MAXIMUM_WEIGHTS_PER_VERTEX);
if (pDvert.isNotNull()) {// assigning weights and bone indices
List<Structure> dverts = pDvert.fetchData(blenderContext.getInputStream());// dverts.size() == verticesAmount (one dvert per
// vertex in blender)
int vertexIndex = 0;
for (Structure dvert : dverts) {
int totweight = ((Number) dvert.getFieldValue("totweight")).intValue();// total amount of weights assignet to the vertex
// (max. 4 in JME)
Pointer pDW = (Pointer) dvert.getFieldValue("dw");
List<Integer> vertexIndices = vertexReferenceMap.get(Integer.valueOf(vertexIndex));// we fetch the referenced vertices here
if (totweight > 0 && pDW.isNotNull() && groupToBoneIndexMap!=null) {// pDW should never be null here, but I check it just in case :)
int weightIndex = 0;
List<Structure> dw = pDW.fetchData(blenderContext.getInputStream());
for (Structure deformWeight : dw) {
Integer boneIndex = groupToBoneIndexMap.get(((Number) deformWeight.getFieldValue("def_nr")).intValue());
// Remove this code if 4 weights limitation is removed
if (weightIndex == 4) {
LOGGER.log(Level.WARNING, "{0} has more than 4 weight on bone index {1}", new Object[] { meshStructure.getName(), boneIndex });
break;
}
// null here means that we came accross group that has no bone attached to
if (boneIndex != null) {
float weight = ((Number) deformWeight.getFieldValue("weight")).floatValue();
if (weight == 0.0f) {
weight = 1;
boneIndex = Integer.valueOf(0);
}
// we apply the weight to all referenced vertices
for (Integer index : vertexIndices) {
weightsFloatData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX + weightIndex, weight);
indicesData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX + weightIndex, boneIndex.byteValue());
}
}
++weightIndex;
}
} else {
for (Integer index : vertexIndices) {
weightsFloatData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, 1.0f);
indicesData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, (byte) 0);
}
}
++vertexIndex;
}
} else {
// always bind all vertices to 0-indexed bone
// this bone makes the model look normally if vertices have no bone
// assigned
// and it is used in object animation, so if we come accross object
// animation
// we can use the 0-indexed bone for this
for (List<Integer> vertexIndexList : vertexReferenceMap.values()) {
// we apply the weight to all referenced vertices
for (Integer index : vertexIndexList) {
weightsFloatData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, 1.0f);
indicesData.put(index * MAXIMUM_WEIGHTS_PER_VERTEX, (byte) 0);
}
}
}
bonesGroups[0] = this.endBoneAssigns(vertexListSize, weightsFloatData);
VertexBuffer verticesWeights = new VertexBuffer(Type.BoneWeight);
verticesWeights.setupData(Usage.CpuOnly, bonesGroups[0], Format.Float, weightsFloatData);
VertexBuffer verticesWeightsIndices = new VertexBuffer(Type.BoneIndex);
verticesWeightsIndices.setupData(Usage.CpuOnly, bonesGroups[0], Format.UnsignedByte, indicesData);
return new VertexBuffer[] { verticesWeights, verticesWeightsIndices };
}
/**
* Normalizes weights if needed and finds largest amount of weights used for
* all vertices in the buffer.
*
* @param vertCount
* amount of vertices
* @param weightsFloatData
* weights for vertices
*/
private int endBoneAssigns(int vertCount, FloatBuffer weightsFloatData) {
int maxWeightsPerVert = 0;
weightsFloatData.rewind();
for (int v = 0; v < vertCount; ++v) {
float w0 = weightsFloatData.get(), w1 = weightsFloatData.get(), w2 = weightsFloatData.get(), w3 = weightsFloatData.get();
if (w3 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 4);
} else if (w2 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 3);
} else if (w1 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 2);
} else if (w0 != 0) {
maxWeightsPerVert = Math.max(maxWeightsPerVert, 1);
}
float sum = w0 + w1 + w2 + w3;
if (sum != 1f && sum != 0.0f) {
weightsFloatData.position(weightsFloatData.position() - 4);
// compute new vals based on sum
float sumToB = 1f / sum;
weightsFloatData.put(w0 * sumToB);
weightsFloatData.put(w1 * sumToB);
weightsFloatData.put(w2 * sumToB);
weightsFloatData.put(w3 * sumToB);
}
}
weightsFloatData.rewind();
return maxWeightsPerVert;
}
@Override
public String getType() {
return Modifier.ARMATURE_MODIFIER_DATA;
}
}
|
FIXED: Issue 427: Blender loader writes BindPose buffers without BoneIndex or BoneWeight data
git-svn-id: f9411aee4f13664f2fc428a5b3e824fe43a079a3@8899 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
|
engine/src/blender/com/jme3/scene/plugins/blender/modifiers/ArmatureModifier.java
|
FIXED: Issue 427: Blender loader writes BindPose buffers without BoneIndex or BoneWeight data
|
<ide><path>ngine/src/blender/com/jme3/scene/plugins/blender/modifiers/ArmatureModifier.java
<ide> * corrupted
<ide> */
<ide> public ArmatureModifier(Structure objectStructure, Structure modifierStructure, BlenderContext blenderContext) throws BlenderFileException {
<del> if (this.validate(modifierStructure, blenderContext)) {
<add> Structure meshStructure = ((Pointer) objectStructure.getFieldValue("data")).fetchData(blenderContext.getInputStream()).get(0);
<add> Pointer pDvert = (Pointer) meshStructure.getFieldValue("dvert");// dvert = DeformVERTices
<add>
<add> //if pDvert==null then there are not vertex groups and no need to load skeleton (untill bone envelopes are supported)
<add> if (this.validate(modifierStructure, blenderContext) && pDvert.isNotNull()) {
<ide> Pointer pArmatureObject = (Pointer) modifierStructure.getFieldValue("object");
<ide> if (pArmatureObject.isNotNull()) {
<ide> ArmatureHelper armatureHelper = blenderContext.getHelper(ArmatureHelper.class);
<ide> Skeleton skeleton = new Skeleton(bonesList.toArray(new Bone[bonesList.size()]));
<ide>
<ide> // read mesh indexes
<del> Structure meshStructure = ((Pointer) objectStructure.getFieldValue("data")).fetchData(blenderContext.getInputStream()).get(0);
<ide> this.meshOMA = meshStructure.getOldMemoryAddress();
<ide> this.readVerticesWeightsData(objectStructure, meshStructure, skeleton, blenderContext);
<ide>
|
|
Java
|
apache-2.0
|
error: pathspec 'sharding-core/sharding-core-api/src/main/java/org/apache/shardingsphere/spi/DatabaseTypeBasedSPI.java' did not match any file(s) known to git
|
5480bfd78505e1c8b846887f5c81642e8f7c1280
| 1 |
leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.spi;
/**
* Database type based SPI.
*
* @author zhangliang
*/
public interface DatabaseTypeBasedSPI {
/**
* Get database type.
*
* <p>
* The value of database type must registered by SPI for {@code org.apache.shardingsphere.spi.database.DatabaseType}.
* </p>
*
* @return database type
*/
String getDatabaseType();
}
|
sharding-core/sharding-core-api/src/main/java/org/apache/shardingsphere/spi/DatabaseTypeBasedSPI.java
|
for #2082, add DatabaseTypeBasedSPI
|
sharding-core/sharding-core-api/src/main/java/org/apache/shardingsphere/spi/DatabaseTypeBasedSPI.java
|
for #2082, add DatabaseTypeBasedSPI
|
<ide><path>harding-core/sharding-core-api/src/main/java/org/apache/shardingsphere/spi/DatabaseTypeBasedSPI.java
<add>/*
<add> * Licensed to the Apache Software Foundation (ASF) under one or more
<add> * contributor license agreements. See the NOTICE file distributed with
<add> * this work for additional information regarding copyright ownership.
<add> * The ASF licenses this file to You under the Apache License, Version 2.0
<add> * (the "License"); you may not use this file except in compliance with
<add> * the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.apache.shardingsphere.spi;
<add>
<add>/**
<add> * Database type based SPI.
<add> *
<add> * @author zhangliang
<add> */
<add>public interface DatabaseTypeBasedSPI {
<add>
<add> /**
<add> * Get database type.
<add> *
<add> * <p>
<add> * The value of database type must registered by SPI for {@code org.apache.shardingsphere.spi.database.DatabaseType}.
<add> * </p>
<add> *
<add> * @return database type
<add> */
<add> String getDatabaseType();
<add>}
|
|
Java
|
apache-2.0
|
0509e909d054286eed08902ce0e7cb941a95be10
| 0 |
andrenpaes/killbill,aglne/killbill,dconcha/killbill,Massiv-IO/killbill,dconcha/killbill,aeq/killbill,gongpu/killbill,gongpu/killbill,maguero/killbill,sbrossie/killbill,sbrossie/killbill,chengjunjian/killbill,joansmith/killbill,liqianggao/killbill,dconcha/killbill,killbill/killbill,Massiv-IO/killbill,aeq/killbill,gsanblas/Prueba,joansmith/killbill,24671335/killbill,marksimu/killbill,aglne/killbill,gongpu/killbill,24671335/killbill,dut3062796s/killbill,killbill/killbill,liqianggao/killbill,andrenpaes/killbill,sbrossie/killbill,joansmith/killbill,gongpu/killbill,dut3062796s/killbill,marksimu/killbill,dut3062796s/killbill,chengjunjian/killbill,marksimu/killbill,gsanblas/Prueba,24671335/killbill,dut3062796s/killbill,chengjunjian/killbill,sbrossie/killbill,liqianggao/killbill,Massiv-IO/killbill,aglne/killbill,Massiv-IO/killbill,liqianggao/killbill,gsanblas/Prueba,andrenpaes/killbill,kares/killbill,killbill/killbill,maguero/killbill,24671335/killbill,aeq/killbill,dconcha/killbill,kares/killbill,sbrossie/killbill,gsanblas/Prueba,maguero/killbill,killbill/killbill,aeq/killbill,Massiv-IO/killbill,dut3062796s/killbill,24671335/killbill,gongpu/killbill,maguero/killbill,dconcha/killbill,andrenpaes/killbill,marksimu/killbill,kares/killbill,maguero/killbill,killbill/killbill,aglne/killbill,joansmith/killbill,aeq/killbill,aglne/killbill,liqianggao/killbill,joansmith/killbill,andrenpaes/killbill,chengjunjian/killbill,kares/killbill,chengjunjian/killbill,marksimu/killbill
|
/*
* Copyright 2010-2011 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ning.billing.analytics;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ning.billing.catalog.api.Catalog;
import com.ning.billing.catalog.api.CatalogApiException;
import com.ning.billing.catalog.api.Plan;
import com.ning.billing.catalog.api.Product;
import com.ning.billing.catalog.api.ProductCategory;
import static com.ning.billing.entitlement.api.user.Subscription.SubscriptionState;
/**
* Describe an event associated with a transition between two BusinessSubscription
*/
public class BusinessSubscriptionEvent {
private static final Logger log = LoggerFactory.getLogger(BusinessSubscriptionEvent.class);
private static final String MISC = "MISC";
public enum EventType {
ADD,
CANCEL,
RE_ADD,
CHANGE,
SYSTEM_CANCEL,
SYSTEM_CHANGE
}
private final EventType eventType;
private final ProductCategory category;
public static BusinessSubscriptionEvent valueOf(final String eventString) {
for (final EventType possibleEventType : EventType.values()) {
if (!eventString.startsWith(possibleEventType.toString().toUpperCase())) {
continue;
}
final String categoryString = eventString.substring(possibleEventType.toString().length() + 1, eventString.length());
if (categoryString.equals(MISC)) {
return new BusinessSubscriptionEvent(possibleEventType, null);
} else {
return new BusinessSubscriptionEvent(possibleEventType, ProductCategory.valueOf(categoryString));
}
}
throw new IllegalArgumentException("Unable to parse event string: " + eventString);
}
// Public for internal reasons
public BusinessSubscriptionEvent(final EventType eventType, final ProductCategory category) {
this.eventType = eventType;
this.category = category;
}
public ProductCategory getCategory() {
return category;
}
public EventType getEventType() {
return eventType;
}
public static BusinessSubscriptionEvent subscriptionCreated(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.ADD, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionCancelled(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.CANCEL, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionChanged(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.CHANGE, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionRecreated(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.RE_ADD, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionPhaseChanged(final String plan, final SubscriptionState state, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
if (state != null && state.equals(SubscriptionState.CANCELLED)) {
return eventFromType(EventType.SYSTEM_CANCEL, plan, catalog, eventTime, subscriptionCreationDate);
} else {
return eventFromType(EventType.SYSTEM_CHANGE, plan, catalog, eventTime, subscriptionCreationDate);
}
}
private static BusinessSubscriptionEvent eventFromType(final EventType eventType, final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
Plan thePlan = null;
try {
thePlan = catalog.findPlan(plan, eventTime, subscriptionCreationDate);
} catch (CatalogApiException e) {
log.error(String.format("Failed to retrieve PLan from catalog for %s", plan));
}
final ProductCategory category = getTypeFromSubscription(thePlan);
return new BusinessSubscriptionEvent(eventType, category);
}
private static ProductCategory getTypeFromSubscription(final Plan plan) {
if (plan != null && plan.getProduct() != null) {
final Product product = plan.getProduct();
if (product.getCatalogName() != null && product.getCategory() != null) {
return product.getCategory();
}
}
return null;
}
@Override
public String toString() {
return eventType.toString() + "_" + (category == null ? MISC : category.toString().toUpperCase());
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final BusinessSubscriptionEvent that = (BusinessSubscriptionEvent) o;
if (category != that.category) {
return false;
}
if (eventType != null ? !eventType.equals(that.eventType) : that.eventType != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = eventType != null ? eventType.hashCode() : 0;
result = 31 * result + (category != null ? category.hashCode() : 0);
return result;
}
}
|
analytics/src/main/java/com/ning/billing/analytics/BusinessSubscriptionEvent.java
|
/*
* Copyright 2010-2011 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ning.billing.analytics;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ning.billing.catalog.api.Catalog;
import com.ning.billing.catalog.api.CatalogApiException;
import com.ning.billing.catalog.api.Plan;
import com.ning.billing.catalog.api.Product;
import com.ning.billing.catalog.api.ProductCategory;
import static com.ning.billing.entitlement.api.user.Subscription.SubscriptionState;
/**
* Describe an event associated with a transition between two BusinessSubscription
*/
public class BusinessSubscriptionEvent {
private static final Logger log = LoggerFactory.getLogger(BusinessSubscriptionEvent.class);
private static final String MISC = "MISC";
public enum EventType {
ADD,
CANCEL,
RE_ADD,
CHANGE,
SYSTEM_CANCEL,
SYSTEM_CHANGE
}
private final EventType eventType;
private final ProductCategory category;
public static BusinessSubscriptionEvent valueOf(final String eventString) {
for (final EventType possibleEventType : EventType.values()) {
if (!eventString.startsWith(possibleEventType.toString().toUpperCase())) {
continue;
}
final String categoryString = eventString.substring(possibleEventType.toString().length() + 1, eventString.length());
if (categoryString.equals(MISC)) {
return new BusinessSubscriptionEvent(possibleEventType, null);
} else {
return new BusinessSubscriptionEvent(possibleEventType, ProductCategory.valueOf(categoryString));
}
}
throw new IllegalArgumentException("Unable to parse event string: " + eventString);
}
private BusinessSubscriptionEvent(final EventType eventType, final ProductCategory category) {
this.eventType = eventType;
this.category = category;
}
public ProductCategory getCategory() {
return category;
}
public EventType getEventType() {
return eventType;
}
public static BusinessSubscriptionEvent subscriptionCreated(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.ADD, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionCancelled(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.CANCEL, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionChanged(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.CHANGE, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionRecreated(final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
return eventFromType(EventType.RE_ADD, plan, catalog, eventTime, subscriptionCreationDate);
}
public static BusinessSubscriptionEvent subscriptionPhaseChanged(final String plan, final SubscriptionState state, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
if (state != null && state.equals(SubscriptionState.CANCELLED)) {
return eventFromType(EventType.SYSTEM_CANCEL, plan, catalog, eventTime, subscriptionCreationDate);
} else {
return eventFromType(EventType.SYSTEM_CHANGE, plan, catalog, eventTime, subscriptionCreationDate);
}
}
private static BusinessSubscriptionEvent eventFromType(final EventType eventType, final String plan, Catalog catalog, DateTime eventTime, DateTime subscriptionCreationDate) {
Plan thePlan = null;
try {
thePlan = catalog.findPlan(plan, eventTime, subscriptionCreationDate);
} catch (CatalogApiException e) {
log.error(String.format("Failed to retrieve PLan from catalog for %s", plan));
}
final ProductCategory category = getTypeFromSubscription(thePlan);
return new BusinessSubscriptionEvent(eventType, category);
}
private static ProductCategory getTypeFromSubscription(final Plan plan) {
if (plan != null && plan.getProduct() != null) {
final Product product = plan.getProduct();
if (product.getCatalogName() != null && product.getCategory() != null) {
return product.getCategory();
}
}
return null;
}
@Override
public String toString() {
return eventType.toString() + "_" + (category == null ? MISC : category.toString().toUpperCase());
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final BusinessSubscriptionEvent that = (BusinessSubscriptionEvent) o;
if (category != that.category) {
return false;
}
if (eventType != null ? !eventType.equals(that.eventType) : that.eventType != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = eventType != null ? eventType.hashCode() : 0;
result = 31 * result + (category != null ? category.hashCode() : 0);
return result;
}
}
|
analytics: make BusinessSubscriptionEvent constructor public for internal reasons
Signed-off-by: Pierre-Alexandre Meyer <[email protected]>
|
analytics/src/main/java/com/ning/billing/analytics/BusinessSubscriptionEvent.java
|
analytics: make BusinessSubscriptionEvent constructor public for internal reasons
|
<ide><path>nalytics/src/main/java/com/ning/billing/analytics/BusinessSubscriptionEvent.java
<ide> throw new IllegalArgumentException("Unable to parse event string: " + eventString);
<ide> }
<ide>
<del> private BusinessSubscriptionEvent(final EventType eventType, final ProductCategory category) {
<add> // Public for internal reasons
<add> public BusinessSubscriptionEvent(final EventType eventType, final ProductCategory category) {
<ide> this.eventType = eventType;
<ide> this.category = category;
<ide> }
|
|
Java
|
agpl-3.0
|
44da02dbcf37a430f67b0ba450d8a8b49fcbe01d
| 0 |
tdefilip/opennms,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,roskens/opennms-pre-github,tdefilip/opennms,tdefilip/opennms,roskens/opennms-pre-github,aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,aihua/opennms,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,aihua/opennms,roskens/opennms-pre-github,aihua/opennms,rdkgit/opennms,aihua/opennms,aihua/opennms,tdefilip/opennms,tdefilip/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,aihua/opennms,aihua/opennms,rdkgit/opennms
|
//
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2002-2003 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Modifications:
//
// 2007 May 21: Use Java 5 generics. - [email protected]
//
// Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <[email protected]>
// http://www.opennms.org/
// http://www.opennms.com/
//
//
// Tab Size = 8
//
//
package org.opennms.core.queue;
/**
* <p>
* This interface defines a FIFO queue that can be open and closed to control
* the addition of elements to the queue. When the queue is opened it is
* possible to add new elements to the queue. When the queue is closed, it is
* should not be possible to add elements to the queue. It should always be
* possible to read elements from the queue, so long as it is not empty.
* </p>
*
* @author <a href="mailto:[email protected]">Brian Weaver </a>
* @author <a href="http://www.opennms.org/">OpenNMS </a>
*
*/
public interface ClosableFifoQueue<T> extends FifoQueue<T> {
/**
* Returns true if the queue is currently open.
*
* @return True if the queue is open.
*/
public boolean isOpen();
/**
* Returns true if the queue is currently closed.
*
* @return True if the queue is closed.
*/
public boolean isClosed();
/**
* Closes a currently open queue. When a queue is closed is should still
* allow elements already in the queue to be removed, but new elements
* should not be added.
*
* @exception org.opennms.core.queue.FifoQueueException
* Thrown if an error occurs closing the queue.
*/
public void close() throws FifoQueueException;
/**
* Ensures that the queue is open and new elements can be added to the
* queue.
*
* @exception org.opennms.core.queue.FifoQueueException
* Thrown if an error occurs opening the queue.
*/
public void open() throws FifoQueueException;
}
|
opennms-util/src/main/java/org/opennms/core/queue/ClosableFifoQueue.java
|
//
// This file is part of the OpenNMS(R) Application.
//
// OpenNMS(R) is Copyright (C) 2002-2003 The OpenNMS Group, Inc. All rights reserved.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// code that was published under the GNU General Public License. Copyrights for modified
// and included code are below.
//
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
//
// Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information contact:
// OpenNMS Licensing <[email protected]>
// http://www.opennms.org/
// http://www.opennms.com/
//
//
// Tab Size = 8
//
//
package org.opennms.core.queue;
/**
* <p>
* This interface defines a FIFO queue that can be open and closed to control
* the addition of elements to the queue. When the queue is opened it is
* possible to add new elements to the queue. When the queue is closed, it is
* should not be possible to add elements to the queue. It should always be
* possible to read elements from the queue, so long as it is not empty.
* </p>
*
* @author <a href="mailto:[email protected]">Brian Weaver </a>
* @author <a href="http://www.opennms.org/">OpenNMS </a>
*
*/
public interface ClosableFifoQueue extends FifoQueue {
/**
* Returns true if the queue is currently open.
*
* @return True if the queue is open.
*/
public boolean isOpen();
/**
* Returns true if the queue is currently closed.
*
* @return True if the queue is closed.
*/
public boolean isClosed();
/**
* Closes a currently open queue. When a queue is closed is should still
* allow elements already in the queue to be removed, but new elements
* should not be added.
*
* @exception org.opennms.core.queue.FifoQueueException
* Thrown if an error occurs closing the queue.
*/
public void close() throws FifoQueueException;
/**
* Ensures that the queue is open and new elements can be added to the
* queue.
*
* @exception org.opennms.core.queue.FifoQueueException
* Thrown if an error occurs opening the queue.
*/
public void open() throws FifoQueueException;
}
|
Use java 5 generics.
|
opennms-util/src/main/java/org/opennms/core/queue/ClosableFifoQueue.java
|
Use java 5 generics.
|
<ide><path>pennms-util/src/main/java/org/opennms/core/queue/ClosableFifoQueue.java
<ide> //
<ide> // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
<ide> //
<add>// Modifications:
<add>//
<add>// 2007 May 21: Use Java 5 generics. - [email protected]
<add>//
<ide> // Copyright (C) 1999-2001 Oculan Corp. All rights reserved.
<ide> //
<ide> // This program is free software; you can redistribute it and/or modify
<ide> * @author <a href="http://www.opennms.org/">OpenNMS </a>
<ide> *
<ide> */
<del>public interface ClosableFifoQueue extends FifoQueue {
<add>public interface ClosableFifoQueue<T> extends FifoQueue<T> {
<ide> /**
<ide> * Returns true if the queue is currently open.
<ide> *
|
|
JavaScript
|
apache-2.0
|
8cb943d6d94da44adaf16f0c2b59e0af08598e4b
| 0 |
cschuff/openui5,nzamani/openui5,SAP/openui5,SQCLabs/openui5,cschuff/openui5,SAP/openui5,SAP/openui5,SAP/openui5,nzamani/openui5,SQCLabs/openui5,SQCLabs/openui5,nzamani/openui5,cschuff/openui5
|
/*!
* ${copyright}
*/
// Provides control sap.uxap.AnchorBar.
sap.ui.define([
"sap/m/Button",
"sap/m/PlacementType",
"sap/m/Popover",
"sap/m/Toolbar",
"sap/ui/core/IconPool",
"sap/ui/core/Item",
"sap/ui/core/ResizeHandler",
"sap/ui/core/delegate/ScrollEnablement",
"sap/ui/layout/HorizontalLayout",
"sap/ui/Device",
"sap/ui/core/CustomData",
"./HierarchicalSelect",
"./library"
], function (Button, PlacementType, Popover, Toolbar, IconPool, Item, ResizeHandler,
ScrollEnablement, HorizontalLayout, Device, CustomData, HierarchicalSelect, library) {
"use strict";
/**
* Constructor for a new AnchorBar.
*
* @param {string} [sId] id for the new control, generated automatically if no id is given
* @param {object} [mSettings] initial settings for the new control
*
* @class
* Anchor bar is the navigation bar of an Object page. Its purpose is to provide links to all Sections and Subsections. Takes the form of a Select on phone.
* @extends sap.m.Toolbar
*
* @author SAP SE
*
* @constructor
* @public
* @since 1.26
* @alias sap.uxap.AnchorBar
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var AnchorBar = Toolbar.extend("sap.uxap.AnchorBar", /** @lends sap.uxap.AnchorBar.prototype */ {
metadata: {
library: "sap.uxap",
properties: {
/**
* Determines whether to show a Popover with Subsection links when clicking on Section links in the Anchor bar.
*/
showPopover: {type: "boolean", defaultValue: true},
/**
* Determines whether the Anchor bar items are displayed in upper case.
*/
upperCase: {type: "boolean", defaultValue: false}
},
associations: {
/**
* The button that represents the Section being scrolled by the user.
*/
selectedButton: {type: "sap.m.Button", multiple: false}
},
aggregations: {
_select: {type: "sap.uxap.HierarchicalSelect", multiple: false, visibility: "hidden"},
_popovers: {type: "sap.m.Popover", multiple: true, visibility: "hidden"},
_scrollArrowLeft: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"},
_scrollArrowRight: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"}
}
}
});
AnchorBar.prototype.init = function () {
if (Toolbar.prototype.init) {
Toolbar.prototype.init.call(this);
}
this.addStyleClass("sapUxAPAnchorBar");
this._oPressHandlers = {}; //keep references on the press handlers we set on first level items (in case of behavior change)
this._oSectionInfo = {}; //keep scrolling info on sections
this._oScroller = null;
this._sSelectedKey = null; // keep track of sap.uxap.HierarchicalSelect selected key
this._bRtl = sap.ui.getCore().getConfiguration().getRTL();
//are we on an rtl scenario?
//IE handles rtl in a transparent way (positions positives, scroll starts at the end)
//while firefox, safari and chrome have a special management (scroll at the beginning and negative positioning)
//therefore we will apply some specific actions only if are in rtl and not in IE.
this._bRtlScenario = this._bRtl && !Device.browser.msie;
//there are 2 different uses cases:
//case 1: on a real phone we don't need the scrolling anchorBar, just the hierarchicalSelect
//case 2: on a real tablet or a desktop we need both as the size may change
this._bHasButtonsBar = Device.system.tablet || Device.system.desktop;
this._oSelect = this._getHierarchicalSelect();
//case 2 requires the scrolling anchorBar
if (this._bHasButtonsBar) {
//horizontal scrolling
this._oScroller = new ScrollEnablement(this, this.getId() + "-scroll", {
horizontal: true,
vertical: false,
nonTouchScrolling: true
});
this._iREMSize = parseInt(jQuery("body").css("font-size"), 10);
this._iTolerance = this._iREMSize * 1; // 1 rem
this._iOffset = this._iREMSize * 3; // 3 rem
//listen to resize
this._sResizeListenerId = undefined; //defined in onAfterRendering
}
//composite controls
this.setDesign("Transparent"); //styling is coming from css
};
/*******************************************************************************
* UX design
******************************************************************************/
AnchorBar.SCROLL_STEP = 250;// how many pixels to scroll with every overflow arrow click
AnchorBar.SCROLL_DURATION = 500; // ms
AnchorBar.DOM_CALC_DELAY = 200; // ms
AnchorBar.prototype.setSelectedButton = function (oButton) {
var aSelectItems = this._oSelect.getItems(),
bHasSelectItems = aSelectItems.length > 0;
if (typeof oButton === "string") {
oButton = sap.ui.getCore().byId(oButton);
}
if (oButton) {
if (oButton.getId() === this.getSelectedButton()) {
return this;
}
var oSelectedSectionId = oButton.data("sectionId");
this._sSelectedKey = oSelectedSectionId;
if (oSelectedSectionId && bHasSelectItems) {
this._oSelect.setSelectedKey(oSelectedSectionId);
}
if (this._bHasButtonsBar) {
//remove selection class from the currently selected item
this.$().find(".sapUxAPAnchorBarButtonSelected").removeClass("sapUxAPAnchorBarButtonSelected");
oButton.$().addClass("sapUxAPAnchorBarButtonSelected");
if (oSelectedSectionId) {
this.scrollToSection(oSelectedSectionId, AnchorBar.SCROLL_DURATION);
}
this._setAnchorButtonsTabFocusValues(oButton);
}
}
return this.setAssociation("selectedButton", oButton, true /* don't rerender */);
};
/*******************************************************************************
* Responsive behavior
******************************************************************************/
AnchorBar.prototype.setShowPopover = function (bValue, bSuppressInvalidate) {
if (this.getShowPopover() === bValue) {
return this;
}
var sSelectedButton, bNeedInvalidate = !jQuery.isEmptyObject(this._oPressHandlers);
//changing the behavior after the firstRendering is removing all press handlers on first level items
if (bNeedInvalidate) {
var aContent = this.getContent() || [];
sSelectedButton = this.getSelectedButton();
aContent.forEach(this._detachPopoverHandler, this);
}
this.setProperty("showPopover", bValue, true /* always trigger re-rendering manually */);
if (bNeedInvalidate) {
this.rerender();
if (sSelectedButton) {
this.setSelectedButton(sSelectedButton);
}
}
return this;
};
AnchorBar.prototype.getSelectedSection = function () {
var oSelectedButton = this.getSelectedButton();
if (oSelectedButton && (typeof (oSelectedButton) === "string" )) {
oSelectedButton = sap.ui.getCore().byId(oSelectedButton);
}
if (oSelectedButton && (oSelectedButton instanceof Button)
&& oSelectedButton.data("sectionId")) {
return sap.ui.getCore().byId(oSelectedButton.data("sectionId"));
}
return null;
};
/**
* create phone equivalents for each of the provided content controls
*/
AnchorBar.prototype.onBeforeRendering = function () {
if (Toolbar.prototype.onBeforeRendering) {
Toolbar.prototype.onBeforeRendering.call(this);
}
var aContent = this.getContent() || [],
bUpperCase = this.getUpperCase(),
oPopoverState = {
oLastFirstLevelButton: null,
oCurrentPopover: null
};
//rebuild select items
this._oSelect.removeAllItems();
this._oSelect.setUpperCase(bUpperCase);
this.toggleStyleClass("sapUxAPAnchorBarUpperCase", bUpperCase);
//create responsive equivalents of the provided controls
aContent.forEach(function (oButton) {
this._createSelectItem(oButton);
// desktop scenario logic: builds the scrolling anchorBar
if (this._bHasButtonsBar) {
this._createPopoverSubMenu(oButton, oPopoverState);
}
}, this);
if (aContent.length > 0 && this._sSelectedKey) {
this._oSelect.setSelectedKey(this._sSelectedKey);
}
};
AnchorBar.prototype.addContent = function (oButton, bInvalidate) {
oButton.addStyleClass("sapUxAPAnchorBarButton");
oButton.removeAllAriaDescribedBy();
if (this._bHasButtonsBar && (oButton.data("secondLevel") === true || oButton.data("secondLevel") === "true")) {
//attach handler on the scrolling mechanism
oButton.attachPress(this._handleDirectScroll, this);
}
return this.addAggregation("content", oButton, bInvalidate);
};
AnchorBar.prototype._createSelectItem = function (oButton) {
var bIsSecondLevel = oButton.data("secondLevel") === true || oButton.data("secondLevel") === "true";
//create the phone equivalent item if the button has some visible text (UX rule)
if (oButton.getText().trim() != "" && (!bIsSecondLevel || oButton.data("bTitleVisible") === true)) {
var oPhoneItem = new Item({
key: oButton.data("sectionId"),
text: oButton.getText(),
customData: [
new CustomData({
key: "secondLevel",
value: oButton.data("secondLevel")
})
]
});
this._oSelect.addItem(oPhoneItem);
}
};
AnchorBar.prototype._createPopoverSubMenu = function (oButton, oPopoverState) {
var bIsSecondLevel = oButton.data("secondLevel") === true || oButton.data("secondLevel") === "true",
fnPressHandler = null;
//handles the tablet/desktop hierarchical behavior
//a second level is injected into the latest first level
//at this point we know that there are children to the last firstLevel therefore we can create the popover
if (bIsSecondLevel) {
if (oPopoverState.oLastFirstLevelButton && oPopoverState.oCurrentPopover) {
//don't attach the parent press handler for each child
if (!this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()]) {
fnPressHandler = jQuery.proxy(this._handlePopover, /* closure with oLastFirstLevelButton and oCurrentPopover as context */
{
oCurrentPopover: oPopoverState.oCurrentPopover,
oLastFirstLevelButton: oPopoverState.oLastFirstLevelButton
}
);
oPopoverState.oLastFirstLevelButton.attachPress(fnPressHandler);
this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()] = fnPressHandler;
}
oPopoverState.oCurrentPopover.addContent(oButton);
} else if (this.getShowPopover()) {
jQuery.sap.log.error("sapUxApAnchorBar :: missing parent first level for item " + oButton.getText());
} else {
this.removeContent(oButton);
oButton.destroy();
}
} else {
oPopoverState.oLastFirstLevelButton = oButton;
//default behavior: the first level show a popover containing second levels
if (this.getShowPopover()) {
oPopoverState.oCurrentPopover = new Popover({
placement: PlacementType.Bottom,
showHeader: false,
verticalScrolling: true,
horizontalScrolling: false,
contentWidth: "auto",
showArrow: false
});
oPopoverState.oCurrentPopover.addStyleClass("sapUxAPAnchorBarPopover");
this._addKeyboardHandling(oPopoverState.oCurrentPopover);
this.addAggregation('_popovers', oPopoverState.oCurrentPopover);
//alternative behavior: the first level triggers direct navigation
} else if (!this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()]) {
fnPressHandler = jQuery.proxy(this._handleDirectScroll, this);
oPopoverState.oLastFirstLevelButton.attachPress(fnPressHandler);
this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()] = fnPressHandler;
}
}
};
AnchorBar.prototype._addKeyboardHandling = function (oCurrentPopover) {
oCurrentPopover.onsapdown = function (oEvent) {
if (oEvent.target.nextSibling) {
oEvent.target.nextSibling.focus();
}
};
oCurrentPopover.onsapright = function (oEvent) {
oCurrentPopover.onsapdown(oEvent);
};
oCurrentPopover.onsapup = function (oEvent) {
if (oEvent.target.previousSibling) {
oEvent.target.previousSibling.focus();
}
};
oCurrentPopover.onsapleft = function (oEvent) {
oCurrentPopover.onsapup(oEvent);
};
oCurrentPopover.onsaphome = function (oEvent) {
if (oEvent.target.parentElement.firstChild) {
oEvent.target.parentElement.firstChild.focus();
}
};
oCurrentPopover.onsapend = function (oEvent) {
if (oEvent.target.parentElement.lastChild) {
oEvent.target.parentElement.lastChild.focus();
}
};
oCurrentPopover.onsappageup = this._handlePageUp.bind(oCurrentPopover);
oCurrentPopover.onsappagedown = this._handlePageDown.bind(oCurrentPopover);
};
AnchorBar.prototype._detachPopoverHandler = function (oButton) {
if (this._oPressHandlers[oButton.getId()]) {
oButton.detachPress(this._oPressHandlers[oButton.getId()]);
this._oPressHandlers[oButton.getId()] = null;
}
};
AnchorBar.prototype._handlePopover = function (oEvent) {
var aPopoverButtons = this.oCurrentPopover.getContent() || [];
//open the popover only if we are in Tablet/Desktop scenario = the button is visible in the anchorBar
if (this.oLastFirstLevelButton.$().is(":visible")) {
//specific use case management: if there are only 1 button in the popover, then we don't display it and navigate directly (= the subsection is "promoted" it to a section level)
//this is a specific behavior asked by UX as of Sep 25, 2014
if (aPopoverButtons.length == 1) {
aPopoverButtons[0].firePress({});
} else {
this.oCurrentPopover.openBy(this.oLastFirstLevelButton);
}
}
};
AnchorBar.prototype._handleDirectScroll = function (oEvent) {
if (oEvent.getSource().getParent() instanceof Popover) {
oEvent.getSource().getParent().close();
}
this._requestScrollToSection(oEvent.getSource().data("sectionId"));
};
AnchorBar.prototype._requestScrollToSection = function (sRequestedSectionId) {
var oRequestedSection = sap.ui.getCore().byId(sRequestedSectionId),
oRequestedSectionParent = oRequestedSection.getParent();
if (this.getParent() instanceof library.ObjectPageLayout) {
// determine the next section that will appear selected in the anchorBar after the scroll
var sNextSelectedSection = sRequestedSectionId;
// if the requestedSection is a subsection, the nextSelectedSection will be its parent (since anchorBar contains only first-level sections)
if (oRequestedSection instanceof library.ObjectPageSubSection &&
oRequestedSectionParent instanceof library.ObjectPageSection) {
sNextSelectedSection = oRequestedSectionParent.getId();
}
// we set *direct* scrolling by which we instruct the page to *skip* processing of intermediate sections (sections between current and requested)
this.getParent().setDirectScrollingToSection(sNextSelectedSection);
// finally request the page to scroll to the requested section
this.getParent().scrollToSection(oRequestedSection.getId(), null, 0, true);
}
if (oRequestedSection instanceof library.ObjectPageSubSection &&
oRequestedSectionParent instanceof library.ObjectPageSection) {
oRequestedSectionParent.setAssociation("selectedSubSection", oRequestedSection, true);
}
};
/**
* called on phone display only when a user selects a section to navigate to
* simulate the press on the corresponding button
* @param {*} oEvent event
* @private
*/
AnchorBar.prototype._onSelectChange = function (oEvent) {
var oSelectedItem = oEvent.getParameter("selectedItem"), oSelectedSection;
oSelectedSection = sap.ui.getCore().byId(oSelectedItem.getKey());
if (oSelectedSection) {
this._requestScrollToSection(oSelectedSection.getId());
} else {
jQuery.sap.log.error("AnchorBar :: cannot find corresponding section", oSelectedItem.getKey());
}
};
AnchorBar.prototype._getHierarchicalSelect = function () {
if (!this.getAggregation('_select')) {
this.setAggregation('_select', new HierarchicalSelect({
width: "100%",
icon: "sap-icon://overflow",
change: jQuery.proxy(this._onSelectChange, this)
}));
}
return this.getAggregation('_select');
};
/**
* Creates a new scroll arrow. The scroll arrow consists of two controls:
* 1. A HorizontalLayout which is used to display the gradient mask and to serve as a container for the arrow.
* 2. A Button which displays the arrow itself.
* In bluecrystal theme the button appears when hovering over the gradient mask and is not focusable.
* In HCB, the button is always visible and can receive focus.
*
* @param {boolean} bLeft indicates whether this is the left button
* @return {sap.ui.layout.HorizontalLayout} a new scroll arrow
* @private
*/
AnchorBar.prototype._createScrollArrow = function (bLeft) {
var sArrowId,
sIconName,
sArrowClass,
oScrollButton,
that = this;
if (bLeft) {
sArrowId = this.getId() + "-arrowScrollLeft";
sIconName = "slim-arrow-left";
sArrowClass = "anchorBarArrowLeft";
} else {
sArrowId = this.getId() + "-arrowScrollRight";
sIconName = "slim-arrow-right";
sArrowClass = "anchorBarArrowRight";
}
oScrollButton = new Button(sArrowId, {
icon: IconPool.getIconURI(sIconName),
type: "Transparent",
press: function (oEvent) {
oEvent.preventDefault();
that._handleScrollButtonTap(bLeft);
}
});
oScrollButton.addEventDelegate({
onAfterRendering: function () {
if (sap.ui.getCore().getConfiguration().getTheme() != "sap_hcb") {
this.$().attr("tabindex", -1);
}
},
onThemeChanged: function () {
if (sap.ui.getCore().getConfiguration().getTheme() == "sap_hcb") {
this.$().removeAttr("tabindex");
} else {
this.$().attr("tabindex", -1);
}
}
}, oScrollButton);
return new HorizontalLayout({
content: [oScrollButton]
}).addStyleClass("anchorBarArrow").addStyleClass(sArrowClass);
};
/**
* Overwritten getter for aggregation "_scrollArrowLeft".
* Implements lazy loading mechanism.
*
* @return {sap.ui.layout.HorizontalLayout} reference to the left scroll arrow instance
* @private
*/
AnchorBar.prototype._getScrollArrowLeft = function () {
var oScrollArrowLeft = this.getAggregation("_scrollArrowLeft");
if (oScrollArrowLeft) {
return oScrollArrowLeft;
} else {
oScrollArrowLeft = this._createScrollArrow(true);
this.setAggregation("_scrollArrowLeft", oScrollArrowLeft);
return oScrollArrowLeft;
}
};
/**
* Overwritten getter for aggregation "_scrollArrowRight".
* Implements lazy loading mechanism.
*
* @return {sap.ui.layout.HorizontalLayout} reference to the right scroll arrow instance
* @private
*/
AnchorBar.prototype._getScrollArrowRight = function () {
var oScrollArrowRight = this.getAggregation("_scrollArrowRight");
if (oScrollArrowRight) {
return oScrollArrowRight;
} else {
oScrollArrowRight = this._createScrollArrow(false);
this.setAggregation("_scrollArrowRight", oScrollArrowRight);
return oScrollArrowRight;
}
};
/*******************************************************************************
* Horizontal scrolling
******************************************************************************/
AnchorBar._hierarchicalSelectModes = {
"Icon": "icon", // Only icon - overview button mode
"Text": "text" // Text - phone mode
};
AnchorBar.prototype._applyHierarchicalSelectMode = function () {
if (this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon) {
this.$().find(".sapUxAPAnchorBarScrollContainer").show();
this._oSelect.setWidth("auto");
this._oSelect.setAutoAdjustWidth(true);
this._oSelect.setType(sap.m.SelectType.IconOnly);
this._computeBarSectionsInfo();
} else {
this.$().find(".sapUxAPAnchorBarScrollContainer").hide();
this._oSelect.setWidth("100%");
this._oSelect.setAutoAdjustWidth(false);
this._oSelect.setType(sap.m.SelectType.Default);
}
this.$().toggleClass("sapUxAPAnchorBarOverflow", this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon);
};
AnchorBar.prototype._adjustSize = function () {
//size changed => check if switch in display-mode (phone-view vs. desktop-view) needed
var sNewMode = library.Utilities.isPhoneScenario(this._getCurrentMediaContainerRange()) ?
AnchorBar._hierarchicalSelectModes.Text :
AnchorBar._hierarchicalSelectModes.Icon;
if (sNewMode !== this._sHierarchicalSelectMode) {
this._sHierarchicalSelectMode = sNewMode;
this._applyHierarchicalSelectMode();
}
//size changed => check if overflow gradients needed
if (this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon) {
//don't go any further if the positions of the items are not calculated yet
if (this._iMaxPosition < 0) {
return;
}
var $dom = this.$(),
$scrollContainer = $dom.find(".sapUxAPAnchorBarScrollContainer"),
bNeedScrollingBegin,
bNeedScrollingEnd,
iContainerWidth;
iContainerWidth = $scrollContainer.width();
//do we need to scroll left or right
if (this._bRtlScenario) {
if (Device.browser.firefox) {
bNeedScrollingEnd = Math.abs($scrollContainer.scrollLeft()) + iContainerWidth < (this._iMaxPosition - this._iTolerance);
bNeedScrollingBegin = Math.abs($scrollContainer.scrollLeft()) >= this._iTolerance;
} else {
bNeedScrollingEnd = Math.abs($scrollContainer.scrollLeft()) >= this._iTolerance;
bNeedScrollingBegin = Math.abs($scrollContainer.scrollLeft()) + iContainerWidth < (this._iMaxPosition - this._iTolerance);
}
} else {
bNeedScrollingEnd = $scrollContainer.scrollLeft() + iContainerWidth < (this._iMaxPosition - this._iTolerance);
bNeedScrollingBegin = $scrollContainer.scrollLeft() >= this._iTolerance;
}
jQuery.sap.log.debug("AnchorBar :: scrolled at " + $scrollContainer.scrollLeft(), "scrollBegin [" + (bNeedScrollingBegin ? "true" : "false") + "] scrollEnd [" + (bNeedScrollingEnd ? "true" : "false") + "]");
$dom.toggleClass("sapUxAPAnchorBarScrollLeft", bNeedScrollingBegin);
$dom.toggleClass("sapUxAPAnchorBarScrollRight", bNeedScrollingEnd);
}
};
/**
* Handles scrolling via the scroll buttons.
*
* @param boolean bScrollLeft indicates whether the left arrow button was pressed
* @private
*/
AnchorBar.prototype._handleScrollButtonTap = function (bScrollLeft) {
/* calculate the direction where to scroll
increase if:
- ltr and right arrow was pressed
- rtl and the left arrow was pressed
decrease if:
- ltr and the left arrow was pressed
- rtl and the right arrow was pressed */
var iScrollDirection = ((!this._bRtlScenario && bScrollLeft) || (this._bRtlScenario && !bScrollLeft)) ? -1 : 1;
this._oScroller.scrollTo(this._iMaxPosition * iScrollDirection, 0, AnchorBar.SCROLL_DURATION * 3); //increase scroll duration when scrolling to the other end of the anchorBar (UX requirement)
};
/**
* Scroll to a specific Section.
*
* @param {string} sId The Section ID to scroll to
* @param {int} duration Scroll duration (in ms). Default value is 0
* @public
* @ui5-metamodel This method also will be described in the UI5 (legacy) designtime metamodel
*/
AnchorBar.prototype.scrollToSection = function (sId, duration) {
if (this._bHasButtonsBar) {
var iDuration = duration || AnchorBar.SCROLL_DURATION,
iScrollTo;
if ((this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon)
&& this._oSectionInfo[sId]) {
if (this._bRtlScenario && Device.browser.firefox) {
// in firefox RTL mode we are working with negative numbers and we have to add the offset in order not to hide the selected item
iScrollTo = this._oSectionInfo[sId].scrollLeft + this._iOffset;
} else {
//scroll to the positionRtl minus the offset (so the gradient never hide the selected item)
iScrollTo = this._oSectionInfo[sId].scrollLeft - this._iOffset;
if (iScrollTo < 0) { //do not allow hiding part of the content if negative value for scroll is calculated here
iScrollTo = 0;
}
}
jQuery.sap.log.debug("AnchorBar :: scrolling to section " + sId + " of " + iScrollTo);
//avoid triggering twice the scrolling onto the same target section
if (this._sCurrentScrollId != sId) {
this._sCurrentScrollId = sId;
if (this._iCurrentScrollTimeout) {
jQuery.sap.clearDelayedCall(this._iCurrentScrollTimeout);
jQuery.sap.byId(this.getId() + "-scroll").parent().stop(true, false);
}
this._iCurrentScrollTimeout = jQuery.sap.delayedCall(duration, this, function () {
this._sCurrentScrollId = undefined;
this._iCurrentScrollTimeout = undefined;
});
this._oScroller.scrollTo(iScrollTo, 0, iDuration);
}
} else {
jQuery.sap.log.debug("AnchorBar :: no need to scroll to " + sId);
}
}
};
// use type 'object' because Metamodel doesn't know ScrollEnablement
/**
* Returns an sap.ui.core.delegate.ScrollEnablement object used to handle scrolling.
*
* @type object
* @public
* @ui5-metamodel This method also will be described in the UI5 (legacy) designtime metamodel
*/
AnchorBar.prototype.getScrollDelegate = function () {
return this._oScroller;
};
/*******************************************************************************
* Keyboard navigation
******************************************************************************/
AnchorBar.PAGEUP_AND_PAGEDOWN_JUMP_SIZE = 5;
/**
* Handles DOWN key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapdown = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.nextSibling) {
oEvent.target.nextSibling.focus();
}
};
/**
* Handles RIGHT key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapright = function (oEvent) {
var sMethodName = this._bRtl ? "onsapup" : "onsapdown";
this[sMethodName](oEvent);
};
/**
* Handles UP key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapup = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.previousSibling) {
oEvent.target.previousSibling.focus();
}
};
/**
* Handles LEFT key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapleft = function (oEvent) {
var sMethodName = this._bRtl ? "onsapdown" : "onsapup";
this[sMethodName](oEvent);
};
/**
* Handles HOME key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsaphome = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.parentElement.firstChild) {
oEvent.target.parentElement.firstChild.focus();
}
};
/**
* Handles END key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapend = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.parentElement.lastChild) {
oEvent.target.parentElement.lastChild.focus();
}
};
/**
* Handles PAGE UP key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsappageup = function (oEvent) {
this._handlePageUp(oEvent);
};
/**
* Handles PAGE DOWN key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsappagedown = function (oEvent) {
this._handlePageDown(oEvent);
};
/**
* Handler for sappageup event.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype._handlePageUp = function (oEvent) {
oEvent.preventDefault();
var iNextIndex;
var aAnchors = this.getContent();
aAnchors.forEach(function (oAnchor, iAnchorIndex) {
if (oAnchor.getId() === oEvent.target.id) {
iNextIndex = iAnchorIndex - (AnchorBar.PAGEUP_AND_PAGEDOWN_JUMP_SIZE + 1);
return;
}
});
if (iNextIndex && aAnchors[iNextIndex]) {
aAnchors[iNextIndex].focus();
} else if (aAnchors[0]) {
aAnchors[0].focus();
}
};
/**
* Handler for sappagedown event.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype._handlePageDown = function (oEvent) {
oEvent.preventDefault();
var iNextIndex;
var aAnchors = this.getContent();
aAnchors.forEach(function (oAnchor, iAnchorIndex) {
if (oAnchor.getId() === oEvent.target.id) {
iNextIndex = iAnchorIndex + AnchorBar.PAGEUP_AND_PAGEDOWN_JUMP_SIZE + 1;
return;
}
});
if (iNextIndex && aAnchors[iNextIndex]) {
aAnchors[iNextIndex].focus();
} else if (aAnchors[aAnchors.length - 1]) {
aAnchors[aAnchors.length - 1].focus();
}
};
/**
* handle tab focusing
*/
AnchorBar.prototype._setAnchorButtonsTabFocusValues = function (oSelectedButton) {
var aAnchorBarContent = this.getContent() || [],
$anchorBarItem,
sFocusable = '0',
sNotFocusable = '-1',
sTabIndex = "tabIndex";
aAnchorBarContent.forEach(function (oAnchorBarItem) {
$anchorBarItem = oAnchorBarItem.$();
if (oAnchorBarItem.sId === oSelectedButton.sId) {
$anchorBarItem.attr(sTabIndex, sFocusable);
} else {
$anchorBarItem.attr(sTabIndex, sNotFocusable);
}
});
};
/**
* Handler for F6
*
* @param oEvent - The event object
*/
AnchorBar.prototype.onsapskipforward = function (oEvent) {
this._handleGroupNavigation(oEvent, false);
};
/**
* Handler for F6 and Shift + F6 group navigation
*
* @param {jQuery.Event} oEvent
* @param {boolean} bShiftKey serving as a reference if shift is used
* @private
*/
AnchorBar.prototype._handleGroupNavigation = function (oEvent, bShiftKey) {
var oEventF6 = jQuery.Event("keydown"),
oSettings = {},
oObjectPageLayout = this.getParent(),
bUseIconTabBar = oObjectPageLayout.getUseIconTabBar(),
sCurrentSectionId = oObjectPageLayout.getSelectedSection(),
aSections = oObjectPageLayout.getSections(),
aSubSections = [this.getDomRef()],
aCurrentSubSections;
if (bUseIconTabBar) {
aCurrentSubSections = sap.ui.getCore().byId(sCurrentSectionId).getSubSections().map(function (oSubSection) {
return oSubSection.$().attr("tabindex", -1)[0];
});
} else {
//this is needed in order to be sure that next F6 group will be found in sub sections
aSections.forEach(function (oSection) {
aCurrentSubSections = oSection.getSubSections().map(function (oSubSection) {
return oSubSection.$().attr("tabindex", -1)[0];
});
});
}
aSubSections = aSubSections.concat(aCurrentSubSections);
oSettings.scope = aSubSections;
oEvent.preventDefault();
this.$().focus();
oEventF6.target = oEvent.target;
oEventF6.keyCode = jQuery.sap.KeyCodes.F6;
oEventF6.shiftKey = bShiftKey;
jQuery.sap.handleF6GroupNavigation(oEventF6, oSettings);
};
/**
* called for figuring out responsive scenarios
*/
AnchorBar.prototype.onAfterRendering = function () {
var oSelectedButton;
if (Toolbar.prototype.onAfterRendering) {
Toolbar.prototype.onAfterRendering.call(this);
}
oSelectedButton = sap.ui.getCore().byId(this.getSelectedButton());
this._sHierarchicalSelectMode = AnchorBar._hierarchicalSelectModes.Text;
//save max for arrow show/hide management, the max position is the required scroll for the item to be fully visible
this._iMaxPosition = -1;
//show/hide scrolling arrows
this._sResizeListenerId = ResizeHandler.register(this, jQuery.proxy(this._adjustSize, this));
this.$().find(".sapUxAPAnchorBarScrollContainer").scroll(jQuery.proxy(this._onScroll, this));
//restore state from previous rendering
if (oSelectedButton) {
this.setSelectedButton(oSelectedButton);
this._setAnchorButtonsTabFocusValues(oSelectedButton);
}
//initial state
if (this._bHasButtonsBar) {
jQuery.sap.delayedCall(AnchorBar.DOM_CALC_DELAY, this, function () {
this._adjustSize();
});
}
};
AnchorBar.prototype._onScroll = function () {
if (!this._iCurrentSizeCheckTimeout) {
this._iCurrentSizeCheckTimeout = jQuery.sap.delayedCall(AnchorBar.SCROLL_DURATION, this, function () {
this._iCurrentSizeCheckTimeout = undefined;
this._adjustSize();
});
}
};
AnchorBar.prototype._computeBarSectionsInfo = function () {
//reset the max position
this._iMaxPosition = 0;
var aContent = this.getContent() || [];
aContent.forEach(this._computeNextSectionInfo, this);
//post processing based on how browsers implement rtl
//chrome, safari && Device.browser.webkit && firefox
if (this._bRtlScenario && (Device.browser.webkit || Device.browser.firefox)) {
aContent.forEach(this._adjustNextSectionInfo, this); // adjust positions depending of the browser
this._oScroller.scrollTo(this._iMaxPosition, 0, 0);
}
};
AnchorBar.prototype._computeNextSectionInfo = function (oContent) {
// set ARIA has-popup if button opens submenu
if (oContent.data("bHasSubMenu")) {
oContent.$().attr("aria-haspopup", "true");
}
// set ARIA attributes of main buttons
oContent.$().attr("aria-controls", oContent.data("sectionId"));
var iWidth = oContent.$().outerWidth(true);
//store info on the various sections for horizontalScrolling
//scrollLeft is the amount of scroll required for reaching that item in normal mode
this._oSectionInfo[oContent.data("sectionId")] = {
scrollLeft: this._iMaxPosition,
width: iWidth
};
this._iMaxPosition += iWidth;
};
/**
* Adjustment for webkit only
*
* Reverse the position as the scroll 0 is at the far end (first item = maxPosition, last item = 0)
*/
AnchorBar.prototype._adjustNextSectionInfo = function (oContent) {
var oSectionInfo = this._oSectionInfo[oContent.data("sectionId")];
if (Device.browser.firefox) {
// 27.11.2015 fix made for the following issue
// firefox not working yet see internal incident 1570001701
oSectionInfo.scrollLeft = -oSectionInfo.scrollLeft;
} else {
// Reverse all positions as the scroll 0 is at the far end (first item = maxPosition, last item = 0)
oSectionInfo.scrollLeft = this._iMaxPosition - oSectionInfo.scrollLeft - oSectionInfo.width;
}
};
AnchorBar.prototype._destroyPopoverContent = function () {
var aPopovers = this.getAggregation("_popovers");
if (Array.isArray(aPopovers)) {
aPopovers.forEach(function (popover) {
popover.destroyContent();
});
}
};
AnchorBar.prototype._resetControl = function () {
this._destroyPopoverContent();
this.getContent().forEach(this._detachPopoverHandler, this);
this.destroyAggregation('content', true);
return this;
};
/**
* clean created controls and deregister handlers
*/
AnchorBar.prototype.exit = function () {
if (this._sResizeListenerId) {
ResizeHandler.deregister(this._sResizeListenerId);
this._sResizeListenerId = null;
}
if (this._oScroller) {
this._oScroller.destroy();
this._oScroller = null;
}
};
return AnchorBar;
});
|
src/sap.uxap/src/sap/uxap/AnchorBar.js
|
/*!
* ${copyright}
*/
// Provides control sap.uxap.AnchorBar.
sap.ui.define([
"sap/m/Button",
"sap/m/PlacementType",
"sap/m/Popover",
"sap/m/Toolbar",
"sap/ui/core/IconPool",
"sap/ui/core/Item",
"sap/ui/core/ResizeHandler",
"sap/ui/core/delegate/ScrollEnablement",
"sap/ui/layout/HorizontalLayout",
"sap/ui/Device",
"sap/ui/core/CustomData",
"./HierarchicalSelect",
"./library"
], function (Button, PlacementType, Popover, Toolbar, IconPool, Item, ResizeHandler,
ScrollEnablement, HorizontalLayout, Device, CustomData, HierarchicalSelect, library) {
"use strict";
/**
* Constructor for a new AnchorBar.
*
* @param {string} [sId] id for the new control, generated automatically if no id is given
* @param {object} [mSettings] initial settings for the new control
*
* @class
* Anchor bar is the navigation bar of an Object page. Its purpose is to provide links to all Sections and Subsections. Takes the form of a Select on phone.
* @extends sap.m.Toolbar
*
* @author SAP SE
*
* @constructor
* @public
* @since 1.26
* @alias sap.uxap.AnchorBar
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var AnchorBar = Toolbar.extend("sap.uxap.AnchorBar", /** @lends sap.uxap.AnchorBar.prototype */ {
metadata: {
library: "sap.uxap",
properties: {
/**
* Determines whether to show a Popover with Subsection links when clicking on Section links in the Anchor bar.
*/
showPopover: {type: "boolean", defaultValue: true},
/**
* Determines whether the Anchor bar items are displayed in upper case.
*/
upperCase: {type: "boolean", defaultValue: false}
},
associations: {
/**
* The button that represents the Section being scrolled by the user.
*/
selectedButton: {type: "sap.m.Button", multiple: false}
},
aggregations: {
_select: {type: "sap.uxap.HierarchicalSelect", multiple: false, visibility: "hidden"},
_popovers: {type: "sap.m.Popover", multiple: true, visibility: "hidden"},
_scrollArrowLeft: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"},
_scrollArrowRight: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"}
}
}
});
AnchorBar.prototype.init = function () {
if (Toolbar.prototype.init) {
Toolbar.prototype.init.call(this);
}
this.addStyleClass("sapUxAPAnchorBar");
this._oPressHandlers = {}; //keep references on the press handlers we set on first level items (in case of behavior change)
this._oSectionInfo = {}; //keep scrolling info on sections
this._oScroller = null;
this._sSelectedKey = null; // keep track of sap.uxap.HierarchicalSelect selected key
this._bRtl = sap.ui.getCore().getConfiguration().getRTL();
//are we on an rtl scenario?
//IE handles rtl in a transparent way (positions positives, scroll starts at the end)
//while firefox, safari and chrome have a special management (scroll at the beginning and negative positioning)
//therefore we will apply some specific actions only if are in rtl and not in IE.
this._bRtlScenario = this._bRtl && !Device.browser.msie;
//there are 2 different uses cases:
//case 1: on a real phone we don't need the scrolling anchorBar, just the hierarchicalSelect
//case 2: on a real tablet or a desktop we need both as the size may change
this._bHasButtonsBar = Device.system.tablet || Device.system.desktop;
this._oSelect = this._getHierarchicalSelect();
//case 2 requires the scrolling anchorBar
if (this._bHasButtonsBar) {
//horizontal scrolling
this._oScroller = new ScrollEnablement(this, this.getId() + "-scroll", {
horizontal: true,
vertical: false,
nonTouchScrolling: true
});
this._iREMSize = parseInt(jQuery("body").css("font-size"), 10);
this._iTolerance = this._iREMSize * 1; // 1 rem
this._iOffset = this._iREMSize * 3; // 3 rem
//listen to resize
this._sResizeListenerId = undefined; //defined in onAfterRendering
}
//composite controls
this.setDesign("Transparent"); //styling is coming from css
};
/*******************************************************************************
* UX design
******************************************************************************/
AnchorBar.SCROLL_STEP = 250;// how many pixels to scroll with every overflow arrow click
AnchorBar.SCROLL_DURATION = 500; // ms
AnchorBar.DOM_CALC_DELAY = 200; // ms
AnchorBar.prototype.setSelectedButton = function (oButton) {
var aSelectItems = this._oSelect.getItems(),
bHasSelectItems = aSelectItems.length > 0;
if (typeof oButton === "string") {
oButton = sap.ui.getCore().byId(oButton);
}
if (oButton) {
if (oButton.getId() === this.getSelectedButton()) {
return this;
}
var oSelectedSectionId = oButton.data("sectionId");
this._sSelectedKey = oSelectedSectionId;
if (oSelectedSectionId && bHasSelectItems) {
this._oSelect.setSelectedKey(oSelectedSectionId);
}
if (this._bHasButtonsBar) {
//remove selection class from the currently selected item
this.$().find(".sapUxAPAnchorBarButtonSelected").removeClass("sapUxAPAnchorBarButtonSelected");
oButton.$().addClass("sapUxAPAnchorBarButtonSelected");
if (oSelectedSectionId) {
this.scrollToSection(oSelectedSectionId, AnchorBar.SCROLL_DURATION);
}
this._setAnchorButtonsTabFocusValues(oButton);
}
}
return this.setAssociation("selectedButton", oButton, true /* don't rerender */);
};
/*******************************************************************************
* Responsive behavior
******************************************************************************/
AnchorBar.prototype.setShowPopover = function (bValue, bSuppressInvalidate) {
if (this.getShowPopover() === bValue) {
return this;
}
var sSelectedButton, bNeedInvalidate = !jQuery.isEmptyObject(this._oPressHandlers);
//changing the behavior after the firstRendering is removing all press handlers on first level items
if (bNeedInvalidate) {
var aContent = this.getContent() || [];
sSelectedButton = this.getSelectedButton();
aContent.forEach(this._detachPopoverHandler, this);
}
this.setProperty("showPopover", bValue, true /* always trigger re-rendering manually */);
if (bNeedInvalidate) {
this.rerender();
if (sSelectedButton) {
this.setSelectedButton(sSelectedButton);
}
}
return this;
};
AnchorBar.prototype.getSelectedSection = function () {
var oSelectedButton = this.getSelectedButton();
if (oSelectedButton && (typeof (oSelectedButton) === "string" )) {
oSelectedButton = sap.ui.getCore().byId(oSelectedButton);
}
if (oSelectedButton && (oSelectedButton instanceof Button)
&& oSelectedButton.data("sectionId")) {
return sap.ui.getCore().byId(oSelectedButton.data("sectionId"));
}
return null;
};
/**
* create phone equivalents for each of the provided content controls
*/
AnchorBar.prototype.onBeforeRendering = function () {
if (Toolbar.prototype.onBeforeRendering) {
Toolbar.prototype.onBeforeRendering.call(this);
}
var aContent = this.getContent() || [],
bUpperCase = this.getUpperCase(),
oPopoverState = {
oLastFirstLevelButton: null,
oCurrentPopover: null
};
//rebuild select items
this._oSelect.removeAllItems();
this._oSelect.setUpperCase(bUpperCase);
this.toggleStyleClass("sapUxAPAnchorBarUpperCase", bUpperCase);
//create responsive equivalents of the provided controls
aContent.forEach(function (oButton) {
this._createSelectItem(oButton);
// desktop scenario logic: builds the scrolling anchorBar
if (this._bHasButtonsBar) {
this._createPopoverSubMenu(oButton, oPopoverState);
}
}, this);
if (aContent.length > 0 && this._sSelectedKey) {
this._oSelect.setSelectedKey(this._sSelectedKey);
}
};
AnchorBar.prototype.addContent = function (oButton, bInvalidate) {
oButton.addStyleClass("sapUxAPAnchorBarButton");
oButton.removeAllAriaDescribedBy();
if (this._bHasButtonsBar && (oButton.data("secondLevel") === true || oButton.data("secondLevel") === "true")) {
//attach handler on the scrolling mechanism
oButton.attachPress(this._handleDirectScroll, this);
}
return this.addAggregation("content", oButton, bInvalidate);
};
AnchorBar.prototype._createSelectItem = function (oButton) {
var bIsSecondLevel = oButton.data("secondLevel") === true || oButton.data("secondLevel") === "true";
//create the phone equivalent item if the button has some visible text (UX rule)
if (oButton.getText().trim() != "" && (!bIsSecondLevel || oButton.data("bTitleVisible") === true)) {
var oPhoneItem = new Item({
key: oButton.data("sectionId"),
text: oButton.getText(),
customData: [
new CustomData({
key: "secondLevel",
value: oButton.data("secondLevel")
})
]
});
this._oSelect.addItem(oPhoneItem);
}
};
AnchorBar.prototype._createPopoverSubMenu = function (oButton, oPopoverState) {
var bIsSecondLevel = oButton.data("secondLevel") === true || oButton.data("secondLevel") === "true",
fnPressHandler = null;
//handles the tablet/desktop hierarchical behavior
//a second level is injected into the latest first level
//at this point we know that there are children to the last firstLevel therefore we can create the popover
if (bIsSecondLevel) {
if (oPopoverState.oLastFirstLevelButton && oPopoverState.oCurrentPopover) {
//don't attach the parent press handler for each child
if (!this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()]) {
fnPressHandler = jQuery.proxy(this._handlePopover, /* closure with oLastFirstLevelButton and oCurrentPopover as context */
{
oCurrentPopover: oPopoverState.oCurrentPopover,
oLastFirstLevelButton: oPopoverState.oLastFirstLevelButton
}
);
oPopoverState.oLastFirstLevelButton.attachPress(fnPressHandler);
this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()] = fnPressHandler;
}
oPopoverState.oCurrentPopover.addContent(oButton);
} else if (this.getShowPopover()) {
jQuery.sap.log.error("sapUxApAnchorBar :: missing parent first level for item " + oButton.getText());
} else {
this.removeContent(oButton);
oButton.destroy();
}
} else {
oPopoverState.oLastFirstLevelButton = oButton;
//default behavior: the first level show a popover containing second levels
if (this.getShowPopover()) {
oPopoverState.oCurrentPopover = new Popover({
placement: PlacementType.Bottom,
showHeader: false,
verticalScrolling: true,
horizontalScrolling: false,
contentWidth: "auto",
showArrow: false
});
oPopoverState.oCurrentPopover.addStyleClass("sapUxAPAnchorBarPopover");
this._addKeyboardHandling(oPopoverState.oCurrentPopover);
this.addAggregation('_popovers', oPopoverState.oCurrentPopover);
//alternative behavior: the first level triggers direct navigation
} else if (!this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()]) {
fnPressHandler = jQuery.proxy(this._handleDirectScroll, this);
oPopoverState.oLastFirstLevelButton.attachPress(fnPressHandler);
this._oPressHandlers[oPopoverState.oLastFirstLevelButton.getId()] = fnPressHandler;
}
}
};
AnchorBar.prototype._addKeyboardHandling = function (oCurrentPopover) {
oCurrentPopover.onsapdown = function (oEvent) {
if (oEvent.target.nextSibling) {
oEvent.target.nextSibling.focus();
}
};
oCurrentPopover.onsapright = function (oEvent) {
oCurrentPopover.onsapdown(oEvent);
};
oCurrentPopover.onsapup = function (oEvent) {
if (oEvent.target.previousSibling) {
oEvent.target.previousSibling.focus();
}
};
oCurrentPopover.onsapleft = function (oEvent) {
oCurrentPopover.onsapup(oEvent);
};
oCurrentPopover.onsaphome = function (oEvent) {
if (oEvent.target.parentElement.firstChild) {
oEvent.target.parentElement.firstChild.focus();
}
};
oCurrentPopover.onsapend = function (oEvent) {
if (oEvent.target.parentElement.lastChild) {
oEvent.target.parentElement.lastChild.focus();
}
};
oCurrentPopover.onsappageup = this._handlePageUp.bind(oCurrentPopover);
oCurrentPopover.onsappagedown = this._handlePageDown.bind(oCurrentPopover);
};
AnchorBar.prototype._detachPopoverHandler = function (oButton) {
if (this._oPressHandlers[oButton.getId()]) {
oButton.detachPress(this._oPressHandlers[oButton.getId()]);
this._oPressHandlers[oButton.getId()] = null;
}
};
AnchorBar.prototype._handlePopover = function (oEvent) {
var aPopoverButtons = this.oCurrentPopover.getContent() || [];
//open the popover only if we are in Tablet/Desktop scenario = the button is visible in the anchorBar
if (this.oLastFirstLevelButton.$().is(":visible")) {
//specific use case management: if there are only 1 button in the popover, then we don't display it and navigate directly (= the subsection is "promoted" it to a section level)
//this is a specific behavior asked by UX as of Sep 25, 2014
if (aPopoverButtons.length == 1) {
aPopoverButtons[0].firePress({});
} else {
this.oCurrentPopover.openBy(this.oLastFirstLevelButton);
}
}
};
AnchorBar.prototype._handleDirectScroll = function (oEvent) {
if (oEvent.getSource().getParent() instanceof Popover) {
oEvent.getSource().getParent().close();
}
this._requestScrollToSection(oEvent.getSource().data("sectionId"));
};
AnchorBar.prototype._requestScrollToSection = function (sRequestedSectionId) {
var oRequestedSection = sap.ui.getCore().byId(sRequestedSectionId),
oRequestedSectionParent = oRequestedSection.getParent();
if (this.getParent() instanceof library.ObjectPageLayout) {
// determine the next section that will appear selected in the anchorBar after the scroll
var sNextSelectedSection = sRequestedSectionId;
// if the requestedSection is a subsection, the nextSelectedSection will be its parent (since anchorBar contains only first-level sections)
if (oRequestedSection instanceof library.ObjectPageSubSection &&
oRequestedSectionParent instanceof library.ObjectPageSection) {
sNextSelectedSection = oRequestedSectionParent.getId();
}
// we set *direct* scrolling by which we instruct the page to *skip* processing of intermediate sections (sections between current and requested)
this.getParent().setDirectScrollingToSection(sNextSelectedSection);
// finally request the page to scroll to the requested section
this.getParent().scrollToSection(oRequestedSection.getId(), null, 0, true);
}
if (oRequestedSection instanceof library.ObjectPageSubSection &&
oRequestedSectionParent instanceof library.ObjectPageSection) {
oRequestedSectionParent.setAssociation("selectedSubSection", oRequestedSection, true);
}
};
/**
* called on phone display only when a user selects a section to navigate to
* simulate the press on the corresponding button
* @param {*} oEvent event
* @private
*/
AnchorBar.prototype._onSelectChange = function (oEvent) {
var oSelectedItem = oEvent.getParameter("selectedItem"), oSelectedSection;
oSelectedSection = sap.ui.getCore().byId(oSelectedItem.getKey());
if (oSelectedSection) {
this._requestScrollToSection(oSelectedSection.getId());
} else {
jQuery.sap.log.error("AnchorBar :: cannot find corresponding section", oSelectedItem.getKey());
}
};
AnchorBar.prototype._getHierarchicalSelect = function () {
if (!this.getAggregation('_select')) {
this.setAggregation('_select', new HierarchicalSelect({
width: "100%",
icon: "sap-icon://overflow",
change: jQuery.proxy(this._onSelectChange, this)
}));
}
return this.getAggregation('_select');
};
/**
* Creates a new scroll arrow. The scroll arrow consists of two controls:
* 1. A HorizontalLayout which is used to display the gradient mask and to serve as a container for the arrow.
* 2. A Button which displays the arrow itself.
* In bluecrystal theme the button appears when hovering over the gradient mask and is not focusable.
* In HCB, the button is always visible and can receive focus.
*
* @param {boolean} bLeft indicates whether this is the left button
* @return {sap.ui.layout.HorizontalLayout} a new scroll arrow
* @private
*/
AnchorBar.prototype._createScrollArrow = function (bLeft) {
var sArrowId,
sIconName,
sArrowClass,
oScrollButton,
that = this;
if (bLeft) {
sArrowId = this.getId() + "-arrowScrollLeft";
sIconName = "slim-arrow-left";
sArrowClass = "anchorBarArrowLeft";
} else {
sArrowId = this.getId() + "-arrowScrollRight";
sIconName = "slim-arrow-right";
sArrowClass = "anchorBarArrowRight";
}
oScrollButton = new Button(sArrowId, {
icon: IconPool.getIconURI(sIconName),
type: "Transparent",
press: function (oEvent) {
oEvent.preventDefault();
that._handleScrollButtonTap(bLeft);
}
});
oScrollButton.addEventDelegate({
onAfterRendering: function () {
if (sap.ui.getCore().getConfiguration().getTheme() != "sap_hcb") {
this.$().attr("tabindex", -1);
}
},
onThemeChanged: function () {
if (sap.ui.getCore().getConfiguration().getTheme() == "sap_hcb") {
this.$().removeAttr("tabindex");
} else {
this.$().attr("tabindex", -1);
}
}
}, oScrollButton);
return new HorizontalLayout({
content: [oScrollButton]
}).addStyleClass("anchorBarArrow").addStyleClass(sArrowClass);
};
/**
* Overwritten getter for aggregation "_scrollArrowLeft".
* Implements lazy loading mechanism.
*
* @return {sap.ui.layout.HorizontalLayout} reference to the left scroll arrow instance
* @private
*/
AnchorBar.prototype._getScrollArrowLeft = function () {
var oScrollArrowLeft = this.getAggregation("_scrollArrowLeft");
if (oScrollArrowLeft) {
return oScrollArrowLeft;
} else {
oScrollArrowLeft = this._createScrollArrow(true);
this.setAggregation("_scrollArrowLeft", oScrollArrowLeft);
return oScrollArrowLeft;
}
};
/**
* Overwritten getter for aggregation "_scrollArrowRight".
* Implements lazy loading mechanism.
*
* @return {sap.ui.layout.HorizontalLayout} reference to the right scroll arrow instance
* @private
*/
AnchorBar.prototype._getScrollArrowRight = function () {
var oScrollArrowRight = this.getAggregation("_scrollArrowRight");
if (oScrollArrowRight) {
return oScrollArrowRight;
} else {
oScrollArrowRight = this._createScrollArrow(false);
this.setAggregation("_scrollArrowRight", oScrollArrowRight);
return oScrollArrowRight;
}
};
/*******************************************************************************
* Horizontal scrolling
******************************************************************************/
AnchorBar._hierarchicalSelectModes = {
"Icon": "icon", // Only icon - overview button mode
"Text": "text" // Text - phone mode
};
AnchorBar.prototype._applyHierarchicalSelectMode = function () {
if (this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon) {
this.$().find(".sapUxAPAnchorBarScrollContainer").show();
this._oSelect.setWidth("auto");
this._oSelect.setAutoAdjustWidth(true);
this._oSelect.setType(sap.m.SelectType.IconOnly);
this._computeBarSectionsInfo();
} else {
this.$().find(".sapUxAPAnchorBarScrollContainer").hide();
this._oSelect.setWidth("100%");
this._oSelect.setAutoAdjustWidth(false);
this._oSelect.setType(sap.m.SelectType.Default);
}
this.$().toggleClass("sapUxAPAnchorBarOverflow", this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon);
};
AnchorBar.prototype._adjustSize = function () {
//size changed => check if switch in display-mode (phone-view vs. desktop-view) needed
var sNewMode = library.Utilities.isPhoneScenario(this._getCurrentMediaContainerRange()) ?
AnchorBar._hierarchicalSelectModes.Text :
AnchorBar._hierarchicalSelectModes.Icon;
if (sNewMode !== this._sHierarchicalSelectMode) {
this._sHierarchicalSelectMode = sNewMode;
this._applyHierarchicalSelectMode();
}
//size changed => check if overflow gradients needed
if (this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon) {
//don't go any further if the positions of the items are not calculated yet
if (this._iMaxPosition < 0) {
return;
}
var $dom = this.$(),
$scrollContainer = $dom.find(".sapUxAPAnchorBarScrollContainer"),
bNeedScrollingBegin,
bNeedScrollingEnd,
iContainerWidth;
iContainerWidth = $scrollContainer.width();
//do we need to scroll left or right
if (this._bRtlScenario) {
if (Device.browser.firefox) {
bNeedScrollingEnd = Math.abs($scrollContainer.scrollLeft()) + iContainerWidth < (this._iMaxPosition - this._iTolerance);
bNeedScrollingBegin = Math.abs($scrollContainer.scrollLeft()) >= this._iTolerance;
} else {
bNeedScrollingEnd = Math.abs($scrollContainer.scrollLeft()) >= this._iTolerance;
bNeedScrollingBegin = Math.abs($scrollContainer.scrollLeft()) + iContainerWidth < (this._iMaxPosition - this._iTolerance);
}
} else {
bNeedScrollingEnd = $scrollContainer.scrollLeft() + iContainerWidth < (this._iMaxPosition - this._iTolerance);
bNeedScrollingBegin = $scrollContainer.scrollLeft() >= this._iTolerance;
}
jQuery.sap.log.debug("AnchorBar :: scrolled at " + $scrollContainer.scrollLeft(), "scrollBegin [" + (bNeedScrollingBegin ? "true" : "false") + "] scrollEnd [" + (bNeedScrollingEnd ? "true" : "false") + "]");
$dom.toggleClass("sapUxAPAnchorBarScrollLeft", bNeedScrollingBegin);
$dom.toggleClass("sapUxAPAnchorBarScrollRight", bNeedScrollingEnd);
}
};
/**
* Handles scrolling via the scroll buttons.
*
* @param boolean bScrollLeft indicates whether the left arrow button was pressed
* @private
*/
AnchorBar.prototype._handleScrollButtonTap = function (bScrollLeft) {
/* calculate the direction where to scroll
increase if:
- ltr and right arrow was pressed
- rtl and the left arrow was pressed
decrease if:
- ltr and the left arrow was pressed
- rtl and the right arrow was pressed */
var iScrollDirection = ((!this._bRtlScenario && bScrollLeft) || (this._bRtlScenario && !bScrollLeft)) ? -1 : 1;
this._oScroller.scrollTo(this._iMaxPosition * iScrollDirection, 0, AnchorBar.SCROLL_DURATION * 3); //increase scroll duration when scrolling to the other end of the anchorBar (UX requirement)
};
/**
* Scroll to a specific Section.
*
* @param {string} sId The Section ID to scroll to
* @param {int} duration Scroll duration (in ms). Default value is 0
* @public
* @ui5-metamodel This method also will be described in the UI5 (legacy) designtime metamodel
*/
AnchorBar.prototype.scrollToSection = function (sId, duration) {
if (this._bHasButtonsBar) {
var iDuration = duration || AnchorBar.SCROLL_DURATION,
iScrollTo;
if ((this._sHierarchicalSelectMode === AnchorBar._hierarchicalSelectModes.Icon)
&& this._oSectionInfo[sId]) {
if (this._bRtlScenario && Device.browser.firefox) {
// in firefox RTL mode we are working with negative numbers and we have to add the offset in order not to hide the selected item
iScrollTo = this._oSectionInfo[sId].scrollLeft + this._iOffset;
} else {
//scroll to the positionRtl minus the offset (so the gradient never hide the selected item)
iScrollTo = this._oSectionInfo[sId].scrollLeft - this._iOffset;
if (iScrollTo < 0) { //do not allow hiding part of the content if negative value for scroll is calculated here
iScrollTo = 0;
}
}
jQuery.sap.log.debug("AnchorBar :: scrolling to section " + sId + " of " + iScrollTo);
//avoid triggering twice the scrolling onto the same target section
if (this._sCurrentScrollId != sId) {
this._sCurrentScrollId = sId;
if (this._iCurrentScrollTimeout) {
jQuery.sap.clearDelayedCall(this._iCurrentScrollTimeout);
jQuery.sap.byId(this.getId() + "-scroll").parent().stop(true, false);
}
this._iCurrentScrollTimeout = jQuery.sap.delayedCall(duration, this, function () {
this._sCurrentScrollId = undefined;
this._iCurrentScrollTimeout = undefined;
});
this._oScroller.scrollTo(iScrollTo, 0, iDuration);
}
} else {
jQuery.sap.log.debug("AnchorBar :: no need to scroll to " + sId);
}
}
};
// use type 'object' because Metamodel doesn't know ScrollEnablement
/**
* Returns an sap.ui.core.delegate.ScrollEnablement object used to handle scrolling.
*
* @type object
* @public
* @ui5-metamodel This method also will be described in the UI5 (legacy) designtime metamodel
*/
AnchorBar.prototype.getScrollDelegate = function () {
return this._oScroller;
};
/*******************************************************************************
* Keyboard navigation
******************************************************************************/
AnchorBar.PAGEUP_AND_PAGEDOWN_JUMP_SIZE = 5;
/**
* Handles DOWN key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapdown = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.nextSibling) {
oEvent.target.nextSibling.focus();
}
};
/**
* Handles RIGHT key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapright = function (oEvent) {
var sMethodName = this._bRtl ? "onsapup" : "onsapdown";
this[sMethodName](oEvent);
};
/**
* Handles UP key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapup = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.previousSibling) {
oEvent.target.previousSibling.focus();
}
};
/**
* Handles LEFT key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapleft = function (oEvent) {
var sMethodName = this._bRtl ? "onsapdown" : "onsapup";
this[sMethodName](oEvent);
};
/**
* Handles HOME key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsaphome = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.parentElement.firstChild) {
oEvent.target.parentElement.firstChild.focus();
}
};
/**
* Handles END key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsapend = function (oEvent) {
oEvent.preventDefault();
if (oEvent.target.parentElement.lastChild) {
oEvent.target.parentElement.lastChild.focus();
}
};
/**
* Handles PAGE UP key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsappageup = function (oEvent) {
this._handlePageUp(oEvent);
};
/**
* Handles PAGE DOWN key, triggered on anchor bar level.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype.onsappagedown = function (oEvent) {
this._handlePageDown(oEvent);
};
/**
* Handler for sappageup event.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype._handlePageUp = function (oEvent) {
oEvent.preventDefault();
var iNextIndex;
var aAnchors = this.getContent();
aAnchors.forEach(function (oAnchor, iAnchorIndex) {
if (oAnchor.getId() === oEvent.target.id) {
iNextIndex = iAnchorIndex - (AnchorBar.PAGEUP_AND_PAGEDOWN_JUMP_SIZE + 1);
return;
}
});
if (iNextIndex && aAnchors[iNextIndex]) {
aAnchors[iNextIndex].focus();
} else if (aAnchors[0]) {
aAnchors[0].focus();
}
};
/**
* Handler for sappagedown event.
*
* @param {jQuery.Event} oEvent
* @private
*/
AnchorBar.prototype._handlePageDown = function (oEvent) {
oEvent.preventDefault();
var iNextIndex;
var aAnchors = this.getContent();
aAnchors.forEach(function (oAnchor, iAnchorIndex) {
if (oAnchor.getId() === oEvent.target.id) {
iNextIndex = iAnchorIndex + AnchorBar.PAGEUP_AND_PAGEDOWN_JUMP_SIZE + 1;
return;
}
});
if (iNextIndex && aAnchors[iNextIndex]) {
aAnchors[iNextIndex].focus();
} else if (aAnchors[aAnchors.length - 1]) {
aAnchors[aAnchors.length - 1].focus();
}
};
/**
* handle tab focusing
*/
AnchorBar.prototype._setAnchorButtonsTabFocusValues = function (oSelectedButton) {
var aAnchorBarContent = this.getContent() || [],
$anchorBarItem,
sFocusable = '0',
sNotFocusable = '-1',
sTabIndex = "tabIndex";
aAnchorBarContent.forEach(function (oAnchorBarItem) {
$anchorBarItem = oAnchorBarItem.$();
if (oAnchorBarItem.sId === oSelectedButton.sId) {
$anchorBarItem.attr(sTabIndex, sFocusable);
} else {
$anchorBarItem.attr(sTabIndex, sNotFocusable);
}
});
};
/**
* Handler for F6
*
* @param oEvent - The event object
*/
AnchorBar.prototype.onsapskipforward = function (oEvent) {
this._handleGroupNavigation(oEvent, false);
};
/**
* Handler for F6 and Shift + F6 group navigation
*
* @param {jQuery.Event} oEvent
* @param {boolean} bShiftKey serving as a reference if shift is used
* @private
*/
AnchorBar.prototype._handleGroupNavigation = function (oEvent, bShiftKey) {
var oEventF6 = jQuery.Event("keydown"),
oSettings = {},
aSections = this.getParent().getSections(),
aSubSections = [this.getDomRef()],
aCurrentSubSections;
//this is needed in order to be sure that next F6 group will be found in sub sections
aSections.forEach(function (oSection) {
aCurrentSubSections = oSection.getSubSections().map(function (oSubSection) {
return oSubSection.$().attr("tabindex", -1)[0];
});
aSubSections = aSubSections.concat(aCurrentSubSections);
});
oSettings.scope = aSubSections;
oEvent.preventDefault();
this.$().focus();
oEventF6.target = oEvent.target;
oEventF6.keyCode = jQuery.sap.KeyCodes.F6;
oEventF6.shiftKey = bShiftKey;
jQuery.sap.handleF6GroupNavigation(oEventF6, oSettings);
};
/**
* called for figuring out responsive scenarios
*/
AnchorBar.prototype.onAfterRendering = function () {
var oSelectedButton;
if (Toolbar.prototype.onAfterRendering) {
Toolbar.prototype.onAfterRendering.call(this);
}
oSelectedButton = sap.ui.getCore().byId(this.getSelectedButton());
this._sHierarchicalSelectMode = AnchorBar._hierarchicalSelectModes.Text;
//save max for arrow show/hide management, the max position is the required scroll for the item to be fully visible
this._iMaxPosition = -1;
//show/hide scrolling arrows
this._sResizeListenerId = ResizeHandler.register(this, jQuery.proxy(this._adjustSize, this));
this.$().find(".sapUxAPAnchorBarScrollContainer").scroll(jQuery.proxy(this._onScroll, this));
//restore state from previous rendering
if (oSelectedButton) {
this.setSelectedButton(oSelectedButton);
this._setAnchorButtonsTabFocusValues(oSelectedButton);
}
//initial state
if (this._bHasButtonsBar) {
jQuery.sap.delayedCall(AnchorBar.DOM_CALC_DELAY, this, function () {
this._adjustSize();
});
}
};
AnchorBar.prototype._onScroll = function () {
if (!this._iCurrentSizeCheckTimeout) {
this._iCurrentSizeCheckTimeout = jQuery.sap.delayedCall(AnchorBar.SCROLL_DURATION, this, function () {
this._iCurrentSizeCheckTimeout = undefined;
this._adjustSize();
});
}
};
AnchorBar.prototype._computeBarSectionsInfo = function () {
//reset the max position
this._iMaxPosition = 0;
var aContent = this.getContent() || [];
aContent.forEach(this._computeNextSectionInfo, this);
//post processing based on how browsers implement rtl
//chrome, safari && Device.browser.webkit && firefox
if (this._bRtlScenario && (Device.browser.webkit || Device.browser.firefox)) {
aContent.forEach(this._adjustNextSectionInfo, this); // adjust positions depending of the browser
this._oScroller.scrollTo(this._iMaxPosition, 0, 0);
}
};
AnchorBar.prototype._computeNextSectionInfo = function (oContent) {
// set ARIA has-popup if button opens submenu
if (oContent.data("bHasSubMenu")) {
oContent.$().attr("aria-haspopup", "true");
}
// set ARIA attributes of main buttons
oContent.$().attr("aria-controls", oContent.data("sectionId"));
var iWidth = oContent.$().outerWidth(true);
//store info on the various sections for horizontalScrolling
//scrollLeft is the amount of scroll required for reaching that item in normal mode
this._oSectionInfo[oContent.data("sectionId")] = {
scrollLeft: this._iMaxPosition,
width: iWidth
};
this._iMaxPosition += iWidth;
};
/**
* Adjustment for webkit only
*
* Reverse the position as the scroll 0 is at the far end (first item = maxPosition, last item = 0)
*/
AnchorBar.prototype._adjustNextSectionInfo = function (oContent) {
var oSectionInfo = this._oSectionInfo[oContent.data("sectionId")];
if (Device.browser.firefox) {
// 27.11.2015 fix made for the following issue
// firefox not working yet see internal incident 1570001701
oSectionInfo.scrollLeft = -oSectionInfo.scrollLeft;
} else {
// Reverse all positions as the scroll 0 is at the far end (first item = maxPosition, last item = 0)
oSectionInfo.scrollLeft = this._iMaxPosition - oSectionInfo.scrollLeft - oSectionInfo.width;
}
};
AnchorBar.prototype._destroyPopoverContent = function () {
var aPopovers = this.getAggregation("_popovers");
if (Array.isArray(aPopovers)) {
aPopovers.forEach(function (popover) {
popover.destroyContent();
});
}
};
AnchorBar.prototype._resetControl = function () {
this._destroyPopoverContent();
this.getContent().forEach(this._detachPopoverHandler, this);
this.destroyAggregation('content', true);
return this;
};
/**
* clean created controls and deregister handlers
*/
AnchorBar.prototype.exit = function () {
if (this._sResizeListenerId) {
ResizeHandler.deregister(this._sResizeListenerId);
this._sResizeListenerId = null;
}
if (this._oScroller) {
this._oScroller.destroy();
this._oScroller = null;
}
};
return AnchorBar;
});
|
[INTERNAL][FIX] uxap.ObjectPage F6 navigation for tab navigation fixed
BCP: 1770236058
- fixed F6 behavior in case tab navigation is used
Change-Id: I1bcbfdb930e99133da3c8d8120e3706e0542c5bd
|
src/sap.uxap/src/sap/uxap/AnchorBar.js
|
[INTERNAL][FIX] uxap.ObjectPage F6 navigation for tab navigation fixed
|
<ide><path>rc/sap.uxap/src/sap/uxap/AnchorBar.js
<ide> AnchorBar.prototype._handleGroupNavigation = function (oEvent, bShiftKey) {
<ide> var oEventF6 = jQuery.Event("keydown"),
<ide> oSettings = {},
<del> aSections = this.getParent().getSections(),
<add> oObjectPageLayout = this.getParent(),
<add> bUseIconTabBar = oObjectPageLayout.getUseIconTabBar(),
<add> sCurrentSectionId = oObjectPageLayout.getSelectedSection(),
<add> aSections = oObjectPageLayout.getSections(),
<ide> aSubSections = [this.getDomRef()],
<ide> aCurrentSubSections;
<ide>
<del> //this is needed in order to be sure that next F6 group will be found in sub sections
<del> aSections.forEach(function (oSection) {
<del> aCurrentSubSections = oSection.getSubSections().map(function (oSubSection) {
<add> if (bUseIconTabBar) {
<add> aCurrentSubSections = sap.ui.getCore().byId(sCurrentSectionId).getSubSections().map(function (oSubSection) {
<ide> return oSubSection.$().attr("tabindex", -1)[0];
<ide> });
<del>
<del> aSubSections = aSubSections.concat(aCurrentSubSections);
<del> });
<add> } else {
<add> //this is needed in order to be sure that next F6 group will be found in sub sections
<add> aSections.forEach(function (oSection) {
<add> aCurrentSubSections = oSection.getSubSections().map(function (oSubSection) {
<add> return oSubSection.$().attr("tabindex", -1)[0];
<add> });
<add> });
<add> }
<add> aSubSections = aSubSections.concat(aCurrentSubSections);
<ide> oSettings.scope = aSubSections;
<ide>
<ide> oEvent.preventDefault();
|
|
JavaScript
|
mit
|
error: pathspec 'index.js' did not match any file(s) known to git
|
60556dddea2dd5f04a883f14d1d9b1a98e9c006b
| 1 |
ramhejazi/parsind
|
/**
* A HTTP resource manager and controller-autoloader
* @author Ramin Hejazi
* @copyright MIT © 2016 Ramin Hejazi
*/
const ResourceManager = require('./lib/ResourceManager');
module.exports = function(options) {
return new ResourceManager(options);
}
|
index.js
|
adding main index.js file
|
index.js
|
adding main index.js file
|
<ide><path>ndex.js
<add>/**
<add> * A HTTP resource manager and controller-autoloader
<add> * @author Ramin Hejazi
<add> * @copyright MIT © 2016 Ramin Hejazi
<add> */
<add>
<add>const ResourceManager = require('./lib/ResourceManager');
<add>module.exports = function(options) {
<add> return new ResourceManager(options);
<add>}
|
|
JavaScript
|
bsd-3-clause
|
05b7458c7c93dfaf1f8a7696e05a591990274211
| 0 |
fcapovilla/caporss,fcapovilla/caporss,fcapovilla/caporss
|
var Router = Backbone.Router.extend({
routes: {
"": "clear",
"feed/:id(/search/*query)": "viewFeed",
"folder/:id(/search/*query)": "viewFolder",
"item(/search/*query)": "viewAllItems"
},
initialize: function() {
this.itemListRegion = new Backbone.Marionette.Region({
el: '#item-list'
});
this.currentSelection = null;
this.itemList = null;
},
clear: function() {
$('#item-list').scrollTop(0);
this.itemListRegion.close();
if(this.currentSelection !== null) {
this.currentSelection.set('active', false);
this.currentSelection = null;
}
$('.mobile-item-button').addClass('invisible');
$('#item-list').addClass('hidden-phone');
$('.feed-list').removeClass('hidden-phone');
},
updateItemList : function(model, query) {
var that = this;
$('#item-list').scrollTop(0);
this.itemListRegion.close();
var options = {
reset: true,
reset_pagination: true,
success: function() {
that.itemListRegion.show(that.itemList);
}
};
// Prepare search query
if(query !== null) {
options.data = {};
if(query.match(/^title/)) {
options.data.search_title = true;
options.data.query = query.split('/')[1];
}
else {
options.data.query = query;
}
}
if(this.currentSelection !== null) {
this.currentSelection.set('active', false);
this.currentSelection.items.query = '';
this.currentSelection.items.search_title = false;
}
model.set('active', true);
this.currentSelection = model;
this.itemList = new ItemListView({collection: model.items});
model.items.fetch(options);
$('#item-list').removeClass('hidden-phone');
$('.feed-list').addClass('hidden-phone');
$('.mobile-item-button').removeClass('invisible');
$('#item-list').focus();
},
viewFeed: function(id, query) {
var model = folders.getFeed(id);
this.updateItemList(model, query);
},
viewFolder: function(id, query) {
var model = folders.get(id);
this.updateItemList(model, query);
},
viewAllItems: function(query) {
var model = folderList.allItemsFolder;
this.updateItemList(model, query);
},
goToModel: function(model) {
if(model instanceof Folder) {
this.navigate('folder/' + model.id, {trigger: true});
}
else if(model instanceof Feed) {
this.navigate('feed/' + model.id, {trigger: true});
}
else {
this.navigate('item', {trigger: true});
}
}
});
|
backbone/routers/router.js
|
var Router = Backbone.Router.extend({
routes: {
"": "clear",
"feed/:id(/search/*query)": "viewFeed",
"folder/:id(/search/*query)": "viewFolder",
"item(/search/*query)": "viewAllItems"
},
initialize: function() {
this.itemListRegion = new Backbone.Marionette.Region({
el: '#item-list'
});
this.currentSelection = null;
this.itemList = null;
},
clear: function() {
$('#item-list').scrollTop(0);
this.itemListRegion.close();
if(this.currentSelection !== null) {
this.currentSelection.set('active', false);
this.currentSelection = null;
}
$('.mobile-item-button').addClass('invisible');
$('#item-list').addClass('hidden-phone');
$('.feed-list').removeClass('hidden-phone');
},
updateItemList : function(model, query) {
var that = this;
$('#item-list').scrollTop(0);
var options = {
reset: true,
reset_pagination: true,
success: function() {
that.itemListRegion.show(that.itemList);
}
};
// Prepare search query
if(query !== null) {
options.data = {};
if(query.match(/^title/)) {
options.data.search_title = true;
options.data.query = query.split('/')[1];
}
else {
options.data.query = query;
}
}
if(this.currentSelection !== null) {
this.currentSelection.set('active', false);
this.currentSelection.items.query = '';
this.currentSelection.items.search_title = false;
}
model.set('active', true);
this.currentSelection = model;
this.itemList = new ItemListView({collection: model.items});
model.items.fetch(options);
$('#item-list').removeClass('hidden-phone');
$('.feed-list').addClass('hidden-phone');
$('.mobile-item-button').removeClass('invisible');
$('#item-list').focus();
},
viewFeed: function(id, query) {
var model = folders.getFeed(id);
this.updateItemList(model, query);
},
viewFolder: function(id, query) {
var model = folders.get(id);
this.updateItemList(model, query);
},
viewAllItems: function(query) {
var model = folderList.allItemsFolder;
this.updateItemList(model, query);
},
goToModel: function(model) {
if(model instanceof Folder) {
this.navigate('folder/' + model.id, {trigger: true});
}
else if(model instanceof Feed) {
this.navigate('feed/' + model.id, {trigger: true});
}
else {
this.navigate('item', {trigger: true});
}
}
});
|
Close itemListRegion on feed change
|
backbone/routers/router.js
|
Close itemListRegion on feed change
|
<ide><path>ackbone/routers/router.js
<ide> updateItemList : function(model, query) {
<ide> var that = this;
<ide> $('#item-list').scrollTop(0);
<add> this.itemListRegion.close();
<ide>
<ide> var options = {
<ide> reset: true,
|
|
Java
|
lgpl-2.1
|
32c106a94c24267cab1f378125d33ae2d9ee7790
| 0 |
antonioaraujob/digidoc4j,open-eid/digidoc4j,open-eid/digidoc4j,open-eid/digidoc4j,fazz/digidoc4j,keijokapp/digidoc4j,keijokapp/digidoc4j,fazz/digidoc4j,antonioaraujob/digidoc4j
|
package ee.sk.digidoc4j.utils;
import org.apache.commons.codec.binary.Base64;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class PKCS12SignerTest {
private static PKCS12Signer pkcs12Signer;
@BeforeClass
public static void setUp() {
pkcs12Signer = new PKCS12Signer("signout.p12", "test");
}
@Test
public void testGetPrivateKey() throws Exception {
assertEquals("MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQChn9qVaA+x3RkDBrD5ujwfnreK\n" +
"5/Nb+Nvo9Vg5OLMn3JKUoUhFX6A/q5lBUylK/CU/lNRTv/kicqnu1aCyAiW0XVYk8jrOI1wRbHey\n" +
"BMq/5gVm/vbbRtMi/XGLkgMZ5UDxY0QZfmu8wlRJ8164zRNocuUJLLXWOB6vda2RRXC3Cix4TDvQ\n" +
"wGmPrQQJ8dzDIJEkLS7NCLBTcndm7buQegRc043gKMjUmRhGZEzF4oJa4pMfXqeSa+PUtrNyNNNQ\n" +
"aOwTH29R8aFfGU2xorVvxoUieNipyWMEz8BTUGwwIceapWi77loBV/VQfStXnQNu/s6BC04ss43O\n" +
"6sK70MB1qlRZAgMBAAECggEAT81lGRY7gZ/gpKzeH0AERbyRdaWXdJcIxhq2B/LmCs2PFpIX5CEW\n" +
"N7nbvvR31A1xutYajIuiUI77NvEGGj6TLV5UlYOA451z7Sp4Y06YaW4CxtsnOhfbUlB/iuF6ZIPc\n" +
"sBNKYagZPCdbhPQElgy0A4OPcRtBYVduV0YsgCkgQU+clV93bCALpDkpU6EeeVys8bfBBtk7cLXe\n" +
"TF3IBXykvXi4tFaVDKz8lTYvDt66clhxFNBo+0H2IL4RqZ4sQCfpi8Gpi0yr2kmGDGvYgTOM8sOF\n" +
"sS2iHwPDIOOEY6RINHNBRuMpC1rmkOOK40qnmVfMrGAj3QpqSDeN6HVu/yqhAQKBgQDVCUbOCCsS\n" +
"oyhBPvQXOTesmD1Y/jrqXwPm8J2t9gKYKZwGtuiKeBwkRRg1Bqo8KwD3WABWrIa/qqYm1/NFKQnW\n" +
"GqILLIrvh9ZLL5U3vDyCdPovYZfhYQX5wPwEkmhAdVfgROzNoADQQEM5o8cokoxn+Uz24Fn6Xz5n\n" +
"YYB8kBQnOQKBgQDCOERfUSzbibl6I0Dn2JHwFgWqEdd2ViQoskeBhE9LhpT7bKD2osmCGvSLdt2B\n" +
"hVLYwbM4mu+9QdYdEoIgvx+n42eZ60ycMChOgwTKC2KYb2NE19vpin0rgYt7C3zpxPjOR83ZUii+\n" +
"9mc2zPUKu2oN0/ZBfEozqmRO4nKSm+V2IQKBgFuGTMEfiUHMjvLZFQ0VK/IexdyB/NXMVGTXYybl\n" +
"1l+BIONRmb5Ds/NxK+E8J88JurSJPjv+izW1HwT5Ki7AXtV5Q70BOf+GoG5U1wrG+Egj8YiBqTrO\n" +
"8D5Ixv0/2UI4J7TWZ9Y/s5nEwhz1XA72RxQ0avh1krKaULkhjo31aHMhAoGAa6A8m0ljf0DhzIIO\n" +
"rKvBq3a4qtb6PDReE0NABtCoFGU+19kJlcL9waBoVYSIGQclssIcK8kIAyuhmDiyba0bwLBur8fJ\n" +
"i1/QZjmKhOAsQeav7u1jixZYaKx/+66RCQZDDiSSONSjibcH2UFYpRrYGVOVShKzF9Bbh69K6F2F\n" +
"maECgYALiEqtS4gfy0t4iydbAGxzvAZPwlifgqqjYsm9XoI7U8wJItw5NgWV+c28yuibZd6tKolN\n" +
"vLV5ywqxQ8t3IoMO/mwXFOgHCUErlefeL7y1SOGqTp2OtJnKSoF9y1GLmXiYi2A0i46EEOR6Hapj\n" +
"qRRMT9z0gtZJviW0dhr/VUZXrA==",
Base64.encodeBase64(pkcs12Signer.getPrivateKey().getEncoded()));
}
}
|
test/ee/sk/digidoc4j/utils/PKCS12SignerTest.java
|
package ee.sk.digidoc4j.utils;
import com.sun.org.apache.xml.internal.security.utils.Base64;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class PKCS12SignerTest {
private static PKCS12Signer pkcs12Signer;
@BeforeClass
public static void setUp() {
pkcs12Signer = new PKCS12Signer("signout.p12", "test");
}
@Test
public void testGetPrivateKey() throws Exception {
assertEquals("MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQChn9qVaA+x3RkDBrD5ujwfnreK\n" +
"5/Nb+Nvo9Vg5OLMn3JKUoUhFX6A/q5lBUylK/CU/lNRTv/kicqnu1aCyAiW0XVYk8jrOI1wRbHey\n" +
"BMq/5gVm/vbbRtMi/XGLkgMZ5UDxY0QZfmu8wlRJ8164zRNocuUJLLXWOB6vda2RRXC3Cix4TDvQ\n" +
"wGmPrQQJ8dzDIJEkLS7NCLBTcndm7buQegRc043gKMjUmRhGZEzF4oJa4pMfXqeSa+PUtrNyNNNQ\n" +
"aOwTH29R8aFfGU2xorVvxoUieNipyWMEz8BTUGwwIceapWi77loBV/VQfStXnQNu/s6BC04ss43O\n" +
"6sK70MB1qlRZAgMBAAECggEAT81lGRY7gZ/gpKzeH0AERbyRdaWXdJcIxhq2B/LmCs2PFpIX5CEW\n" +
"N7nbvvR31A1xutYajIuiUI77NvEGGj6TLV5UlYOA451z7Sp4Y06YaW4CxtsnOhfbUlB/iuF6ZIPc\n" +
"sBNKYagZPCdbhPQElgy0A4OPcRtBYVduV0YsgCkgQU+clV93bCALpDkpU6EeeVys8bfBBtk7cLXe\n" +
"TF3IBXykvXi4tFaVDKz8lTYvDt66clhxFNBo+0H2IL4RqZ4sQCfpi8Gpi0yr2kmGDGvYgTOM8sOF\n" +
"sS2iHwPDIOOEY6RINHNBRuMpC1rmkOOK40qnmVfMrGAj3QpqSDeN6HVu/yqhAQKBgQDVCUbOCCsS\n" +
"oyhBPvQXOTesmD1Y/jrqXwPm8J2t9gKYKZwGtuiKeBwkRRg1Bqo8KwD3WABWrIa/qqYm1/NFKQnW\n" +
"GqILLIrvh9ZLL5U3vDyCdPovYZfhYQX5wPwEkmhAdVfgROzNoADQQEM5o8cokoxn+Uz24Fn6Xz5n\n" +
"YYB8kBQnOQKBgQDCOERfUSzbibl6I0Dn2JHwFgWqEdd2ViQoskeBhE9LhpT7bKD2osmCGvSLdt2B\n" +
"hVLYwbM4mu+9QdYdEoIgvx+n42eZ60ycMChOgwTKC2KYb2NE19vpin0rgYt7C3zpxPjOR83ZUii+\n" +
"9mc2zPUKu2oN0/ZBfEozqmRO4nKSm+V2IQKBgFuGTMEfiUHMjvLZFQ0VK/IexdyB/NXMVGTXYybl\n" +
"1l+BIONRmb5Ds/NxK+E8J88JurSJPjv+izW1HwT5Ki7AXtV5Q70BOf+GoG5U1wrG+Egj8YiBqTrO\n" +
"8D5Ixv0/2UI4J7TWZ9Y/s5nEwhz1XA72RxQ0avh1krKaULkhjo31aHMhAoGAa6A8m0ljf0DhzIIO\n" +
"rKvBq3a4qtb6PDReE0NABtCoFGU+19kJlcL9waBoVYSIGQclssIcK8kIAyuhmDiyba0bwLBur8fJ\n" +
"i1/QZjmKhOAsQeav7u1jixZYaKx/+66RCQZDDiSSONSjibcH2UFYpRrYGVOVShKzF9Bbh69K6F2F\n" +
"maECgYALiEqtS4gfy0t4iydbAGxzvAZPwlifgqqjYsm9XoI7U8wJItw5NgWV+c28yuibZd6tKolN\n" +
"vLV5ywqxQ8t3IoMO/mwXFOgHCUErlefeL7y1SOGqTp2OtJnKSoF9y1GLmXiYi2A0i46EEOR6Hapj\n" +
"qRRMT9z0gtZJviW0dhr/VUZXrA==",
Base64.encode(pkcs12Signer.getPrivateKey().getEncoded()));
}
}
|
changes base64 encoder to apache-codec
|
test/ee/sk/digidoc4j/utils/PKCS12SignerTest.java
|
changes base64 encoder to apache-codec
|
<ide><path>est/ee/sk/digidoc4j/utils/PKCS12SignerTest.java
<ide> package ee.sk.digidoc4j.utils;
<ide>
<del>import com.sun.org.apache.xml.internal.security.utils.Base64;
<add>import org.apache.commons.codec.binary.Base64;
<ide> import org.junit.BeforeClass;
<ide> import org.junit.Test;
<ide>
<ide> "maECgYALiEqtS4gfy0t4iydbAGxzvAZPwlifgqqjYsm9XoI7U8wJItw5NgWV+c28yuibZd6tKolN\n" +
<ide> "vLV5ywqxQ8t3IoMO/mwXFOgHCUErlefeL7y1SOGqTp2OtJnKSoF9y1GLmXiYi2A0i46EEOR6Hapj\n" +
<ide> "qRRMT9z0gtZJviW0dhr/VUZXrA==",
<del> Base64.encode(pkcs12Signer.getPrivateKey().getEncoded()));
<add> Base64.encodeBase64(pkcs12Signer.getPrivateKey().getEncoded()));
<ide> }
<ide>
<ide> }
|
|
JavaScript
|
apache-2.0
|
4d2c6c17cfd7331207b9d475882df40f98897124
| 0 |
prebid/Prebid.js,PWyrembak/Prebid.js,gumgum/Prebid.js,PWyrembak/Prebid.js,ImproveDigital/Prebid.js,prebid/Prebid.js,Niksok/Prebid.js,gumgum/Prebid.js,PubWise/Prebid.js,PubWise/Prebid.js,HuddledMasses/Prebid.js,ImproveDigital/Prebid.js,ImproveDigital/Prebid.js,prebid/Prebid.js,PWyrembak/Prebid.js,PubWise/Prebid.js,Niksok/Prebid.js,Niksok/Prebid.js,HuddledMasses/Prebid.js,HuddledMasses/Prebid.js,gumgum/Prebid.js
|
import {registerBidder} from '../src/adapters/bidderFactory.js';
import {BANNER} from '../src/mediaTypes.js';
import {config} from '../src/config.js';
import * as utils from '../src/utils.js';
const BIDDER_CODE = 'interactiveOffers';
const ENDPOINT = 'https://prebid.ioadx.com/bidRequest/?partnerId=4a3bab187a74ac4862920cca864d6eff195ff5e4';
const DEFAULT = {
'OpenRTBBidRequest': {},
'OpenRTBBidRequestSite': {},
'OpenRTBBidRequestSitePublisher': {},
'OpenRTBBidRequestSiteContent': {
language: navigator.language,
},
'OpenRTBBidRequestSource': {},
'OpenRTBBidRequestDevice': {
ua: navigator.userAgent,
language: navigator.language
},
'OpenRTBBidRequestUser': {},
'OpenRTBBidRequestImp': {},
'OpenRTBBidRequestImpBanner': {},
'PrebidBid': {
currency: 'USD',
ttl: 60,
netRevenue: false
}
};
export const spec = {
code: BIDDER_CODE,
supportedMediaTypes: [BANNER],
isBidRequestValid: function(bid) {
let ret = true;
if (bid && bid.params) {
if (!utils.isNumber(bid.params.pubid)) {
utils.logWarn('pubid must be a valid numeric ID');
ret = false;
}
if (bid.params.tmax && !utils.isNumber(bid.params.tmax)) {
utils.logWarn('tmax must be a valid numeric ID');
ret = false;
}
} else {
utils.logWarn('invalid request');
ret = false;
}
return ret;
},
buildRequests: function(validBidRequests, bidderRequest) {
let payload = parseRequestPrebidjsToOpenRTB(bidderRequest);
return {
method: 'POST',
url: ENDPOINT,
data: JSON.stringify(payload),
bidderRequest: bidderRequest
};
},
interpretResponse: function(response, request) {
let bidResponses = [];
if (response.body && response.body.length) {
bidResponses = parseResponseOpenRTBToPrebidjs(response.body);
}
return bidResponses;
}
};
function parseRequestPrebidjsToOpenRTB(prebidRequest) {
let pageURL = window.location.href;
let domain = window.location.hostname;
let secure = (window.location.protocol == 'https:' ? 1 : 0);
let openRTBRequest = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequest']));
openRTBRequest.id = prebidRequest.auctionId;
openRTBRequest.ext = {
auctionstart: Date.now()
};
openRTBRequest.site = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSite']));
openRTBRequest.site.id = domain;
openRTBRequest.site.name = domain;
openRTBRequest.site.domain = domain;
openRTBRequest.site.page = pageURL;
openRTBRequest.site.ref = prebidRequest.refererInfo.referer;
openRTBRequest.site.publisher = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSitePublisher']));
openRTBRequest.site.publisher.id = 0;
openRTBRequest.site.publisher.name = config.getConfig('publisherDomain');
openRTBRequest.site.publisher.domain = domain;
openRTBRequest.site.publisher.domain = domain;
openRTBRequest.site.content = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSiteContent']));
openRTBRequest.source = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSource']));
openRTBRequest.source.fd = 0;
openRTBRequest.source.tid = prebidRequest.auctionId;
openRTBRequest.source.pchain = '';
openRTBRequest.device = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestDevice']));
openRTBRequest.user = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestUser']));
openRTBRequest.imp = [];
prebidRequest.bids.forEach(function(bid, impId) {
impId++;
let imp = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestImp']));
imp.id = impId;
imp.secure = secure;
imp.tagid = bid.bidId;
openRTBRequest.site.publisher.id = openRTBRequest.site.publisher.id || bid.params.pubid;
openRTBRequest.tmax = openRTBRequest.tmax || bid.params.tmax || 0;
Object.keys(bid.mediaTypes).forEach(function(mediaType) {
if (mediaType == 'banner') {
imp.banner = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestImpBanner']));
imp.banner.w = 0;
imp.banner.h = 0;
imp.banner.format = [];
bid.mediaTypes[mediaType].sizes.forEach(function(adSize) {
if (!imp.banner.w) {
imp.banner.w = adSize[0];
imp.banner.h = adSize[1];
}
imp.banner.format.push({w: adSize[0], h: adSize[1]});
});
}
});
openRTBRequest.imp.push(imp);
});
return openRTBRequest;
}
function parseResponseOpenRTBToPrebidjs(openRTBResponse) {
let prebidResponse = [];
openRTBResponse.forEach(function(response) {
response.seatbid.forEach(function(seatbid) {
seatbid.bid.forEach(function(bid) {
let prebid = JSON.parse(JSON.stringify(DEFAULT['PrebidBid']));
prebid.requestId = bid.ext.tagid;
prebid.ad = bid.adm;
prebid.creativeId = bid.crid;
prebid.cpm = bid.price;
prebid.width = bid.w;
prebid.height = bid.h;
prebid.mediaType = 'banner';
prebid.meta = {
advertiserDomains: bid.adomain,
advertiserId: bid.adid,
mediaType: 'banner',
primaryCatId: bid.cat[0] || '',
secondaryCatIds: bid.cat
}
prebidResponse.push(prebid);
});
});
});
return prebidResponse;
}
registerBidder(spec);
|
modules/interactiveOffersBidAdapter.js
|
import {registerBidder} from '../src/adapters/bidderFactory.js';
import {BANNER} from '../src/mediaTypes.js';
import {config} from '../src/config.js';
import * as utils from '../src/utils.js';
const BIDDER_CODE = 'interactiveOffers';
const ENDPOINT = 'https://rtb.ioadx.com/bidRequest/?partnerId=4a3bab187a74ac4862920cca864d6eff195ff5e4';
const DEFAULT = {
'OpenRTBBidRequest': {},
'OpenRTBBidRequestSite': {},
'OpenRTBBidRequestSitePublisher': {},
'OpenRTBBidRequestSiteContent': {
language: navigator.language,
},
'OpenRTBBidRequestSource': {},
'OpenRTBBidRequestDevice': {
ua: navigator.userAgent,
language: navigator.language
},
'OpenRTBBidRequestUser': {},
'OpenRTBBidRequestImp': {},
'OpenRTBBidRequestImpBanner': {},
'PrebidBid': {
currency: 'USD',
ttl: 60,
netRevenue: false
}
};
export const spec = {
code: BIDDER_CODE,
supportedMediaTypes: [BANNER],
isBidRequestValid: function(bid) {
let ret = true;
if (bid && bid.params) {
if (!utils.isNumber(bid.params.pubid)) {
utils.logWarn('pubid must be a valid numeric ID');
ret = false;
}
if (bid.params.tmax && !utils.isNumber(bid.params.tmax)) {
utils.logWarn('tmax must be a valid numeric ID');
ret = false;
}
} else {
utils.logWarn('invalid request');
ret = false;
}
return ret;
},
buildRequests: function(validBidRequests, bidderRequest) {
let payload = parseRequestPrebidjsToOpenRTB(bidderRequest);
return {
method: 'POST',
url: ENDPOINT,
data: JSON.stringify(payload),
bidderRequest: bidderRequest
};
},
interpretResponse: function(response, request) {
let bidResponses = [];
if (response.body && response.body.length) {
bidResponses = parseResponseOpenRTBToPrebidjs(response.body);
}
return bidResponses;
}
};
function parseRequestPrebidjsToOpenRTB(prebidRequest) {
let pageURL = window.location.href;
let domain = window.location.hostname;
let secure = (window.location.protocol == 'https:' ? 1 : 0);
let openRTBRequest = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequest']));
openRTBRequest.id = prebidRequest.auctionId;
openRTBRequest.ext = {
auctionstart: Date.now()
};
openRTBRequest.site = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSite']));
openRTBRequest.site.id = domain;
openRTBRequest.site.name = domain;
openRTBRequest.site.domain = domain;
openRTBRequest.site.page = pageURL;
openRTBRequest.site.ref = prebidRequest.refererInfo.referer;
openRTBRequest.site.publisher = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSitePublisher']));
openRTBRequest.site.publisher.id = 0;
openRTBRequest.site.publisher.name = config.getConfig('publisherDomain');
openRTBRequest.site.publisher.domain = domain;
openRTBRequest.site.publisher.domain = domain;
openRTBRequest.site.content = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSiteContent']));
openRTBRequest.source = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestSource']));
openRTBRequest.source.fd = 0;
openRTBRequest.source.tid = prebidRequest.auctionId;
openRTBRequest.source.pchain = '';
openRTBRequest.device = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestDevice']));
openRTBRequest.user = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestUser']));
openRTBRequest.imp = [];
prebidRequest.bids.forEach(function(bid, impId) {
impId++;
let imp = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestImp']));
imp.id = impId;
imp.secure = secure;
imp.tagid = bid.bidId;
openRTBRequest.site.publisher.id = openRTBRequest.site.publisher.id || bid.params.pubid;
openRTBRequest.tmax = openRTBRequest.tmax || bid.params.tmax || 0;
Object.keys(bid.mediaTypes).forEach(function(mediaType) {
if (mediaType == 'banner') {
imp.banner = JSON.parse(JSON.stringify(DEFAULT['OpenRTBBidRequestImpBanner']));
imp.banner.w = 0;
imp.banner.h = 0;
imp.banner.format = [];
bid.mediaTypes[mediaType].sizes.forEach(function(adSize) {
if (!imp.banner.w) {
imp.banner.w = adSize[0];
imp.banner.h = adSize[1];
}
imp.banner.format.push({w: adSize[0], h: adSize[1]});
});
}
});
openRTBRequest.imp.push(imp);
});
return openRTBRequest;
}
function parseResponseOpenRTBToPrebidjs(openRTBResponse) {
let prebidResponse = [];
openRTBResponse.forEach(function(response) {
response.seatbid.forEach(function(seatbid) {
seatbid.bid.forEach(function(bid) {
let prebid = JSON.parse(JSON.stringify(DEFAULT['PrebidBid']));
prebid.requestId = bid.ext.tagid;
prebid.ad = bid.adm;
prebid.creativeId = bid.crid;
prebid.cpm = bid.price;
prebid.width = bid.w;
prebid.height = bid.h;
prebid.mediaType = 'banner';
prebid.meta = {
advertiserDomains: bid.adomain,
advertiserId: bid.adid,
mediaType: 'banner',
primaryCatId: bid.cat[0] || '',
secondaryCatIds: bid.cat
}
prebidResponse.push(prebid);
});
});
});
return prebidResponse;
}
registerBidder(spec);
|
InteractiveOffers BidAdapter: New endpoint (#7243)
Co-authored-by: EC2 Default User <a6c2ad1b181775e2874a89890a3889abc0fdaca8@ip-172-31-93-198.ec2.internal>
|
modules/interactiveOffersBidAdapter.js
|
InteractiveOffers BidAdapter: New endpoint (#7243)
|
<ide><path>odules/interactiveOffersBidAdapter.js
<ide> import * as utils from '../src/utils.js';
<ide>
<ide> const BIDDER_CODE = 'interactiveOffers';
<del>const ENDPOINT = 'https://rtb.ioadx.com/bidRequest/?partnerId=4a3bab187a74ac4862920cca864d6eff195ff5e4';
<add>const ENDPOINT = 'https://prebid.ioadx.com/bidRequest/?partnerId=4a3bab187a74ac4862920cca864d6eff195ff5e4';
<ide>
<ide> const DEFAULT = {
<ide> 'OpenRTBBidRequest': {},
|
|
Java
|
lgpl-2.1
|
b1b7f197a190114539dbae460eca78b43e32d66c
| 0 |
maxbiostat/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,maxbiostat/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,maxbiostat/beast-mcmc,maxbiostat/beast-mcmc,maxbiostat/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc
|
/*
* GeneralizedLinearModelParser.java
*
* Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.inferencexml.distribution;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.linalg.SingularValueDecomposition;
import dr.inference.distribution.GeneralizedLinearModel;
import dr.inference.distribution.LinearRegression;
import dr.inference.distribution.LogLinearModel;
import dr.inference.distribution.LogisticRegression;
import dr.inference.model.DesignMatrix;
import dr.inference.model.Likelihood;
import dr.inference.model.Parameter;
import dr.xml.*;
/**
*
*/
public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
public static final String GLM_LIKELIHOOD = "glmModel";
public static final String DEPENDENT_VARIABLES = "dependentVariables";
public static final String INDEPENDENT_VARIABLES = "independentVariables";
public static final String BASIS_MATRIX = "basis";
public static final String FAMILY = "family";
public static final String SCALE_VARIABLES = "scaleVariables";
public static final String INDICATOR = "indicator";
public static final String LOGISTIC_REGRESSION = "logistic";
public static final String NORMAL_REGRESSION = "normal";
public static final String LOG_NORMAL_REGRESSION = "logNormal";
public static final String LOG_LINEAR = "logLinear";
// public static final String LOG_TRANSFORM = "logDependentTransform";
public static final String RANDOM_EFFECTS = "randomEffects";
public static final String CHECK_IDENTIFIABILITY = "checkIdentifiability";
public static final String CHECK_FULL_RANK = "checkFullRank";
public String getParserName() {
return GLM_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
// System.err.println("PASSED 0");
XMLObject cxo = xo.getChild(DEPENDENT_VARIABLES);
Parameter dependentParam = null;
if (cxo != null) {
dependentParam = (Parameter) cxo.getChild(Parameter.class);
}
String family = xo.getStringAttribute(FAMILY);
GeneralizedLinearModel glm;
if (family.compareTo(LOGISTIC_REGRESSION) == 0) {
glm = new LogisticRegression(dependentParam);
} else if (family.compareTo(NORMAL_REGRESSION) == 0) {
glm = new LinearRegression(dependentParam, false);
} else if (family.compareTo(LOG_NORMAL_REGRESSION) == 0) {
glm = new LinearRegression(dependentParam, true);
} else if (family.compareTo(LOG_LINEAR) == 0) {
glm = new LogLinearModel(dependentParam);
} else
throw new XMLParseException("Family '" + family + "' is not currently implemented");
if (glm.requiresScale()) {
cxo = xo.getChild(SCALE_VARIABLES);
Parameter scaleParameter = null;
// DesignMatrix designMatrix = null;
Parameter scaleDesign = null;
if (cxo != null) {
scaleParameter = (Parameter) cxo.getChild(Parameter.class);
XMLObject gxo = cxo.getChild(INDICATOR);
if (gxo != null)
scaleDesign = (Parameter) gxo.getChild(Parameter.class);
// designMatrix = (DesignMatrix) cxo.getChild(DesignMatrix.class);
}
if (scaleParameter == null)
throw new XMLParseException("Family '" + family + "' requires scale parameters");
if (scaleDesign == null)
scaleDesign = new Parameter.Default(dependentParam.getDimension(), 0.0);
else {
if (scaleDesign.getDimension() != dependentParam.getDimension())
throw new XMLParseException("Scale ("+dependentParam.getDimension()+") and scaleDesign parameters ("+scaleDesign.getDimension()+") must be the same dimension");
for (int i = 0; i < scaleDesign.getDimension(); i++) {
double value = scaleDesign.getParameterValue(i);
if (value < 1 || value > scaleParameter.getDimension())
throw new XMLParseException("Invalid scaleDesign value");
scaleDesign.setParameterValue(i, value - 1);
}
}
glm.addScaleParameter(scaleParameter, scaleDesign);
}
// System.err.println("START 0");
addIndependentParameters(xo, glm, dependentParam);
// System.err.println("START 1");
addRandomEffects(xo, glm, dependentParam);
// System.err.println("START 2");
boolean checkIdentifiability = xo.getAttribute(CHECK_IDENTIFIABILITY, true);
if (checkIdentifiability) {
if (!glm.getAllIndependentVariablesIdentifiable()) {
throw new XMLParseException("All design matrix predictors are not identifiable in "+ xo.getId());
}
}
// System.err.println("PASSED B");
checkFullRankOfMatrix = xo.getAttribute(CHECK_FULL_RANK,true);
// System.err.println("PASSED C");
return glm;
}
public void addRandomEffects(XMLObject xo, GeneralizedLinearModel glm,
Parameter dependentParam) throws XMLParseException {
int totalCount = xo.getChildCount();
for (int i = 0; i < totalCount; i++) {
if (xo.getChildName(i).compareTo(RANDOM_EFFECTS) == 0) {
XMLObject cxo = (XMLObject) xo.getChild(i);
Parameter randomEffect = (Parameter) cxo.getChild(Parameter.class);
checkRandomEffectsDimensions(randomEffect, dependentParam);
glm.addRandomEffectsParameter(randomEffect);
}
}
}
public void addIndependentParameters(XMLObject xo, GeneralizedLinearModel glm,
Parameter dependentParam) throws XMLParseException {
int totalCount = xo.getChildCount();
// System.err.println("number of independent parameters = "+totalCount);
for (int i = 0; i < totalCount; i++) {
if (xo.getChildName(i).compareTo(INDEPENDENT_VARIABLES) == 0) {
XMLObject cxo = (XMLObject) xo.getChild(i);
Parameter independentParam = (Parameter) cxo.getChild(Parameter.class);
DesignMatrix designMatrix = (DesignMatrix) cxo.getChild(DesignMatrix.class);
checkDimensions(independentParam, dependentParam, designMatrix);
cxo = cxo.getChild(INDICATOR);
Parameter indicator = null;
if (cxo != null) {
indicator = (Parameter) cxo.getChild(Parameter.class);
if (indicator.getDimension() == 0) {
// if a dimension hasn't been set, then set it automatically
indicator.setDimension(independentParam.getDimension());
}
if (indicator.getDimension() != independentParam.getDimension())
throw new XMLParseException("dim(" + independentParam.getId() + ") != dim(" + indicator.getId() + ")");
}
// System.err.println("A");
if (checkFullRankOfMatrix) {
checkFullRank(designMatrix);
}
// System.err.println("B");
// System.err.println(new Matrix(designMatrix.getParameterAsMatrix()));
// System.exit(-1);
glm.addIndependentParameter(independentParam, designMatrix, indicator);
// System.err.println("C");
}
}
}
private boolean checkFullRankOfMatrix;
private void checkFullRank(DesignMatrix designMatrix) throws XMLParseException {
int fullRank = designMatrix.getColumnDimension();
// System.err.println("designMatrix getColumnDimension = "+fullRank);
SingularValueDecomposition svd = new SingularValueDecomposition(
new DenseDoubleMatrix2D(designMatrix.getParameterAsMatrix()));
int realRank = svd.rank();
if (realRank != fullRank) {
throw new XMLParseException(
"rank(" + designMatrix.getId() + ") = " + realRank +
".\nMatrix is not of full rank as colDim(" + designMatrix.getId() + ") = " + fullRank
);
}
}
private void checkRandomEffectsDimensions(Parameter randomEffect, Parameter dependentParam)
throws XMLParseException {
if (dependentParam != null) {
if (randomEffect.getDimension() == 0) {
// if a dimension hasn't been set, then set it automatically
randomEffect.setDimension(dependentParam.getDimension());
}
if (randomEffect.getDimension() != dependentParam.getDimension()) {
throw new XMLParseException(
"dim(" + dependentParam.getId() + ") != dim(" + randomEffect.getId() + ")"
);
}
}
}
private void checkDimensions(Parameter independentParam, Parameter dependentParam, DesignMatrix designMatrix)
throws XMLParseException {
if (dependentParam != null) {
if (dependentParam.getDimension() == 0) {
// if a dimension hasn't been set, then set it automatically
dependentParam.setDimension(designMatrix.getRowDimension());
}
if ((dependentParam.getDimension() != designMatrix.getRowDimension()) ||
(independentParam.getDimension() != designMatrix.getColumnDimension()))
throw new XMLParseException(
"dim(" + dependentParam.getId() + ") != dim(" + designMatrix.getId() + " %*% " + independentParam.getId() + ")"
);
} else {
if (independentParam.getDimension() == 0) {
// if a dimension hasn't been set, then set it automatically
independentParam.setDimension(designMatrix.getColumnDimension());
}
if (independentParam.getDimension() != designMatrix.getColumnDimension()) {
throw new XMLParseException(
"dim(" + independentParam.getId() + ") is incompatible with dim (" + designMatrix.getId() + ")"
);
}
// System.err.println(independentParam.getId()+" and "+designMatrix.getId());
}
}
//************************************************************************
// AbstractXMLObjectParser implementation
//************************************************************************
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newStringRule(FAMILY),
AttributeRule.newBooleanRule(CHECK_IDENTIFIABILITY, true),
AttributeRule.newBooleanRule(CHECK_FULL_RANK, true),
new ElementRule(DEPENDENT_VARIABLES,
new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, true),
new ElementRule(INDEPENDENT_VARIABLES,
new XMLSyntaxRule[]{
new ElementRule(Parameter.class, true),
new ElementRule(DesignMatrix.class),
new ElementRule(INDICATOR,
new XMLSyntaxRule[]{
new ElementRule(Parameter.class)
}, true),
}, 1, 3),
new ElementRule(RANDOM_EFFECTS,
new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, 0, 3),
// new ElementRule(BASIS_MATRIX,
// new XMLSyntaxRule[]{new ElementRule(DesignMatrix.class)})
};
public String getParserDescription() {
return "Calculates the generalized linear model likelihood of the dependent parameters given one or more blocks of independent parameters and their design matrix.";
}
public Class getReturnType() {
return Likelihood.class;
}
}
|
src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
|
/*
* GeneralizedLinearModelParser.java
*
* Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.inferencexml.distribution;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.linalg.SingularValueDecomposition;
import dr.inference.distribution.GeneralizedLinearModel;
import dr.inference.distribution.LinearRegression;
import dr.inference.distribution.LogLinearModel;
import dr.inference.distribution.LogisticRegression;
import dr.inference.model.DesignMatrix;
import dr.inference.model.Likelihood;
import dr.inference.model.Parameter;
import dr.xml.*;
/**
*
*/
public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
public static final String GLM_LIKELIHOOD = "glmModel";
public static final String DEPENDENT_VARIABLES = "dependentVariables";
public static final String INDEPENDENT_VARIABLES = "independentVariables";
public static final String BASIS_MATRIX = "basis";
public static final String FAMILY = "family";
public static final String SCALE_VARIABLES = "scaleVariables";
public static final String INDICATOR = "indicator";
public static final String LOGISTIC_REGRESSION = "logistic";
public static final String NORMAL_REGRESSION = "normal";
public static final String LOG_NORMAL_REGRESSION = "logNormal";
public static final String LOG_LINEAR = "logLinear";
// public static final String LOG_TRANSFORM = "logDependentTransform";
public static final String RANDOM_EFFECTS = "randomEffects";
public static final String CHECK_IDENTIFIABILITY = "checkIdentifiability";
public static final String CHECK_FULL_RANK = "checkFullRank";
public String getParserName() {
return GLM_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
// System.err.println("PASSED 0");
XMLObject cxo = xo.getChild(DEPENDENT_VARIABLES);
Parameter dependentParam = null;
if (cxo != null)
dependentParam = (Parameter) cxo.getChild(Parameter.class);
String family = xo.getStringAttribute(FAMILY);
GeneralizedLinearModel glm;
if (family.compareTo(LOGISTIC_REGRESSION) == 0) {
glm = new LogisticRegression(dependentParam);
} else if (family.compareTo(NORMAL_REGRESSION) == 0) {
glm = new LinearRegression(dependentParam, false);
} else if (family.compareTo(LOG_NORMAL_REGRESSION) == 0) {
glm = new LinearRegression(dependentParam, true);
} else if (family.compareTo(LOG_LINEAR) == 0) {
glm = new LogLinearModel(dependentParam);
} else
throw new XMLParseException("Family '" + family + "' is not currently implemented");
if (glm.requiresScale()) {
cxo = xo.getChild(SCALE_VARIABLES);
Parameter scaleParameter = null;
// DesignMatrix designMatrix = null;
Parameter scaleDesign = null;
if (cxo != null) {
scaleParameter = (Parameter) cxo.getChild(Parameter.class);
XMLObject gxo = cxo.getChild(INDICATOR);
if (gxo != null)
scaleDesign = (Parameter) gxo.getChild(Parameter.class);
// designMatrix = (DesignMatrix) cxo.getChild(DesignMatrix.class);
}
if (scaleParameter == null)
throw new XMLParseException("Family '" + family + "' requires scale parameters");
if (scaleDesign == null)
scaleDesign = new Parameter.Default(dependentParam.getDimension(), 0.0);
else {
if (scaleDesign.getDimension() != dependentParam.getDimension())
throw new XMLParseException("Scale ("+dependentParam.getDimension()+") and scaleDesign parameters ("+scaleDesign.getDimension()+") must be the same dimension");
for (int i = 0; i < scaleDesign.getDimension(); i++) {
double value = scaleDesign.getParameterValue(i);
if (value < 1 || value > scaleParameter.getDimension())
throw new XMLParseException("Invalid scaleDesign value");
scaleDesign.setParameterValue(i, value - 1);
}
}
glm.addScaleParameter(scaleParameter, scaleDesign);
}
// System.err.println("START 0");
addIndependentParameters(xo, glm, dependentParam);
// System.err.println("START 1");
addRandomEffects(xo, glm, dependentParam);
// System.err.println("START 2");
boolean checkIdentifiability = xo.getAttribute(CHECK_IDENTIFIABILITY, true);
if (checkIdentifiability) {
if (!glm.getAllIndependentVariablesIdentifiable()) {
throw new XMLParseException("All design matrix predictors are not identifiable in "+ xo.getId());
}
}
// System.err.println("PASSED B");
checkFullRankOfMatrix = xo.getAttribute(CHECK_FULL_RANK,true);
// System.err.println("PASSED C");
return glm;
}
public void addRandomEffects(XMLObject xo, GeneralizedLinearModel glm,
Parameter dependentParam) throws XMLParseException {
int totalCount = xo.getChildCount();
for (int i = 0; i < totalCount; i++) {
if (xo.getChildName(i).compareTo(RANDOM_EFFECTS) == 0) {
XMLObject cxo = (XMLObject) xo.getChild(i);
Parameter randomEffect = (Parameter) cxo.getChild(Parameter.class);
checkRandomEffectsDimensions(randomEffect, dependentParam);
glm.addRandomEffectsParameter(randomEffect);
}
}
}
public void addIndependentParameters(XMLObject xo, GeneralizedLinearModel glm,
Parameter dependentParam) throws XMLParseException {
int totalCount = xo.getChildCount();
// System.err.println("number of independent parameters = "+totalCount);
for (int i = 0; i < totalCount; i++) {
if (xo.getChildName(i).compareTo(INDEPENDENT_VARIABLES) == 0) {
XMLObject cxo = (XMLObject) xo.getChild(i);
Parameter independentParam = (Parameter) cxo.getChild(Parameter.class);
DesignMatrix designMatrix = (DesignMatrix) cxo.getChild(DesignMatrix.class);
checkDimensions(independentParam, dependentParam, designMatrix);
cxo = cxo.getChild(INDICATOR);
Parameter indicator = null;
if (cxo != null) {
indicator = (Parameter) cxo.getChild(Parameter.class);
if (indicator.getDimension() != independentParam.getDimension())
throw new XMLParseException("dim(" + independentParam.getId() + ") != dim(" + indicator.getId() + ")");
}
// System.err.println("A");
if (checkFullRankOfMatrix) {
checkFullRank(designMatrix);
}
// System.err.println("B");
// System.err.println(new Matrix(designMatrix.getParameterAsMatrix()));
// System.exit(-1);
glm.addIndependentParameter(independentParam, designMatrix, indicator);
// System.err.println("C");
}
}
}
private boolean checkFullRankOfMatrix;
private void checkFullRank(DesignMatrix designMatrix) throws XMLParseException {
int fullRank = designMatrix.getColumnDimension();
// System.err.println("designMatrix getColumnDimension = "+fullRank);
SingularValueDecomposition svd = new SingularValueDecomposition(
new DenseDoubleMatrix2D(designMatrix.getParameterAsMatrix()));
int realRank = svd.rank();
if (realRank != fullRank) {
throw new XMLParseException(
"rank(" + designMatrix.getId() + ") = " + realRank +
".\nMatrix is not of full rank as colDim(" + designMatrix.getId() + ") = " + fullRank
);
}
}
private void checkRandomEffectsDimensions(Parameter randomEffect, Parameter dependentParam)
throws XMLParseException {
if (dependentParam != null) {
if (randomEffect.getDimension() != dependentParam.getDimension()) {
throw new XMLParseException(
"dim(" + dependentParam.getId() + ") != dim(" + randomEffect.getId() + ")"
);
}
}
}
private void checkDimensions(Parameter independentParam, Parameter dependentParam, DesignMatrix designMatrix)
throws XMLParseException {
if (dependentParam != null) {
if ((dependentParam.getDimension() != designMatrix.getRowDimension()) ||
(independentParam.getDimension() != designMatrix.getColumnDimension()))
throw new XMLParseException(
"dim(" + dependentParam.getId() + ") != dim(" + designMatrix.getId() + " %*% " + independentParam.getId() + ")"
);
} else {
if (independentParam.getDimension() != designMatrix.getColumnDimension()) {
throw new XMLParseException(
"dim(" + independentParam.getId() + ") is incompatible with dim (" + designMatrix.getId() + ")"
);
}
// System.err.println(independentParam.getId()+" and "+designMatrix.getId());
}
}
//************************************************************************
// AbstractXMLObjectParser implementation
//************************************************************************
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newStringRule(FAMILY),
AttributeRule.newBooleanRule(CHECK_IDENTIFIABILITY, true),
AttributeRule.newBooleanRule(CHECK_FULL_RANK, true),
new ElementRule(DEPENDENT_VARIABLES,
new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, true),
new ElementRule(INDEPENDENT_VARIABLES,
new XMLSyntaxRule[]{
new ElementRule(Parameter.class, true),
new ElementRule(DesignMatrix.class),
new ElementRule(INDICATOR,
new XMLSyntaxRule[]{
new ElementRule(Parameter.class)
}, true),
}, 1, 3),
new ElementRule(RANDOM_EFFECTS,
new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, 0, 3),
// new ElementRule(BASIS_MATRIX,
// new XMLSyntaxRule[]{new ElementRule(DesignMatrix.class)})
};
public String getParserDescription() {
return "Calculates the generalized linear model likelihood of the dependent parameters given one or more blocks of independent parameters and their design matrix.";
}
public Class getReturnType() {
return Likelihood.class;
}
}
|
The parameter dimensions are set automatically if they are not specified explicitly.
|
src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
|
The parameter dimensions are set automatically if they are not specified explicitly.
|
<ide><path>rc/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
<ide> // System.err.println("PASSED 0");
<ide> XMLObject cxo = xo.getChild(DEPENDENT_VARIABLES);
<ide> Parameter dependentParam = null;
<del> if (cxo != null)
<add> if (cxo != null) {
<ide> dependentParam = (Parameter) cxo.getChild(Parameter.class);
<add> }
<ide>
<ide> String family = xo.getStringAttribute(FAMILY);
<ide> GeneralizedLinearModel glm;
<ide> Parameter indicator = null;
<ide> if (cxo != null) {
<ide> indicator = (Parameter) cxo.getChild(Parameter.class);
<add> if (indicator.getDimension() == 0) {
<add> // if a dimension hasn't been set, then set it automatically
<add> indicator.setDimension(independentParam.getDimension());
<add> }
<ide> if (indicator.getDimension() != independentParam.getDimension())
<ide> throw new XMLParseException("dim(" + independentParam.getId() + ") != dim(" + indicator.getId() + ")");
<ide> }
<ide> private void checkRandomEffectsDimensions(Parameter randomEffect, Parameter dependentParam)
<ide> throws XMLParseException {
<ide> if (dependentParam != null) {
<add> if (randomEffect.getDimension() == 0) {
<add> // if a dimension hasn't been set, then set it automatically
<add> randomEffect.setDimension(dependentParam.getDimension());
<add> }
<ide> if (randomEffect.getDimension() != dependentParam.getDimension()) {
<ide> throw new XMLParseException(
<ide> "dim(" + dependentParam.getId() + ") != dim(" + randomEffect.getId() + ")"
<ide> private void checkDimensions(Parameter independentParam, Parameter dependentParam, DesignMatrix designMatrix)
<ide> throws XMLParseException {
<ide> if (dependentParam != null) {
<add> if (dependentParam.getDimension() == 0) {
<add> // if a dimension hasn't been set, then set it automatically
<add> dependentParam.setDimension(designMatrix.getRowDimension());
<add> }
<ide> if ((dependentParam.getDimension() != designMatrix.getRowDimension()) ||
<ide> (independentParam.getDimension() != designMatrix.getColumnDimension()))
<ide> throw new XMLParseException(
<ide> "dim(" + dependentParam.getId() + ") != dim(" + designMatrix.getId() + " %*% " + independentParam.getId() + ")"
<ide> );
<ide> } else {
<add> if (independentParam.getDimension() == 0) {
<add> // if a dimension hasn't been set, then set it automatically
<add> independentParam.setDimension(designMatrix.getColumnDimension());
<add> }
<ide> if (independentParam.getDimension() != designMatrix.getColumnDimension()) {
<ide> throw new XMLParseException(
<ide> "dim(" + independentParam.getId() + ") is incompatible with dim (" + designMatrix.getId() + ")"
|
|
JavaScript
|
agpl-3.0
|
9ecae6d32ebbeafe5e74157a0c1a901dc095032c
| 0 |
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
|
/**
* Starter for text edit view.
*
* @author Serhii Maksymchuk from Ubrainians for imCode
* 16.02.18
*/
Imcms.require(
["imcms-text-editor-initializer", "imcms-image-editor-initializer", "tinyMCE"],
function (textEditorInitializer, imageEditorInitializer, tinyMCE) {
textEditorInitializer.initEditor();
imageEditorInitializer.initEditor();
tinyMCE.activeEditor.fire("focus");
}
);
|
src/main/webapp/js/imcms_new/edit_starters/imcms_text_edit_start.js
|
/**
* Starter for text edit view.
*
* @author Serhii Maksymchuk from Ubrainians for imCode
* 16.02.18
*/
Imcms.require(
["imcms-text-editor-initializer", "imcms-image-editor-initializer"],
function (textEditorInitializer, imageEditorInitializer) {
textEditorInitializer.initEditor();
imageEditorInitializer.initEditor();
}
);
|
IMCMS-233 - Apply new UI to the admin panel and editors:
- Editor focused when on edit text view loaded.
|
src/main/webapp/js/imcms_new/edit_starters/imcms_text_edit_start.js
|
IMCMS-233 - Apply new UI to the admin panel and editors: - Editor focused when on edit text view loaded.
|
<ide><path>rc/main/webapp/js/imcms_new/edit_starters/imcms_text_edit_start.js
<ide> * 16.02.18
<ide> */
<ide> Imcms.require(
<del> ["imcms-text-editor-initializer", "imcms-image-editor-initializer"],
<add> ["imcms-text-editor-initializer", "imcms-image-editor-initializer", "tinyMCE"],
<ide>
<del> function (textEditorInitializer, imageEditorInitializer) {
<add> function (textEditorInitializer, imageEditorInitializer, tinyMCE) {
<ide> textEditorInitializer.initEditor();
<ide> imageEditorInitializer.initEditor();
<add> tinyMCE.activeEditor.fire("focus");
<ide> }
<ide> );
|
|
Java
|
mit
|
e356b17f21b7697b0343d7a62c23b4391d94a819
| 0 |
lhrl/zheng,xiazecheng/zheng,xiazecheng/zheng,shuzheng/zheng,lhrl/zheng,shuzheng/zheng,SeerGlaucus/zheng,lhrl/zheng,SeerGlaucus/zheng,folksuperior/renren-security,xubaifu/zheng,shuzheng/zheng,glacierck/zheng,xubaifu/zheng,SeerGlaucus/zheng,lhrl/zheng,folksuperior/renren-security,xiazecheng/zheng,xubaifu/zheng,xiazecheng/zheng,shuzheng/zheng,xubaifu/zheng,glacierck/zheng,folksuperior/renren-security,glacierck/zheng,SeerGlaucus/zheng,glacierck/zheng
|
package com.zheng.upms.admin.realm;
import com.zheng.common.util.MD5Util;
import com.zheng.upms.dao.model.UpmsUser;
import com.zheng.upms.dao.model.UpmsUserExample;
import com.zheng.upms.rpc.api.UpmsUserService;
import org.apache.shiro.authc.*;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashSet;
import java.util.Set;
/**
* Created by shuzheng on 2017/1/20.
*/
public class UpmsRealm extends AuthorizingRealm {
private static Logger _log = LoggerFactory.getLogger(UpmsRealm.class);
@Autowired
private UpmsUserService upmsUserService;
/**
* 授权:验证权限时调用
* @param principalCollection
* @return
*/
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principalCollection) {
// 当前用户
UpmsUser upmsUser = (UpmsUser) principalCollection.getPrimaryPrincipal();
// 全部权限 TODO
Set<String> permissions = new HashSet<>();
permissions.add("*:*:*");
SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo();
simpleAuthorizationInfo.setStringPermissions(permissions);
return simpleAuthorizationInfo;
}
/**
* 认证:登录时调用
* @param authenticationToken
* @return
* @throws AuthenticationException
*/
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken authenticationToken) throws AuthenticationException {
String username = (String) authenticationToken.getPrincipal();
String password = new String((char[]) authenticationToken.getCredentials());
// 查询用户信息
UpmsUserExample upmsUserExample = new UpmsUserExample();
upmsUserExample.createCriteria()
.andUsernameEqualTo(username);
UpmsUser upmsUser = upmsUserService.selectFirstByExample(upmsUserExample);
if (null == upmsUser) {
throw new UnknownAccountException("帐号不存在!");
}
if (!upmsUser.getPassword().equals(MD5Util.MD5(password + upmsUser.getSalt()))) {
throw new IncorrectCredentialsException("密码错误!");
}
if (upmsUser.getLocked() == 1) {
throw new LockedAccountException("账号已被锁定!");
}
return new SimpleAuthenticationInfo(upmsUser, password, getName());
}
}
|
zheng-upms/zheng-upms-server/src/main/java/com/zheng/upms/admin/realm/UpmsRealm.java
|
package com.zheng.upms.admin.realm;
import com.zheng.common.util.MD5Util;
import com.zheng.upms.dao.model.UpmsUser;
import com.zheng.upms.dao.model.UpmsUserExample;
import com.zheng.upms.rpc.api.UpmsUserService;
import org.apache.shiro.authc.*;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.subject.PrincipalCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashSet;
import java.util.Set;
/**
* Created by shuzheng on 2017/1/20.
*/
public class UpmsRealm extends AuthorizingRealm {
private static Logger _log = LoggerFactory.getLogger(UpmsRealm.class);
@Autowired
private UpmsUserService upmsUserService;
/**
* 授权:验证权限时调用
* @param principalCollection
* @return
*/
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principalCollection) {
// 当前用户
UpmsUser upmsUser = (UpmsUser) principalCollection.getPrimaryPrincipal();
_log.info("授权:upmsUser={}", upmsUser);
// 全部权限 TODO
Set<String> permissions = new HashSet<>();
permissions.add("*:*:*");
SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo();
simpleAuthorizationInfo.setStringPermissions(permissions);
return simpleAuthorizationInfo;
}
/**
* 认证:登录时调用
* @param authenticationToken
* @return
* @throws AuthenticationException
*/
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken authenticationToken) throws AuthenticationException {
String username = (String) authenticationToken.getPrincipal();
String password = new String((char[]) authenticationToken.getCredentials());
_log.info("认证:username={}, password={}", username, password);
// 查询用户信息
UpmsUserExample upmsUserExample = new UpmsUserExample();
upmsUserExample.createCriteria()
.andUsernameEqualTo(username);
UpmsUser upmsUser = upmsUserService.selectFirstByExample(upmsUserExample);
if (null == upmsUser) {
throw new UnknownAccountException("帐号不存在!");
}
if (!upmsUser.getPassword().equals(MD5Util.MD5(password + upmsUser.getSalt()))) {
throw new IncorrectCredentialsException("密码错误!");
}
if (upmsUser.getLocked() == 1) {
throw new LockedAccountException("账号已被锁定!");
}
return new SimpleAuthenticationInfo(upmsUser, password, getName());
}
}
|
删除测试日志
|
zheng-upms/zheng-upms-server/src/main/java/com/zheng/upms/admin/realm/UpmsRealm.java
|
删除测试日志
|
<ide><path>heng-upms/zheng-upms-server/src/main/java/com/zheng/upms/admin/realm/UpmsRealm.java
<ide> protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principalCollection) {
<ide> // 当前用户
<ide> UpmsUser upmsUser = (UpmsUser) principalCollection.getPrimaryPrincipal();
<del> _log.info("授权:upmsUser={}", upmsUser);
<ide>
<ide> // 全部权限 TODO
<ide> Set<String> permissions = new HashSet<>();
<ide> protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken authenticationToken) throws AuthenticationException {
<ide> String username = (String) authenticationToken.getPrincipal();
<ide> String password = new String((char[]) authenticationToken.getCredentials());
<del> _log.info("认证:username={}, password={}", username, password);
<ide>
<ide> // 查询用户信息
<ide> UpmsUserExample upmsUserExample = new UpmsUserExample();
|
|
Java
|
lgpl-2.1
|
646953de8fb30140440e70b54f763fb9db5b8848
| 0 |
dezgeg/titokone,OlliV/titokone,OlliV/titokone,titokone/titokone,titokone/titokone
|
package fi.hu.cs.titokone;
import fi.hu.cs.ttk91.TTK91CompileSource;
import java.util.logging.Logger;
import java.util.ResourceBundle;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.FileReader;
import java.io.FileNotFoundException;
import java.text.ParseException;
import java.net.URL;
import java.net.URLClassLoader;
/** This class transforms files into various sorts of buffer classes
depending on who needs them, and saves these buffer classes to
files when needed. The buffer classes will not be dependent on
files and I/O operations anymore, and therefore will not throw
eg. IOExceptions when read. */
public class FileHandler {
/** This logger will be used for logging the I/O activities. */
private Logger logger;
/** This class has its own logger. */
public static String loggerName = "fi.hu.cs.titokone.filehandler";
/** Read only access to file */
public static final int READ_ACCESS = 1;
/** Append access to file */
public static final int APPEND_ACCESS = 2;
/** Write access to file */
public static final int WRITE_ACCESS = 3;
/** This class sets up a new FileHandler and sets up its Logger. */
public FileHandler() {}
/** This function loads up a Source file from a given file.
@param srcFile The identifier of the file to read from.
@return A source instance which is no longer dependent on I/O.
@throws IOException If an I/O error occurs. Eg. one of the possible
IOExceptions is FileNotFoundException. */
public Source loadSource(File srcFile) throws IOException {
return new Source(loadFileContentsToString(srcFile).toString());
}
/** This method is used to save a source that has been modified.
@param src The source object to save to file.
@param srcFile The file to save the source to.
@throws IOException If an I/O error occurds. */
public void saveSource(Source src, File srcFile) throws IOException {
saveStringToFile(src.getSource(), srcFile);
}
/** This function loads a settings file into a StringBuffer.
@param settingsFile The identifier of the file to read from.
@return A StringBuffer which no longer depends on I/O.
@throws IOException If an I/O error occurs. Eg. one of the possible
IOExceptions is FileNotFoundException. */
public StringBuffer loadSettings(File settingsFile) throws IOException {
return loadFileContentsToString(settingsFile);
}
/** This function loads a settings input stream to a StringBuffer.
@param settingsStream An input stream to read the contents
from.
@return A StringBuffer containing the stream's contents,
linebreaks unmodified, or null if the settingsStream was null.
@throws IOException If an I/O error occurs while reading or
closing the stream. */
public StringBuffer loadSettings(InputStream settingsStream)
throws IOException {
StringBuffer result;
BufferedReader reader;
if(settingsStream == null)
return null;
reader = new BufferedReader(new InputStreamReader(settingsStream));
return loadReaderContentsToString(reader);
}
/** This method saves settings data from a StringBuffer to a file.
The line separator will be
System.getProperty("line.separator", "\n").
@param settingsData The settings data in a StringBuffer in the
form it is to be saved in. The linebreaks in the file will be \ns.
@param settingsFile The identifier of the file to save to.
@throws IOException If an I/O error occurs, eg. the directory
the file should be in does not exist or we cannot write to it. */
public void saveSettings(String settingsData, File settingsFile)
throws IOException {
saveStringToFile(settingsData, settingsFile);
}
/** This function loads a Binary from a binary .b91 file and
returns the result. The Binary class checks itself upon creation
and throws a ParseException if it is not syntactically correct.
@param binaryFile Identifier of the file to read from.
@return An Binary instance containing the contents of the
.b91 file.
@throws IOException If an I/O error occurs. Eg. one of the possible
IOExceptions is FileNotFoundException.
@throws ParseException If the file does not contain a valid
binary. */
public Binary loadBinary(File binaryFile)
throws IOException, ParseException {
return new Binary(loadFileContentsToString(binaryFile).toString());
}
/** This method saves a Binary to file in a .b91 binary format.
@param bin The binary to save to file.
@param binarySaveFile The identifier for the file to save to.
@throws IOException If an I/O error occurs, eg. the given file
cannot be written to. */
public void saveBinary(Binary bin, File binarySaveFile)
throws IOException {
saveStringToFile(bin.toString(), binarySaveFile);
}
/** This method loads a "stdin" file representing the disk into
a string. The contents should be integers delimited by \n,
\r or \r\n, but the loader does not check that this is the
case.
@param stdinFile The identifier for the file to read from.
@return A stringbuffer containing the contents of the file.
@throws IOException If an I/O error occurs, eg. the given
file is not found. */
public StringBuffer loadStdIn(File stdinFile) throws IOException {
return loadFileContentsToString(stdinFile);
}
/** This method appends data to a stdout file. If the file does
not exist, it is created.
@param dataItem The data to append to the file (a newline is
added automagically).
@param stdoutFile The file to append to.
@throws IOException If an I/O error occurs. */
public void appendDataToStdOut(String dataItem, File stdoutFile)
throws IOException {
boolean fileExisted = stdoutFile.exists();
// Open the file in append mode.
BufferedWriter writer = new BufferedWriter(new FileWriter(stdoutFile,
true));
if(fileExisted)
writer.newLine();
writer.write(dataItem, 0, dataItem.length());
writer.flush();
writer.close();
}
/** This method attempts to load a resource bundle from a file
(with an URLClassLoader). It bundles up the various exceptions
possibly created by this into ResourceLoadFailedException.
@param rbFile The filename to load and instantiate the
ResourceBundle from.
@return A ResourceBundle found from the file.
@throws ResourceLoadFailedException If the file load would cast
an IOException, or the class loading would cast a
ClassNotFoundException or the instantiation would cast a
InstantiationException or the cast a ClassCastException. */
public ResourceBundle loadResourceBundle(File rbFile)
throws ResourceLoadFailedException {
Class theClass;
Object translations;
String className, errorMessage;
String errorParams[] = new String[2];
Logger logger = Logger.getLogger(getClass().getPackage().getName());
URL[] url = new URL[1];
// Remove .class from the file. Note that package names will not be added.
// We can't determine them sensibly from the file name.
className = changeExtension(rbFile, "").getName();
try {
url[0] = rbFile.getParentFile().toURL(); // MalformedURLException, anyone?
URLClassLoader loader = new URLClassLoader(url); // SecurityExcp..
theClass = loader.loadClass(className); // ClassNotFoundExcp..?
// InstantiationException or IllegalAccessException, anyone?
translations = theClass.newInstance();
return (ResourceBundle) translations; // ClassCastException?
}
catch(Exception originalException) {
// Throw an exception with a message like "<exception name> in
// loadResourceBundle(): <original exception message>".
errorParams[0] = originalException.getClass().getName();
errorParams[1] = originalException.getMessage();
errorMessage = new Message("{0} in loadResourceBundle(): {1}",
errorParams).toString();
logger.fine(errorMessage);
throw new ResourceLoadFailedException(errorMessage);
}
}
/** This function is a private assistant method for FileHandler and
it loads the contents of the given file into a string and returns
that string. It may throw an IOException in case of a read error.
*/
private StringBuffer loadFileContentsToString(File loadFile)
throws IOException {
BufferedReader loadFileContents =
new BufferedReader(new FileReader(loadFile));
return loadReaderContentsToString(loadFileContents);
}
/** This function is a private assistant method, which loads the
contents of a given reader into a string and returns that string.
The lines will be read using .readLine() and recombined with
\ns.
@throws IOException If an I/O error occurs while reading the file.
*/
private StringBuffer loadReaderContentsToString(BufferedReader reader)
throws IOException {
StringBuffer result;
String line = "";
result = new StringBuffer("");
// readLine() returns null when the end of the stream has been
// reached.
while(line != null) {
result.append(line);
line = reader.readLine();
if(line != null) line += "\n"; // (Result-str is internally used.)
}
reader.close();
return result;
}
/** This method is a private helper method which handles saving strings
to files. */
private void saveStringToFile(String str, File saveFile)
throws IOException {
if(!saveFile.exists())
saveFile.createNewFile();
BufferedWriter saveFileWriter =
new BufferedWriter(new FileWriter(saveFile));
saveFileWriter.write(str, 0, str.length());
saveFileWriter.flush();
saveFileWriter.close();
}
/** This method changes the extension of the given filename to
newExtension and returns the new filename as a File object.
File extensions are considered to be the part after the last period
in the File.getName(). If there are no periods in that part,
the file is considered to be without an extension and newExtension
is added. */
public File changeExtension(File f, String newExtension) {
String filenamestr;
File parent;
filenamestr = f.getName();
parent = f.getParentFile();
return new File(parent, changeExtensionStr(filenamestr,
newExtension));
}
/** This method returns the first string modified so that the part of
it following the last period is removed, including the period,
and the result is this modified followed by newExtension. If
newExtension is not an empty string, the two are separated with a
".". */
private String changeExtensionStr(String filename, String newExtension) {
String result;
int lastPeriod;
lastPeriod = filename.lastIndexOf(".");
if(lastPeriod == -1)
lastPeriod = filename.length();
result = filename.substring(0, lastPeriod);
if(!newExtension.equals(""))
result += "." + newExtension;
return result;
}
/* This function tests if a given file can be accessed for
reading, writing or appending operations. It throws a home-made
IOException accordingly.
@throws FileNotFoundException If a file checked for read access
does not exist.
@throws IOException (Excluding the above.) If a file does not
allow the sort of access which is asked for. */
public void testAccess(File accessedFile, int accessType)
throws IOException {
if(accessedFile.exists() == false && accessType == READ_ACCESS) {
throw new FileNotFoundException(accessedFile.getName());
}
if(accessType == READ_ACCESS && accessedFile.canRead() == false) {
String msg = new Message("No read access to {0}.",
accessedFile.getName()).toString();
throw new IOException(msg);
}
else if((accessType == WRITE_ACCESS || accessType == APPEND_ACCESS) &&
accessedFile.canWrite() == false) {
String msg = new Message("No write access to {0}.",
accessedFile.getName()).toString();
throw new IOException(msg);
}
}
}
|
fi/hu/cs/titokone/FileHandler.java
|
package fi.hu.cs.titokone;
import fi.hu.cs.ttk91.TTK91CompileSource;
import java.util.logging.Logger;
import java.util.ResourceBundle;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.FileReader;
import java.io.FileNotFoundException;
import java.text.ParseException;
import java.net.URL;
import java.net.URLClassLoader;
/** This class transforms files into various sorts of buffer classes
depending on who needs them, and saves these buffer classes to
files when needed. The buffer classes will not be dependent on
files and I/O operations anymore, and therefore will not throw
eg. IOExceptions when read. */
public class FileHandler {
/** This logger will be used for logging the I/O activities. */
private Logger logger;
/** This class has its own logger. */
public static String loggerName = "fi.hu.cs.titokone.filehandler";
/** Read only access to file */
public static final int READ_ACCESS = 1;
/** Append access to file */
public static final int APPEND_ACCESS = 2;
/** Write access to file */
public static final int WRITE_ACCESS = 3;
/** This class sets up a new FileHandler and sets up its Logger. */
public FileHandler() {}
/** This function loads up a Source file from a given file.
@param filename The identifier of the file to read from.
@return A source instance which is no longer dependent on I/O.
@throws IOException If an I/O error occurs. Eg. one of the possible
IOExceptions is FileNotFoundException. */
public Source loadSource(File srcFile) throws IOException {
return new Source(loadFileContentsToString(srcFile).toString());
}
/** This method is used to save a source that has been modified.
@param src The source object to save to file.
@param srcFile The file to save the source to.
@throws IOException If an I/O error occurds. */
public void saveSource(Source src, File srcFile) throws IOException {
saveStringToFile(src.getSource(), srcFile);
}
/** This function loads a settings file into a StringBuffer.
@param filename The identifier of the file to read from.
@return A StringBuffer which no longer depends on I/O.
@throws IOException If an I/O error occurs. Eg. one of the possible
IOExceptions is FileNotFoundException. */
public StringBuffer loadSettings(File settingsFile) throws IOException {
return loadFileContentsToString(settingsFile);
}
/** This function loads a settings input stream to a StringBuffer.
@param settingsStream An input stream to read the contents
from.
@return A StringBuffer containing the stream's contents,
linebreaks unmodified, or null if the settingsStream was null.
@throws IOException If an I/O error occurs while reading or
closing the stream. */
public StringBuffer loadSettings(InputStream settingsStream)
throws IOException {
StringBuffer result;
BufferedReader reader;
if(settingsStream == null)
return null;
reader = new BufferedReader(new InputStreamReader(settingsStream));
return loadReaderContentsToString(reader);
}
/** This method saves settings data from a StringBuffer to a file.
The line separator will be
System.getProperty("line.separator", "\n").
@param settingsData The settings data in a StringBuffer in the
form it is to be saved in. The linebreaks in the file will be \ns.
@param filename The identifier of the file to save to.
@throws IOException If an I/O error occurs, eg. the directory
the file should be in does not exist or we cannot write to it. */
public void saveSettings(String settingsData, File settingsFile)
throws IOException {
saveStringToFile(settingsData, settingsFile);
}
/** This function loads a Binary from a binary .b91 file and
returns the result. The Binary class checks itself upon creation
and throws a ParseException if it is not syntactically correct.
@param filename Identifier of the file to read from.
@return An Binary instance containing the contents of the
.b91 file.
@throws IOException If an I/O error occurs. Eg. one of the possible
IOExceptions is FileNotFoundException.
@throws ParseException If the file does not contain a valid
binary. */
public Binary loadBinary(File binaryFile)
throws IOException, ParseException {
return new Binary(loadFileContentsToString(binaryFile).toString());
}
/** This method saves a Binary to file in a .b91 binary format.
@param binary The binary to save to file.
@param filename The identifier for the file to save to.
@throws IOException If an I/O error occurs, eg. the given file
cannot be written to. */
public void saveBinary(Binary bin, File binarySaveFile)
throws IOException {
saveStringToFile(bin.toString(), binarySaveFile);
}
/** This method loads a "stdin" file representing the disk into
a string. The contents should be integers delimited by \n,
\r or \r\n, but the loader does not check that this is the
case.
@param filename The identifier for the file to read from.
@return A stringbuffer containing the contents of the file.
@throws IOException If an I/O error occurs, eg. the given
file is not found. */
public StringBuffer loadStdIn(File stdinFile) throws IOException {
return loadFileContentsToString(stdinFile);
}
/** This method saves a "stdout" file representing the disk.
The contents to be saved to the file should be integers
delimited by \n, \r or \r\n, but no checking is made.
@param contents The string to save to the file.
@param filename The file to save the given string to.
@throws IOException If an I/O error occurs, eg. the given
file cannot be written to.
This method is not used in practice, as appending the
data one line at a time has been found to be more convenient. */
//public void saveStdOut(String contents, File stdoutFile)
// throws IOException {
// saveStringToFile(contents, stdoutFile);
//}
/** This method appends data to a stdout file. If the file does
not exist, it is created.
@param dataItem The data to append to the file (a newline is
added automagically).
@param stdoutFile The file to append to.
@throws IOException If an I/O error occurs. */
public void appendDataToStdOut(String dataItem, File stdoutFile)
throws IOException {
boolean fileExisted = stdoutFile.exists();
// Open the file in append mode.
BufferedWriter writer = new BufferedWriter(new FileWriter(stdoutFile,
true));
if(fileExisted)
writer.newLine();
writer.write(dataItem, 0, dataItem.length());
writer.flush();
writer.close();
}
/** This method attempts to load a resource bundle from a file
(with an URLClassLoader). It bundles up the various exceptions
possibly created by this into ResourceLoadFailedException.
@param filename The filename to load and instantiate the
ResourceBundle from.
@return A ResourceBundle found from the file.
@throws ResourceLoadFailedException If the file load would cast
an IOException, or the class loading would cast a
ClassNotFoundException or the instantiation would cast a
InstantiationException or the cast a ClassCastException. */
public ResourceBundle loadResourceBundle(File rbFile)
throws ResourceLoadFailedException {
Class theClass;
Object translations;
String className, errorMessage;
String errorParams[] = new String[2];
Logger logger = Logger.getLogger(getClass().getPackage().getName());
URL[] url = new URL[1];
// Remove .class from the file. Note that package names will not be added.
// We can't determine them sensibly from the file name.
className = changeExtension(rbFile, "").getName();
try {
url[0] = rbFile.getParentFile().toURL(); // MalformedURLException, anyone?
URLClassLoader loader = new URLClassLoader(url); // SecurityExcp..
theClass = loader.loadClass(className); // ClassNotFoundExcp..?
// InstantiationException or IllegalAccessException, anyone?
translations = theClass.newInstance();
return (ResourceBundle) translations; // ClassCastException?
}
catch(Exception originalException) {
// Throw an exception with a message like "<exception name> in
// loadResourceBundle(): <original exception message>".
errorParams[0] = originalException.getClass().getName();
errorParams[1] = originalException.getMessage();
errorMessage = new Message("{0} in loadResourceBundle(): {1}",
errorParams).toString();
logger.fine(errorMessage);
throw new ResourceLoadFailedException(errorMessage);
}
}
/** This function is a private assistant method for FileHandler and
it loads the contents of the given file into a string and returns
that string. It may throw an IOException in case of a read error.
*/
private StringBuffer loadFileContentsToString(File loadFile)
throws IOException {
BufferedReader loadFileContents =
new BufferedReader(new FileReader(loadFile));
return loadReaderContentsToString(loadFileContents);
}
/** This function is a private assistant method, which loads the
contents of a given reader into a string and returns that string.
The lines will be read using .readLine() and recombined with
\ns.
@throws IOException If an I/O error occurs while reading the file.
*/
private StringBuffer loadReaderContentsToString(BufferedReader reader)
throws IOException {
StringBuffer result;
String line = "";
result = new StringBuffer("");
// readLine() returns null when the end of the stream has been
// reached.
while(line != null) {
result.append(line);
line = reader.readLine();
if(line != null) line += "\n"; // (Result-str is internally used.)
}
reader.close();
return result;
}
/** This method is a private helper method which handles saving strings
to files. */
private void saveStringToFile(String str, File saveFile)
throws IOException {
if(!saveFile.exists())
saveFile.createNewFile();
BufferedWriter saveFileWriter =
new BufferedWriter(new FileWriter(saveFile));
saveFileWriter.write(str, 0, str.length());
saveFileWriter.flush();
saveFileWriter.close();
}
/** This method changes the extension of the given filename to
newExtension and returns the new filename as a File object.
File extensions are considered to be the part after the last period
in the File.getName(). If there are no periods in that part,
the file is considered to be without an extension and newExtension
is added. */
public File changeExtension(File f, String newExtension) {
String filenamestr;
File parent;
filenamestr = f.getName();
parent = f.getParentFile();
return new File(parent, changeExtensionStr(filenamestr,
newExtension));
}
/** This method returns the first string modified so that the part of
it following the last period is removed, including the period,
and the result is this modified followed by newExtension. If
newExtension is not an empty string, the two are separated with a
".". */
private String changeExtensionStr(String filename, String newExtension) {
String result;
int lastPeriod;
lastPeriod = filename.lastIndexOf(".");
if(lastPeriod == -1)
lastPeriod = filename.length();
result = filename.substring(0, lastPeriod);
if(!newExtension.equals(""))
result += "." + newExtension;
return result;
}
/* This function tests if a given file can be accessed for
reading, writing or appending operations. It throws a home-made
IOException accordingly.
@throws FileNotFoundException If a file checked for read access
does not exist.
@throws IOException (Excluding the above.) If a file does not
allow the sort of access which is asked for. */
public void testAccess(File accessedFile, int accessType)
throws IOException {
if(accessedFile.exists() == false && accessType == READ_ACCESS) {
throw new FileNotFoundException(accessedFile.getName());
}
if(accessType == READ_ACCESS && accessedFile.canRead() == false) {
String msg = new Message("No read access to {0}.",
accessedFile.getName()).toString();
throw new IOException(msg);
}
else if((accessType == WRITE_ACCESS || accessType == APPEND_ACCESS) &&
accessedFile.canWrite() == false) {
String msg = new Message("No write access to {0}.",
accessedFile.getName()).toString();
throw new IOException(msg);
}
}
}
|
Javadoc korjattu.
|
fi/hu/cs/titokone/FileHandler.java
|
Javadoc korjattu.
|
<ide><path>i/hu/cs/titokone/FileHandler.java
<ide> public FileHandler() {}
<ide>
<ide> /** This function loads up a Source file from a given file.
<del> @param filename The identifier of the file to read from.
<add> @param srcFile The identifier of the file to read from.
<ide> @return A source instance which is no longer dependent on I/O.
<ide> @throws IOException If an I/O error occurs. Eg. one of the possible
<ide> IOExceptions is FileNotFoundException. */
<ide> }
<ide>
<ide> /** This function loads a settings file into a StringBuffer.
<del> @param filename The identifier of the file to read from.
<add> @param settingsFile The identifier of the file to read from.
<ide> @return A StringBuffer which no longer depends on I/O.
<ide> @throws IOException If an I/O error occurs. Eg. one of the possible
<ide> IOExceptions is FileNotFoundException. */
<ide> System.getProperty("line.separator", "\n").
<ide> @param settingsData The settings data in a StringBuffer in the
<ide> form it is to be saved in. The linebreaks in the file will be \ns.
<del> @param filename The identifier of the file to save to.
<add> @param settingsFile The identifier of the file to save to.
<ide> @throws IOException If an I/O error occurs, eg. the directory
<ide> the file should be in does not exist or we cannot write to it. */
<ide> public void saveSettings(String settingsData, File settingsFile)
<ide> /** This function loads a Binary from a binary .b91 file and
<ide> returns the result. The Binary class checks itself upon creation
<ide> and throws a ParseException if it is not syntactically correct.
<del> @param filename Identifier of the file to read from.
<add> @param binaryFile Identifier of the file to read from.
<ide> @return An Binary instance containing the contents of the
<ide> .b91 file.
<ide> @throws IOException If an I/O error occurs. Eg. one of the possible
<ide>
<ide>
<ide> /** This method saves a Binary to file in a .b91 binary format.
<del> @param binary The binary to save to file.
<del> @param filename The identifier for the file to save to.
<add> @param bin The binary to save to file.
<add> @param binarySaveFile The identifier for the file to save to.
<ide> @throws IOException If an I/O error occurs, eg. the given file
<ide> cannot be written to. */
<ide> public void saveBinary(Binary bin, File binarySaveFile)
<ide> a string. The contents should be integers delimited by \n,
<ide> \r or \r\n, but the loader does not check that this is the
<ide> case.
<del> @param filename The identifier for the file to read from.
<add> @param stdinFile The identifier for the file to read from.
<ide> @return A stringbuffer containing the contents of the file.
<ide> @throws IOException If an I/O error occurs, eg. the given
<ide> file is not found. */
<ide> public StringBuffer loadStdIn(File stdinFile) throws IOException {
<ide> return loadFileContentsToString(stdinFile);
<ide> }
<del>
<del> /** This method saves a "stdout" file representing the disk.
<del> The contents to be saved to the file should be integers
<del> delimited by \n, \r or \r\n, but no checking is made.
<del> @param contents The string to save to the file.
<del> @param filename The file to save the given string to.
<del> @throws IOException If an I/O error occurs, eg. the given
<del> file cannot be written to.
<del> This method is not used in practice, as appending the
<del> data one line at a time has been found to be more convenient. */
<del> //public void saveStdOut(String contents, File stdoutFile)
<del> // throws IOException {
<del> // saveStringToFile(contents, stdoutFile);
<del> //}
<ide>
<ide> /** This method appends data to a stdout file. If the file does
<ide> not exist, it is created.
<ide> /** This method attempts to load a resource bundle from a file
<ide> (with an URLClassLoader). It bundles up the various exceptions
<ide> possibly created by this into ResourceLoadFailedException.
<del> @param filename The filename to load and instantiate the
<add> @param rbFile The filename to load and instantiate the
<ide> ResourceBundle from.
<ide> @return A ResourceBundle found from the file.
<ide> @throws ResourceLoadFailedException If the file load would cast
|
|
Java
|
bsd-2-clause
|
a4b582594373f67d5697e3371bce6a3178ec210e
| 0 |
skudi/DarkBot,DarkStorm652/DarkBot,NoChanceSD/DarkBot,skudi/DarkBot
|
package org.darkstorm.darkbot.minecraftbot.ai;
import org.darkstorm.darkbot.minecraftbot.MinecraftBot;
import org.darkstorm.darkbot.minecraftbot.world.World;
import org.darkstorm.darkbot.minecraftbot.world.WorldLocation;
import org.darkstorm.darkbot.minecraftbot.world.block.BlockLocation;
import org.darkstorm.darkbot.minecraftbot.world.entity.MainPlayerEntity;
import org.darkstorm.darkbot.minecraftbot.world.pathfinding.PathNode;
import org.darkstorm.darkbot.minecraftbot.world.pathfinding.PathSearch;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
public class WalkTask implements Task {
private final MinecraftBot bot;
private final ExecutorService service = Executors.newSingleThreadExecutor();
private BlockLocation target;
private Future<PathNode> thread;
private PathNode nextStep;
private int ticksSinceStepChange = 0;
private int timeout = 0;
private long startTime = 0;
public WalkTask(MinecraftBot bot) {
this.bot = bot;
}
public synchronized BlockLocation getTarget() {
return target;
}
public synchronized void setTarget(final BlockLocation target) {
this.target = target;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
@Override
public synchronized boolean isPreconditionMet() {
return target != null;
}
@Override
public synchronized boolean start(String... options) {
if (options.length == 3) {
try {
int x = Integer.parseInt(options[0]);
int y = Integer.parseInt(options[1]);
int z = Integer.parseInt(options[2]);
target = new BlockLocation(x, y, z);
} catch (Exception e) {
return false;
}
}
if (target == null)
return false;
System.out.println("Searching for path to " + target);
thread = service.submit(new Callable<PathNode>() {
@Override
public PathNode call() throws Exception {
World world = bot.getWorld();
MainPlayerEntity player = bot.getPlayer();
if (world == null || player == null || target == null)
return null;
BlockLocation ourLocation = new BlockLocation((int) (Math
.round(player.getX() - 0.5)), (int) player.getY(),
(int) (Math.round(player.getZ() - 0.5)));
PathSearch search = world.getPathFinder().provideSearch(
ourLocation, target);
while (!search.isDone() && !Thread.interrupted())
search.step();
return search.getPath();
}
});
startTime = System.currentTimeMillis();
return true;
}
@Override
public synchronized void stop() {
target = null;
if (thread != null && !thread.isDone()) {
thread.cancel(true);
thread = null;
}
nextStep = null;
ticksSinceStepChange = 0;
timeout = 0;
startTime = 0;
}
@Override
public synchronized void run() {
if (thread != null && !thread.isDone()) {
if (timeout > 0 && System.currentTimeMillis() - startTime > timeout)
stop();
return;
} else if (thread != null && thread.isDone()) {
try {
nextStep = thread.get();
} catch (Exception exception) {
}
thread = null;
} else if (nextStep != null) {
MainPlayerEntity player = bot.getPlayer();
if (nextStep.getNext() != null
&& player.getDistanceToSquared(nextStep.getLocation()) > player
.getDistanceToSquared(nextStep.getNext()
.getLocation()))
nextStep = nextStep.getNext();
if (player.getDistanceToSquared(nextStep.getLocation()) > 10) {
stop();
return;
}
ticksSinceStepChange++;
if (ticksSinceStepChange > 80) {
stop();
return;
}
double speed = 0.12;
WorldLocation location = nextStep.getLocation();
double x = location.getX(), y = location.getY(), z = location
.getZ();
if (player.getY() != y) {
speed *= 2;
player.setY(player.getY() + (player.getY() < y ? Math.min(speed, y - player.getY()) : Math.max(-speed, y - player.getY())));
}
if (player.getX() != (x + 0.5D)) {
player.setX(player.getX()
+ (player.getX() < (x + 0.5D) ? Math.min(speed,
(x + 0.5D) - player.getX()) : Math.max(-speed,
(x + 0.5D) - player.getX())));
}
if (player.getZ() != (z + 0.5D)) {
player.setZ(player.getZ()
+ (player.getZ() < (z + 0.5D) ? Math.min(speed,
(z + 0.5D) - player.getZ()) : Math.max(-speed,
(z + 0.5D) - player.getZ())));
}
if (player.getX() == (x + 0.5D) && player.getY() == y
&& player.getZ() == (z + 0.5D)) {
nextStep = nextStep.getNext();
ticksSinceStepChange = 0;
}
}
}
public synchronized boolean isMoving() {
return nextStep != null;
}
@Override
public synchronized boolean isActive() {
return target != null && (thread != null || nextStep != null);
}
@Override
public TaskPriority getPriority() {
return TaskPriority.NORMAL;
}
@Override
public boolean isExclusive() {
return true;
}
@Override
public boolean ignoresExclusive() {
return false;
}
@Override
public String getName() {
return "Walk";
}
@Override
public String getOptionDescription() {
return "[x y z]";
}
}
|
src/minecraft/java/org/darkstorm/darkbot/minecraftbot/ai/WalkTask.java
|
package org.darkstorm.darkbot.minecraftbot.ai;
import java.util.concurrent.*;
import org.darkstorm.darkbot.minecraftbot.MinecraftBot;
import org.darkstorm.darkbot.minecraftbot.world.*;
import org.darkstorm.darkbot.minecraftbot.world.block.BlockLocation;
import org.darkstorm.darkbot.minecraftbot.world.entity.MainPlayerEntity;
import org.darkstorm.darkbot.minecraftbot.world.pathfinding.*;
public class WalkTask implements Task {
private final MinecraftBot bot;
private final ExecutorService service = Executors.newSingleThreadExecutor();
private BlockLocation target;
private Future<PathNode> thread;
private PathNode nextStep;
private int ticksSinceStepChange = 0;
private int timeout = 0;
private long startTime = 0;
public WalkTask(MinecraftBot bot) {
this.bot = bot;
}
public synchronized BlockLocation getTarget() {
return target;
}
public synchronized void setTarget(final BlockLocation target) {
this.target = target;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
@Override
public synchronized boolean isPreconditionMet() {
return target != null;
}
@Override
public synchronized boolean start(String... options) {
if(options.length == 3) {
try {
int x = Integer.parseInt(options[0]);
int y = Integer.parseInt(options[1]);
int z = Integer.parseInt(options[2]);
target = new BlockLocation(x, y, z);
} catch(Exception e) {
return false;
}
}
if(target == null)
return false;
System.out.println("Searching for path to " + target);
thread = service.submit(new Callable<PathNode>() {
@Override
public PathNode call() throws Exception {
World world = bot.getWorld();
MainPlayerEntity player = bot.getPlayer();
if(world == null || player == null || target == null)
return null;
BlockLocation ourLocation = new BlockLocation((int) (Math
.round(player.getX() - 0.5)), (int) player.getY(),
(int) (Math.round(player.getZ() - 0.5)));
PathSearch search = world.getPathFinder().provideSearch(
ourLocation, target);
while(!search.isDone() && !Thread.interrupted())
search.step();
return search.getPath();
}
});
startTime = System.currentTimeMillis();
return true;
}
@Override
public synchronized void stop() {
target = null;
if(thread != null && !thread.isDone()) {
thread.cancel(true);
thread = null;
}
nextStep = null;
ticksSinceStepChange = 0;
timeout = 0;
startTime = 0;
}
@Override
public synchronized void run() {
if(thread != null && !thread.isDone()) {
if(timeout > 0 && System.currentTimeMillis() - startTime > timeout)
stop();
return;
} else if(thread != null && thread.isDone()) {
try {
nextStep = thread.get();
} catch(Exception exception) {}
thread = null;
} else if(nextStep != null) {
MainPlayerEntity player = bot.getPlayer();
if(nextStep.getNext() != null
&& player.getDistanceToSquared(nextStep.getLocation()) > player
.getDistanceToSquared(nextStep.getNext()
.getLocation()))
nextStep = nextStep.getNext();
if(player.getDistanceToSquared(nextStep.getLocation()) > 10) {
stop();
return;
}
ticksSinceStepChange++;
if(ticksSinceStepChange > 80) {
stop();
return;
}
double speed = 0.12;
WorldLocation location = nextStep.getLocation();
double x = location.getX(), y = location.getY(), z = location
.getZ();
if(player.getX() != (x + 0.5D)) {
player.setX(player.getX()
+ (player.getX() < (x + 0.5D) ? Math.min(speed,
(x + 0.5D) - player.getX()) : Math.max(-speed,
(x + 0.5D) - player.getX())));
}
if(player.getZ() != (z + 0.5D)) {
player.setZ(player.getZ()
+ (player.getZ() < (z + 0.5D) ? Math.min(speed,
(z + 0.5D) - player.getZ()) : Math.max(-speed,
(z + 0.5D) - player.getZ())));
}
if(player.getY() != y) {
speed *= 3;
player.setY(player.getY()
+ (player.getY() < y ? Math.min(speed,
y - player.getY()) : Math.max(-speed, y
- player.getY())));
}
if(player.getX() == (x + 0.5D) && player.getY() == y
&& player.getZ() == (z + 0.5D)) {
nextStep = nextStep.getNext();
ticksSinceStepChange = 0;
}
}
}
public synchronized boolean isMoving() {
return nextStep != null;
}
@Override
public synchronized boolean isActive() {
return target != null && (thread != null || nextStep != null);
}
@Override
public TaskPriority getPriority() {
return TaskPriority.NORMAL;
}
@Override
public boolean isExclusive() {
return true;
}
@Override
public boolean ignoresExclusive() {
return false;
}
@Override
public String getName() {
return "Walk";
}
@Override
public String getOptionDescription() {
return "[x y z]";
}
}
|
Fixed jump mechanics for NoCheat compatibility
|
src/minecraft/java/org/darkstorm/darkbot/minecraftbot/ai/WalkTask.java
|
Fixed jump mechanics for NoCheat compatibility
|
<ide><path>rc/minecraft/java/org/darkstorm/darkbot/minecraftbot/ai/WalkTask.java
<ide> package org.darkstorm.darkbot.minecraftbot.ai;
<ide>
<del>import java.util.concurrent.*;
<del>
<ide> import org.darkstorm.darkbot.minecraftbot.MinecraftBot;
<del>import org.darkstorm.darkbot.minecraftbot.world.*;
<add>import org.darkstorm.darkbot.minecraftbot.world.World;
<add>import org.darkstorm.darkbot.minecraftbot.world.WorldLocation;
<ide> import org.darkstorm.darkbot.minecraftbot.world.block.BlockLocation;
<ide> import org.darkstorm.darkbot.minecraftbot.world.entity.MainPlayerEntity;
<del>import org.darkstorm.darkbot.minecraftbot.world.pathfinding.*;
<add>import org.darkstorm.darkbot.minecraftbot.world.pathfinding.PathNode;
<add>import org.darkstorm.darkbot.minecraftbot.world.pathfinding.PathSearch;
<add>
<add>import java.util.concurrent.Callable;
<add>import java.util.concurrent.ExecutorService;
<add>import java.util.concurrent.Executors;
<add>import java.util.concurrent.Future;
<ide>
<ide> public class WalkTask implements Task {
<del> private final MinecraftBot bot;
<del> private final ExecutorService service = Executors.newSingleThreadExecutor();
<del> private BlockLocation target;
<del> private Future<PathNode> thread;
<add> private final MinecraftBot bot;
<add> private final ExecutorService service = Executors.newSingleThreadExecutor();
<add> private BlockLocation target;
<add> private Future<PathNode> thread;
<add> private PathNode nextStep;
<add> private int ticksSinceStepChange = 0;
<add> private int timeout = 0;
<add> private long startTime = 0;
<ide>
<del> private PathNode nextStep;
<del> private int ticksSinceStepChange = 0;
<add> public WalkTask(MinecraftBot bot) {
<add> this.bot = bot;
<add> }
<ide>
<del> private int timeout = 0;
<del> private long startTime = 0;
<add> public synchronized BlockLocation getTarget() {
<add> return target;
<add> }
<ide>
<del> public WalkTask(MinecraftBot bot) {
<del> this.bot = bot;
<del> }
<add> public synchronized void setTarget(final BlockLocation target) {
<add> this.target = target;
<add> }
<ide>
<del> public synchronized BlockLocation getTarget() {
<del> return target;
<del> }
<add> public void setTimeout(int timeout) {
<add> this.timeout = timeout;
<add> }
<ide>
<del> public synchronized void setTarget(final BlockLocation target) {
<del> this.target = target;
<del> }
<add> @Override
<add> public synchronized boolean isPreconditionMet() {
<add> return target != null;
<add> }
<ide>
<del> public void setTimeout(int timeout) {
<del> this.timeout = timeout;
<del> }
<add> @Override
<add> public synchronized boolean start(String... options) {
<add> if (options.length == 3) {
<add> try {
<add> int x = Integer.parseInt(options[0]);
<add> int y = Integer.parseInt(options[1]);
<add> int z = Integer.parseInt(options[2]);
<ide>
<del> @Override
<del> public synchronized boolean isPreconditionMet() {
<del> return target != null;
<del> }
<add> target = new BlockLocation(x, y, z);
<add> } catch (Exception e) {
<add> return false;
<add> }
<add> }
<add> if (target == null)
<add> return false;
<add> System.out.println("Searching for path to " + target);
<add> thread = service.submit(new Callable<PathNode>() {
<add> @Override
<add> public PathNode call() throws Exception {
<add> World world = bot.getWorld();
<add> MainPlayerEntity player = bot.getPlayer();
<add> if (world == null || player == null || target == null)
<add> return null;
<add> BlockLocation ourLocation = new BlockLocation((int) (Math
<add> .round(player.getX() - 0.5)), (int) player.getY(),
<add> (int) (Math.round(player.getZ() - 0.5)));
<add> PathSearch search = world.getPathFinder().provideSearch(
<add> ourLocation, target);
<add> while (!search.isDone() && !Thread.interrupted())
<add> search.step();
<add> return search.getPath();
<add> }
<add> });
<add> startTime = System.currentTimeMillis();
<add> return true;
<add> }
<ide>
<del> @Override
<del> public synchronized boolean start(String... options) {
<del> if(options.length == 3) {
<del> try {
<del> int x = Integer.parseInt(options[0]);
<del> int y = Integer.parseInt(options[1]);
<del> int z = Integer.parseInt(options[2]);
<add> @Override
<add> public synchronized void stop() {
<add> target = null;
<add> if (thread != null && !thread.isDone()) {
<add> thread.cancel(true);
<add> thread = null;
<add> }
<add> nextStep = null;
<add> ticksSinceStepChange = 0;
<ide>
<del> target = new BlockLocation(x, y, z);
<del> } catch(Exception e) {
<del> return false;
<del> }
<del> }
<del> if(target == null)
<del> return false;
<del> System.out.println("Searching for path to " + target);
<del> thread = service.submit(new Callable<PathNode>() {
<del> @Override
<del> public PathNode call() throws Exception {
<del> World world = bot.getWorld();
<del> MainPlayerEntity player = bot.getPlayer();
<del> if(world == null || player == null || target == null)
<del> return null;
<del> BlockLocation ourLocation = new BlockLocation((int) (Math
<del> .round(player.getX() - 0.5)), (int) player.getY(),
<del> (int) (Math.round(player.getZ() - 0.5)));
<del> PathSearch search = world.getPathFinder().provideSearch(
<del> ourLocation, target);
<del> while(!search.isDone() && !Thread.interrupted())
<del> search.step();
<del> return search.getPath();
<del> }
<del> });
<del> startTime = System.currentTimeMillis();
<del> return true;
<del> }
<add> timeout = 0;
<add> startTime = 0;
<add> }
<ide>
<del> @Override
<del> public synchronized void stop() {
<del> target = null;
<del> if(thread != null && !thread.isDone()) {
<del> thread.cancel(true);
<del> thread = null;
<del> }
<del> nextStep = null;
<del> ticksSinceStepChange = 0;
<add> @Override
<add> public synchronized void run() {
<add> if (thread != null && !thread.isDone()) {
<add> if (timeout > 0 && System.currentTimeMillis() - startTime > timeout)
<add> stop();
<add> return;
<add> } else if (thread != null && thread.isDone()) {
<add> try {
<add> nextStep = thread.get();
<add> } catch (Exception exception) {
<add> }
<add> thread = null;
<add> } else if (nextStep != null) {
<add> MainPlayerEntity player = bot.getPlayer();
<add> if (nextStep.getNext() != null
<add> && player.getDistanceToSquared(nextStep.getLocation()) > player
<add> .getDistanceToSquared(nextStep.getNext()
<add> .getLocation()))
<add> nextStep = nextStep.getNext();
<add> if (player.getDistanceToSquared(nextStep.getLocation()) > 10) {
<add> stop();
<add> return;
<add> }
<add> ticksSinceStepChange++;
<add> if (ticksSinceStepChange > 80) {
<add> stop();
<add> return;
<add> }
<add> double speed = 0.12;
<add> WorldLocation location = nextStep.getLocation();
<add> double x = location.getX(), y = location.getY(), z = location
<add> .getZ();
<add> if (player.getY() != y) {
<add> speed *= 2;
<add> player.setY(player.getY() + (player.getY() < y ? Math.min(speed, y - player.getY()) : Math.max(-speed, y - player.getY())));
<add> }
<add> if (player.getX() != (x + 0.5D)) {
<add> player.setX(player.getX()
<add> + (player.getX() < (x + 0.5D) ? Math.min(speed,
<add> (x + 0.5D) - player.getX()) : Math.max(-speed,
<add> (x + 0.5D) - player.getX())));
<add> }
<add> if (player.getZ() != (z + 0.5D)) {
<add> player.setZ(player.getZ()
<add> + (player.getZ() < (z + 0.5D) ? Math.min(speed,
<add> (z + 0.5D) - player.getZ()) : Math.max(-speed,
<add> (z + 0.5D) - player.getZ())));
<add> }
<add> if (player.getX() == (x + 0.5D) && player.getY() == y
<add> && player.getZ() == (z + 0.5D)) {
<add> nextStep = nextStep.getNext();
<add> ticksSinceStepChange = 0;
<add> }
<add> }
<add> }
<ide>
<del> timeout = 0;
<del> startTime = 0;
<del> }
<add> public synchronized boolean isMoving() {
<add> return nextStep != null;
<add> }
<ide>
<del> @Override
<del> public synchronized void run() {
<del> if(thread != null && !thread.isDone()) {
<del> if(timeout > 0 && System.currentTimeMillis() - startTime > timeout)
<del> stop();
<del> return;
<del> } else if(thread != null && thread.isDone()) {
<del> try {
<del> nextStep = thread.get();
<del> } catch(Exception exception) {}
<del> thread = null;
<del> } else if(nextStep != null) {
<del> MainPlayerEntity player = bot.getPlayer();
<del> if(nextStep.getNext() != null
<del> && player.getDistanceToSquared(nextStep.getLocation()) > player
<del> .getDistanceToSquared(nextStep.getNext()
<del> .getLocation()))
<del> nextStep = nextStep.getNext();
<del> if(player.getDistanceToSquared(nextStep.getLocation()) > 10) {
<del> stop();
<del> return;
<del> }
<del> ticksSinceStepChange++;
<del> if(ticksSinceStepChange > 80) {
<del> stop();
<del> return;
<del> }
<del> double speed = 0.12;
<del> WorldLocation location = nextStep.getLocation();
<del> double x = location.getX(), y = location.getY(), z = location
<del> .getZ();
<del> if(player.getX() != (x + 0.5D)) {
<del> player.setX(player.getX()
<del> + (player.getX() < (x + 0.5D) ? Math.min(speed,
<del> (x + 0.5D) - player.getX()) : Math.max(-speed,
<del> (x + 0.5D) - player.getX())));
<del> }
<del> if(player.getZ() != (z + 0.5D)) {
<del> player.setZ(player.getZ()
<del> + (player.getZ() < (z + 0.5D) ? Math.min(speed,
<del> (z + 0.5D) - player.getZ()) : Math.max(-speed,
<del> (z + 0.5D) - player.getZ())));
<del> }
<del> if(player.getY() != y) {
<del> speed *= 3;
<del> player.setY(player.getY()
<del> + (player.getY() < y ? Math.min(speed,
<del> y - player.getY()) : Math.max(-speed, y
<del> - player.getY())));
<del> }
<del> if(player.getX() == (x + 0.5D) && player.getY() == y
<del> && player.getZ() == (z + 0.5D)) {
<del> nextStep = nextStep.getNext();
<del> ticksSinceStepChange = 0;
<del> }
<del> }
<del> }
<add> @Override
<add> public synchronized boolean isActive() {
<add> return target != null && (thread != null || nextStep != null);
<add> }
<ide>
<del> public synchronized boolean isMoving() {
<del> return nextStep != null;
<del> }
<add> @Override
<add> public TaskPriority getPriority() {
<add> return TaskPriority.NORMAL;
<add> }
<ide>
<del> @Override
<del> public synchronized boolean isActive() {
<del> return target != null && (thread != null || nextStep != null);
<del> }
<add> @Override
<add> public boolean isExclusive() {
<add> return true;
<add> }
<ide>
<del> @Override
<del> public TaskPriority getPriority() {
<del> return TaskPriority.NORMAL;
<del> }
<add> @Override
<add> public boolean ignoresExclusive() {
<add> return false;
<add> }
<ide>
<del> @Override
<del> public boolean isExclusive() {
<del> return true;
<del> }
<add> @Override
<add> public String getName() {
<add> return "Walk";
<add> }
<ide>
<del> @Override
<del> public boolean ignoresExclusive() {
<del> return false;
<del> }
<del>
<del> @Override
<del> public String getName() {
<del> return "Walk";
<del> }
<del>
<del> @Override
<del> public String getOptionDescription() {
<del> return "[x y z]";
<del> }
<add> @Override
<add> public String getOptionDescription() {
<add> return "[x y z]";
<add> }
<ide> }
|
|
Java
|
apache-2.0
|
323413ef4ac8eb40ad4bf445561371a3fcaa95b4
| 0 |
smsOrg/Tetris3d
|
package org.sms.tetris3d.views;
/**
* Created by hsh on 2016. 11. 29..
*/
import android.content.*;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Handler;
import android.widget.*;
import android.view.*;
import android.util.*;
import android.graphics.*;
import android.graphics.drawable.*;
import com.dexafree.materialList.view.MaterialListView;
import org.sms.tetris3d.GameStatus;
import org.sms.tetris3d.controls.RotateControls;
import org.sms.tetris3d.players.DeviceUser;
public class RotateButtonView extends View implements View.OnTouchListener,Runnable{
protected final static int X_AXIS_BUTTON_COLOR = Color.argb(0xff,0x90,0,0);
protected final static int Y_AXIS_BUTTON_COLOR = Color.argb(0xff,0,0x80,0);
protected final static int Z_AXIS_BUTTON_COLOR = Color.argb(0xff,0x30,0x30,0xa1);
protected final static float AREA_DEGREE =360.0f/3;
protected final static float PREFIX_DEGREE = AREA_DEGREE/2;
protected final static byte UNKNOWN_AXIS_CLICK_INDEX = -1;
protected final static byte X_AXIS_CLICK_INDEX = 2;
protected final static byte Y_AXIS_CLICK_INDEX = 1;
protected final static byte Z_AXIS_CLICK_INDEX = 0;
protected Paint x_pnt=null,y_pnt=null,z_pnt=null,txt_pnt,btnclk_pnt,rm_pnt;
protected RotateControls rc = null;
protected byte mClickState =UNKNOWN_AXIS_CLICK_INDEX;
protected final Handler mHandler = new Handler();
private final Object mSync = new Object();
// protected final Thread mMultiProcessThread = new Thread(new Runnable(){@Override public void run(){mHandler.post(RotateButtonView.this);}});
public RotateButtonView(Context ctx) {
super(ctx);
setOnTouchListener(this);
initPaint();
}
public RotateButtonView(Context ctx, AttributeSet attrs) {
super(ctx,attrs);
setOnTouchListener(this);
initPaint();
}
public RotateButtonView(Context ctx, AttributeSet attrs,int themResId){
super(ctx,attrs,themResId);
setOnTouchListener(this);
initPaint();
}
@Override
public void run(){
synchronized (mSync){
invalidate();
}
}
private void initPaint(){
x_pnt = new Paint();
y_pnt = new Paint();
z_pnt = new Paint();
txt_pnt = new Paint();
btnclk_pnt=new Paint();
rm_pnt=new Paint();
x_pnt.setAntiAlias(true);
y_pnt.setAntiAlias(true);
z_pnt.setAntiAlias(true);
txt_pnt.setAntiAlias(true);
btnclk_pnt.setAntiAlias(true);
rm_pnt.setAntiAlias(true);
x_pnt.setColor(X_AXIS_BUTTON_COLOR);
y_pnt.setColor(Y_AXIS_BUTTON_COLOR);
z_pnt.setColor(Z_AXIS_BUTTON_COLOR);
rm_pnt.setAlpha(0x0);
rm_pnt.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
rm_pnt.setColor(Color.WHITE);
btnclk_pnt.setColor(Color.argb(0x90,0x0,0x0,0x0));
txt_pnt.setColor(Color.WHITE);
txt_pnt.setTextSize(40);
}
Paint[] getPackedPaints(){
final Paint[] rst = {z_pnt,y_pnt,x_pnt};
return rst;
}
String[] getAxisString(){
return new String[]{"Z","y","X"};
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
final float height =Math.max(getMeasuredHeight(), canvas.getHeight());
final float width = Math.max(getMeasuredWidth(), canvas.getWidth());
Paint[] pnt_easy_ary = getPackedPaints(); //{x_pnt,y_pnt,z_pnt};
canvas.save();
canvas.rotate(-(90+PREFIX_DEGREE),width/2,height/2);
RectF area = new RectF();
if(height>width){
area.set(0,height/2-width/2,width,height/2+width+2);
}
else if(height<width){
area.set(width/2-height/2,0,width/2+height/2,height);
}
else{
area.set(0,0,width,height);
}
for(int i=0;i<pnt_easy_ary.length;i++){
canvas.drawArc(area,AREA_DEGREE*i,AREA_DEGREE,true ,pnt_easy_ary[i]);
if(mClickState==i){
synchronized (mSync) {
canvas.drawArc(area, AREA_DEGREE * i, AREA_DEGREE, true, btnclk_pnt);
}
}
//android.util.Log.e("rbv color:",i+"th = "+AREA_DEGREE*(i+1));
}
canvas.restore();
String[] strs = getAxisString();
for(int i =0;i<strs.length;i++){
canvas.save();
canvas.rotate(AREA_DEGREE*i,width/2,height/2);
String str = strs[i];
Rect rct = new Rect();
txt_pnt.getTextBounds(str.toCharArray(),0,str.toCharArray().length,rct);
canvas.drawText(strs[i],width/2-rct.width()/2,area.height()/5+rct.height()/2,txt_pnt);
canvas.restore();
}
RectF rct = new RectF();
rct.set(area.width()/2-area.width()*0.05f,area.height()/2-area.height()*0.05f,area.width()/2+area.width()*0.05f,area.height()/2+area.height()*0.05f);
canvas.drawArc(rct,0,360,true,rm_pnt);
}
protected float getClickedDegree(float r,float relative_xPos,float relative_yPos){
final float relative_r = (float)Math.sqrt(Math.pow(relative_xPos,2)+ Math.pow(relative_yPos,2));
float currentDegree =Float.POSITIVE_INFINITY;
if(r>10){
if(relative_xPos!=0){
currentDegree= (float)
Math.atan(relative_yPos/relative_xPos);
if(relative_xPos>0&&relative_yPos>=0){//dai 1
currentDegree=(float)Math.PI/2-currentDegree;
// android.util.Log.e("pos log: ","dai 1 area");
}
else if(relative_xPos>0&&relative_yPos<0){//dai 4
currentDegree=(float)Math.PI/2+Math.abs(currentDegree);
//android.util.Log.e("pos log: ","dai 4 area");
}
else if(relative_xPos<0&&relative_yPos<0){//dai 3
currentDegree=(float)Math.PI+((float)Math.PI/2-currentDegree);
//android.util.Log.e("pos log: ","dai 3 area");
}
else{ //dai 2
currentDegree=(float)Math.PI*3/2+Math.abs(currentDegree);
//android.util.Log.e("pos log: ","dai 2 area");
}
currentDegree=(float)Math.toDegrees(currentDegree);
//android.util.Log.e("rbv original degree: ",currentDegree+" and pos: "+String.format("%f , %f",relative_xPos,relative_yPos));
}
else{
currentDegree = (relative_yPos==0)? 0:(relative_xPos>0)? 0:180;
}
if(currentDegree<0){
currentDegree = (360*3-currentDegree)%360;
}
}
return currentDegree;
}
private float touch_x=0,touch_y = 0,currentDegree=0;
@Override
public boolean onTouch(View v, MotionEvent event) {
switch(event.getAction()){
case MotionEvent.ACTION_DOWN:{
touch_x = event.getX();
touch_y = event.getY();
final float height = Math.max(v.getMeasuredHeight(), v.getHeight());
final float width = Math.max(v.getMeasuredWidth(), v.getWidth());
final float r = Math.min(height,width);
//android.util.Log.e("touch x and y: ",String.format("x= %f y= %f",touch_x,touch_y));
float relative_xPos = width/2- touch_x;
float relative_yPos = height/2-touch_y;
relative_xPos *=-1;
currentDegree = getClickedDegree(r,relative_xPos,relative_yPos);
//android.util.Log.e("rbv degree: ",currentDegree+"");
if(PREFIX_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE){
mClickState=Y_AXIS_CLICK_INDEX;
//rc.rotateY(du);
}
else if(PREFIX_DEGREE+AREA_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE*2){
mClickState=X_AXIS_CLICK_INDEX;
//rc.rotateX(du);
}
else if(!Float.isInfinite(currentDegree)){
mClickState=Z_AXIS_CLICK_INDEX;
//rc.rotateZ(du);
}
else{
mClickState=UNKNOWN_AXIS_CLICK_INDEX;
}
synchronized (mSync) {
invalidate();
}
break;
}
case MotionEvent.ACTION_UP:{
/*touch_x = event.getX();
touch_y = event.getY();
final float height = Math.max(v.getMeasuredHeight(), v.getHeight());
final float width = Math.max(v.getMeasuredWidth(), v.getWidth());
final float r = Math.min(height,width);
//android.util.Log.e("touch x and y: ",String.format("x= %f y= %f",touch_x,touch_y));
float relative_xPos = width/2- touch_x;
float relative_yPos = height/2-touch_y;
relative_xPos *=-1;
float currentDegree = getClickedDegree(r,relative_xPos,relative_yPos);
//android.util.Log.e("rbv degree: ",currentDegree+"");
*/
if(rc==null){
rc = new RotateControls();
}
final DeviceUser du = GameStatus.getDeviceUser();
mClickState=UNKNOWN_AXIS_CLICK_INDEX;
if(PREFIX_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE){
//mMultiProcessThread.start();
mHandler.postDelayed(this,4);
rc.rotateY(du);
}
else if(PREFIX_DEGREE+AREA_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE*2){
// mMultiProcessThread.start();
mHandler.postDelayed(this,4);
rc.rotateX(du);
}
else if(!Float.isInfinite(currentDegree)){
//mMultiProcessThread.start();
mHandler.postDelayed(this,4);
rc.rotateZ(du);
}
break;
}
default:break;
}
return true;
}
}
|
app/src/main/java/org/sms/tetris3d/views/RotateButtonView.java
|
package org.sms.tetris3d.views;
/**
* Created by hsh on 2016. 11. 29..
*/
import android.content.*;
import android.graphics.Canvas;
import android.graphics.Color;
import android.widget.*;
import android.view.*;
import android.util.*;
import android.graphics.*;
import android.graphics.drawable.*;
import com.dexafree.materialList.view.MaterialListView;
import org.sms.tetris3d.GameStatus;
import org.sms.tetris3d.controls.RotateControls;
import org.sms.tetris3d.players.DeviceUser;
public class RotateButtonView extends View implements View.OnTouchListener{
protected final static int X_AXIS_BUTTON_COLOR = Color.argb(0xff,0x90,0,0);
protected final static int Y_AXIS_BUTTON_COLOR = Color.argb(0xff,0,0x80,0);
protected final static int Z_AXIS_BUTTON_COLOR = Color.argb(0xff,0x30,0x30,0xa1);
protected final static float AREA_DEGREE =360.0f/3;
protected final static float PREFIX_DEGREE = 45;
protected Paint x_pnt=null,y_pnt=null,z_pnt=null,txt_pnt;
protected RotateControls rc = null;
public RotateButtonView(Context ctx) {
super(ctx);
setOnTouchListener(this);
initPaint();
}
public RotateButtonView(Context ctx, AttributeSet attrs) {
super(ctx,attrs);
setOnTouchListener(this);
initPaint();
}
public RotateButtonView(Context ctx, AttributeSet attrs,int themResId){
super(ctx,attrs,themResId);
setOnTouchListener(this);
initPaint();
}
private void initPaint(){
x_pnt = new Paint();
y_pnt = new Paint();
z_pnt = new Paint();
txt_pnt = new Paint();
x_pnt.setAntiAlias(true);
y_pnt.setAntiAlias(true);
z_pnt.setAntiAlias(true);
txt_pnt.setAntiAlias(true);
x_pnt.setColor(X_AXIS_BUTTON_COLOR);
y_pnt.setColor(Y_AXIS_BUTTON_COLOR);
z_pnt.setColor(Z_AXIS_BUTTON_COLOR);
txt_pnt.setColor(Color.WHITE);
txt_pnt.setTextSize(40);
}
Paint[] getPackedPaints(){
final Paint[] rst = {z_pnt,y_pnt,x_pnt};
return rst;
}
String[] getAxisString(){
return new String[]{"Z","y","X"};
}
protected int getQuadrant(float x,float y){
if(x>=0&&y>=0){
return 1;
}
else if(x<0&&y>=0){
return 2;
}
else if(x<0&&y<0){
return 3;
}
else {
return 4;
}
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
final float height =Math.max(getMeasuredHeight(), canvas.getHeight());
final float width = Math.max(getMeasuredWidth(), canvas.getWidth());
Paint[] pnt_easy_ary = getPackedPaints(); //{x_pnt,y_pnt,z_pnt};
canvas.save();
canvas.rotate(-(90+PREFIX_DEGREE),width/2,height/2);
RectF area = new RectF();
if(height>width){
area.set(0,height/2-width/2,width,height/2+width+2);
}
else if(height<width){
area.set(width/2-height/2,0,width/2+height/2,height);
}
else{
area.set(0,0,width,height);
}
for(int i=0;i<pnt_easy_ary.length;i++){
canvas.drawArc(area,AREA_DEGREE*i,AREA_DEGREE,true ,pnt_easy_ary[i]);
//android.util.Log.e("rbv color:",i+"th = "+AREA_DEGREE*(i+1));
}
canvas.restore();
String[] strs = getAxisString();
for(int i =0;i<strs.length;i++){
canvas.save();
canvas.rotate(AREA_DEGREE*i,width/2,height/2);
canvas.drawText(strs[i],width/2,area.height()/4,txt_pnt);
canvas.restore();
}
}
float touch_x=0,touch_y = 0;
@Override
public boolean onTouch(View v, MotionEvent event) {
switch(event.getAction()){
case MotionEvent.ACTION_DOWN:{
touch_x = event.getX();
touch_y = event.getY();
break;
}
case MotionEvent.ACTION_UP:{
touch_x = event.getX();
touch_y = event.getY();
final float height = Math.max(v.getMeasuredHeight(), v.getHeight());
final float width = Math.max(v.getMeasuredWidth(), v.getWidth());
final float r = Math.min(height,width);
//android.util.Log.e("touch x and y: ",String.format("x= %f y= %f",touch_x,touch_y));
float relative_xPos = width/2- touch_x;
float relative_yPos = height/2-touch_y;
relative_xPos *=-1;
final float relative_r = (float)Math.sqrt(Math.pow(relative_xPos,2)+ Math.pow(relative_yPos,2));
if(r>10){
float currentDegree =Float.POSITIVE_INFINITY;
if(relative_xPos!=0){
currentDegree= (float)
Math.atan(relative_yPos/relative_xPos);
if(relative_xPos>0&&relative_yPos>=0){//dai 1
currentDegree=(float)Math.PI/2-currentDegree;
// android.util.Log.e("pos log: ","dai 1 area");
}
else if(relative_xPos>0&&relative_yPos<0){//dai 4
currentDegree=(float)Math.PI/2+Math.abs(currentDegree);
//android.util.Log.e("pos log: ","dai 4 area");
}
else if(relative_xPos<0&&relative_yPos<0){//dai 3
currentDegree=(float)Math.PI+((float)Math.PI/2-currentDegree);
//android.util.Log.e("pos log: ","dai 3 area");
}
else{ //dai 2
currentDegree=(float)Math.PI*3/2+Math.abs(currentDegree);
//android.util.Log.e("pos log: ","dai 2 area");
}
currentDegree=(float)Math.toDegrees(currentDegree);
//android.util.Log.e("rbv original degree: ",currentDegree+" and pos: "+String.format("%f , %f",relative_xPos,relative_yPos));
}
else{
currentDegree = (relative_yPos==0)? 0:(relative_xPos>0)? 0:180;
}
if(currentDegree<0){
currentDegree = (360*3-currentDegree)%360;
}
//android.util.Log.e("rbv degree: ",currentDegree+"");
if(rc==null){
rc = new RotateControls();
}
final DeviceUser du = GameStatus.getDeviceUser();
if(PREFIX_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE){
rc.rotateY(du);
}
else if(PREFIX_DEGREE+AREA_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE*2){
rc.rotateX(du);
}
else{
rc.rotateZ(du);
}
}
break;
}
default:break;
}
return true;
}
}
|
add click motion
|
app/src/main/java/org/sms/tetris3d/views/RotateButtonView.java
|
add click motion
|
<ide><path>pp/src/main/java/org/sms/tetris3d/views/RotateButtonView.java
<ide> import android.content.*;
<ide> import android.graphics.Canvas;
<ide> import android.graphics.Color;
<add>import android.os.Handler;
<ide> import android.widget.*;
<ide> import android.view.*;
<ide> import android.util.*;
<ide> import org.sms.tetris3d.controls.RotateControls;
<ide> import org.sms.tetris3d.players.DeviceUser;
<ide>
<del>public class RotateButtonView extends View implements View.OnTouchListener{
<add>public class RotateButtonView extends View implements View.OnTouchListener,Runnable{
<ide> protected final static int X_AXIS_BUTTON_COLOR = Color.argb(0xff,0x90,0,0);
<ide>
<ide> protected final static int Y_AXIS_BUTTON_COLOR = Color.argb(0xff,0,0x80,0);
<ide>
<ide> protected final static float AREA_DEGREE =360.0f/3;
<ide>
<del> protected final static float PREFIX_DEGREE = 45;
<del>
<del>
<del> protected Paint x_pnt=null,y_pnt=null,z_pnt=null,txt_pnt;
<add> protected final static float PREFIX_DEGREE = AREA_DEGREE/2;
<add>
<add> protected final static byte UNKNOWN_AXIS_CLICK_INDEX = -1;
<add>
<add> protected final static byte X_AXIS_CLICK_INDEX = 2;
<add>
<add> protected final static byte Y_AXIS_CLICK_INDEX = 1;
<add>
<add> protected final static byte Z_AXIS_CLICK_INDEX = 0;
<add>
<add>
<add> protected Paint x_pnt=null,y_pnt=null,z_pnt=null,txt_pnt,btnclk_pnt,rm_pnt;
<ide>
<ide> protected RotateControls rc = null;
<ide>
<add> protected byte mClickState =UNKNOWN_AXIS_CLICK_INDEX;
<add>
<add> protected final Handler mHandler = new Handler();
<add>
<add> private final Object mSync = new Object();
<add>
<add> // protected final Thread mMultiProcessThread = new Thread(new Runnable(){@Override public void run(){mHandler.post(RotateButtonView.this);}});
<ide> public RotateButtonView(Context ctx) {
<ide> super(ctx);
<ide> setOnTouchListener(this);
<ide> setOnTouchListener(this);
<ide> initPaint();
<ide> }
<add>
<add> @Override
<add> public void run(){
<add> synchronized (mSync){
<add> invalidate();
<add> }
<add> }
<add>
<ide> private void initPaint(){
<ide> x_pnt = new Paint();
<ide> y_pnt = new Paint();
<ide> z_pnt = new Paint();
<ide> txt_pnt = new Paint();
<add> btnclk_pnt=new Paint();
<add> rm_pnt=new Paint();
<ide> x_pnt.setAntiAlias(true);
<ide> y_pnt.setAntiAlias(true);
<ide> z_pnt.setAntiAlias(true);
<ide> txt_pnt.setAntiAlias(true);
<add> btnclk_pnt.setAntiAlias(true);
<add> rm_pnt.setAntiAlias(true);
<ide> x_pnt.setColor(X_AXIS_BUTTON_COLOR);
<ide> y_pnt.setColor(Y_AXIS_BUTTON_COLOR);
<ide> z_pnt.setColor(Z_AXIS_BUTTON_COLOR);
<add> rm_pnt.setAlpha(0x0);
<add> rm_pnt.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
<add> rm_pnt.setColor(Color.WHITE);
<add> btnclk_pnt.setColor(Color.argb(0x90,0x0,0x0,0x0));
<add>
<ide> txt_pnt.setColor(Color.WHITE);
<ide> txt_pnt.setTextSize(40);
<ide> }
<ide>
<ide> return new String[]{"Z","y","X"};
<ide> }
<del> protected int getQuadrant(float x,float y){
<del> if(x>=0&&y>=0){
<del> return 1;
<del> }
<del> else if(x<0&&y>=0){
<del> return 2;
<del> }
<del> else if(x<0&&y<0){
<del> return 3;
<del> }
<del> else {
<del> return 4;
<del> }
<del> }
<add>
<ide> @Override
<ide> public void draw(Canvas canvas) {
<ide> super.draw(canvas);
<ide> }
<ide> for(int i=0;i<pnt_easy_ary.length;i++){
<ide> canvas.drawArc(area,AREA_DEGREE*i,AREA_DEGREE,true ,pnt_easy_ary[i]);
<add> if(mClickState==i){
<add> synchronized (mSync) {
<add> canvas.drawArc(area, AREA_DEGREE * i, AREA_DEGREE, true, btnclk_pnt);
<add> }
<add> }
<ide> //android.util.Log.e("rbv color:",i+"th = "+AREA_DEGREE*(i+1));
<ide> }
<ide> canvas.restore();
<ide> String[] strs = getAxisString();
<add>
<ide> for(int i =0;i<strs.length;i++){
<ide> canvas.save();
<ide> canvas.rotate(AREA_DEGREE*i,width/2,height/2);
<del> canvas.drawText(strs[i],width/2,area.height()/4,txt_pnt);
<add> String str = strs[i];
<add> Rect rct = new Rect();
<add> txt_pnt.getTextBounds(str.toCharArray(),0,str.toCharArray().length,rct);
<add> canvas.drawText(strs[i],width/2-rct.width()/2,area.height()/5+rct.height()/2,txt_pnt);
<ide> canvas.restore();
<ide> }
<del> }
<del>float touch_x=0,touch_y = 0;
<add> RectF rct = new RectF();
<add> rct.set(area.width()/2-area.width()*0.05f,area.height()/2-area.height()*0.05f,area.width()/2+area.width()*0.05f,area.height()/2+area.height()*0.05f);
<add> canvas.drawArc(rct,0,360,true,rm_pnt);
<add> }
<add> protected float getClickedDegree(float r,float relative_xPos,float relative_yPos){
<add> final float relative_r = (float)Math.sqrt(Math.pow(relative_xPos,2)+ Math.pow(relative_yPos,2));
<add> float currentDegree =Float.POSITIVE_INFINITY;
<add> if(r>10){
<add>
<add> if(relative_xPos!=0){
<add> currentDegree= (float)
<add> Math.atan(relative_yPos/relative_xPos);
<add>
<add> if(relative_xPos>0&&relative_yPos>=0){//dai 1
<add> currentDegree=(float)Math.PI/2-currentDegree;
<add>
<add> // android.util.Log.e("pos log: ","dai 1 area");
<add>
<add> }
<add> else if(relative_xPos>0&&relative_yPos<0){//dai 4
<add> currentDegree=(float)Math.PI/2+Math.abs(currentDegree);
<add> //android.util.Log.e("pos log: ","dai 4 area");
<add> }
<add> else if(relative_xPos<0&&relative_yPos<0){//dai 3
<add> currentDegree=(float)Math.PI+((float)Math.PI/2-currentDegree);
<add> //android.util.Log.e("pos log: ","dai 3 area");
<add> }
<add> else{ //dai 2
<add> currentDegree=(float)Math.PI*3/2+Math.abs(currentDegree);
<add> //android.util.Log.e("pos log: ","dai 2 area");
<add> }
<add>
<add> currentDegree=(float)Math.toDegrees(currentDegree);
<add> //android.util.Log.e("rbv original degree: ",currentDegree+" and pos: "+String.format("%f , %f",relative_xPos,relative_yPos));
<add>
<add> }
<add> else{
<add> currentDegree = (relative_yPos==0)? 0:(relative_xPos>0)? 0:180;
<add> }
<add> if(currentDegree<0){
<add> currentDegree = (360*3-currentDegree)%360;
<add> }
<add>
<add> }
<add> return currentDegree;
<add> }
<add> private float touch_x=0,touch_y = 0,currentDegree=0;
<ide> @Override
<ide> public boolean onTouch(View v, MotionEvent event) {
<ide> switch(event.getAction()){
<ide> case MotionEvent.ACTION_DOWN:{
<del> touch_x = event.getX();
<del> touch_y = event.getY();
<del> break;
<del> }
<del> case MotionEvent.ACTION_UP:{
<ide> touch_x = event.getX();
<ide> touch_y = event.getY();
<ide> final float height = Math.max(v.getMeasuredHeight(), v.getHeight());
<ide> float relative_xPos = width/2- touch_x;
<ide> float relative_yPos = height/2-touch_y;
<ide> relative_xPos *=-1;
<del> final float relative_r = (float)Math.sqrt(Math.pow(relative_xPos,2)+ Math.pow(relative_yPos,2));
<del> if(r>10){
<del> float currentDegree =Float.POSITIVE_INFINITY;
<del> if(relative_xPos!=0){
<del> currentDegree= (float)
<del> Math.atan(relative_yPos/relative_xPos);
<del>
<del> if(relative_xPos>0&&relative_yPos>=0){//dai 1
<del> currentDegree=(float)Math.PI/2-currentDegree;
<del>
<del> // android.util.Log.e("pos log: ","dai 1 area");
<del>
<del> }
<del> else if(relative_xPos>0&&relative_yPos<0){//dai 4
<del> currentDegree=(float)Math.PI/2+Math.abs(currentDegree);
<del> //android.util.Log.e("pos log: ","dai 4 area");
<del> }
<del> else if(relative_xPos<0&&relative_yPos<0){//dai 3
<del> currentDegree=(float)Math.PI+((float)Math.PI/2-currentDegree);
<del> //android.util.Log.e("pos log: ","dai 3 area");
<del> }
<del> else{ //dai 2
<del> currentDegree=(float)Math.PI*3/2+Math.abs(currentDegree);
<del> //android.util.Log.e("pos log: ","dai 2 area");
<del> }
<del>
<del> currentDegree=(float)Math.toDegrees(currentDegree);
<del> //android.util.Log.e("rbv original degree: ",currentDegree+" and pos: "+String.format("%f , %f",relative_xPos,relative_yPos));
<del>
<del> }
<del> else{
<del> currentDegree = (relative_yPos==0)? 0:(relative_xPos>0)? 0:180;
<del> }
<del> if(currentDegree<0){
<del> currentDegree = (360*3-currentDegree)%360;
<del> }
<add> currentDegree = getClickedDegree(r,relative_xPos,relative_yPos);
<add> //android.util.Log.e("rbv degree: ",currentDegree+"");
<add>
<add> if(PREFIX_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE){
<add>
<add> mClickState=Y_AXIS_CLICK_INDEX;
<add> //rc.rotateY(du);
<add> }
<add> else if(PREFIX_DEGREE+AREA_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE*2){
<add> mClickState=X_AXIS_CLICK_INDEX;
<add> //rc.rotateX(du);
<add> }
<add> else if(!Float.isInfinite(currentDegree)){
<add> mClickState=Z_AXIS_CLICK_INDEX;
<add> //rc.rotateZ(du);
<add> }
<add> else{
<add> mClickState=UNKNOWN_AXIS_CLICK_INDEX;
<add> }
<add> synchronized (mSync) {
<add> invalidate();
<add> }
<add> break;
<add> }
<add> case MotionEvent.ACTION_UP:{
<add> /*touch_x = event.getX();
<add> touch_y = event.getY();
<add> final float height = Math.max(v.getMeasuredHeight(), v.getHeight());
<add> final float width = Math.max(v.getMeasuredWidth(), v.getWidth());
<add> final float r = Math.min(height,width);
<add> //android.util.Log.e("touch x and y: ",String.format("x= %f y= %f",touch_x,touch_y));
<add> float relative_xPos = width/2- touch_x;
<add> float relative_yPos = height/2-touch_y;
<add> relative_xPos *=-1;
<add> float currentDegree = getClickedDegree(r,relative_xPos,relative_yPos);
<ide> //android.util.Log.e("rbv degree: ",currentDegree+"");
<add> */
<add>
<ide> if(rc==null){
<ide> rc = new RotateControls();
<ide> }
<ide> final DeviceUser du = GameStatus.getDeviceUser();
<add> mClickState=UNKNOWN_AXIS_CLICK_INDEX;
<ide> if(PREFIX_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE){
<add> //mMultiProcessThread.start();
<add> mHandler.postDelayed(this,4);
<ide> rc.rotateY(du);
<ide> }
<ide> else if(PREFIX_DEGREE+AREA_DEGREE<=currentDegree&¤tDegree<PREFIX_DEGREE+AREA_DEGREE*2){
<add> // mMultiProcessThread.start();
<add> mHandler.postDelayed(this,4);
<ide> rc.rotateX(du);
<ide> }
<del> else{
<add> else if(!Float.isInfinite(currentDegree)){
<add> //mMultiProcessThread.start();
<add> mHandler.postDelayed(this,4);
<ide> rc.rotateZ(du);
<ide> }
<del> }
<add>
<ide> break;
<ide> }
<ide> default:break;
<ide> }
<ide> return true;
<ide> }
<add>
<ide> }
|
|
JavaScript
|
mit
|
388b0243c61d57ed13ab529a75c54ed3388b22a8
| 0 |
lucian303/silex-skeleton,lucian303/silex-skeleton
|
// app.js
(function($) {
$(document).ready(function() {
$('#button-ajax-test').click(function(event) {
console.log(event);
$.ajax('/api/search/lucian', {
success: function(data) {
console.log(data);
}
});
});
});
})(jQuery);
|
web/js/app/app.js
|
// app.js
(function($) {
$(document).ready(function() {
// $('#button-ajax-test').click(function() {
// $.ajax('/api/search/lucian', {
// success: function(data) {
// console.log(data);
// }
// });
// });
});
})(jQuery);
|
Example jQuery ajax call to the API.
|
web/js/app/app.js
|
Example jQuery ajax call to the API.
|
<ide><path>eb/js/app/app.js
<ide> // app.js
<ide> (function($) {
<ide> $(document).ready(function() {
<del>// $('#button-ajax-test').click(function() {
<del>// $.ajax('/api/search/lucian', {
<del>// success: function(data) {
<del>// console.log(data);
<del>// }
<del>// });
<del>// });
<add> $('#button-ajax-test').click(function(event) {
<add> console.log(event);
<add> $.ajax('/api/search/lucian', {
<add> success: function(data) {
<add> console.log(data);
<add> }
<add> });
<add> });
<ide> });
<ide> })(jQuery);
|
|
JavaScript
|
mit
|
8589ad24e653d4643d04873ea55e3832714c45e5
| 0 |
mpatric/react-native-cached-image,kfiroo/react-native-cached-image,mpatric/react-native-cached-image,mpatric/react-native-cached-image,kfiroo/react-native-cached-image
|
'use strict';
const _ = require('lodash');
const React = require('react');
const ReactNative = require('react-native');
const ImageCacheProvider = require('./ImageCacheProvider');
const {
Image,
ActivityIndicator,
NetInfo,
Platform
} = ReactNative;
const {
StyleSheet
} = ReactNative;
const styles = StyleSheet.create({
image: {
backgroundColor: 'transparent'
},
loader: {
backgroundColor: 'transparent'
}
});
const CachedImage = React.createClass({
propTypes: {
renderImage: React.PropTypes.func.isRequired,
activityIndicatorProps: React.PropTypes.object.isRequired,
useQueryParamsInCacheKey: React.PropTypes.oneOfType([
React.PropTypes.bool,
React.PropTypes.array
]).isRequired
},
getDefaultProps() {
return {
renderImage: props => (<Image {...props}/>),
activityIndicatorProps: {},
useQueryParamsInCacheKey: false
};
},
getInitialState() {
this._isMounted = false;
return {
isCacheable: false,
cachedImagePath: null,
networkAvailable: true
};
},
safeSetState(newState) {
if (!this._isMounted) {
return;
}
return this.setState(newState);
},
componentWillMount() {
this._isMounted = true;
NetInfo.isConnected.addEventListener('change', this.handleConnectivityChange);
// initial
NetInfo.isConnected.fetch()
.then(isConnected => {
this.safeSetState({
networkAvailable: isConnected
});
});
this.processSource(this.props.source);
},
componentWillUnmount() {
this._isMounted = false;
NetInfo.isConnected.removeEventListener('change', this.handleConnectivityChange);
},
componentWillReceiveProps(nextProps) {
if (!_.isEqual(this.props.source, nextProps.source)) {
this.processSource(nextProps.source);
}
},
handleConnectivityChange(isConnected) {
this.safeSetState({
networkAvailable: isConnected
});
},
processSource(source) {
const url = _.get(source, ['uri'], null);
if (ImageCacheProvider.isCacheable(url)) {
const options = _.pick(this.props, ['useQueryParamsInCacheKey']);
// try to get the image path from cache
ImageCacheProvider.getCachedImagePath(url, options)
// try to put the image in cache if
.catch(() => ImageCacheProvider.cacheImage(url, options))
.then(cachedImagePath => {
this.safeSetState({
cachedImagePath
});
})
.catch(err => {
this.safeSetState({
cachedImagePath: null
});
});
this.safeSetState({
isCacheable: true
});
} else {
this.safeSetState({
isCacheable: false
});
}
},
render() {
if (this.state.isCacheable && !this.state.cachedImagePath) {
return this.renderLoader();
}
const props = _.omit(this.props, ['source', 'activityIndicatorProps', 'style']);
const style = this.props.style || styles.image;
const source = this.state.cachedImagePath ? {
uri: 'file://' + this.state.cachedImagePath
} : this.props.source;
return this.props.renderImage({
...props,
style,
source
});
},
renderLoader() {
const props = _.omit(this.props.activityIndicatorProps, ['style']);
const imageProps = _.omit(this.props, ['source', 'activityIndicatorProps', 'style']);
const style = [this.props.style, this.props.activityIndicatorProps.style || styles.loader];
const imageStyle = this.props.style || styles.image;
return (
<Image {...imageProps} style={imageStyle}>
<ActivityIndicator
{...props}
style={style}/>
</image>
);
}
});
/**
* Same as ReactNaive.Image.getSize only it will not download the image if it has a cached version
* @param uri
* @param success
* @param failure
* @param options
*/
CachedImage.getSize = function getSize(uri, success, failure, options) {
if (ImageCacheProvider.isCacheable(uri)) {
ImageCacheProvider.getCachedImagePath(uri, options)
.then(imagePath => {
if (Platform.OS === 'android') {
imagePath = 'file://' + imagePath;
}
Image.getSize(imagePath, success, failure);
})
.catch(err => {
Image.getSize(uri, success, failure);
});
} else {
Image.getSize(uri, success, failure);
}
};
module.exports = CachedImage;
|
CachedImage.js
|
'use strict';
const _ = require('lodash');
const React = require('react');
const ReactNative = require('react-native');
const ImageCacheProvider = require('./ImageCacheProvider');
const {
Image,
ActivityIndicator,
NetInfo,
Platform
} = ReactNative;
const {
StyleSheet
} = ReactNative;
const styles = StyleSheet.create({
image: {
backgroundColor: 'transparent'
},
loader: {
backgroundColor: 'transparent'
}
});
const CachedImage = React.createClass({
propTypes: {
renderImage: React.PropTypes.func.isRequired,
activityIndicatorProps: React.PropTypes.object.isRequired,
useQueryParamsInCacheKey: React.PropTypes.oneOfType([
React.PropTypes.bool,
React.PropTypes.array
]).isRequired
},
getDefaultProps() {
return {
renderImage: props => (<Image {...props}/>),
activityIndicatorProps: {},
useQueryParamsInCacheKey: false
};
},
getInitialState() {
this._isMounted = false;
return {
isCacheable: false,
cachedImagePath: null,
networkAvailable: true
};
},
safeSetState(newState) {
if (!this._isMounted) {
return;
}
return this.setState(newState);
},
componentWillMount() {
this._isMounted = true;
NetInfo.isConnected.addEventListener('change', this.handleConnectivityChange);
// initial
NetInfo.isConnected.fetch()
.then(isConnected => {
this.safeSetState({
networkAvailable: isConnected
});
});
this.processSource(this.props.source);
},
componentWillUnmount() {
this._isMounted = false;
NetInfo.isConnected.removeEventListener('change', this.handleConnectivityChange);
},
componentWillReceiveProps(nextProps) {
if (!_.isEqual(this.props.source, nextProps.source)) {
this.processSource(nextProps.source);
}
},
handleConnectivityChange(isConnected) {
this.safeSetState({
networkAvailable: isConnected
});
},
processSource(source) {
const url = _.get(source, ['uri'], null);
if (ImageCacheProvider.isCacheable(url)) {
const options = _.pick(this.props, ['useQueryParamsInCacheKey']);
// try to get the image path from cache
ImageCacheProvider.getCachedImagePath(url, options)
// try to put the image in cache if
.catch(() => ImageCacheProvider.cacheImage(url, options))
.then(cachedImagePath => {
this.safeSetState({
cachedImagePath
});
})
.catch(err => {
this.safeSetState({
cachedImagePath: null
});
});
this.safeSetState({
isCacheable: true
});
} else {
this.safeSetState({
isCacheable: false
});
}
},
render() {
if (this.state.isCacheable && !this.state.cachedImagePath) {
return this.renderLoader();
}
const props = _.omit(this.props, ['source', 'activityIndicatorProps', 'style']);
const style = this.props.style || styles.image;
const source = this.state.cachedImagePath ? {
uri: 'file://' + this.state.cachedImagePath
} : this.props.source;
return this.props.renderImage({
...props,
style,
source
});
},
renderLoader() {
const props = _.omit(this.props.activityIndicatorProps, ['style']);
const style = [this.props.style, this.props.activityIndicatorProps.style || styles.loader];
return (
<ActivityIndicator
{...props}
style={style}
/>
);
}
});
/**
* Same as ReactNaive.Image.getSize only it will not download the image if it has a cached version
* @param uri
* @param success
* @param failure
* @param options
*/
CachedImage.getSize = function getSize(uri, success, failure, options) {
if (ImageCacheProvider.isCacheable(uri)) {
ImageCacheProvider.getCachedImagePath(uri, options)
.then(imagePath => {
if (Platform.OS === 'android') {
imagePath = 'file://' + imagePath;
}
Image.getSize(imagePath, success, failure);
})
.catch(err => {
Image.getSize(uri, success, failure);
});
} else {
Image.getSize(uri, success, failure);
}
};
module.exports = CachedImage;
|
Show the defaultSource image behind the ActivityIndicator until source loaded
|
CachedImage.js
|
Show the defaultSource image behind the ActivityIndicator until source loaded
|
<ide><path>achedImage.js
<ide>
<ide> renderLoader() {
<ide> const props = _.omit(this.props.activityIndicatorProps, ['style']);
<add> const imageProps = _.omit(this.props, ['source', 'activityIndicatorProps', 'style']);
<ide> const style = [this.props.style, this.props.activityIndicatorProps.style || styles.loader];
<add> const imageStyle = this.props.style || styles.image;
<ide> return (
<del> <ActivityIndicator
<del> {...props}
<del> style={style}
<del> />
<add> <Image {...imageProps} style={imageStyle}>
<add> <ActivityIndicator
<add> {...props}
<add> style={style}/>
<add> </image>
<ide> );
<ide> }
<ide> });
|
|
Java
|
apache-2.0
|
694183a1c1ac9d7ff56ff0214fc268e26e69f6bc
| 0 |
sangramjadhav/testrs
|
20c28d06-2ece-11e5-905b-74de2bd44bed
|
hello.java
|
20c1fbfc-2ece-11e5-905b-74de2bd44bed
|
20c28d06-2ece-11e5-905b-74de2bd44bed
|
hello.java
|
20c28d06-2ece-11e5-905b-74de2bd44bed
|
<ide><path>ello.java
<del>20c1fbfc-2ece-11e5-905b-74de2bd44bed
<add>20c28d06-2ece-11e5-905b-74de2bd44bed
|
|
JavaScript
|
mit
|
c4de05f275da49b63e641344285f65892a8d95b0
| 0 |
alexcpendleton/sleep-tight,alexcpendleton/sleep-tight
|
/* eslint global-require: 1, flowtype-errors/show-errors: 0 */
/**
* This module executes inside of electron's main process. You can start
* electron renderer process from here and communicate with the other processes
* through IPC.
*
* When running `npm run build` or `npm run build-main`, this file is compiled to
* `./app/main.prod.js` using webpack. This gives us some performance wins.
*
* @flow
*/
import { app, BrowserWindow } from 'electron';
const MainThreadReceiver = require('./core/mainThreadReceiver.js');
let mainWindow = null;
if (process.env.NODE_ENV === 'production') {
const sourceMapSupport = require('source-map-support');
sourceMapSupport.install();
}
if (process.env.NODE_ENV === 'development' || process.env.DEBUG_PROD === 'true') {
require('electron-debug')();
const path = require('path');
const p = path.join(__dirname, '..', 'app', 'node_modules');
require('module').globalPaths.push(p);
}
const installExtensions = async () => {
const installer = require('electron-devtools-installer');
const forceDownload = !!process.env.UPGRADE_EXTENSIONS;
const extensions = [
'REACT_DEVELOPER_TOOLS',
'REDUX_DEVTOOLS'
];
return Promise
.all(extensions.map(name => installer.default(installer[name], forceDownload)))
.catch(console.log);
};
var hasBeenSetup = false;
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
//let mainWindow;
function initMenubar() {
var menubar = require('menubar');
var IconResolver = require('./core/IconResolver.js');
var iconPath = new IconResolver(process.platform).resolve();
var dimensions = getWindowDimensions();
var mb = menubar({
dir:__dirname,
icon:iconPath,
preloadWindow:true,
width: dimensions.width,
height: dimensions.height,
index:`file://${__dirname}/app.html`
});
mb.on('ready', function ready () {
console.log('app is ready');
});
mb.on('after-show', function afterShow() {
if(hasBeenSetup) return;
if(process.env.NODE_ENV === 'development') {
// Open the DevTools.
mb.window.webContents.openDevTools();
}
hasBeenSetup = true;
});
return menubar;
}
/**
* Add event listeners...
*/
function setupSignaling() {
new MainThreadReceiver().setup();
}
function getWindowDimensions() {
if(process.env.NODE_ENV === 'development') {
return {width: 1024, height: 768};
}
return {width:280, height:240};
}
app.on('ready', async () => {
if (process.env.NODE_ENV === 'development' || process.env.DEBUG_PROD === 'true') {
await installExtensions();
}
const menubar = initMenubar();
mainWindow = menubar.window;
setupSignaling();
// @TODO: Use 'ready-to-show' event
// https://github.com/electron/electron/blob/master/docs/api/browser-window.md#using-ready-to-show-event
mainWindow.webContents.on('did-finish-load', () => {
if (!mainWindow) {
throw new Error('"mainWindow" is not defined');
}
});
mainWindow.on('closed', () => {
});
});
|
app/main.dev.js
|
/* eslint global-require: 1, flowtype-errors/show-errors: 0 */
/**
* This module executes inside of electron's main process. You can start
* electron renderer process from here and communicate with the other processes
* through IPC.
*
* When running `npm run build` or `npm run build-main`, this file is compiled to
* `./app/main.prod.js` using webpack. This gives us some performance wins.
*
* @flow
*/
import { app, BrowserWindow } from 'electron';
const MainThreadReceiver = require('./core/mainThreadReceiver.js');
let mainWindow = null;
if (process.env.NODE_ENV === 'production') {
const sourceMapSupport = require('source-map-support');
sourceMapSupport.install();
}
if (process.env.NODE_ENV === 'development' || process.env.DEBUG_PROD === 'true') {
require('electron-debug')();
const path = require('path');
const p = path.join(__dirname, '..', 'app', 'node_modules');
require('module').globalPaths.push(p);
}
const installExtensions = async () => {
const installer = require('electron-devtools-installer');
const forceDownload = !!process.env.UPGRADE_EXTENSIONS;
const extensions = [
'REACT_DEVELOPER_TOOLS',
'REDUX_DEVTOOLS'
];
return Promise
.all(extensions.map(name => installer.default(installer[name], forceDownload)))
.catch(console.log);
};
var hasBeenSetup = false;
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
//let mainWindow;
function initMenubar() {
var menubar = require('menubar');
var IconResolver = require('./core/IconResolver.js');
var iconPath = new IconResolver(process.platform).resolve();
var dimensions = getWindowDimensions();
var mb = menubar({
dir:__dirname,
icon:iconPath,
preloadWindow:true,
width: dimensions.width,
height: dimensions.height,
index:`file://${__dirname}/app.html`
});
mb.on('ready', function ready () {
console.log('app is ready');
});
mb.on('after-show', function afterShow() {
if(hasBeenSetup) return;
if(process.env.NODE_ENV === 'development') {
// Open the DevTools.
mb.window.webContents.openDevTools();
}
hasBeenSetup = true;
});
return menubar;
}
/**
* Add event listeners...
*/
function setupSignaling() {
new MainThreadReceiver().setup();
}
function getWindowDimensions() {
if(process.env.NODE_ENV === 'development') {
return {width: 1024, height: 768};
}
return {width:280, height:240};
}
app.on('ready', async () => {
if (process.env.NODE_ENV === 'development' || process.env.DEBUG_PROD === 'true') {
await installExtensions();
}
const menubar = initMenubar();
mainWindow = menubar.window;
// @TODO: Use 'ready-to-show' event
// https://github.com/electron/electron/blob/master/docs/api/browser-window.md#using-ready-to-show-event
mainWindow.webContents.on('did-finish-load', () => {
if (!mainWindow) {
throw new Error('"mainWindow" is not defined');
}
initMenubar();
setupSignaling();
});
mainWindow.on('closed', () => {
});
});
|
Fixed signaling
|
app/main.dev.js
|
Fixed signaling
|
<ide><path>pp/main.dev.js
<ide>
<ide> const menubar = initMenubar();
<ide> mainWindow = menubar.window;
<add> setupSignaling();
<ide>
<ide> // @TODO: Use 'ready-to-show' event
<ide> // https://github.com/electron/electron/blob/master/docs/api/browser-window.md#using-ready-to-show-event
<ide> if (!mainWindow) {
<ide> throw new Error('"mainWindow" is not defined');
<ide> }
<del> initMenubar();
<del> setupSignaling();
<ide> });
<ide>
<ide> mainWindow.on('closed', () => {
|
|
Java
|
apache-2.0
|
3f072aabcb97f336b476e637f93f6b274bf8ea85
| 0 |
scorpionvicky/elasticsearch,uschindler/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,gingerwizard/elasticsearch,gingerwizard/elasticsearch,GlenRSmith/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,gingerwizard/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.CompositeBytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import java.io.IOException;
abstract class OutboundMessage extends NetworkMessage {
private final Writeable message;
OutboundMessage(ThreadContext threadContext, Version version, byte status, long requestId, Writeable message) {
super(threadContext, version, status, requestId);
this.message = message;
}
BytesReference serialize(BytesStreamOutput bytesStream) throws IOException {
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
storedContext.restore();
bytesStream.setVersion(version);
bytesStream.skip(TcpHeader.headerSize(version));
// The compressible bytes stream will not close the underlying bytes stream
BytesReference reference;
int variableHeaderLength = -1;
final long preHeaderPosition = bytesStream.position();
if (version.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) {
writeVariableHeader(bytesStream);
variableHeaderLength = Math.toIntExact(bytesStream.position() - preHeaderPosition);
}
try (CompressibleBytesOutputStream stream =
new CompressibleBytesOutputStream(bytesStream, TransportStatus.isCompress(status))) {
stream.setVersion(version);
if (variableHeaderLength == -1) {
writeVariableHeader(stream);
}
reference = writeMessage(stream);
}
bytesStream.seek(0);
final int contentSize = reference.length() - TcpHeader.headerSize(version);
TcpHeader.writeHeader(bytesStream, requestId, status, version, contentSize, variableHeaderLength);
return reference;
}
}
protected void writeVariableHeader(StreamOutput stream) throws IOException {
threadContext.writeTo(stream);
}
protected BytesReference writeMessage(CompressibleBytesOutputStream stream) throws IOException {
final BytesReference zeroCopyBuffer;
if (message instanceof BytesTransportRequest) {
BytesTransportRequest bRequest = (BytesTransportRequest) message;
bRequest.writeThin(stream);
zeroCopyBuffer = bRequest.bytes;
} else if (message instanceof RemoteTransportException) {
stream.writeException((RemoteTransportException) message);
zeroCopyBuffer = BytesArray.EMPTY;
} else {
message.writeTo(stream);
zeroCopyBuffer = BytesArray.EMPTY;
}
// we have to call materializeBytes() here before accessing the bytes. A CompressibleBytesOutputStream
// might be implementing compression. And materializeBytes() ensures that some marker bytes (EOS marker)
// are written. Otherwise we barf on the decompressing end when we read past EOF on purpose in the
// #validateRequest method. this might be a problem in deflate after all but it's important to write
// the marker bytes.
final BytesReference message = stream.materializeBytes();
if (zeroCopyBuffer.length() == 0) {
return message;
} else {
return CompositeBytesReference.of(message, zeroCopyBuffer);
}
}
static class Request extends OutboundMessage {
private final String action;
Request(ThreadContext threadContext, Writeable message, Version version, String action, long requestId,
boolean isHandshake, boolean compress) {
super(threadContext, version, setStatus(compress, isHandshake, message), requestId, message);
this.action = action;
}
@Override
protected void writeVariableHeader(StreamOutput stream) throws IOException {
super.writeVariableHeader(stream);
if (version.before(Version.V_8_0_0)) {
// empty features array
stream.writeStringArray(Strings.EMPTY_ARRAY);
}
stream.writeString(action);
}
private static byte setStatus(boolean compress, boolean isHandshake, Writeable message) {
byte status = 0;
status = TransportStatus.setRequest(status);
if (compress && OutboundMessage.canCompress(message)) {
status = TransportStatus.setCompress(status);
}
if (isHandshake) {
status = TransportStatus.setHandshake(status);
}
return status;
}
}
static class Response extends OutboundMessage {
Response(ThreadContext threadContext, Writeable message, Version version, long requestId, boolean isHandshake, boolean compress) {
super(threadContext, version, setStatus(compress, isHandshake, message), requestId, message);
}
private static byte setStatus(boolean compress, boolean isHandshake, Writeable message) {
byte status = 0;
status = TransportStatus.setResponse(status);
if (message instanceof RemoteTransportException) {
status = TransportStatus.setError(status);
}
if (compress) {
status = TransportStatus.setCompress(status);
}
if (isHandshake) {
status = TransportStatus.setHandshake(status);
}
return status;
}
}
private static boolean canCompress(Writeable message) {
return message instanceof BytesTransportRequest == false;
}
}
|
server/src/main/java/org/elasticsearch/transport/OutboundMessage.java
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.CompositeBytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import java.io.IOException;
abstract class OutboundMessage extends NetworkMessage {
private final Writeable message;
OutboundMessage(ThreadContext threadContext, Version version, byte status, long requestId, Writeable message) {
super(threadContext, version, status, requestId);
this.message = message;
}
BytesReference serialize(BytesStreamOutput bytesStream) throws IOException {
storedContext.restore();
bytesStream.setVersion(version);
bytesStream.skip(TcpHeader.headerSize(version));
// The compressible bytes stream will not close the underlying bytes stream
BytesReference reference;
int variableHeaderLength = -1;
final long preHeaderPosition = bytesStream.position();
if (version.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) {
writeVariableHeader(bytesStream);
variableHeaderLength = Math.toIntExact(bytesStream.position() - preHeaderPosition);
}
try (CompressibleBytesOutputStream stream = new CompressibleBytesOutputStream(bytesStream, TransportStatus.isCompress(status))) {
stream.setVersion(version);
if (variableHeaderLength == -1) {
writeVariableHeader(stream);
}
reference = writeMessage(stream);
}
bytesStream.seek(0);
final int contentSize = reference.length() - TcpHeader.headerSize(version);
TcpHeader.writeHeader(bytesStream, requestId, status, version, contentSize, variableHeaderLength);
return reference;
}
protected void writeVariableHeader(StreamOutput stream) throws IOException {
threadContext.writeTo(stream);
}
protected BytesReference writeMessage(CompressibleBytesOutputStream stream) throws IOException {
final BytesReference zeroCopyBuffer;
if (message instanceof BytesTransportRequest) {
BytesTransportRequest bRequest = (BytesTransportRequest) message;
bRequest.writeThin(stream);
zeroCopyBuffer = bRequest.bytes;
} else if (message instanceof RemoteTransportException) {
stream.writeException((RemoteTransportException) message);
zeroCopyBuffer = BytesArray.EMPTY;
} else {
message.writeTo(stream);
zeroCopyBuffer = BytesArray.EMPTY;
}
// we have to call materializeBytes() here before accessing the bytes. A CompressibleBytesOutputStream
// might be implementing compression. And materializeBytes() ensures that some marker bytes (EOS marker)
// are written. Otherwise we barf on the decompressing end when we read past EOF on purpose in the
// #validateRequest method. this might be a problem in deflate after all but it's important to write
// the marker bytes.
final BytesReference message = stream.materializeBytes();
if (zeroCopyBuffer.length() == 0) {
return message;
} else {
return CompositeBytesReference.of(message, zeroCopyBuffer);
}
}
static class Request extends OutboundMessage {
private final String action;
Request(ThreadContext threadContext, Writeable message, Version version, String action, long requestId,
boolean isHandshake, boolean compress) {
super(threadContext, version, setStatus(compress, isHandshake, message), requestId, message);
this.action = action;
}
@Override
protected void writeVariableHeader(StreamOutput stream) throws IOException {
super.writeVariableHeader(stream);
if (version.before(Version.V_8_0_0)) {
// empty features array
stream.writeStringArray(Strings.EMPTY_ARRAY);
}
stream.writeString(action);
}
private static byte setStatus(boolean compress, boolean isHandshake, Writeable message) {
byte status = 0;
status = TransportStatus.setRequest(status);
if (compress && OutboundMessage.canCompress(message)) {
status = TransportStatus.setCompress(status);
}
if (isHandshake) {
status = TransportStatus.setHandshake(status);
}
return status;
}
}
static class Response extends OutboundMessage {
Response(ThreadContext threadContext, Writeable message, Version version, long requestId, boolean isHandshake, boolean compress) {
super(threadContext, version, setStatus(compress, isHandshake, message), requestId, message);
}
private static byte setStatus(boolean compress, boolean isHandshake, Writeable message) {
byte status = 0;
status = TransportStatus.setResponse(status);
if (message instanceof RemoteTransportException) {
status = TransportStatus.setError(status);
}
if (compress) {
status = TransportStatus.setCompress(status);
}
if (isHandshake) {
status = TransportStatus.setHandshake(status);
}
return status;
}
}
private static boolean canCompress(Writeable message) {
return message instanceof BytesTransportRequest == false;
}
}
|
Restore ThreadContext after Serializing OutboundMessage (#57659)
Stash the current context before restoring the stored context on the IO thread
so that its thread context does not get polluted.
Closes #57554
|
server/src/main/java/org/elasticsearch/transport/OutboundMessage.java
|
Restore ThreadContext after Serializing OutboundMessage (#57659)
|
<ide><path>erver/src/main/java/org/elasticsearch/transport/OutboundMessage.java
<ide> }
<ide>
<ide> BytesReference serialize(BytesStreamOutput bytesStream) throws IOException {
<del> storedContext.restore();
<del> bytesStream.setVersion(version);
<del> bytesStream.skip(TcpHeader.headerSize(version));
<add> try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
<add> storedContext.restore();
<add> bytesStream.setVersion(version);
<add> bytesStream.skip(TcpHeader.headerSize(version));
<ide>
<del> // The compressible bytes stream will not close the underlying bytes stream
<del> BytesReference reference;
<del> int variableHeaderLength = -1;
<del> final long preHeaderPosition = bytesStream.position();
<add> // The compressible bytes stream will not close the underlying bytes stream
<add> BytesReference reference;
<add> int variableHeaderLength = -1;
<add> final long preHeaderPosition = bytesStream.position();
<ide>
<del> if (version.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) {
<del> writeVariableHeader(bytesStream);
<del> variableHeaderLength = Math.toIntExact(bytesStream.position() - preHeaderPosition);
<add> if (version.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) {
<add> writeVariableHeader(bytesStream);
<add> variableHeaderLength = Math.toIntExact(bytesStream.position() - preHeaderPosition);
<add> }
<add>
<add> try (CompressibleBytesOutputStream stream =
<add> new CompressibleBytesOutputStream(bytesStream, TransportStatus.isCompress(status))) {
<add> stream.setVersion(version);
<add> if (variableHeaderLength == -1) {
<add> writeVariableHeader(stream);
<add> }
<add> reference = writeMessage(stream);
<add> }
<add>
<add> bytesStream.seek(0);
<add> final int contentSize = reference.length() - TcpHeader.headerSize(version);
<add> TcpHeader.writeHeader(bytesStream, requestId, status, version, contentSize, variableHeaderLength);
<add> return reference;
<ide> }
<del>
<del> try (CompressibleBytesOutputStream stream = new CompressibleBytesOutputStream(bytesStream, TransportStatus.isCompress(status))) {
<del> stream.setVersion(version);
<del> if (variableHeaderLength == -1) {
<del> writeVariableHeader(stream);
<del> }
<del> reference = writeMessage(stream);
<del> }
<del>
<del> bytesStream.seek(0);
<del> final int contentSize = reference.length() - TcpHeader.headerSize(version);
<del> TcpHeader.writeHeader(bytesStream, requestId, status, version, contentSize, variableHeaderLength);
<del> return reference;
<ide> }
<ide>
<ide> protected void writeVariableHeader(StreamOutput stream) throws IOException {
|
|
Java
|
apache-2.0
|
f7b333f10583639ee3d0f2631fee41c577c60452
| 0 |
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.DocIdSetBuilder;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
public class TestReqExclBulkScorer extends LuceneTestCase {
public void testRandom() throws IOException {
final int iters = atLeast(10);
for (int iter = 0; iter < iters; ++iter) {
doTestRandom();
}
}
public void doTestRandom() throws IOException {
final int maxDoc = TestUtil.nextInt(random(), 1, 1000);
DocIdSetBuilder reqBuilder = new DocIdSetBuilder(maxDoc);
DocIdSetBuilder exclBuilder = new DocIdSetBuilder(maxDoc);
final int numIncludedDocs = TestUtil.nextInt(random(), 1, maxDoc);
final int numExcludedDocs = TestUtil.nextInt(random(), 1, maxDoc);
DocIdSetBuilder.BulkAdder reqAdder = reqBuilder.grow(numIncludedDocs);
for (int i = 0; i < numIncludedDocs; ++i) {
reqAdder.add(random().nextInt(maxDoc));
}
DocIdSetBuilder.BulkAdder exclAdder = exclBuilder.grow(numExcludedDocs);
for (int i = 0; i < numExcludedDocs; ++i) {
exclAdder.add(random().nextInt(maxDoc));
}
final DocIdSet req = reqBuilder.build();
final DocIdSet excl = exclBuilder.build();
final BulkScorer reqBulkScorer = new BulkScorer() {
final DocIdSetIterator iterator = req.iterator();
@Override
public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException {
int doc = iterator.docID();
if (iterator.docID() < min) {
doc = iterator.advance(min);
}
while (doc < max) {
if (acceptDocs == null || acceptDocs.get(doc)) {
collector.collect(doc);
}
doc = iterator.nextDoc();
}
return doc;
}
@Override
public long cost() {
return iterator.cost();
}
};
ReqExclBulkScorer reqExcl = new ReqExclBulkScorer(reqBulkScorer, excl.iterator());
final FixedBitSet actualMatches = new FixedBitSet(maxDoc);
if (random().nextBoolean()) {
reqExcl.score(new LeafCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {}
@Override
public void collect(int doc) throws IOException {
actualMatches.set(doc);
}
}, null);
} else {
int next = 0;
while (next < maxDoc) {
final int min = next;
final int max = min + random().nextInt(10);
next = reqExcl.score(new LeafCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {}
@Override
public void collect(int doc) throws IOException {
actualMatches.set(doc);
}
}, null, min, max);
assertTrue(next >= max);
}
}
final FixedBitSet expectedMatches = new FixedBitSet(maxDoc);
expectedMatches.or(req.iterator());
FixedBitSet excludedSet = new FixedBitSet(maxDoc);
excludedSet.or(excl.iterator());
expectedMatches.andNot(excludedSet);
assertArrayEquals(expectedMatches.getBits(), actualMatches.getBits());
}
}
|
lucene/core/src/test/org/apache/lucene/search/TestReqExclBulkScorer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.DocIdSetBuilder;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
public class TestReqExclBulkScorer extends LuceneTestCase {
public void testRandom() throws IOException {
final int iters = atLeast(10);
for (int iter = 0; iter < iters; ++iter) {
doTestRandom();
}
}
public void doTestRandom() throws IOException {
final int maxDoc = TestUtil.nextInt(random(), 1, 1000);
DocIdSetBuilder reqBuilder = new DocIdSetBuilder(maxDoc);
DocIdSetBuilder exclBuilder = new DocIdSetBuilder(maxDoc);
final int numIncludedDocs = TestUtil.nextInt(random(), 1, maxDoc);
final int numExcludedDocs = TestUtil.nextInt(random(), 1, maxDoc);
DocIdSetBuilder.BulkAdder reqAdder = reqBuilder.grow(numIncludedDocs);
for (int i = 0; i < numIncludedDocs; ++i) {
reqAdder.add(random().nextInt(maxDoc));
}
DocIdSetBuilder.BulkAdder exclAdder = exclBuilder.grow(numIncludedDocs);
for (int i = 0; i < numExcludedDocs; ++i) {
exclAdder.add(random().nextInt(maxDoc));
}
final DocIdSet req = reqBuilder.build();
final DocIdSet excl = exclBuilder.build();
final BulkScorer reqBulkScorer = new BulkScorer() {
final DocIdSetIterator iterator = req.iterator();
@Override
public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException {
int doc = iterator.docID();
if (iterator.docID() < min) {
doc = iterator.advance(min);
}
while (doc < max) {
if (acceptDocs == null || acceptDocs.get(doc)) {
collector.collect(doc);
}
doc = iterator.nextDoc();
}
return doc;
}
@Override
public long cost() {
return iterator.cost();
}
};
ReqExclBulkScorer reqExcl = new ReqExclBulkScorer(reqBulkScorer, excl.iterator());
final FixedBitSet actualMatches = new FixedBitSet(maxDoc);
if (random().nextBoolean()) {
reqExcl.score(new LeafCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {}
@Override
public void collect(int doc) throws IOException {
actualMatches.set(doc);
}
}, null);
} else {
int next = 0;
while (next < maxDoc) {
final int min = next;
final int max = min + random().nextInt(10);
next = reqExcl.score(new LeafCollector() {
@Override
public void setScorer(Scorer scorer) throws IOException {}
@Override
public void collect(int doc) throws IOException {
actualMatches.set(doc);
}
}, null, min, max);
assertTrue(next >= max);
}
}
final FixedBitSet expectedMatches = new FixedBitSet(maxDoc);
expectedMatches.or(req.iterator());
FixedBitSet excludedSet = new FixedBitSet(maxDoc);
excludedSet.or(excl.iterator());
expectedMatches.andNot(excludedSet);
assertArrayEquals(expectedMatches.getBits(), actualMatches.getBits());
}
}
|
LUCENE-7264: Fix test bug in TestReqExclBulkScorer.
|
lucene/core/src/test/org/apache/lucene/search/TestReqExclBulkScorer.java
|
LUCENE-7264: Fix test bug in TestReqExclBulkScorer.
|
<ide><path>ucene/core/src/test/org/apache/lucene/search/TestReqExclBulkScorer.java
<ide> for (int i = 0; i < numIncludedDocs; ++i) {
<ide> reqAdder.add(random().nextInt(maxDoc));
<ide> }
<del> DocIdSetBuilder.BulkAdder exclAdder = exclBuilder.grow(numIncludedDocs);
<add> DocIdSetBuilder.BulkAdder exclAdder = exclBuilder.grow(numExcludedDocs);
<ide> for (int i = 0; i < numExcludedDocs; ++i) {
<ide> exclAdder.add(random().nextInt(maxDoc));
<ide> }
|
|
Java
|
lgpl-2.1
|
5cdfb14d1604de7d7c00ff1fcdb6e3d1167ca32e
| 0 |
allati/eclipse-rbe
|
/*
* Copyright (C) 2003, 2004 Pascal Essiembre, Essiembre Consultant Inc.
*
* This file is part of Essiembre ResourceBundle Editor.
*
* Essiembre ResourceBundle Editor is free software; you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Essiembre ResourceBundle Editor is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Essiembre ResourceBundle Editor; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package com.essiembre.eclipse.rbe.ui.preferences;
import org.eclipse.core.runtime.Preferences;
import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer;
import com.essiembre.eclipse.rbe.RBEPlugin;
import com.essiembre.eclipse.rbe.model.workbench.RBEPreferences;
/**
* Initializes default preferences.
* @author Pascal Essiembre ([email protected])
* @version $Author$ $Revision$ $Date$
*/
public class RBEPreferenceInitializer extends
AbstractPreferenceInitializer {
/**
* Constructor.
*/
public RBEPreferenceInitializer() {
super();
}
/**
* @see org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer
* #initializeDefaultPreferences()
*/
public void initializeDefaultPreferences() {
Preferences prefs = RBEPlugin.getDefault().getPluginPreferences();
//General
prefs.setDefault(RBEPreferences.CONVERT_ENCODED_TO_UNICODE, true);
prefs.setDefault(RBEPreferences.FIELD_TAB_INSERTS, true);
prefs.setDefault(RBEPreferences.KEY_TREE_HIERARCHICAL, true);
prefs.setDefault(RBEPreferences.KEY_TREE_EXPANDED, true);
//Formatting
prefs.setDefault(RBEPreferences.CONVERT_UNICODE_TO_ENCODED, true);
prefs.setDefault(RBEPreferences.CONVERT_UNICODE_TO_ENCODED_UPPER, true);
prefs.setDefault(RBEPreferences.SPACES_AROUND_EQUAL_SIGNS, true);
prefs.setDefault(RBEPreferences.KEY_GROUP_SEPARATOR, "."); //$NON-NLS-1$
prefs.setDefault(RBEPreferences.ALIGN_EQUAL_SIGNS, true);
prefs.setDefault(RBEPreferences.SHOW_GENERATOR, true);
prefs.setDefault(RBEPreferences.KEY_TREE_HIERARCHICAL, true);
prefs.setDefault(RBEPreferences.GROUP_KEYS, true);
prefs.setDefault(RBEPreferences.GROUP_LEVEL_DEEP, 1);
prefs.setDefault(RBEPreferences.GROUP_LINE_BREAKS, 1);
prefs.setDefault(RBEPreferences.GROUP_ALIGN_EQUAL_SIGNS, true);
prefs.setDefault(RBEPreferences.WRAP_CHAR_LIMIT, 80);
prefs.setDefault(RBEPreferences.WRAP_INDENT_SPACES, 8);
prefs.setDefault(RBEPreferences.NEW_LINE_TYPE,
RBEPreferences.NEW_LINE_UNIX);
// Reporting/Performance
prefs.setDefault(RBEPreferences.REPORT_MISSING_VALUES, true);
prefs.setDefault(RBEPreferences.REPORT_DUPL_VALUES, true);
prefs.setDefault(RBEPreferences.REPORT_SIM_VALUES_WORD_COMPARE, true);
prefs.setDefault(RBEPreferences.REPORT_SIM_VALUES_PRECISION, 0.75d);
prefs.setDefault(RBEPreferences.NO_TREE_IN_EDITOR, false);
}
}
|
src/com/essiembre/eclipse/rbe/ui/preferences/RBEPreferenceInitializer.java
|
/*
* Copyright (C) 2003, 2004 Pascal Essiembre, Essiembre Consultant Inc.
*
* This file is part of Essiembre ResourceBundle Editor.
*
* Essiembre ResourceBundle Editor is free software; you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Essiembre ResourceBundle Editor is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Essiembre ResourceBundle Editor; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*/
package com.essiembre.eclipse.rbe.ui.preferences;
import org.eclipse.core.runtime.Preferences;
import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer;
import com.essiembre.eclipse.rbe.RBEPlugin;
import com.essiembre.eclipse.rbe.model.workbench.RBEPreferences;
/**
* Initializes default preferences.
* @author Pascal Essiembre ([email protected])
* @version $Author$ $Revision$ $Date$
*/
public class RBEPreferenceInitializer extends
AbstractPreferenceInitializer {
/**
* Constructor.
*/
public RBEPreferenceInitializer() {
super();
}
/**
* @see org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer
* #initializeDefaultPreferences()
*/
public void initializeDefaultPreferences() {
Preferences prefs = RBEPlugin.getDefault().getPluginPreferences();
//General
prefs.setDefault(RBEPreferences.CONVERT_ENCODED_TO_UNICODE, true);
prefs.setDefault(RBEPreferences.FIELD_TAB_INSERTS, true);
prefs.setDefault(RBEPreferences.KEY_TREE_HIERARCHICAL, true);
prefs.setDefault(RBEPreferences.KEY_TREE_EXPANDED, true);
//Formatting
prefs.setDefault(RBEPreferences.CONVERT_UNICODE_TO_ENCODED, true);
prefs.setDefault(RBEPreferences.CONVERT_UNICODE_TO_ENCODED_UPPER, true);
prefs.setDefault(RBEPreferences.SPACES_AROUND_EQUAL_SIGNS, true);
prefs.setDefault(RBEPreferences.KEY_GROUP_SEPARATOR, "."); //$NON-NLS-1$
prefs.setDefault(RBEPreferences.ALIGN_EQUAL_SIGNS, true);
prefs.setDefault(RBEPreferences.SHOW_GENERATOR, true);
prefs.setDefault(RBEPreferences.KEY_TREE_HIERARCHICAL, true);
prefs.setDefault(RBEPreferences.GROUP_KEYS, true);
prefs.setDefault(RBEPreferences.GROUP_LEVEL_DEEP, 1);
prefs.setDefault(RBEPreferences.GROUP_LINE_BREAKS, 1);
prefs.setDefault(RBEPreferences.GROUP_ALIGN_EQUAL_SIGNS, true);
prefs.setDefault(RBEPreferences.WRAP_CHAR_LIMIT, 80);
prefs.setDefault(RBEPreferences.WRAP_INDENT_SPACES, 8);
prefs.setDefault(RBEPreferences.NEW_LINE_TYPE,
RBEPreferences.NEW_LINE_UNIX);
// Reporting/Performance
prefs.setDefault(RBEPreferences.REPORT_MISSING_VALUES, true);
prefs.setDefault(RBEPreferences.REPORT_DUPL_VALUES, true);
prefs.setDefault(RBEPreferences.REPORT_SIM_VALUES_WORD_COMPARE, true);
prefs.setDefault(RBEPreferences.REPORT_SIM_VALUES_PRECISION, 0.75d);
}
}
|
~ Added default value for 'getNoTreeInEditor' which means: do not suppress the the editor.
|
src/com/essiembre/eclipse/rbe/ui/preferences/RBEPreferenceInitializer.java
|
~ Added default value for 'getNoTreeInEditor' which means: do not suppress the the editor.
|
<ide><path>rc/com/essiembre/eclipse/rbe/ui/preferences/RBEPreferenceInitializer.java
<ide> prefs.setDefault(RBEPreferences.REPORT_DUPL_VALUES, true);
<ide> prefs.setDefault(RBEPreferences.REPORT_SIM_VALUES_WORD_COMPARE, true);
<ide> prefs.setDefault(RBEPreferences.REPORT_SIM_VALUES_PRECISION, 0.75d);
<add>
<add> prefs.setDefault(RBEPreferences.NO_TREE_IN_EDITOR, false);
<add>
<ide> }
<ide>
<ide> }
|
|
Java
|
apache-2.0
|
33a458e27f34ce243edfc11558a2c6d1193c15fa
| 0 |
echsylon/atlantis
|
package com.echsylon.atlantis;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import okio.Okio;
import okio.Source;
import static com.echsylon.atlantis.LogUtils.info;
import static com.echsylon.atlantis.Utils.getNative;
import static com.echsylon.atlantis.Utils.getNonNull;
/**
* This class contains the full description of a mock response.
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class MockResponse {
/**
* This interface describes the mandatory feature set to provide a mocked
* response. Implementing classes are responsible for picking a response
* from a given list of available responses. Any state machine etc is also
* in the scope of the implementing class.
*/
public interface Filter {
/**
* Returns a mocked response of choice. May be null, in which case the
* calling logic is responsible for deciding what response to serve.
*
* @param mockResponses All available response to pick a candidate from.
* Null and empty lists are acceptable.
* @return The response candidate.
*/
MockResponse findResponse(final List<MockResponse> mockResponses);
}
/**
* This interface describes the mandatory feature set to provide a data
* stream for content described in the given text.
*/
public interface SourceHelper {
/**
* Returns a stream from which the response body content can be read.
*
* @param text The description of the content to stream.
* @return A data stream or null.
*/
Source open(final String text);
}
/**
* This class offers means of building a mocked response configuration
* directly from code (as opposed to configure one in a JSON file).
*/
public static final class Builder {
private final MockResponse mockResponse;
/**
* Creates a new builder based on an uninitialized response object.
*/
public Builder() {
mockResponse = new MockResponse();
}
/**
* Adds a header to the response being built. Any existing keys will be
* overwritten.
*
* @param key The header key.
* @param value The header value.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addHeader(final String key, final String value) {
mockResponse.headers.put(key, value);
return this;
}
/**
* Adds all headers to the response being built where neither the key
* nor the value is empty or null. Any existing keys will be
* overwritten.
*
* @param headers The headers to add.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addHeaders(final Map<String, String> headers) {
mockResponse.headers.putAll(headers);
return this;
}
/**
* Adds a setting to the response being built. Any existing keys will be
* overwritten.
*
* @param key The setting key.
* @param value The setting value.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addSetting(final String key, final String value) {
mockResponse.settings.put(key, value);
return this;
}
/**
* Adds all settings to the response being built where neither the key
* nor the value is empty or null. Any existing keys will be
* overwritten.
*
* @param settings The settings to add.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addSettings(final Map<String, String> settings) {
mockResponse.settings.putAll(settings);
return this;
}
/**
* Sets the status of the response being built. Doesn't validate neither
* the given status code nor the phrase. It's up to the caller to ensure
* they match and make sense in the given context.
*
* @param code The new HTTP status code.
* @param phrase The corresponding human readable status text (e.g.
* "OK", "Not found", etc).
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setStatus(final int code, final String phrase) {
mockResponse.code = code;
mockResponse.phrase = phrase;
return this;
}
/**
* Sets a string as the body content of the response being built.
*
* @param string The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final String string) {
mockResponse.text = string;
return this;
}
/**
* Sets a byte array as the body content of the response being built.
*
* @param bytes The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final byte[] bytes) {
mockResponse.sourceHelper = text -> Okio.source(new ByteArrayInputStream(bytes));
return this;
}
/**
* Sets a file (the content of the file to be more specific) as the body
* content of the response being built.
*
* @param file The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final File file) {
mockResponse.sourceHelper = text -> {
try {
return Okio.source(file);
} catch (FileNotFoundException e) {
info(e, "Couldn't open source: %s", file.getAbsolutePath());
return null;
}
};
return this;
}
/**
* Sets an InputStream (the data provided by the input stream to be more
* specific) as the body content of the response being built.
*
* @param inputStream The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final InputStream inputStream) {
mockResponse.sourceHelper = text -> Okio.source(inputStream);
return this;
}
/**
* Returns a sealed response object which can not be further built on.
*
* @return The final response object.
*/
public MockResponse build() {
return mockResponse;
}
}
private String text = null;
private Integer code = null;
private String phrase = null;
private HeaderManager headers = null;
private SettingsManager settings = null;
private transient SourceHelper sourceHelper = null;
MockResponse() {
headers = new HeaderManager();
settings = new SettingsManager();
}
/**
* Returns the mocked response code.
*
* @return The mocked HTTP response code.
*/
public int code() {
return getNative(code, 0);
}
/**
* Returns the mocked response phrase.
*
* @return The mocked HTTP response phrase.
*/
public String phrase() {
return getNonNull(phrase, "");
}
/**
* Returns an unmodifiable map of the mocked response headers. NOTE! that
* {@code Atlantis} may internally decide to add, remove or overwrite some
* headers, depending on the characteristics of the response itself.
* "Content-Length" would be an example of this.
*
* @return The unmodifiable response headers map as per definition in {@link
* Collections#unmodifiableMap(Map)}.
*/
public Map<String, String> headers() {
return Collections.unmodifiableMap(headers);
}
/**
* Returns the body content description of this mock response. NOTE! that
* this isn't necessarily the actual data, but rather a description of how
* to get hold of the data, e.g. "file://path/to/file.json" is a perfectly
* valid body content descriptor.
*
* @return A string that describes the mock response body content.
*/
public String body() {
return getNonNull(text, "");
}
/**
* Returns the mocked response body stream.
*
* @return The response body.
*/
Source source() {
return sourceHelper != null ?
sourceHelper.open(text) :
null;
}
/**
* Returns the response behavior settings.
*
* @return The response settings.
*/
SettingsManager settings() {
return settings;
}
/**
* Sets the source helper implementation that will help open a data stream
* for any response body content.
*
* @param sourceHelper The source open helper.
*/
void setSourceHelperIfAbsent(final SourceHelper sourceHelper) {
if (this.sourceHelper == null)
this.sourceHelper = sourceHelper;
}
/**
* Returns a boolean flag indicating whether there is a "Content-Length"
* header with a value greater than 0.
*
* @return Boolean true if there is a "Content-Length" header and a
* corresponding value greater than 0, false otherwise.
*/
boolean isExpectedToHaveBody() {
return headers.isExpectedToHaveBody();
}
/**
* Returns a boolean flag indicating whether there is an "Expect" header
* with a "100-continue" value.
*
* @return Boolean true if there is an "Expect" header and a corresponding
* "100-Continue" value, false otherwise.
*/
boolean isExpectedToContinue() {
return headers.isExpectedToContinue();
}
/**
* Returns a boolean flag indicating whether there is a "Transfer-Encoding"
* header with a "chunked" value.
*
* @return Boolean true if there is a "Transfer-Encoding" header and a
* corresponding "chunked" value, false otherwise.
*/
boolean isExpectedToBeChunked() {
return headers.isExpectedToBeChunked();
}
}
|
library/src/main/java/com/echsylon/atlantis/MockResponse.java
|
package com.echsylon.atlantis;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import okio.Okio;
import okio.Source;
import static com.echsylon.atlantis.LogUtils.info;
import static com.echsylon.atlantis.Utils.getNative;
import static com.echsylon.atlantis.Utils.getNonNull;
/**
* This class contains the full description of a mock response.
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class MockResponse {
/**
* This interface describes the mandatory feature set to provide a mocked
* response. Implementing classes are responsible for picking a response
* from a given list of available responses. Any state machine etc is also
* in the scope of the implementing class.
*/
public interface Filter {
/**
* Returns a mocked response of choice. May be null, in which case the
* calling logic is responsible for deciding what response to serve.
*
* @param mockResponses All available response to pick a candidate from.
* Null and empty lists are acceptable.
* @return The response candidate.
*/
MockResponse findResponse(final List<MockResponse> mockResponses);
}
/**
* This interface describes the mandatory feature set to provide a data
* stream for content described in the given text.
*/
public interface SourceHelper {
/**
* Returns a stream from which the response body content can be read.
*
* @param text The description of the content to stream.
* @return A data stream or null.
*/
Source open(final String text);
}
/**
* This class offers means of building a mocked response configuration
* directly from code (as opposed to configure one in a JSON file).
*/
public static final class Builder {
private final MockResponse mockResponse;
/**
* Creates a new builder based on an uninitialized response object.
*/
public Builder() {
mockResponse = new MockResponse();
}
/**
* Adds a header to the response being built. Any existing keys will be
* overwritten.
*
* @param key The header key.
* @param value The header value.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addHeader(final String key, final String value) {
mockResponse.headers.put(key, value);
return this;
}
/**
* Adds all headers to the response being built where neither the key
* nor the value is empty or null. Any existing keys will be
* overwritten.
*
* @param headers The headers to add.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addHeaders(final Map<String, String> headers) {
mockResponse.headers.putAll(headers);
return this;
}
/**
* Adds a setting to the response being built. Any existing keys will be
* overwritten.
*
* @param key The setting key.
* @param value The setting value.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addSetting(final String key, final String value) {
mockResponse.settings.put(key, value);
return this;
}
/**
* Adds all settings to the response being built where neither the key
* nor the value is empty or null. Any existing keys will be
* overwritten.
*
* @param settings The settings to add.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder addSettings(final Map<String, String> settings) {
mockResponse.settings.putAll(settings);
return this;
}
/**
* Sets the status of the response being built. Doesn't validate neither
* the given status code nor the phrase. It's up to the caller to ensure
* they match and make sense in the given context.
*
* @param code The new HTTP status code.
* @param phrase The corresponding human readable status text (e.g.
* "OK", "Not found", etc).
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setStatus(final int code, final String phrase) {
mockResponse.code = code;
mockResponse.phrase = phrase;
return this;
}
/**
* Sets a string as the body content of the response being built.
*
* @param string The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final String string) {
mockResponse.text = string;
return this;
}
/**
* Sets a byte array as the body content of the response being built.
*
* @param bytes The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final byte[] bytes) {
mockResponse.sourceHelper = text -> Okio.source(new ByteArrayInputStream(bytes));
return this;
}
/**
* Sets a file (the content of the file to be more specific) as the body
* content of the response being built.
*
* @param file The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final File file) {
mockResponse.sourceHelper = text -> {
try {
return Okio.source(file);
} catch (FileNotFoundException e) {
info(e, "Couldn't open source: %s", file.getAbsolutePath());
return null;
}
};
return this;
}
/**
* Sets an InputStream (the data provided by the input stream to be more
* specific) as the body content of the response being built.
*
* @param inputStream The new response body content.
* @return This builder instance, allowing chaining of method calls.
*/
public Builder setBody(final InputStream inputStream) {
mockResponse.sourceHelper = text -> Okio.source(inputStream);
return this;
}
/**
* Returns a sealed response object which can not be further built on.
*
* @return The final response object.
*/
public MockResponse build() {
return mockResponse;
}
}
private String text = null;
private Integer code = null;
private String phrase = null;
private HeaderManager headers = null;
private SettingsManager settings = null;
private transient SourceHelper sourceHelper = null;
MockResponse() {
headers = new HeaderManager();
settings = new SettingsManager();
}
/**
* Returns the mocked response code.
*
* @return The mocked HTTP response code.
*/
public int code() {
return getNative(code, 0);
}
/**
* Returns the mocked response phrase.
*
* @return The mocked HTTP response phrase.
*/
public String phrase() {
return getNonNull(phrase, "");
}
/**
* Returns an unmodifiable map of the mocked response headers. NOTE! that
* {@code Atlantis} may internally decide to add, remove or overwrite some
* headers, depending on the characteristics of the response itself.
* "Content-Length" would be an example of this.
*
* @return The unmodifiable response headers map as per definition in {@link
* Collections#unmodifiableMap(Map)}.
*/
public Map<String, String> headers() {
return Collections.unmodifiableMap(headers);
}
/**
* Returns the body content description of this mock response. NOTE! that
* this isn't necessarily the actual data, but rather a description of how
* to get hold of the data, e.g. "file://path/to/file.json" is a perfectly
* valid body content descriptor.
*
* @return A string that describes the mock response body content.
*/
public String body() {
return text;
}
/**
* Returns the mocked response body stream.
*
* @return The response body.
*/
Source source() {
return sourceHelper != null ?
sourceHelper.open(text) :
null;
}
/**
* Returns the response behavior settings.
*
* @return The response settings.
*/
SettingsManager settings() {
return settings;
}
/**
* Sets the source helper implementation that will help open a data stream
* for any response body content.
*
* @param sourceHelper The source open helper.
*/
void setSourceHelperIfAbsent(final SourceHelper sourceHelper) {
if (this.sourceHelper == null)
this.sourceHelper = sourceHelper;
}
/**
* Returns a boolean flag indicating whether there is a "Content-Length"
* header with a value greater than 0.
*
* @return Boolean true if there is a "Content-Length" header and a
* corresponding value greater than 0, false otherwise.
*/
boolean isExpectedToHaveBody() {
return headers.isExpectedToHaveBody();
}
/**
* Returns a boolean flag indicating whether there is an "Expect" header
* with a "100-continue" value.
*
* @return Boolean true if there is an "Expect" header and a corresponding
* "100-Continue" value, false otherwise.
*/
boolean isExpectedToContinue() {
return headers.isExpectedToContinue();
}
/**
* Returns a boolean flag indicating whether there is a "Transfer-Encoding"
* header with a "chunked" value.
*
* @return Boolean true if there is a "Transfer-Encoding" header and a
* corresponding "chunked" value, false otherwise.
*/
boolean isExpectedToBeChunked() {
return headers.isExpectedToBeChunked();
}
}
|
Prevents returning null pointer mock response body
|
library/src/main/java/com/echsylon/atlantis/MockResponse.java
|
Prevents returning null pointer mock response body
|
<ide><path>ibrary/src/main/java/com/echsylon/atlantis/MockResponse.java
<ide> * @return A string that describes the mock response body content.
<ide> */
<ide> public String body() {
<del> return text;
<add> return getNonNull(text, "");
<ide> }
<ide>
<ide> /**
|
|
Java
|
bsd-3-clause
|
46d3016014d7698608881c7a619cb7686c1ff45d
| 0 |
dries007/TFCraft-NEIplugin
|
/*
* Copyright (c) 2014 Dries007
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted (subject to the limitations in the
* disclaimer below) provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Dries007 nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
* GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
* HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.dries007.tfcnei.recipeHandlers;
import codechicken.nei.PositionedStack;
import codechicken.nei.recipe.TemplateRecipeHandler;
import com.bioxx.tfc.Items.ItemFlatGeneric;
import com.bioxx.tfc.Items.ItemLooseRock;
import com.bioxx.tfc.Reference;
import com.bioxx.tfc.TFCItems;
import com.bioxx.tfc.api.Crafting.CraftingManagerTFC;
import com.bioxx.tfc.api.Crafting.ShapedRecipesTFC;
import net.dries007.tfcnei.util.Helper;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.IRecipe;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import static cpw.mods.fml.relauncher.ReflectionHelper.getPrivateValue;
/**
* @author Dries007
*/
public class KnappingRecipeHandler extends TemplateRecipeHandler
{
private static List<IRecipe> recipeList;
@Override
public String getGuiTexture()
{
return Reference.ModID + ":" + Reference.AssetPathGui + "gui_knapping.png";
}
@Override
public String getRecipeName()
{
return "Knapping";
}
@Override
public String getOverlayIdentifier()
{
return "knapping";
}
@SuppressWarnings("unchecked")
@Override
public TemplateRecipeHandler newInstance()
{
if (recipeList == null) recipeList = CraftingManagerTFC.getInstance().getRecipeList();
return super.newInstance();
}
@Override
public int recipiesPerPage()
{
return 1;
}
@Override
public void loadTransferRects()
{
transferRects.add(new RecipeTransferRect(new Rectangle(0, 0, 5 * 16, 5 * 16), "knapping"));
}
@Override
public void loadCraftingRecipes(String outputId, Object... results)
{
if (outputId.equals("knapping") && getClass() == KnappingRecipeHandler.class)
{
for (IRecipe recipe : recipeList)
if (recipe.getRecipeSize() > 9 && recipe instanceof ShapedRecipesTFC) // Filter out junk for optimisation. All knapping recipes are > 9 and are shaped
{
// START COPY CODE PART
ItemStack[] inputs = getPrivateValue(ShapedRecipesTFC.class, (ShapedRecipesTFC) recipe, "recipeItems"); // Get inputs
for (ItemStack inStack : inputs)
{
if (inStack == null) continue; // Loop over until we find a not null entry
if (!(inStack.getItem() instanceof ItemFlatGeneric)) break; // If its not a flat type item, break out now
if (inStack.getItem() == TFCItems.FlatClay) // if its clay, we need some special code
{
if (inStack.getItemDamage() == 1) // Normal clay (damage value 1)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 0); // Dark texture has data value 0
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
else if (inStack.getItemDamage() == 3) // Fire clay (damage value 3)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 1);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 2); // Dark texture has data value 3
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
}
else // If not clay (aka Leather or stone) add the recipe without a 'dark' texture in place.
{
ItemStack actualInput = null;
if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
else if (inStack.getItem() == TFCItems.FlatRock) actualInput = new ItemStack(TFCItems.LooseRock);
else if (inStack.getItem() == TFCItems.FlatClay) actualInput = new ItemStack(TFCItems.ClayBall, 5);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), null, inputs, recipe.getRecipeOutput(), actualInput));
}
break;
}
// END COPY CODE PART
}
}
else
super.loadCraftingRecipes(outputId, results);
}
@Override
public void loadCraftingRecipes(ItemStack result)
{
for (IRecipe recipe : recipeList)
{
if (Helper.areItemStacksEqual(result, recipe.getRecipeOutput()) && recipe.getRecipeSize() > 9 && recipe instanceof ShapedRecipesTFC)
{
// START COPY CODE PART. FOR COMMENTS SEE loadCraftingRecipes
ItemStack[] inputs = getPrivateValue(ShapedRecipesTFC.class, (ShapedRecipesTFC) recipe, "recipeItems");
for (ItemStack inStack : inputs)
{
if (inStack == null) continue;
if (!(inStack.getItem() instanceof ItemFlatGeneric)) break;
if (inStack.getItem() == TFCItems.FlatClay)
{
if (inStack.getItemDamage() == 1)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 0);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
else if (inStack.getItemDamage() == 3)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 1);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 2);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
}
else
{
ItemStack actualInput = null;
if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
else if (inStack.getItem() == TFCItems.FlatRock) actualInput = new ItemStack(TFCItems.LooseRock);
else if (inStack.getItem() == TFCItems.FlatClay) actualInput = new ItemStack(TFCItems.ClayBall, 5);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), null, inputs, recipe.getRecipeOutput(), actualInput));
}
break;
}
// END COPY CODE PART
}
}
}
@Override
public void loadUsageRecipes(ItemStack ingredient)
{
if (!(ingredient.getItem() instanceof ItemLooseRock)) return;
Item flatType = getPrivateValue(ItemLooseRock.class, (ItemLooseRock) ingredient.getItem(), "specialCraftingType");
for (IRecipe recipe : recipeList)
{
if (recipe.getRecipeSize() > 9 && recipe instanceof ShapedRecipesTFC)
{
ItemStack[] inputs = getPrivateValue(ShapedRecipesTFC.class, (ShapedRecipesTFC) recipe, "recipeItems");
for (ItemStack inStack : inputs)
{
if (inStack == null || flatType != inStack.getItem()) continue;
if (flatType == TFCItems.FlatClay)
{
if (ingredient.getItemDamage() == 0 && inStack.getItemDamage() == 1) // Compare to see if the ingredient is normal clay
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(flatType, 1, 0);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
else if (ingredient.getItemDamage() == 1 && inStack.getItemDamage() == 3) // Compare to see if the ingredient is fire clay
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(flatType, 1, 2);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
}
else if (inStack.getItemDamage() == Short.MAX_VALUE || ingredient.getItemDamage() == inStack.getItemDamage()) // In this case match damage value of stone too.
{
ItemStack actualInput = null;
if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
else if (inStack.getItem() == TFCItems.FlatRock) actualInput = new ItemStack(TFCItems.LooseRock);
else if (inStack.getItem() == TFCItems.FlatClay) actualInput = new ItemStack(TFCItems.ClayBall, 5);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), null, inputs, recipe.getRecipeOutput(), actualInput));
}
break;
}
}
}
}
public class CachedKnappingRecipe extends CachedRecipe
{
List<PositionedStack> inputs;
PositionedStack result;
PositionedStack actualInput;
public CachedKnappingRecipe(final int W, final int H, ItemStack off, ItemStack[] inputs, ItemStack recipeOutput, ItemStack actualInput)
{
this.inputs = new ArrayList<>();
for (int h = 0; h < H; h++)
for (int w = 0; w < W; w++)
{
if (inputs[h * W + w] != null) this.inputs.add(new PositionedStack(inputs[h * W + w], 16 * w, 16 * h));
else if (off != null) this.inputs.add(new PositionedStack(off, 16 * w, 16 * h));
}
this.result = new PositionedStack(recipeOutput, 123, 33);
this.actualInput = new PositionedStack(actualInput, 123, 10);
}
@Override
public List<PositionedStack> getIngredients()
{
return inputs;
}
@Override
public PositionedStack getResult()
{
return result;
}
@Override
public PositionedStack getOtherStack()
{
return actualInput;
}
}
}
|
src/main/java/net/dries007/tfcnei/recipeHandlers/KnappingRecipeHandler.java
|
/*
* Copyright (c) 2014 Dries007
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted (subject to the limitations in the
* disclaimer below) provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Dries007 nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
* GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
* HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.dries007.tfcnei.recipeHandlers;
import codechicken.nei.PositionedStack;
import codechicken.nei.recipe.TemplateRecipeHandler;
import com.bioxx.tfc.Items.ItemFlatGeneric;
import com.bioxx.tfc.Items.ItemLooseRock;
import com.bioxx.tfc.Reference;
import com.bioxx.tfc.TFCItems;
import com.bioxx.tfc.api.Crafting.CraftingManagerTFC;
import com.bioxx.tfc.api.Crafting.ShapedRecipesTFC;
import net.dries007.tfcnei.util.Helper;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.IRecipe;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import static cpw.mods.fml.relauncher.ReflectionHelper.getPrivateValue;
/**
* @author Dries007
*/
public class KnappingRecipeHandler extends TemplateRecipeHandler
{
private static List<IRecipe> recipeList;
@Override
public String getGuiTexture()
{
return Reference.ModID + ":" + Reference.AssetPathGui + "gui_knapping.png";
}
@Override
public String getRecipeName()
{
return "Knapping";
}
@Override
public String getOverlayIdentifier()
{
return "knapping";
}
@SuppressWarnings("unchecked")
@Override
public TemplateRecipeHandler newInstance()
{
if (recipeList == null) recipeList = CraftingManagerTFC.getInstance().getRecipeList();
return super.newInstance();
}
@Override
public int recipiesPerPage()
{
return 1;
}
@Override
public void loadTransferRects()
{
transferRects.add(new RecipeTransferRect(new Rectangle(0, 0, 5 * 16, 5 * 16), "knapping"));
}
@Override
public void loadCraftingRecipes(String outputId, Object... results)
{
if (outputId.equals("knapping") && getClass() == KnappingRecipeHandler.class)
{
for (IRecipe recipe : recipeList)
if (recipe.getRecipeSize() > 9 && recipe instanceof ShapedRecipesTFC) // Filter out junk for optimisation. All knapping recipes are > 9 and are shaped
{
// START COPY CODE PART
ItemStack[] inputs = getPrivateValue(ShapedRecipesTFC.class, (ShapedRecipesTFC) recipe, "recipeItems"); // Get inputs
for (ItemStack inStack : inputs)
{
if (inStack == null) continue; // Loop over until we find a not null entry
if (!(inStack.getItem() instanceof ItemFlatGeneric)) break; // If its not a flat type item, break out now
if (inStack.getItem() == TFCItems.FlatClay) // if its clay, we need some special code
{
if (inStack.getItemDamage() == 1) // Normal clay (damage value 1)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 0); // Dark texture has data value 0
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
else if (inStack.getItemDamage() == 3) // Fire clay (damage value 3)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 1);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 2); // Dark texture has data value 3
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
}
else // If not clay (aka Leather or stone) add the recipe without a 'dark' texture in place.
{
ItemStack actualInput = null;
if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
else if (inStack.getItem() == TFCItems.FlatRock) actualInput = new ItemStack(TFCItems.LooseRock);
else if (inStack.getItem() == TFCItems.FlatClay) actualInput = new ItemStack(TFCItems.ClayBall, 5);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), null, inputs, recipe.getRecipeOutput(), actualInput));
}
break;
}
// END COPY CODE PART
}
}
else
super.loadCraftingRecipes(outputId, results);
}
@Override
public void loadCraftingRecipes(ItemStack result)
{
for (IRecipe recipe : recipeList)
{
if (Helper.areItemStacksEqual(result, recipe.getRecipeOutput()) && recipe.getRecipeSize() > 9 && recipe instanceof ShapedRecipesTFC)
{
// START COPY CODE PART. FOR COMMENTS SEE loadCraftingRecipes
ItemStack[] inputs = getPrivateValue(ShapedRecipesTFC.class, (ShapedRecipesTFC) recipe, "recipeItems");
for (ItemStack inStack : inputs)
{
if (inStack == null) continue;
if (!(inStack.getItem() instanceof ItemFlatGeneric)) break;
if (inStack.getItem() == TFCItems.FlatClay)
{
if (inStack.getItemDamage() == 1)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 0);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
else if (inStack.getItemDamage() == 3)
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 1);
ItemStack flatType2 = new ItemStack(inStack.getItem(), 1, 2);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
}
else if (inStack.getItemDamage() == Short.MAX_VALUE || result.getItemDamage() == inStack.getItemDamage()) // In this case match damage value of stone too.
{
ItemStack actualInput = null;
if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
else if (inStack.getItem() == TFCItems.FlatRock) actualInput = new ItemStack(TFCItems.LooseRock);
else if (inStack.getItem() == TFCItems.FlatClay) actualInput = new ItemStack(TFCItems.ClayBall, 5);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), null, inputs, recipe.getRecipeOutput(), actualInput));
}
break;
}
// END COPY CODE PART
}
}
}
@Override
public void loadUsageRecipes(ItemStack ingredient)
{
if (!(ingredient.getItem() instanceof ItemLooseRock)) return;
Item flatType = getPrivateValue(ItemLooseRock.class, (ItemLooseRock) ingredient.getItem(), "specialCraftingType");
for (IRecipe recipe : recipeList)
{
if (recipe.getRecipeSize() > 9 && recipe instanceof ShapedRecipesTFC)
{
ItemStack[] inputs = getPrivateValue(ShapedRecipesTFC.class, (ShapedRecipesTFC) recipe, "recipeItems");
for (ItemStack inStack : inputs)
{
if (inStack == null || flatType != inStack.getItem()) continue;
if (flatType == TFCItems.FlatClay)
{
if (ingredient.getItemDamage() == 0 && inStack.getItemDamage() == 1) // Compare to see if the ingredient is normal clay
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(flatType, 1, 0);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
else if (ingredient.getItemDamage() == 1 && inStack.getItemDamage() == 3) // Compare to see if the ingredient is fire clay
{
ItemStack actualInput = new ItemStack(TFCItems.ClayBall, 5, 0);
ItemStack flatType2 = new ItemStack(flatType, 1, 2);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
}
}
else if (inStack.getItemDamage() == Short.MAX_VALUE || ingredient.getItemDamage() == inStack.getItemDamage()) // In this case match damage value of stone too.
{
ItemStack actualInput = null;
if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
else if (inStack.getItem() == TFCItems.FlatRock) actualInput = new ItemStack(TFCItems.LooseRock);
else if (inStack.getItem() == TFCItems.FlatClay) actualInput = new ItemStack(TFCItems.ClayBall, 5);
arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), null, inputs, recipe.getRecipeOutput(), actualInput));
}
break;
}
}
}
}
public class CachedKnappingRecipe extends CachedRecipe
{
List<PositionedStack> inputs;
PositionedStack result;
PositionedStack actualInput;
public CachedKnappingRecipe(final int W, final int H, ItemStack off, ItemStack[] inputs, ItemStack recipeOutput, ItemStack actualInput)
{
this.inputs = new ArrayList<>();
for (int h = 0; h < H; h++)
for (int w = 0; w < W; w++)
{
if (inputs[h * W + w] != null) this.inputs.add(new PositionedStack(inputs[h * W + w], 16 * w, 16 * h));
else if (off != null) this.inputs.add(new PositionedStack(off, 16 * w, 16 * h));
}
this.result = new PositionedStack(recipeOutput, 123, 33);
this.actualInput = new PositionedStack(actualInput, 123, 10);
}
@Override
public List<PositionedStack> getIngredients()
{
return inputs;
}
@Override
public PositionedStack getResult()
{
return result;
}
@Override
public PositionedStack getOtherStack()
{
return actualInput;
}
}
}
|
fixed 90% of stone tools not showing up
|
src/main/java/net/dries007/tfcnei/recipeHandlers/KnappingRecipeHandler.java
|
fixed 90% of stone tools not showing up
|
<ide><path>rc/main/java/net/dries007/tfcnei/recipeHandlers/KnappingRecipeHandler.java
<ide> arecipes.add(new CachedKnappingRecipe(Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeWidth"), Helper.getPrivateValue(ShapedRecipesTFC.class, int.class, (ShapedRecipesTFC) recipe, "recipeHeight"), flatType2, inputs, recipe.getRecipeOutput(), actualInput));
<ide> }
<ide> }
<del> else if (inStack.getItemDamage() == Short.MAX_VALUE || result.getItemDamage() == inStack.getItemDamage()) // In this case match damage value of stone too.
<add> else
<ide> {
<ide> ItemStack actualInput = null;
<ide> if (inStack.getItem() == TFCItems.FlatLeather) actualInput = new ItemStack(TFCItems.Leather);
|
|
Java
|
apache-2.0
|
e382db32e7dbc11c0a4b8cf25b5a18b629288acf
| 0 |
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.services;
import com.intellij.ide.util.treeView.TreeState;
import com.intellij.util.xmlb.annotations.Attribute;
import com.intellij.util.xmlb.annotations.Tag;
import com.intellij.util.xmlb.annotations.Transient;
import org.jdom.Element;
@Tag("serviceView")
class ServiceViewState {
private static final float DEFAULT_CONTENT_PROPORTION = 0.3f;
@Attribute("id")
public String id = "";
public float contentProportion = DEFAULT_CONTENT_PROPORTION;
@Tag("treeState")
public Element treeStateElement;
@Transient
public TreeState treeState = TreeState.createFrom(null);
}
|
platform/lang-impl/src/com/intellij/execution/services/ServiceViewState.java
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.services;
import com.intellij.ide.util.treeView.TreeState;
import com.intellij.util.xmlb.annotations.Attribute;
import com.intellij.util.xmlb.annotations.Tag;
import com.intellij.util.xmlb.annotations.Transient;
import org.jdom.Element;
@Tag("serviceView")
class ServiceViewState {
private static final float DEFAULT_CONTENT_PROPORTION = 0.3f;
@Attribute("id")
public String id = "";
public float contentProportion = DEFAULT_CONTENT_PROPORTION;
@Tag("treeState")
public Element treeStateElement;
@Transient
public TreeState treeState;
}
|
[service-view] fix NPE on applying tree state
|
platform/lang-impl/src/com/intellij/execution/services/ServiceViewState.java
|
[service-view] fix NPE on applying tree state
|
<ide><path>latform/lang-impl/src/com/intellij/execution/services/ServiceViewState.java
<ide> @Tag("treeState")
<ide> public Element treeStateElement;
<ide> @Transient
<del> public TreeState treeState;
<add> public TreeState treeState = TreeState.createFrom(null);
<ide> }
|
|
Java
|
lgpl-2.1
|
b3125cad115fa450cb7a8bb836a2829831c9fead
| 0 |
gallardo/opencms-core,gallardo/opencms-core,alkacon/opencms-core,alkacon/opencms-core,gallardo/opencms-core,gallardo/opencms-core,alkacon/opencms-core,alkacon/opencms-core
|
/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH & Co. KG (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.configuration;
import org.opencms.file.CmsObject;
import org.opencms.letsencrypt.CmsLetsEncryptConfiguration;
import org.opencms.letsencrypt.CmsLetsEncryptConfiguration.Trigger;
import org.opencms.letsencrypt.CmsSiteConfigToLetsEncryptConfigConverter;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.report.CmsLogReport;
import org.opencms.site.CmsSSLMode;
import org.opencms.site.CmsSite;
import org.opencms.site.CmsSiteManagerImpl;
import org.opencms.site.CmsSiteMatcher;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.commons.digester.Digester;
import org.dom4j.Element;
/**
* Class to read and write the OpenCms site configuration.<p>
*/
public class CmsSitesConfiguration extends A_CmsXmlConfiguration implements I_CmsXmlConfigurationWithUpdateHandler {
/** The "error" attribute. */
public static final String A_ERROR = "error";
/** The "errorPage" attribute. */
public static final String A_ERROR_PAGE = "errorPage";
/** The "exclusive" attribute. */
public static final String A_EXCLUSIVE = "exclusive";
/** The attribute name for the alias offset. */
public static final String A_OFFSET = "offset";
/** The "position" attribute. */
public static final String A_POSITION = "position";
/** The "server" attribute. */
public static final String A_SERVER = "server";
/** The "title" attribute. */
public static final String A_TITLE = "title";
/** The ssl mode attribute.*/
public static final String A_SSL = "sslmode";
/** The "usePermanentRedirects" attribute. */
public static final String A_USE_PERMANENT_REDIRECTS = "usePermanentRedirects";
/** The "webserver" attribute. */
public static final String A_WEBSERVER = "webserver";
/** The name of the DTD for this configuration. */
public static final String CONFIGURATION_DTD_NAME = "opencms-sites.dtd";
/** The name of the default XML file for this configuration. */
public static final String DEFAULT_XML_FILE_NAME = "opencms-sites.xml";
/** The node name for the alias node. */
public static final String N_ALIAS = "alias";
/** The node name for the default-uri node. */
public static final String N_DEFAULT_URI = "default-uri";
/** The node name for the parameters. */
public static final String N_PARAMETERS = "parameters";
/** The node name for the secure site. */
public static final String N_SECURE = "secure";
/** Shared folder node name. */
public static final String N_SHARED_FOLDER = "shared-folder";
/** New secure modes node. */
public static final String N_OLD_STYLE_SECURE_SERVER = "oldStyleSecureServer";
/** The node name for the sites node. */
public static final String N_SITES = "sites";
/** The node name which indicates if apache should be configurable in sitemanager. */
public static final String N_WEBSERVERSCRIPTING = "webserver-scripting";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_CONFIGTEMPLATE = "configtemplate";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_FILENAMEPREFIX = "filenameprefix";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_LOGGINGDIR = "loggingdir";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_SECURETEMPLATE = "securetemplate";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_TARGETPATH = "targetpath";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_WEBSERVERSCRIPT = "webserverscript";
/** The node name for the workplace-server node. */
public static final String N_WORKPLACE_SERVER = "workplace-server";
/** The CmsObject with admin privileges. */
private CmsObject m_adminCms;
/** The configured site manager. */
private CmsSiteManagerImpl m_siteManager;
/** Future for the LetsEncrypt async update. */
private ScheduledFuture<?> m_updateFuture;
/**
* @see org.opencms.configuration.I_CmsXmlConfiguration#addXmlDigesterRules(org.apache.commons.digester.Digester)
*/
public void addXmlDigesterRules(Digester digester) {
// add site configuration rule
digester.addObjectCreate("*/" + N_SITES, CmsSiteManagerImpl.class);
digester.addCallMethod("*/" + N_SITES + "/" + N_WORKPLACE_SERVER, "addWorkplaceServer", 2);
digester.addCallParam("*/" + N_SITES + "/" + N_WORKPLACE_SERVER, 0);
digester.addCallParam("*/" + N_SITES + "/" + N_WORKPLACE_SERVER, 1, A_SSL);
digester.addCallMethod("*/" + N_SITES + "/" + N_DEFAULT_URI, "setDefaultUri", 0);
digester.addCallMethod("*/" + N_SITES + "/" + N_OLD_STYLE_SECURE_SERVER, "setOldStyleSecureServerAllowed", 0);
String configApachePath = "*/" + N_SITES + "/" + N_WEBSERVERSCRIPTING;
digester.addCallMethod(configApachePath, "setWebServerScripting", 6);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_WEBSERVERSCRIPT, 0);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_TARGETPATH, 1);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_CONFIGTEMPLATE, 2);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_SECURETEMPLATE, 3);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_FILENAMEPREFIX, 4);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_LOGGINGDIR, 5);
digester.addSetNext("*/" + N_SITES, "setSiteManager");
// add site configuration rule
String siteXpath = "*/" + N_SITES + "/" + N_SITE;
digester.addCallMethod(siteXpath, "addSiteInternally", 11);
digester.addCallParam(siteXpath, 0, A_SERVER);
digester.addCallParam(siteXpath, 1, A_URI);
digester.addCallParam(siteXpath, 2, A_TITLE);
digester.addCallParam(siteXpath, 3, A_POSITION);
digester.addCallParam(siteXpath, 4, A_ERROR_PAGE);
digester.addCallParam(siteXpath, 5, A_WEBSERVER);
digester.addCallParam(siteXpath, 6, A_SSL);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 7, A_SERVER);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 8, A_EXCLUSIVE);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 9, A_ERROR);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 10, A_USE_PERMANENT_REDIRECTS);
digester.addCallMethod(siteXpath + "/" + N_PARAMETERS + "/" + N_PARAM, "addParamToConfigSite", 2);
digester.addCallParam(siteXpath + "/" + N_PARAMETERS + "/" + N_PARAM, 0, A_NAME);
digester.addCallParam(siteXpath + "/" + N_PARAMETERS + "/" + N_PARAM, 1);
// add an alias to the currently configured site
digester.addCallMethod("*/" + N_SITES + "/" + N_SITE + "/" + N_ALIAS, "addAliasToConfigSite", 2);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_ALIAS, 0, A_SERVER);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_ALIAS, 1, A_OFFSET);
digester.addCallMethod("*/" + N_SITES + "/" + N_SHARED_FOLDER, "setSharedFolder", 0);
}
/**
* @see org.opencms.configuration.I_CmsXmlConfiguration#generateXml(org.dom4j.Element)
*/
public Element generateXml(Element parent) {
// create <sites> node
Element sitesElement = parent.addElement(N_SITES);
if (OpenCms.getRunLevel() >= OpenCms.RUNLEVEL_3_SHELL_ACCESS) {
m_siteManager = OpenCms.getSiteManager();
}
Map<String, CmsSSLMode> workplaceMap = m_siteManager.getWorkplaceServersMap();
for (String server : workplaceMap.keySet()) {
Element workplaceElement = sitesElement.addElement(N_WORKPLACE_SERVER).addText(server);
workplaceElement.addAttribute(A_SSL, workplaceMap.get(server).getXMLValue());
}
sitesElement.addElement(N_DEFAULT_URI).addText(m_siteManager.getDefaultUri());
String sharedFolder = m_siteManager.getSharedFolder();
if (sharedFolder != null) {
sitesElement.addElement(N_SHARED_FOLDER).addText(sharedFolder);
}
String oldStyleSecureAllowed = String.valueOf(m_siteManager.isOldStyleSecureServerAllowed());
sitesElement.addElement(N_OLD_STYLE_SECURE_SERVER).addText(oldStyleSecureAllowed);
if (m_siteManager.isConfigurableWebServer()) {
Element configServer = sitesElement.addElement(N_WEBSERVERSCRIPTING);
Map<String, String> configServerMap = m_siteManager.getWebServerConfig();
configServer.addElement(N_WEBSERVERSCRIPTING_WEBSERVERSCRIPT).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_WEBSERVERSCRIPT));
configServer.addElement(N_WEBSERVERSCRIPTING_TARGETPATH).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_TARGETPATH));
configServer.addElement(N_WEBSERVERSCRIPTING_CONFIGTEMPLATE).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_CONFIGTEMPLATE));
configServer.addElement(N_WEBSERVERSCRIPTING_SECURETEMPLATE).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_SECURETEMPLATE));
configServer.addElement(N_WEBSERVERSCRIPTING_FILENAMEPREFIX).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_FILENAMEPREFIX));
configServer.addElement(N_WEBSERVERSCRIPTING_LOGGINGDIR).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_LOGGINGDIR));
}
Iterator<CmsSite> siteIterator = new HashSet<CmsSite>(m_siteManager.getSites().values()).iterator();
while (siteIterator.hasNext()) {
CmsSite site = siteIterator.next();
// create <site server="" uri=""/> subnode(s)
Element siteElement = sitesElement.addElement(N_SITE);
siteElement.addAttribute(A_SERVER, site.getSiteMatcher().toString());
siteElement.addAttribute(A_URI, site.getSiteRoot().concat("/"));
siteElement.addAttribute(A_TITLE, site.getTitle());
siteElement.addAttribute(A_POSITION, Float.toString(site.getPosition()));
siteElement.addAttribute(A_ERROR_PAGE, site.getErrorPage());
siteElement.addAttribute(A_WEBSERVER, String.valueOf(site.isWebserver()));
siteElement.addAttribute(A_SSL, site.getSSLMode().getXMLValue());
// create <secure server=""/> subnode
if (site.hasSecureServer()) {
Element secureElem = siteElement.addElement(N_SECURE);
secureElem.addAttribute(A_SERVER, site.getSecureUrl());
secureElem.addAttribute(A_EXCLUSIVE, String.valueOf(site.isExclusiveUrl()));
secureElem.addAttribute(A_ERROR, String.valueOf(site.isExclusiveError()));
if (site.usesPermanentRedirects()) {
secureElem.addAttribute(A_USE_PERMANENT_REDIRECTS, Boolean.TRUE.toString());
}
}
if ((site.getParameters() != null) && !site.getParameters().isEmpty()) {
Element parametersElem = siteElement.addElement(N_PARAMETERS);
for (Map.Entry<String, String> entry : site.getParameters().entrySet()) {
Element paramElem = parametersElem.addElement(N_PARAM);
paramElem.addAttribute(A_NAME, entry.getKey());
paramElem.addText(entry.getValue());
}
}
// create <alias server=""/> subnode(s)
Iterator<CmsSiteMatcher> aliasIterator = site.getAliases().iterator();
while (aliasIterator.hasNext()) {
CmsSiteMatcher matcher = aliasIterator.next();
Element aliasElement = siteElement.addElement(N_ALIAS);
aliasElement.addAttribute(A_SERVER, matcher.getUrl());
if (matcher.getTimeOffset() != 0) {
aliasElement.addAttribute(A_OFFSET, "" + matcher.getTimeOffset());
}
}
}
return sitesElement;
}
/**
* @see org.opencms.configuration.I_CmsXmlConfiguration#getDtdFilename()
*/
public String getDtdFilename() {
return CONFIGURATION_DTD_NAME;
}
/**
* Returns the site manager.<p>
*
* @return the site manager
*/
public CmsSiteManagerImpl getSiteManager() {
return m_siteManager;
}
/**
* @see org.opencms.configuration.I_CmsXmlConfigurationWithUpdateHandler#handleUpdate()
*/
public synchronized void handleUpdate() throws Exception {
CmsLetsEncryptConfiguration config = OpenCms.getLetsEncryptConfig();
if ((config != null) && config.isValidAndEnabled() && (config.getTrigger() == Trigger.siteConfig)) {
// the configuration may be written several times in quick succession. We want to update when this
// happens for the last time, not the first, so we use a scheduled task.
if (m_updateFuture != null) {
m_updateFuture.cancel(false);
m_updateFuture = null;
}
m_updateFuture = OpenCms.getExecutor().schedule(new Runnable() {
@SuppressWarnings("synthetic-access")
public void run() {
m_updateFuture = null;
CmsLogReport report = new CmsLogReport(
Locale.ENGLISH,
org.opencms.letsencrypt.CmsSiteConfigToLetsEncryptConfigConverter.class);
CmsSiteConfigToLetsEncryptConfigConverter converter = new CmsSiteConfigToLetsEncryptConfigConverter(
config);
converter.run(report, OpenCms.getSiteManager());
// TODO Auto-generated method stub
}
}, 5, TimeUnit.SECONDS);
}
}
/**
* @see org.opencms.configuration.I_CmsXmlConfigurationWithUpdateHandler#setCmsObject(org.opencms.file.CmsObject)
*/
public void setCmsObject(CmsObject cms) {
m_adminCms = cms;
}
/**
* Sets the site manager.<p>
*
* @param siteManager the site manager to set
*/
public void setSiteManager(CmsSiteManagerImpl siteManager) {
m_siteManager = siteManager;
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_SITE_CONFIG_FINISHED_0));
}
}
/**
* @see org.opencms.configuration.A_CmsXmlConfiguration#initMembers()
*/
@Override
protected void initMembers() {
setXmlFileName(DEFAULT_XML_FILE_NAME);
}
}
|
src/org/opencms/configuration/CmsSitesConfiguration.java
|
/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH & Co. KG (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.configuration;
import org.opencms.file.CmsObject;
import org.opencms.letsencrypt.CmsLetsEncryptConfiguration;
import org.opencms.letsencrypt.CmsLetsEncryptConfiguration.Trigger;
import org.opencms.letsencrypt.CmsSiteConfigToLetsEncryptConfigConverter;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.report.CmsLogReport;
import org.opencms.site.CmsSSLMode;
import org.opencms.site.CmsSite;
import org.opencms.site.CmsSiteManagerImpl;
import org.opencms.site.CmsSiteMatcher;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.commons.digester.Digester;
import org.dom4j.Element;
/**
* Class to read and write the OpenCms site configuration.<p>
*/
public class CmsSitesConfiguration extends A_CmsXmlConfiguration implements I_CmsXmlConfigurationWithUpdateHandler {
/** The "error" attribute. */
public static final String A_ERROR = "error";
/** The "errorPage" attribute. */
public static final String A_ERROR_PAGE = "errorPage";
/** The "exclusive" attribute. */
public static final String A_EXCLUSIVE = "exclusive";
/** The attribute name for the alias offset. */
public static final String A_OFFSET = "offset";
/** The "position" attribute. */
public static final String A_POSITION = "position";
/** The "server" attribute. */
public static final String A_SERVER = "server";
/** The "title" attribute. */
public static final String A_TITLE = "title";
/** The ssl mode attribute.*/
public static final String A_SSL = "sslmode";
/** The "usePermanentRedirects" attribute. */
public static final String A_USE_PERMANENT_REDIRECTS = "usePermanentRedirects";
/** The "webserver" attribute. */
public static final String A_WEBSERVER = "webserver";
/** The name of the DTD for this configuration. */
public static final String CONFIGURATION_DTD_NAME = "opencms-sites.dtd";
/** The name of the default XML file for this configuration. */
public static final String DEFAULT_XML_FILE_NAME = "opencms-sites.xml";
/** The node name for the alias node. */
public static final String N_ALIAS = "alias";
/** The node name for the default-uri node. */
public static final String N_DEFAULT_URI = "default-uri";
/** The node name for the parameters. */
public static final String N_PARAMETERS = "parameters";
/** The node name for the secure site. */
public static final String N_SECURE = "secure";
/** Shared folder node name. */
public static final String N_SHARED_FOLDER = "shared-folder";
/** New secure modes node. */
public static final String N_OLD_STYLE_SECURE_SERVER = "oldStyleSecureServer";
/** The node name for the sites node. */
public static final String N_SITES = "sites";
/** The node name which indicates if apache should be configurable in sitemanager. */
public static final String N_WEBSERVERSCRIPTING = "webserver-scripting";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_CONFIGTEMPLATE = "configtemplate";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_FILENAMEPREFIX = "filenameprefix";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_LOGGINGDIR = "loggingdir";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_SECURETEMPLATE = "securetemplate";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_TARGETPATH = "targetpath";
/** Configuration node name. */
public static final String N_WEBSERVERSCRIPTING_WEBSERVERSCRIPT = "webserverscript";
/** The node name for the workplace-server node. */
public static final String N_WORKPLACE_SERVER = "workplace-server";
/** The CmsObject with admin privileges. */
@SuppressWarnings("unused")
private CmsObject m_adminCms;
/** The configured site manager. */
private CmsSiteManagerImpl m_siteManager;
/** Future for the LetsEncrypt async update. */
private ScheduledFuture<?> m_updateFuture;
/**
* @see org.opencms.configuration.I_CmsXmlConfiguration#addXmlDigesterRules(org.apache.commons.digester.Digester)
*/
public void addXmlDigesterRules(Digester digester) {
// add site configuration rule
digester.addObjectCreate("*/" + N_SITES, CmsSiteManagerImpl.class);
digester.addCallMethod("*/" + N_SITES + "/" + N_WORKPLACE_SERVER, "addWorkplaceServer", 2);
digester.addCallParam("*/" + N_SITES + "/" + N_WORKPLACE_SERVER, 0);
digester.addCallParam("*/" + N_SITES + "/" + N_WORKPLACE_SERVER, 1, A_SSL);
digester.addCallMethod("*/" + N_SITES + "/" + N_DEFAULT_URI, "setDefaultUri", 0);
digester.addCallMethod("*/" + N_SITES + "/" + N_OLD_STYLE_SECURE_SERVER, "setOldStyleSecureServerAllowed", 0);
String configApachePath = "*/" + N_SITES + "/" + N_WEBSERVERSCRIPTING;
digester.addCallMethod(configApachePath, "setWebServerScripting", 6);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_WEBSERVERSCRIPT, 0);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_TARGETPATH, 1);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_CONFIGTEMPLATE, 2);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_SECURETEMPLATE, 3);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_FILENAMEPREFIX, 4);
digester.addCallParam(configApachePath + "/" + N_WEBSERVERSCRIPTING_LOGGINGDIR, 5);
digester.addSetNext("*/" + N_SITES, "setSiteManager");
// add site configuration rule
String siteXpath = "*/" + N_SITES + "/" + N_SITE;
digester.addCallMethod(siteXpath, "addSiteInternally", 11);
digester.addCallParam(siteXpath, 0, A_SERVER);
digester.addCallParam(siteXpath, 1, A_URI);
digester.addCallParam(siteXpath, 2, A_TITLE);
digester.addCallParam(siteXpath, 3, A_POSITION);
digester.addCallParam(siteXpath, 4, A_ERROR_PAGE);
digester.addCallParam(siteXpath, 5, A_WEBSERVER);
digester.addCallParam(siteXpath, 6, A_SSL);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 7, A_SERVER);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 8, A_EXCLUSIVE);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 9, A_ERROR);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_SECURE, 10, A_USE_PERMANENT_REDIRECTS);
digester.addCallMethod(siteXpath + "/" + N_PARAMETERS + "/" + N_PARAM, "addParamToConfigSite", 2);
digester.addCallParam(siteXpath + "/" + N_PARAMETERS + "/" + N_PARAM, 0, A_NAME);
digester.addCallParam(siteXpath + "/" + N_PARAMETERS + "/" + N_PARAM, 1);
// add an alias to the currently configured site
digester.addCallMethod("*/" + N_SITES + "/" + N_SITE + "/" + N_ALIAS, "addAliasToConfigSite", 2);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_ALIAS, 0, A_SERVER);
digester.addCallParam("*/" + N_SITES + "/" + N_SITE + "/" + N_ALIAS, 1, A_OFFSET);
digester.addCallMethod("*/" + N_SITES + "/" + N_SHARED_FOLDER, "setSharedFolder", 0);
}
/**
* @see org.opencms.configuration.I_CmsXmlConfiguration#generateXml(org.dom4j.Element)
*/
public Element generateXml(Element parent) {
// create <sites> node
Element sitesElement = parent.addElement(N_SITES);
if (OpenCms.getRunLevel() >= OpenCms.RUNLEVEL_3_SHELL_ACCESS) {
m_siteManager = OpenCms.getSiteManager();
}
Map<String, CmsSSLMode> workplaceMap = m_siteManager.getWorkplaceServersMap();
for (String server : workplaceMap.keySet()) {
Element workplaceElement = sitesElement.addElement(N_WORKPLACE_SERVER).addText(server);
workplaceElement.addAttribute(A_SSL, workplaceMap.get(server).getXMLValue());
}
sitesElement.addElement(N_DEFAULT_URI).addText(m_siteManager.getDefaultUri());
String sharedFolder = m_siteManager.getSharedFolder();
if (sharedFolder != null) {
sitesElement.addElement(N_SHARED_FOLDER).addText(sharedFolder);
}
String oldStyleSecureAllowed = String.valueOf(m_siteManager.isOldStyleSecureServerAllowed());
sitesElement.addElement(N_OLD_STYLE_SECURE_SERVER).addText(oldStyleSecureAllowed);
if (m_siteManager.isConfigurableWebServer()) {
Element configServer = sitesElement.addElement(N_WEBSERVERSCRIPTING);
Map<String, String> configServerMap = m_siteManager.getWebServerConfig();
configServer.addElement(N_WEBSERVERSCRIPTING_WEBSERVERSCRIPT).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_WEBSERVERSCRIPT));
configServer.addElement(N_WEBSERVERSCRIPTING_TARGETPATH).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_TARGETPATH));
configServer.addElement(N_WEBSERVERSCRIPTING_CONFIGTEMPLATE).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_CONFIGTEMPLATE));
configServer.addElement(N_WEBSERVERSCRIPTING_SECURETEMPLATE).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_SECURETEMPLATE));
configServer.addElement(N_WEBSERVERSCRIPTING_FILENAMEPREFIX).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_FILENAMEPREFIX));
configServer.addElement(N_WEBSERVERSCRIPTING_LOGGINGDIR).addText(
configServerMap.get(CmsSiteManagerImpl.WEB_SERVER_CONFIG_LOGGINGDIR));
}
Iterator<CmsSite> siteIterator = new HashSet<CmsSite>(m_siteManager.getSites().values()).iterator();
while (siteIterator.hasNext()) {
CmsSite site = siteIterator.next();
// create <site server="" uri=""/> subnode(s)
Element siteElement = sitesElement.addElement(N_SITE);
siteElement.addAttribute(A_SERVER, site.getSiteMatcher().toString());
siteElement.addAttribute(A_URI, site.getSiteRoot().concat("/"));
siteElement.addAttribute(A_TITLE, site.getTitle());
siteElement.addAttribute(A_POSITION, Float.toString(site.getPosition()));
siteElement.addAttribute(A_ERROR_PAGE, site.getErrorPage());
siteElement.addAttribute(A_WEBSERVER, String.valueOf(site.isWebserver()));
siteElement.addAttribute(A_SSL, site.getSSLMode().getXMLValue());
// create <secure server=""/> subnode
if (site.hasSecureServer()) {
Element secureElem = siteElement.addElement(N_SECURE);
secureElem.addAttribute(A_SERVER, site.getSecureUrl());
secureElem.addAttribute(A_EXCLUSIVE, String.valueOf(site.isExclusiveUrl()));
secureElem.addAttribute(A_ERROR, String.valueOf(site.isExclusiveError()));
if (site.usesPermanentRedirects()) {
secureElem.addAttribute(A_USE_PERMANENT_REDIRECTS, Boolean.TRUE.toString());
}
}
if ((site.getParameters() != null) && !site.getParameters().isEmpty()) {
Element parametersElem = siteElement.addElement(N_PARAMETERS);
for (Map.Entry<String, String> entry : site.getParameters().entrySet()) {
Element paramElem = parametersElem.addElement(N_PARAM);
paramElem.addAttribute(A_NAME, entry.getKey());
paramElem.addText(entry.getValue());
}
}
// create <alias server=""/> subnode(s)
Iterator<CmsSiteMatcher> aliasIterator = site.getAliases().iterator();
while (aliasIterator.hasNext()) {
CmsSiteMatcher matcher = aliasIterator.next();
Element aliasElement = siteElement.addElement(N_ALIAS);
aliasElement.addAttribute(A_SERVER, matcher.getUrl());
if (matcher.getTimeOffset() != 0) {
aliasElement.addAttribute(A_OFFSET, "" + matcher.getTimeOffset());
}
}
}
return sitesElement;
}
/**
* @see org.opencms.configuration.I_CmsXmlConfiguration#getDtdFilename()
*/
public String getDtdFilename() {
return CONFIGURATION_DTD_NAME;
}
/**
* Returns the site manager.<p>
*
* @return the site manager
*/
public CmsSiteManagerImpl getSiteManager() {
return m_siteManager;
}
/**
* @see org.opencms.configuration.I_CmsXmlConfigurationWithUpdateHandler#handleUpdate()
*/
public synchronized void handleUpdate() throws Exception {
CmsLetsEncryptConfiguration config = OpenCms.getLetsEncryptConfig();
if ((config != null) && config.isValidAndEnabled() && (config.getTrigger() == Trigger.siteConfig)) {
// the configuration may be written several times in quick succession. We want to update when this
// happens for the last time, not the first, so we use a scheduled task.
if (m_updateFuture != null) {
m_updateFuture.cancel(false);
m_updateFuture = null;
}
m_updateFuture = OpenCms.getExecutor().schedule(new Runnable() {
@SuppressWarnings("synthetic-access")
public void run() {
m_updateFuture = null;
CmsLogReport report = new CmsLogReport(
Locale.ENGLISH,
org.opencms.letsencrypt.CmsSiteConfigToLetsEncryptConfigConverter.class);
CmsSiteConfigToLetsEncryptConfigConverter converter = new CmsSiteConfigToLetsEncryptConfigConverter(
config);
converter.run(report, OpenCms.getSiteManager());
// TODO Auto-generated method stub
}
}, 5, TimeUnit.SECONDS);
}
}
/**
* @see org.opencms.configuration.I_CmsXmlConfigurationWithUpdateHandler#setCmsObject(org.opencms.file.CmsObject)
*/
public void setCmsObject(CmsObject cms) {
System.out.println("CmsObject initialized");
m_adminCms = cms;
}
/**
* Sets the site manager.<p>
*
* @param siteManager the site manager to set
*/
public void setSiteManager(CmsSiteManagerImpl siteManager) {
m_siteManager = siteManager;
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_SITE_CONFIG_FINISHED_0));
}
}
/**
* @see org.opencms.configuration.A_CmsXmlConfiguration#initMembers()
*/
@Override
protected void initMembers() {
setXmlFileName(DEFAULT_XML_FILE_NAME);
}
}
|
Removed debugging output.
|
src/org/opencms/configuration/CmsSitesConfiguration.java
|
Removed debugging output.
|
<ide><path>rc/org/opencms/configuration/CmsSitesConfiguration.java
<ide> public static final String N_WORKPLACE_SERVER = "workplace-server";
<ide>
<ide> /** The CmsObject with admin privileges. */
<del> @SuppressWarnings("unused")
<ide> private CmsObject m_adminCms;
<ide>
<ide> /** The configured site manager. */
<ide> */
<ide> public void setCmsObject(CmsObject cms) {
<ide>
<del> System.out.println("CmsObject initialized");
<ide> m_adminCms = cms;
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
cb476bf5733cc5bb69e9d367b5a40b7ec6ab7408
| 0 |
gucce/citrus,gucce/citrus,hmmlopez/citrus,hmmlopez/citrus,hmmlopez/citrus,christophd/citrus,christophd/citrus
|
/*
* Copyright 2006-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus;
import java.io.*;
import java.util.*;
import org.apache.commons.cli.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.TestNG;
import org.testng.xml.*;
import com.consol.citrus.exceptions.CitrusRuntimeException;
import com.consol.citrus.exceptions.TestEngineFailedException;
/**
* Citrus command line application.
*
* @author Christoph Deppisch
* @since 2008
*/
public class Citrus {
/**
* Logger
*/
private static final Logger log = LoggerFactory.getLogger(Citrus.class);
/**
* Prevent instanciation.
*/
private Citrus() {}
/**
* Main CLI method.
* @param args
*/
public static void main(String[] args) {
log.info("CITRUS TESTFRAMEWORK ");
log.info("");
Options options = new CitrusCliOptions();
CommandLineParser cliParser = new GnuParser();
CommandLine cmd = null;
try {
cmd = cliParser.parse(options, args);
if(cmd.hasOption("help")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("CITRUS TestFramework", options);
return;
}
String testDirectory = cmd.getOptionValue("testdir", CitrusConstants.DEFAULT_TEST_DIRECTORY);
if(!testDirectory.endsWith("/")) {
testDirectory = testDirectory + "/";
}
TestNG testNG = new TestNG(true);
XmlSuite suite = new XmlSuite();
suite.setName(cmd.getOptionValue("suitename", CitrusConstants.DEFAULT_SUITE_NAME));
if(cmd.hasOption("test")) {
for (String testName : cmd.getOptionValues("test")) {
XmlTest test = new XmlTest(suite);
test.setName(testName);
test.setXmlClasses(Collections.singletonList(new XmlClass(getClassNameForTest(testDirectory, testName.trim()))));
}
}
if(cmd.hasOption("package")) {
for (String packageName : cmd.getOptionValues("package")) {
XmlTest test = new XmlTest(suite);
test.setName(packageName);
XmlPackage xmlPackage = new XmlPackage();
xmlPackage.setName(packageName);
test.setXmlPackages(Collections.singletonList(xmlPackage));
}
}
if(cmd.getArgList().size() > 0) {
List<String> testNgXml = new ArrayList<String>();
for (String testNgXmlFile : cmd.getArgs()) {
if(testNgXmlFile.endsWith(".xml")) {
testNgXml.add(testNgXmlFile);
} else {
log.warn("Unrecognized argument '" + testNgXmlFile + "'");
}
}
testNG.setTestSuites(testNgXml);
}
List<XmlSuite> suites = new ArrayList<XmlSuite>();
suites.add(suite);
testNG.setXmlSuites(suites);
testNG.run();
System.exit(testNG.getStatus());
} catch (ParseException e) {
log.error("Failed to parse command line arguments", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("CITRUS TestFramework", options);
} catch (FileNotFoundException e) {
log.error("Failed to load test files", e);
throw new TestEngineFailedException("TestSuite failed with error", e);
} catch (IOException e) {
log.error("Error while accessing test file", e);
throw new TestEngineFailedException("TestSuite failed with error", e);
}
}
/**
* Method to retrieve the full class name for a test.
* Hierarchy of folders is supported, too.
*
* @param startDir directory where to start the search
* @param testName test name to search for
* @throws CitrusRuntimeException
* @return the class name of the test
*/
public static String getClassNameForTest(final String startDir, final String testName)
throws IOException, FileNotFoundException {
/* Stack to hold potential sub directories */
final Stack<File> dirs = new Stack<File>();
/* start directory */
final File startdir = new File(startDir);
if (startdir.isDirectory()) {
dirs.push(startdir);
}
/* walk through the directories */
while (dirs.size() > 0) {
File file = dirs.pop();
File[] found = file.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
File tmp = new File(dir.getPath() + "/" + name);
/* Only allowing XML files as spring configuration files */
if ((name.endsWith(".xml") || tmp.isDirectory()) && !name.startsWith("CVS") && !name.startsWith(".svn")) {
return true;
} else {
return false;
}
}
});
for (int i = 0; i < found.length; i++) {
/* Subfolder support */
if (found[i].isDirectory()) {
dirs.push(found[i]);
} else {
if ((testName + ".xml").equalsIgnoreCase(found[i].getName())) {
String fileName = found[i].getPath();
fileName = fileName.substring(0, (fileName.length()-".xml".length()));
if(fileName.startsWith(File.separator)) {
fileName = fileName.substring(File.separator.length());
}
//replace operating system path separator and translate to class package string
fileName = fileName.substring(startDir.length()).replace(File.separatorChar, '.');
if (log.isDebugEnabled()) {
log.debug("Found test '" + fileName + "'");
}
return fileName;
}
}
}
}
throw new CitrusRuntimeException("Could not find test with name '"
+ testName + "'. Test directory is: " + startDir);
}
}
|
modules/citrus-core/src/main/java/com/consol/citrus/Citrus.java
|
/*
* Copyright 2006-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus;
import java.io.*;
import java.util.*;
import org.apache.commons.cli.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.TestNG;
import org.testng.xml.*;
import com.consol.citrus.exceptions.CitrusRuntimeException;
import com.consol.citrus.exceptions.TestEngineFailedException;
/**
* Citrus command line application.
*
* @author Christoph Deppisch
* @since 2008
*/
public class Citrus {
/**
* Logger
*/
private static final Logger log = LoggerFactory.getLogger(Citrus.class);
/**
* Prevent instanciation.
*/
private Citrus() {}
/**
* Main CLI method.
* @param args
*/
public static void main(String[] args) {
log.info("CITRUS TESTFRAMEWORK ");
log.info("");
Options options = new CitrusCliOptions();
CommandLineParser cliParser = new GnuParser();
CommandLine cmd = null;
try {
cmd = cliParser.parse(options, args);
if(cmd.hasOption("help")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("CITRUS TestFramework", options);
return;
}
String testDirectory = cmd.getOptionValue("testdir", CitrusConstants.DEFAULT_TEST_DIRECTORY);
if(!testDirectory.endsWith("/")) {
testDirectory = testDirectory + "/";
}
TestNG testNG = new TestNG(false);
XmlSuite suite = new XmlSuite();
suite.setName(cmd.getOptionValue("suitename", CitrusConstants.DEFAULT_SUITE_NAME));
if(cmd.hasOption("test")) {
for (String testName : cmd.getOptionValues("test")) {
XmlTest test = new XmlTest(suite);
test.setName(testName);
test.setXmlClasses(Collections.singletonList(new XmlClass(getClassNameForTest(testDirectory, testName.trim()))));
}
}
if(cmd.hasOption("package")) {
for (String packageName : cmd.getOptionValues("package")) {
XmlTest test = new XmlTest(suite);
test.setName(packageName);
XmlPackage xmlPackage = new XmlPackage();
xmlPackage.setName(packageName);
test.setXmlPackages(Collections.singletonList(xmlPackage));
}
}
if(cmd.getArgList().size() > 0) {
List<String> testNgXml = new ArrayList<String>();
for (String testNgXmlFile : cmd.getArgs()) {
if(testNgXmlFile.endsWith(".xml")) {
testNgXml.add(testNgXmlFile);
} else {
log.warn("Unrecognized argument '" + testNgXmlFile + "'");
}
}
testNG.setTestSuites(testNgXml);
}
List<XmlSuite> suites = new ArrayList<XmlSuite>();
suites.add(suite);
testNG.setXmlSuites(suites);
testNG.run();
System.exit(testNG.getStatus());
} catch (ParseException e) {
log.error("Failed to parse command line arguments", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("CITRUS TestFramework", options);
} catch (FileNotFoundException e) {
log.error("Failed to load test files", e);
throw new TestEngineFailedException("TestSuite failed with error", e);
} catch (IOException e) {
log.error("Error while accessing test file", e);
throw new TestEngineFailedException("TestSuite failed with error", e);
}
}
/**
* Method to retrieve the full class name for a test.
* Hierarchy of folders is supported, too.
*
* @param startDir directory where to start the search
* @param testName test name to search for
* @throws CitrusRuntimeException
* @return the class name of the test
*/
public static String getClassNameForTest(final String startDir, final String testName)
throws IOException, FileNotFoundException {
/* Stack to hold potential sub directories */
final Stack<File> dirs = new Stack<File>();
/* start directory */
final File startdir = new File(startDir);
if (startdir.isDirectory()) {
dirs.push(startdir);
}
/* walk through the directories */
while (dirs.size() > 0) {
File file = dirs.pop();
File[] found = file.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
File tmp = new File(dir.getPath() + "/" + name);
/* Only allowing XML files as spring configuration files */
if ((name.endsWith(".xml") || tmp.isDirectory()) && !name.startsWith("CVS") && !name.startsWith(".svn")) {
return true;
} else {
return false;
}
}
});
for (int i = 0; i < found.length; i++) {
/* Subfolder support */
if (found[i].isDirectory()) {
dirs.push(found[i]);
} else {
if ((testName + ".xml").equalsIgnoreCase(found[i].getName())) {
String fileName = found[i].getPath();
fileName = fileName.substring(0, (fileName.length()-".xml".length()));
if(fileName.startsWith(File.separator)) {
fileName = fileName.substring(File.separator.length());
}
//replace operating system path separator and translate to class package string
fileName = fileName.substring(startDir.length()).replace(File.separatorChar, '.');
if (log.isDebugEnabled()) {
log.debug("Found test '" + fileName + "'");
}
return fileName;
}
}
}
}
throw new CitrusRuntimeException("Could not find test with name '"
+ testName + "'. Test directory is: " + startDir);
}
}
|
Use default test listeners from TestNG when using ANT
|
modules/citrus-core/src/main/java/com/consol/citrus/Citrus.java
|
Use default test listeners from TestNG when using ANT
|
<ide><path>odules/citrus-core/src/main/java/com/consol/citrus/Citrus.java
<ide> testDirectory = testDirectory + "/";
<ide> }
<ide>
<del> TestNG testNG = new TestNG(false);
<add> TestNG testNG = new TestNG(true);
<ide>
<ide> XmlSuite suite = new XmlSuite();
<ide> suite.setName(cmd.getOptionValue("suitename", CitrusConstants.DEFAULT_SUITE_NAME));
|
|
JavaScript
|
mit
|
b79fbaa8d5ff49e93b8ff8cc52aa6433782df76e
| 0 |
silverbux/rsjs
|
3d039b30-2e9d-11e5-83cc-a45e60cdfd11
|
helloWorld.js
|
3cf41442-2e9d-11e5-ab34-a45e60cdfd11
|
3d039b30-2e9d-11e5-83cc-a45e60cdfd11
|
helloWorld.js
|
3d039b30-2e9d-11e5-83cc-a45e60cdfd11
|
<ide><path>elloWorld.js
<del>3cf41442-2e9d-11e5-ab34-a45e60cdfd11
<add>3d039b30-2e9d-11e5-83cc-a45e60cdfd11
|
|
Java
|
apache-2.0
|
09314d00111d35da53377a2e1fe2036939f6539a
| 0 |
EvilMcJerkface/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb,palantir/atlasdb
|
/**
* Copyright 2015 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.timestamp;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.concurrent.Synchroniser;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;
import com.palantir.common.concurrent.PTExecutors;
import com.palantir.common.remoting.ServiceNotAvailableException;
import com.palantir.common.time.Clock;
public class PersistentTimestampServiceTest {
private static final long TWO_MINUTES_IN_MILLIS = 120000L;
@Rule
public final ExpectedException expectedException = ExpectedException.none();
@Test
public void testFastForward() {
Mockery m = new Mockery();
m.setThreadingPolicy(new Synchroniser());
final TimestampBoundStore tbsMock = m.mock(TimestampBoundStore.class);
final long initialValue = 1234567L;
final long futureTimestamp = 12345678L;
m.checking(new Expectations() {{
oneOf(tbsMock).getUpperLimit(); will(returnValue(initialValue));
oneOf(tbsMock).storeUpperLimit(initialValue + PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
oneOf(tbsMock).storeUpperLimit(futureTimestamp + PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
}});
final PersistentTimestampService ptsService = PersistentTimestampService.create(tbsMock);
for (int i = 1; i <= 1000; i++) {
assertEquals(initialValue+i, ptsService.getFreshTimestamp());
}
ptsService.fastForwardTimestamp(futureTimestamp);
for (int i = 1; i <= 1000; i++) {
assertEquals(futureTimestamp+i, ptsService.getFreshTimestamp());
}
m.assertIsSatisfied();
}
@Test(expected = ServiceNotAvailableException.class)
public void shouldThrowAServiceNotAvailableExceptionIfMultipleTimestampSerivcesAreRunning() {
final TimestampBoundStore timestampBoundStore = timestampStoreFailingWith(new MultipleRunningTimestampServiceError("error"));
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
persistentTimestampService.getFreshTimestamp();
}
@Test
public void incrementUpperLimitIfOneMinuteElapsedSinceLastUpdate() throws InterruptedException {
Clock clock = mock(Clock.class);
when(clock.getTimeMillis()).thenReturn(0L, TWO_MINUTES_IN_MILLIS, 2 * TWO_MINUTES_IN_MILLIS, 3 * TWO_MINUTES_IN_MILLIS);
TimestampBoundStore timestampBoundStore = initialTimestampBoundStore();
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore, clock);
persistentTimestampService.getFreshTimestamp();
Thread.sleep(10);
persistentTimestampService.getFreshTimestamp();
Thread.sleep(10);
verify(timestampBoundStore, times(2)).storeUpperLimit(anyLong());
}
@Test
public void incrementUpperLimitOnFirstFreshTimestampRequest() {
TimestampBoundStore timestampBoundStore = initialTimestampBoundStore();
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
persistentTimestampService.getFreshTimestamp();
verify(timestampBoundStore).storeUpperLimit(PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
}
@Test
public void multipleFreshTimestampRequestsShouldIncreaseUpperLimitOnlyOnce() {
TimestampBoundStore timestampBoundStore = initialTimestampBoundStore();
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
getFreshTimestampsInParallel(persistentTimestampService, 20);
verify(timestampBoundStore, times(1)).storeUpperLimit(PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
}
@Test
public void throwOnTimestampRequestIfBoundStoreCannotStoreNewUpperLimit() {
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampStoreFailingWith(new RuntimeException()));
expectedException.expect(RuntimeException.class);
persistentTimestampService.getFreshTimestamp();
}
@Test
public void testLimit() throws InterruptedException {
Mockery m = new Mockery();
m.setThreadingPolicy(new Synchroniser());
final TimestampBoundStore tbsMock = m.mock(TimestampBoundStore.class);
final long initialValue = 72;
m.checking(new Expectations() {{
oneOf(tbsMock).getUpperLimit(); will(returnValue(initialValue));
oneOf(tbsMock).storeUpperLimit(with(any(Long.class)));
// Throws exceptions after here, which will prevent allocating more timestamps.
}});
// Use up all initially-allocated timestamps.
final TimestampService tsService = PersistentTimestampService.create(tbsMock);
for (int i = 1; i <= PersistentTimestampService.ALLOCATION_BUFFER_SIZE; ++i) {
assertEquals(initialValue+i, tsService.getFreshTimestamp());
}
ExecutorService exec = PTExecutors.newSingleThreadExecutor();
Future<?> f = exec.submit(new Runnable() {
@Override
public void run() {
// This will block.
tsService.getFreshTimestamp();
}
});
try {
f.get(10, TimeUnit.MILLISECONDS);
fail("We should be blocking");
} catch (ExecutionException e) {
// we expect this failure because we can't allocate timestamps
} catch (TimeoutException e) {
// We expect this timeout, as we're blocking.
} finally {
f.cancel(true);
exec.shutdown();
}
}
private void getFreshTimestampsInParallel(PersistentTimestampService persistentTimestampService, int numTimes) {
ExecutorService executorService = Executors.newFixedThreadPool(numTimes / 2);
try {
List<Future<Long>> futures = Lists.newArrayListWithExpectedSize(numTimes);
for (int i = 0; i < numTimes; i++) {
Future<Long> future = executorService.submit(new Callable<Long>() {
@Override
public Long call() throws Exception {
return persistentTimestampService.getFreshTimestamp();
}
});
futures.add(future);
}
for (int i = 0; i < futures.size(); i++) {
Futures.getUnchecked(futures.get(i));
}
} finally {
executorService.shutdown();
}
}
private TimestampBoundStore initialTimestampBoundStore() {
TimestampBoundStore timestampBoundStore = mock(TimestampBoundStore.class);
when(timestampBoundStore.getUpperLimit()).thenReturn(0L);
return timestampBoundStore;
}
private TimestampBoundStore timestampStoreFailingWith(Throwable throwable) {
TimestampBoundStore timestampBoundStore = mock(TimestampBoundStore.class);
when(timestampBoundStore.getUpperLimit()).thenReturn(0L);
doThrow(throwable).when(timestampBoundStore).storeUpperLimit(anyLong());
return timestampBoundStore;
}
}
|
timestamp-impl/src/test/java/com/palantir/timestamp/PersistentTimestampServiceTest.java
|
/**
* Copyright 2015 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.timestamp;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.concurrent.Synchroniser;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;
import com.palantir.common.concurrent.PTExecutors;
import com.palantir.common.remoting.ServiceNotAvailableException;
import com.palantir.common.time.Clock;
public class PersistentTimestampServiceTest {
private static final long TWO_MINUTES_IN_MILLIS = 120000L;
@Rule
public final ExpectedException expectedException = ExpectedException.none();
@Test
public void testFastForward() {
Mockery m = new Mockery();
m.setThreadingPolicy(new Synchroniser());
final TimestampBoundStore tbsMock = m.mock(TimestampBoundStore.class);
final long initialValue = 1234567L;
final long futureTimestamp = 12345678L;
m.checking(new Expectations() {{
oneOf(tbsMock).getUpperLimit(); will(returnValue(initialValue));
oneOf(tbsMock).storeUpperLimit(initialValue + PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
oneOf(tbsMock).storeUpperLimit(futureTimestamp + PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
}});
final PersistentTimestampService ptsService = PersistentTimestampService.create(tbsMock);
for (int i = 1; i <= 1000; i++) {
assertEquals(initialValue+i, ptsService.getFreshTimestamp());
}
ptsService.fastForwardTimestamp(futureTimestamp);
for (int i = 1; i <= 1000; i++) {
assertEquals(futureTimestamp+i, ptsService.getFreshTimestamp());
}
m.assertIsSatisfied();
}
@Test(expected = ServiceNotAvailableException.class)
public void shouldAServiceNotAvailableExceptionIfMultipleTimestampSerivcesAreRunning() {
final TimestampBoundStore timestampBoundStore = timestampStoreFailingWith(new MultipleRunningTimestampServiceError("error"));
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
persistentTimestampService.getFreshTimestamp();
}
@Test
public void incrementUpperLimitIfOneMinuteElapsedSinceLastUpdate() throws InterruptedException {
Clock clock = mock(Clock.class);
when(clock.getTimeMillis()).thenReturn(0L, TWO_MINUTES_IN_MILLIS, 2 * TWO_MINUTES_IN_MILLIS, 3 * TWO_MINUTES_IN_MILLIS);
TimestampBoundStore timestampBoundStore = initialTimestampBoundStore();
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore, clock);
persistentTimestampService.getFreshTimestamp();
Thread.sleep(10);
persistentTimestampService.getFreshTimestamp();
Thread.sleep(10);
verify(timestampBoundStore, times(2)).storeUpperLimit(anyLong());
}
@Test
public void incrementUpperLimitOnFirstFreshTimestampRequest() {
TimestampBoundStore timestampBoundStore = initialTimestampBoundStore();
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
persistentTimestampService.getFreshTimestamp();
verify(timestampBoundStore).storeUpperLimit(PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
}
@Test
public void multipleFreshTimestampRequestsShouldIncreaseUpperLimitOnlyOnce() {
TimestampBoundStore timestampBoundStore = initialTimestampBoundStore();
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
getFreshTimestampsInParallel(persistentTimestampService, 20);
verify(timestampBoundStore, times(1)).storeUpperLimit(PersistentTimestampService.ALLOCATION_BUFFER_SIZE);
}
@Test
public void throwOnTimestampRequestIfBoundStoreCannotStoreNewUpperLimit() {
PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampStoreFailingWith(new RuntimeException()));
expectedException.expect(RuntimeException.class);
persistentTimestampService.getFreshTimestamp();
}
@Test
public void testLimit() throws InterruptedException {
Mockery m = new Mockery();
m.setThreadingPolicy(new Synchroniser());
final TimestampBoundStore tbsMock = m.mock(TimestampBoundStore.class);
final long initialValue = 72;
m.checking(new Expectations() {{
oneOf(tbsMock).getUpperLimit(); will(returnValue(initialValue));
oneOf(tbsMock).storeUpperLimit(with(any(Long.class)));
// Throws exceptions after here, which will prevent allocating more timestamps.
}});
// Use up all initially-allocated timestamps.
final TimestampService tsService = PersistentTimestampService.create(tbsMock);
for (int i = 1; i <= PersistentTimestampService.ALLOCATION_BUFFER_SIZE; ++i) {
assertEquals(initialValue+i, tsService.getFreshTimestamp());
}
ExecutorService exec = PTExecutors.newSingleThreadExecutor();
Future<?> f = exec.submit(new Runnable() {
@Override
public void run() {
// This will block.
tsService.getFreshTimestamp();
}
});
try {
f.get(10, TimeUnit.MILLISECONDS);
fail("We should be blocking");
} catch (ExecutionException e) {
// we expect this failure because we can't allocate timestamps
} catch (TimeoutException e) {
// We expect this timeout, as we're blocking.
} finally {
f.cancel(true);
exec.shutdown();
}
}
private void getFreshTimestampsInParallel(PersistentTimestampService persistentTimestampService, int numTimes) {
ExecutorService executorService = Executors.newFixedThreadPool(numTimes / 2);
try {
List<Future<Long>> futures = Lists.newArrayListWithExpectedSize(numTimes);
for (int i = 0; i < numTimes; i++) {
Future<Long> future = executorService.submit(new Callable<Long>() {
@Override
public Long call() throws Exception {
return persistentTimestampService.getFreshTimestamp();
}
});
futures.add(future);
}
for (int i = 0; i < futures.size(); i++) {
Futures.getUnchecked(futures.get(i));
}
} finally {
executorService.shutdown();
}
}
private TimestampBoundStore initialTimestampBoundStore() {
TimestampBoundStore timestampBoundStore = mock(TimestampBoundStore.class);
when(timestampBoundStore.getUpperLimit()).thenReturn(0L);
return timestampBoundStore;
}
private TimestampBoundStore timestampStoreFailingWith(Throwable throwable) {
TimestampBoundStore timestampBoundStore = mock(TimestampBoundStore.class);
when(timestampBoundStore.getUpperLimit()).thenReturn(0L);
doThrow(throwable).when(timestampBoundStore).storeUpperLimit(anyLong());
return timestampBoundStore;
}
}
|
Fix test typo
|
timestamp-impl/src/test/java/com/palantir/timestamp/PersistentTimestampServiceTest.java
|
Fix test typo
|
<ide><path>imestamp-impl/src/test/java/com/palantir/timestamp/PersistentTimestampServiceTest.java
<ide> }
<ide>
<ide> @Test(expected = ServiceNotAvailableException.class)
<del> public void shouldAServiceNotAvailableExceptionIfMultipleTimestampSerivcesAreRunning() {
<add> public void shouldThrowAServiceNotAvailableExceptionIfMultipleTimestampSerivcesAreRunning() {
<ide> final TimestampBoundStore timestampBoundStore = timestampStoreFailingWith(new MultipleRunningTimestampServiceError("error"));
<ide>
<ide> PersistentTimestampService persistentTimestampService = PersistentTimestampService.create(timestampBoundStore);
|
|
Java
|
apache-2.0
|
3729769b79b99910947fb13ccc46a6b6169a47a9
| 0 |
GoogleCloudDataproc/hadoop-connectors,GoogleCloudDataproc/hadoop-connectors
|
/*
* Copyright 2013 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.hadoop.fs.gcs;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.BLOCK_SIZE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_CONCURRENT_GLOB_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_CONFIG_OVERRIDE_FILE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_CREATE_SYSTEM_BUCKET;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_FILE_CHECKSUM_TYPE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_FLAT_GLOB_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_LAZY_INITIALIZATION_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_TYPE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_SYSTEM_BUCKET;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_WORKING_DIRECTORY;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.PATH_CODEC;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.PERMISSIONS_TO_REPORT;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.flogger.LazyArgs.lazy;
import com.google.api.client.auth.oauth2.Credential;
import com.google.cloud.hadoop.gcsio.CreateFileOptions;
import com.google.cloud.hadoop.gcsio.FileInfo;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorage;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorage.ListPage;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystemOptions;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageItemInfo;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadOptions;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadOptions.Fadvise;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadOptions.GenerationReadConsistency;
import com.google.cloud.hadoop.gcsio.PathCodec;
import com.google.cloud.hadoop.gcsio.StorageResourceId;
import com.google.cloud.hadoop.util.AccessTokenProvider;
import com.google.cloud.hadoop.util.AccessTokenProviderClassFromConfigFactory;
import com.google.cloud.hadoop.util.CredentialFactory;
import com.google.cloud.hadoop.util.CredentialFromAccessTokenProviderClassFactory;
import com.google.cloud.hadoop.util.HadoopCredentialConfiguration;
import com.google.cloud.hadoop.util.HadoopVersionInfo;
import com.google.cloud.hadoop.util.PropertyUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Ascii;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.flogger.GoogleLogger;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.nio.file.DirectoryNotEmptyException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.GlobPattern;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
/**
* This class provides a Hadoop compatible File System on top of Google Cloud Storage (GCS).
*
* <p>It is implemented as a thin abstraction layer on top of GCS. The layer hides any specific
* characteristics of the underlying store and exposes FileSystem interface understood by the Hadoop
* engine.
*
* <p>Users interact with the files in the storage using fully qualified URIs. The file system
* exposed by this class is identified using the 'gs' scheme. For example, {@code
* gs://dir1/dir2/file1.txt}.
*
* <p>This implementation translates paths between hadoop Path and GCS URI with the convention that
* the Hadoop root directly corresponds to the GCS "root", e.g. gs:/. This is convenient for many
* reasons, such as data portability and close equivalence to gsutil paths, but imposes certain
* inherited constraints, such as files not being allowed in root (only 'directories' can be placed
* in root), and directory names inside root have a more limited set of allowed characters.
*
* <p>One of the main goals of this implementation is to maintain compatibility with behavior of
* HDFS implementation when accessed through FileSystem interface. HDFS implementation is not very
* consistent about the cases when it throws versus the cases when methods return false. We run GHFS
* tests and HDFS tests against the same test data and use that as a guide to decide whether to
* throw or to return false.
*/
public abstract class GoogleHadoopFileSystemBase extends GoogleHadoopFileSystemBaseSpecific
implements FileSystemDescriptor {
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
/**
* Available types for use with {@link
* GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_TYPE}.
*/
public enum OutputStreamType {
BASIC,
SYNCABLE_COMPOSITE
}
/**
* Available GCS checksum types for use with {@link
* GoogleHadoopFileSystemConfiguration#GCS_FILE_CHECKSUM_TYPE}.
*/
public static enum GcsFileChecksumType {
NONE(null, 0),
CRC32C("COMPOSITE-CRC32C", 4),
MD5("MD5", 16);
private final String algorithmName;
private final int byteLength;
GcsFileChecksumType(String algorithmName, int byteLength) {
this.algorithmName = algorithmName;
this.byteLength = byteLength;
}
public String getAlgorithmName() {
return algorithmName;
}
public int getByteLength() {
return byteLength;
}
}
/** Use new URI_ENCODED_PATH_CODEC. */
public static final String PATH_CODEC_USE_URI_ENCODING = "uri-path";
/** Use LEGACY_PATH_CODEC. */
public static final String PATH_CODEC_USE_LEGACY_ENCODING = "legacy";
/** Default value of replication factor. */
public static final short REPLICATION_FACTOR_DEFAULT = 3;
/** Default PathFilter that accepts all paths. */
public static final PathFilter DEFAULT_FILTER = path -> true;
/** Prefix to use for common authentication keys. */
public static final String AUTHENTICATION_PREFIX = "fs.gs";
/** A resource file containing GCS related build properties. */
public static final String PROPERTIES_FILE = "gcs.properties";
/** The key in the PROPERTIES_FILE that contains the version built. */
public static final String VERSION_PROPERTY = "gcs.connector.version";
/** The version returned when one cannot be found in properties. */
public static final String UNKNOWN_VERSION = "0.0.0";
/** Current version. */
public static final String VERSION;
/** Identifies this version of the GoogleHadoopFileSystemBase library. */
public static final String GHFS_ID;
static {
VERSION =
PropertyUtil.getPropertyOrDefault(
GoogleHadoopFileSystemBase.class, PROPERTIES_FILE, VERSION_PROPERTY, UNKNOWN_VERSION);
logger.atFine().log("GHFS version: %s", VERSION);
GHFS_ID = String.format("GHFS/%s", VERSION);
}
private static final ThreadFactory DAEMON_THREAD_FACTORY =
new ThreadFactoryBuilder().setNameFormat("ghfs-thread-%d").setDaemon(true).build();
@VisibleForTesting
boolean enableFlatGlob = GCS_FLAT_GLOB_ENABLE.getDefault();
@VisibleForTesting
boolean enableConcurrentGlob = GCS_CONCURRENT_GLOB_ENABLE.getDefault();
private GcsFileChecksumType checksumType = GCS_FILE_CHECKSUM_TYPE.getDefault();
/** The URI the File System is passed in initialize. */
protected URI initUri;
/**
* The retrieved configuration value for {@link
* GoogleHadoopFileSystemConfiguration#GCS_SYSTEM_BUCKET}. Used as a fallback for a root bucket,
* when required.
*/
@Deprecated protected String systemBucket;
/** Underlying GCS file system object. */
private Supplier<GoogleCloudStorageFileSystem> gcsFsSupplier;
private boolean gcsFsInitialized = false;
protected PathCodec pathCodec;
/**
* Current working directory; overridden in initialize() if {@link
* GoogleHadoopFileSystemConfiguration#GCS_WORKING_DIRECTORY} is set.
*/
private Path workingDirectory;
/**
* Default block size. Note that this is the size that is reported to Hadoop FS clients. It does
* not modify the actual block size of an underlying GCS object, because GCS JSON API does not
* allow modifying or querying the value. Modifying this value allows one to control how many
* mappers are used to process a given file.
*/
protected long defaultBlockSize = BLOCK_SIZE.getDefault();
/** The fixed reported permission of all files. */
private FsPermission reportedPermissions;
/** Map of counter values */
protected final ImmutableMap<Counter, AtomicLong> counters = createCounterMap();
protected ImmutableMap<Counter, AtomicLong> createCounterMap() {
EnumMap<Counter, AtomicLong> countersMap = new EnumMap<>(Counter.class);
for (Counter counter : ALL_COUNTERS) {
countersMap.put(counter, new AtomicLong());
}
return Maps.immutableEnumMap(countersMap);
}
/**
* Behavior of listStatus when a path is not found.
*/
protected enum ListStatusFileNotFoundBehavior {
Hadoop1 {
@Override
public FileStatus[] handle(String path) throws IOException {
return null;
}
},
Hadoop2 {
@Override
public FileStatus[] handle(String path) throws IOException {
throw new FileNotFoundException(String.format("Path '%s' does not exist.", path));
}
};
/**
* Perform version specific handling for a missing path.
* @param path The missing path
*/
public abstract FileStatus[] handle(String path) throws IOException;
/**
* Get the ListStatusFileNotFoundBehavior for the currently running Hadoop version.
*/
public static ListStatusFileNotFoundBehavior get() {
return get(HadoopVersionInfo.getInstance());
}
/**
* Get the ListStatusFileNotFoundBehavior for the given hadoop version/
* @param hadoopVersionInfo The hadoop version.
*/
public static ListStatusFileNotFoundBehavior get(HadoopVersionInfo hadoopVersionInfo) {
if (hadoopVersionInfo.isGreaterThan(2, 0)
|| hadoopVersionInfo.isEqualTo(2, 0)
|| hadoopVersionInfo.isEqualTo(0, 23)) {
return Hadoop2;
}
return Hadoop1;
}
}
// Behavior when a path is not found in listStatus()
protected ListStatusFileNotFoundBehavior listStatusFileNotFoundBehavior =
ListStatusFileNotFoundBehavior.get();
@VisibleForTesting
protected void setListStatusFileNotFoundBehavior(ListStatusFileNotFoundBehavior behavior) {
this.listStatusFileNotFoundBehavior = behavior;
}
/**
* Defines names of counters we track for each operation.
*
* There are two types of counters:
* -- METHOD_NAME : Number of successful invocations of method METHOD.
* -- METHOD_NAME_TIME : Total inclusive time spent in method METHOD.
*/
public enum Counter {
APPEND,
APPEND_TIME,
CREATE,
CREATE_TIME,
DELETE,
DELETE_TIME,
GET_FILE_CHECKSUM,
GET_FILE_CHECKSUM_TIME,
GET_FILE_STATUS,
GET_FILE_STATUS_TIME,
INIT,
INIT_TIME,
INPUT_STREAM,
INPUT_STREAM_TIME,
LIST_STATUS,
LIST_STATUS_TIME,
MKDIRS,
MKDIRS_TIME,
OPEN,
OPEN_TIME,
OUTPUT_STREAM,
OUTPUT_STREAM_TIME,
READ1,
READ1_TIME,
READ,
READ_TIME,
READ_FROM_CHANNEL,
READ_FROM_CHANNEL_TIME,
READ_CLOSE,
READ_CLOSE_TIME,
READ_POS,
READ_POS_TIME,
RENAME,
RENAME_TIME,
SEEK,
SEEK_TIME,
SET_WD,
SET_WD_TIME,
WRITE1,
WRITE1_TIME,
WRITE,
WRITE_TIME,
WRITE_CLOSE,
WRITE_CLOSE_TIME,
}
/**
* Set of all counters.
*
* <p>It is used for performance optimization instead of `Counter.values`, because
* `Counter.values` returns new array on each invocation.
*/
private static final ImmutableSet<Counter> ALL_COUNTERS =
Sets.immutableEnumSet(EnumSet.allOf(Counter.class));
/**
* GCS {@link FileChecksum} which takes constructor parameters to define the return values of the
* various abstract methods of {@link FileChecksum}.
*/
private static class GcsFileChecksum extends FileChecksum {
private final GcsFileChecksumType checksumType;
private final byte[] bytes;
public GcsFileChecksum(GcsFileChecksumType checksumType, byte[] bytes) {
this.checksumType = checksumType;
this.bytes = bytes;
checkState(
bytes == null || bytes.length == checksumType.getByteLength(),
"Checksum value length (%s) should be equal to the algorithm byte length (%s)",
checksumType.getByteLength(), bytes.length);
}
@Override
public String getAlgorithmName() {
return checksumType.getAlgorithmName();
}
@Override
public int getLength() {
return checksumType.getByteLength();
}
@Override
public byte[] getBytes() {
return bytes;
}
@Override
public void readFields(DataInput in) throws IOException {
in.readFully(bytes);
}
@Override
public void write(DataOutput out) throws IOException {
out.write(bytes);
}
@Override
public String toString() {
return getAlgorithmName() + ": " + (bytes == null ? null : new String(Hex.encodeHex(bytes)));
}
}
/**
* A predicate that processes individual directory paths and evaluates the conditions set in
* fs.gs.parent.timestamp.update.enable, fs.gs.parent.timestamp.update.substrings.include and
* fs.gs.parent.timestamp.update.substrings.exclude to determine if a path should be ignored
* when running directory timestamp updates. If no match is found in either include or
* exclude and updates are enabled, the directory timestamp will be updated.
*/
public static class ParentTimestampUpdateIncludePredicate
implements GoogleCloudStorageFileSystemOptions.TimestampUpdatePredicate {
/**
* Create a new ParentTimestampUpdateIncludePredicate from the passed Hadoop configuration
* object.
*/
public static ParentTimestampUpdateIncludePredicate create(Configuration config) {
return new ParentTimestampUpdateIncludePredicate(
GCS_PARENT_TIMESTAMP_UPDATE_ENABLE.get(config, config::getBoolean),
GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES.getStringCollection(config),
GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES.getStringCollection(config));
}
// Include and exclude lists are intended to be small N and checked relatively
// infrequently. If that becomes not that case, consider Aho-Corasick or similar matching
// algorithms.
private final Collection<String> includeSubstrings;
private final Collection<String> excludeSubstrings;
private final boolean enableTimestampUpdates;
public ParentTimestampUpdateIncludePredicate(
boolean enableTimestampUpdates,
Collection<String> includeSubstrings,
Collection<String> excludeSubstrings) {
this.includeSubstrings = includeSubstrings;
this.excludeSubstrings = excludeSubstrings;
this.enableTimestampUpdates = enableTimestampUpdates;
}
/**
* Determine if updating directory timestamps should be ignored.
* @return True if the directory timestamp should not be updated. False to indicate it should
* be updated.
*/
@Override
public boolean shouldUpdateTimestamp(URI uri) {
if (!enableTimestampUpdates) {
logger.atFine().log("Timestamp updating disabled. Not updating uri %s", uri);
return false;
}
for (String include : includeSubstrings) {
if (uri.toString().contains(include)) {
logger.atFine().log(
"Path %s matched included path %s. Updating timestamps.", uri, include);
return true;
}
}
for (String exclude : excludeSubstrings) {
if (uri.toString().contains(exclude)) {
logger.atFine().log(
"Path %s matched excluded path %s. Not updating timestamps.", uri, exclude);
return false;
}
}
return true;
}
}
/**
* Constructs an instance of GoogleHadoopFileSystemBase; the internal {@link
* GoogleCloudStorageFileSystem} will be set up with config settings when initialize() is called.
*/
public GoogleHadoopFileSystemBase() {}
/**
* Constructs an instance of {@link GoogleHadoopFileSystemBase} using the provided
* GoogleCloudStorageFileSystem; initialize() will not re-initialize it.
*/
// TODO(b/120887495): This @VisibleForTesting annotation was being ignored by prod code.
// Please check that removing it is correct, and remove this comment along with it.
// @VisibleForTesting
GoogleHadoopFileSystemBase(GoogleCloudStorageFileSystem gcsFs) {
checkNotNull(gcsFs, "gcsFs must not be null");
setGcsFs(gcsFs);
}
private void setGcsFs(GoogleCloudStorageFileSystem gcsFs) {
this.gcsFsSupplier = Suppliers.ofInstance(gcsFs);
this.gcsFsInitialized = true;
this.pathCodec = gcsFs.getPathCodec();
}
/**
* Returns an unqualified path without any leading slash, relative to the filesystem root,
* which serves as the home directory of the current user; see {@code getHomeDirectory} for
* a description of what the home directory means.
*/
protected abstract String getHomeDirectorySubpath();
/**
* Gets Hadoop path corresponding to the given GCS path.
*
* @param gcsPath Fully-qualified GCS path, of the form gs://<bucket>/<object>.
*/
public abstract Path getHadoopPath(URI gcsPath);
/**
* Gets GCS path corresponding to the given Hadoop path, which can be relative or absolute,
* and can have either gs://<path> or gs:/<path> forms.
*
* @param hadoopPath Hadoop path.
*/
public abstract URI getGcsPath(Path hadoopPath);
/**
* Gets the default value of working directory.
*/
public abstract Path getDefaultWorkingDirectory();
// =================================================================
// Methods implementing FileSystemDescriptor interface; these define the way
// paths are translated between Hadoop and GCS.
// =================================================================
@Override
public abstract Path getFileSystemRoot();
@Override
public abstract String getScheme();
@Deprecated
@Override
public String getHadoopScheme() {
return getScheme();
}
/**
*
* <p> Overridden to make root it's own parent. This is POSIX compliant, but more importantly
* guards against poor directory accounting in the PathData class of Hadoop 2's FsShell.
*/
@Override
public Path makeQualified(Path path) {
logger.atFine().log("GHFS.makeQualified: path: %s", path);
Path qualifiedPath = super.makeQualified(path);
URI uri = qualifiedPath.toUri();
checkState(
"".equals(uri.getPath()) || qualifiedPath.isAbsolute(),
"Path '%s' must be fully qualified.",
qualifiedPath);
// Strip initial '..'s to make root is its own parent.
StringBuilder sb = new StringBuilder(uri.getPath());
while (sb.indexOf("/../") == 0) {
// Leave a preceding slash, so path is still absolute.
sb.delete(0, 3);
}
String strippedPath = sb.toString();
// Allow a Path of gs://someBucket to map to gs://someBucket/
if (strippedPath.equals("/..") || strippedPath.equals("")) {
strippedPath = "/";
}
Path result = new Path(uri.getScheme(), uri.getAuthority(), strippedPath);
logger.atFine().log("GHFS.makeQualified:=> %s", result);
return result;
}
@Override
protected void checkPath(Path path) {
URI uri = path.toUri();
String scheme = uri.getScheme();
// Only check that the scheme matches. The authority and path will be
// validated later.
if (scheme == null || scheme.equalsIgnoreCase(getScheme())) {
return;
}
String msg = String.format(
"Wrong FS scheme: %s, in path: %s, expected scheme: %s",
scheme, path, getScheme());
throw new IllegalArgumentException(msg);
}
/**
* See {@link #initialize(URI, Configuration, boolean)} for details; calls with third arg
* defaulting to 'true' for initializing the superclass.
*
* @param path URI of a file/directory within this file system.
* @param config Hadoop configuration.
*/
@Override
public void initialize(URI path, Configuration config) throws IOException {
initialize(path, config, /* initSuperclass= */ true);
}
/**
* Initializes this file system instance.
*
* Note:
* The path passed to this method could be path of any file/directory.
* It does not matter because the only thing we check is whether
* it uses 'gs' scheme. The rest is ignored.
*
* @param path URI of a file/directory within this file system.
* @param config Hadoop configuration.
* @param initSuperclass if false, doesn't call super.initialize(path, config); avoids
* registering a global Statistics object for this instance.
*/
public void initialize(URI path, Configuration config, boolean initSuperclass)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(path != null, "path must not be null");
Preconditions.checkArgument(config != null, "config must not be null");
Preconditions.checkArgument(path.getScheme() != null, "scheme of path must not be null");
if (!path.getScheme().equals(getScheme())) {
throw new IllegalArgumentException("URI scheme not supported: " + path);
}
initUri = path;
logger.atFine().log("GHFS.initialize: %s", path);
if (initSuperclass) {
super.initialize(path, config);
} else {
logger.atFine().log(
"Initializing 'statistics' as an instance not attached to the static FileSystem map");
// Provide an ephemeral Statistics object to avoid NPE, but still avoid registering a global
// statistics object.
statistics = new Statistics(getScheme());
}
configure(config);
long duration = System.nanoTime() - startTime;
increment(Counter.INIT);
increment(Counter.INIT_TIME, duration);
}
/**
* Returns a URI of the root of this FileSystem.
*/
@Override
public URI getUri() {
return getFileSystemRoot().toUri();
}
/**
* The default port is listed as -1 as an indication that ports are not used.
*/
@Override
protected int getDefaultPort() {
logger.atFine().log("GHFS.getDefaultPort:");
int result = -1;
logger.atFine().log("GHFS.getDefaultPort:=> %s", result);
return result;
}
// TODO(user): Improve conversion of exceptions to 'false'.
// Hadoop is inconsistent about when methods are expected to throw
// and when they should return false. The FileSystem documentation
// is unclear on this and many other aspects. For now, we convert
// all IOExceptions to false which is not the right thing to do.
// We need to find a way to only convert known cases to 'false'
// and let the other exceptions bubble up.
/**
* Opens the given file for reading.
*
* <p>Note: This function overrides the given bufferSize value with a higher number unless further
* overridden using configuration parameter {@code fs.gs.inputstream.buffer.size}.
*
* @param hadoopPath File to open.
* @param bufferSize Size of buffer to use for IO.
* @return A readable stream.
* @throws FileNotFoundException if the given path does not exist.
* @throws IOException if an error occurs.
*/
@Override
public FSDataInputStream open(Path hadoopPath, int bufferSize) throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.open: %s, bufferSize: %d (ignored)", hadoopPath, bufferSize);
URI gcsPath = getGcsPath(hadoopPath);
GoogleCloudStorageReadOptions readChannelOptions =
getGcsFs().getOptions().getCloudStorageOptions().getReadChannelOptions();
GoogleHadoopFSInputStream in =
new GoogleHadoopFSInputStream(this, gcsPath, readChannelOptions, statistics);
long duration = System.nanoTime() - startTime;
increment(Counter.OPEN);
increment(Counter.OPEN_TIME, duration);
return new FSDataInputStream(in);
}
/**
* Opens the given file for writing.
*
* <p>Note: This function overrides the given bufferSize value with a higher number unless further
* overridden using configuration parameter {@code fs.gs.outputstream.buffer.size}.
*
* @param hadoopPath The file to open.
* @param permission Permissions to set on the new file. Ignored.
* @param overwrite If a file with this name already exists, then if true, the file will be
* overwritten, and if false an error will be thrown.
* @param bufferSize The size of the buffer to use.
* @param replication Required block replication for the file. Ignored.
* @param blockSize The block-size to be used for the new file. Ignored.
* @param progress Progress is reported through this. Ignored.
* @return A writable stream.
* @throws IOException if an error occurs.
* @see #setPermission(Path, FsPermission)
*/
@Override
public FSDataOutputStream create(
Path hadoopPath,
FsPermission permission,
boolean overwrite,
int bufferSize,
short replication,
long blockSize,
Progressable progress)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
Preconditions.checkArgument(
replication > 0, "replication must be a positive integer: %s", replication);
Preconditions.checkArgument(
blockSize > 0, "blockSize must be a positive integer: %s", blockSize);
checkOpen();
logger.atFine().log(
"GHFS.create: %s, overwrite: %s, bufferSize: %d (ignored)",
hadoopPath, overwrite, bufferSize);
URI gcsPath = getGcsPath(hadoopPath);
OutputStreamType type = GCS_OUTPUT_STREAM_TYPE.get(getConf(), getConf()::getEnum);
OutputStream out;
switch (type) {
case BASIC:
out =
new GoogleHadoopOutputStream(
this, gcsPath, statistics, new CreateFileOptions(overwrite));
break;
case SYNCABLE_COMPOSITE:
out =
new GoogleHadoopSyncableOutputStream(
this, gcsPath, statistics, new CreateFileOptions(overwrite));
break;
default:
throw new IOException(
String.format(
"Unsupported output stream type given for key '%s': '%s'",
GCS_OUTPUT_STREAM_TYPE.getKey(), type));
}
long duration = System.nanoTime() - startTime;
increment(Counter.CREATE);
increment(Counter.CREATE_TIME, duration);
return new FSDataOutputStream(out, null);
}
/**
* Appends to an existing file (optional operation). Not supported.
*
* @param hadoopPath The existing file to be appended.
* @param bufferSize The size of the buffer to be used.
* @param progress For reporting progress if it is not null.
* @return A writable stream.
* @throws IOException if an error occurs.
*/
@Override
public FSDataOutputStream append(Path hadoopPath, int bufferSize, Progressable progress)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
logger.atFine().log("GHFS.append: %s, bufferSize: %d (ignored)", hadoopPath, bufferSize);
long duration = System.nanoTime() - startTime;
increment(Counter.APPEND);
increment(Counter.APPEND_TIME, duration);
throw new IOException("The append operation is not supported.");
}
/**
* Concat existing files into one file.
*
* @param trg the path to the target destination.
* @param psrcs the paths to the sources to use for the concatenation.
* @throws IOException IO failure
*/
@Override
public void concat(Path trg, Path[] psrcs) throws IOException {
logger.atFine().log("GHFS.concat: %s, %s", trg, lazy(() -> Arrays.toString(psrcs)));
checkArgument(psrcs.length > 0, "psrcs must have at least one source");
URI trgPath = getGcsPath(trg);
List<URI> srcPaths = Arrays.stream(psrcs).map(this::getGcsPath).collect(toImmutableList());
checkArgument(!srcPaths.contains(trgPath), "target must not be contained in sources");
List<List<URI>> partitions =
Lists.partition(srcPaths, GoogleCloudStorage.MAX_COMPOSE_OBJECTS - 1);
logger.atFine().log("GHFS.concat: %s, %d partitions", trg, partitions.size());
for (List<URI> partition : partitions) {
// We need to include the target in the list of sources to compose since
// the GCS FS compose operation will overwrite the target, whereas the Hadoop
// concat operation appends to the target.
List<URI> sources = Lists.newArrayList(trgPath);
sources.addAll(partition);
logger.atFine().log("GHFS.concat compose: %s, %s", trgPath, sources);
getGcsFs().compose(sources, trgPath, CreateFileOptions.DEFAULT_CONTENT_TYPE);
}
logger.atFine().log("GHFS.concat:=> ");
}
/**
* Renames src to dst. Src must not be equal to the filesystem root.
*
* @param src Source path.
* @param dst Destination path.
* @return true if rename succeeds.
* @throws FileNotFoundException if src does not exist.
* @throws IOException if an error occurs.
*/
@Override
public boolean rename(Path src, Path dst) throws IOException {
// Even though the underlying GCSFS will also throw an IAE if src is root, since our filesystem
// root happens to equal the global root, we want to explicitly check it here since derived
// classes may not have filesystem roots equal to the global root.
if (src.makeQualified(this).equals(getFileSystemRoot())) {
logger.atFine().log("GHFS.rename: src is root: '%s'", src);
return false;
}
long startTime = System.nanoTime();
Preconditions.checkArgument(src != null, "src must not be null");
Preconditions.checkArgument(dst != null, "dst must not be null");
checkOpen();
URI srcPath = getGcsPath(src);
URI dstPath = getGcsPath(dst);
logger.atFine().log("GHFS.rename: %s -> %s", src, dst);
try {
getGcsFs().rename(srcPath, dstPath);
} catch (IOException e) {
// Occasionally log exceptions that have a cause at info level,
// because they could surface real issues and help with troubleshooting
(logger.atFine().isEnabled() || e.getCause() == null
? logger.atFine()
: logger.atInfo().atMostEvery(5, TimeUnit.MINUTES))
.withCause(e)
.log("Failed GHFS.rename: %s -> %s", src, dst);
return false;
}
long duration = System.nanoTime() - startTime;
increment(Counter.RENAME);
increment(Counter.RENAME_TIME, duration);
return true;
}
/**
* Delete a file.
* @deprecated Use {@code delete(Path, boolean)} instead
*/
@Deprecated
@Override
public boolean delete(Path f)
throws IOException {
return delete(f, true);
}
/**
* Deletes the given file or directory.
*
* @param hadoopPath The path to delete.
* @param recursive If path is a directory and set to
* true, the directory is deleted, else throws an exception.
* In case of a file, the recursive parameter is ignored.
* @return true if delete is successful else false.
* @throws IOException if an error occurs.
*/
@Override
public boolean delete(Path hadoopPath, boolean recursive) throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.delete: %s, recursive: %s", hadoopPath, recursive);
URI gcsPath = getGcsPath(hadoopPath);
try {
getGcsFs().delete(gcsPath, recursive);
} catch (DirectoryNotEmptyException e) {
throw e;
} catch (IOException e) {
// Occasionally log exceptions that have a cause at info level,
// because they could surface real issues and help with troubleshooting
(logger.atFine().isEnabled() || e.getCause() == null
? logger.atFine()
: logger.atInfo().atMostEvery(5, TimeUnit.MINUTES))
.withCause(e)
.log("Failed GHFS.delete: %s, recursive: %s", hadoopPath, recursive);
return false;
}
long duration = System.nanoTime() - startTime;
increment(Counter.DELETE);
increment(Counter.DELETE_TIME, duration);
return true;
}
/**
* Lists file status. If the given path points to a directory then the status
* of children is returned, otherwise the status of the given file is returned.
*
* @param hadoopPath Given path.
* @return File status list or null if path does not exist.
* @throws IOException if an error occurs.
*/
@Override
public FileStatus[] listStatus(Path hadoopPath)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.listStatus: %s", hadoopPath);
URI gcsPath = getGcsPath(hadoopPath);
List<FileStatus> status;
try {
List<FileInfo> fileInfos =
getGcsFs().listFileInfo(gcsPath, isAutoRepairImplicitDirectoriesEnabled());
status = new ArrayList<>(fileInfos.size());
String userName = getUgiUserName();
for (FileInfo fileInfo : fileInfos) {
status.add(getFileStatus(fileInfo, userName));
}
} catch (FileNotFoundException fnfe) {
logger.atFine().withCause(fnfe).log("Got fnfe: ");
return listStatusFileNotFoundBehavior.handle(gcsPath.toString());
}
long duration = System.nanoTime() - startTime;
increment(Counter.LIST_STATUS);
increment(Counter.LIST_STATUS_TIME, duration);
return status.toArray(new FileStatus[0]);
}
private boolean isAutoRepairImplicitDirectoriesEnabled() {
GoogleCloudStorageFileSystemOptions gcsFsOptions = getGcsFs().getOptions();
return gcsFsOptions.getCloudStorageOptions().isAutoRepairImplicitDirectoriesEnabled();
}
/**
* Sets the current working directory to the given path.
*
* @param hadoopPath New working directory.
*/
@Override
public void setWorkingDirectory(Path hadoopPath) {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
logger.atFine().log("GHFS.setWorkingDirectory: %s", hadoopPath);
URI gcsPath = FileInfo.convertToDirectoryPath(pathCodec, getGcsPath(hadoopPath));
Path newPath = getHadoopPath(gcsPath);
// Ideally we should check (as we did earlier) if the given path really points to an existing
// directory. However, it takes considerable amount of time for that check which hurts perf.
// Given that HDFS code does not do such checks either, we choose to not do them in favor of
// better performance.
workingDirectory = newPath;
logger.atFine().log("GHFS.setWorkingDirectory: => %s", workingDirectory);
long duration = System.nanoTime() - startTime;
increment(Counter.SET_WD);
increment(Counter.SET_WD_TIME, duration);
}
/**
* Gets the current working directory.
*
* @return The current working directory.
*/
@Override
public Path getWorkingDirectory() {
logger.atFine().log("GHFS.getWorkingDirectory: %s", workingDirectory);
return workingDirectory;
}
/**
* Makes the given path and all non-existent parents directories.
* Has the semantics of Unix 'mkdir -p'.
*
* @param hadoopPath Given path.
* @param permission Permissions to set on the given directory.
* @return true on success, false otherwise.
* @throws IOException if an error occurs.
*/
@Override
public boolean mkdirs(Path hadoopPath, FsPermission permission)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.mkdirs: %s, perm: %s", hadoopPath, permission);
URI gcsPath = getGcsPath(hadoopPath);
try {
getGcsFs().mkdirs(gcsPath);
} catch (java.nio.file.FileAlreadyExistsException faee) {
// Need to convert to the Hadoop flavor of FileAlreadyExistsException.
throw (FileAlreadyExistsException)
new FileAlreadyExistsException(faee.getMessage()).initCause(faee);
}
long duration = System.nanoTime() - startTime;
increment(Counter.MKDIRS);
increment(Counter.MKDIRS_TIME, duration);
return true;
}
/**
* Gets the default replication factor.
*/
@Override
public short getDefaultReplication() {
return REPLICATION_FACTOR_DEFAULT;
}
/**
* Gets status of the given path item.
*
* @param hadoopPath The path we want information about.
* @return A FileStatus object for the given path.
* @throws FileNotFoundException when the path does not exist;
* @throws IOException on other errors.
*/
@Override
public FileStatus getFileStatus(Path hadoopPath)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.getFileStatus: %s", hadoopPath);
URI gcsPath = getGcsPath(hadoopPath);
FileInfo fileInfo = getGcsFs().getFileInfo(gcsPath);
if (!fileInfo.exists()) {
logger.atFine().log("GHFS.getFileStatus: not found: %s", gcsPath);
throw new FileNotFoundException(
(fileInfo.isDirectory() ? "Directory not found : " : "File not found : ") + hadoopPath);
}
String userName = getUgiUserName();
FileStatus status = getFileStatus(fileInfo, userName);
long duration = System.nanoTime() - startTime;
increment(Counter.GET_FILE_STATUS);
increment(Counter.GET_FILE_STATUS_TIME, duration);
return status;
}
/** Gets FileStatus corresponding to the given FileInfo value. */
private FileStatus getFileStatus(FileInfo fileInfo, String userName) throws IOException {
// GCS does not provide modification time. It only provides creation time.
// It works for objects because they are immutable once created.
FileStatus status =
new FileStatus(
fileInfo.getSize(),
fileInfo.isDirectory(),
REPLICATION_FACTOR_DEFAULT,
defaultBlockSize,
/* modificationTime= */ fileInfo.getModificationTime(),
/* accessTime= */ fileInfo.getModificationTime(),
reportedPermissions,
/* owner= */ userName,
/* group= */ userName,
getHadoopPath(fileInfo.getPath()));
logger.atFine().log(
"GHFS.getFileStatus: %s => %s", fileInfo.getPath(), lazy(() -> fileStatusToString(status)));
return status;
}
/**
* Determines based on suitability of {@code fixedPath} whether to use flat globbing logic where
* we use a single large listing during globStatus to then perform the core globbing logic
* in-memory.
*/
@VisibleForTesting
boolean couldUseFlatGlob(Path fixedPath) {
// Only works for filesystems where the base Hadoop Path scheme matches the underlying URI
// scheme for GCS.
if (!getUri().getScheme().equals(GoogleCloudStorageFileSystem.SCHEME)) {
logger.atFine().log(
"Flat glob is on, but doesn't work for scheme '%s'; using default behavior.",
getUri().getScheme());
return false;
}
// The full pattern should have a wildcard, otherwise there's no point doing the flat glob.
GlobPattern fullPattern = new GlobPattern(fixedPath.toString());
if (!fullPattern.hasWildcard()) {
logger.atFine().log(
"Flat glob is on, but Path '%s' has no wildcard; using default behavior.", fixedPath);
return false;
}
// To use a flat glob, there must be an authority defined.
if (Strings.isNullOrEmpty(fixedPath.toUri().getAuthority())) {
logger.atInfo().log(
"Flat glob is on, but Path '%s' has a empty authority, using default behavior.",
fixedPath);
return false;
}
// And the authority must not contain a wildcard.
GlobPattern authorityPattern = new GlobPattern(fixedPath.toUri().getAuthority());
if (authorityPattern.hasWildcard()) {
logger.atInfo().log(
"Flat glob is on, but Path '%s' has a wildcard authority, using default behavior.",
fixedPath);
return false;
}
return true;
}
@VisibleForTesting
String trimToPrefixWithoutGlob(String path) {
char[] wildcardChars = "*?{[".toCharArray();
int trimIndex = path.length();
// Find the first occurrence of any one of the wildcard characters, or just path.length()
// if none are found.
for (char wildcard : wildcardChars) {
int wildcardIndex = path.indexOf(wildcard);
if (wildcardIndex >= 0 && wildcardIndex < trimIndex) {
trimIndex = wildcardIndex;
}
}
return path.substring(0, trimIndex);
}
/**
* Returns an array of FileStatus objects whose path names match pathPattern.
*
* Return null if pathPattern has no glob and the path does not exist.
* Return an empty array if pathPattern has a glob and no path matches it.
*
* @param pathPattern A regular expression specifying the path pattern.
* @return An array of FileStatus objects.
* @throws IOException if an error occurs.
*/
@Override
public FileStatus[] globStatus(Path pathPattern) throws IOException {
return globStatus(pathPattern, DEFAULT_FILTER);
}
/**
* Returns an array of FileStatus objects whose path names match pathPattern and is accepted by
* the user-supplied path filter. Results are sorted by their path names.
*
* <p>Return null if pathPattern has no glob and the path does not exist. Return an empty array if
* pathPattern has a glob and no path matches it.
*
* @param pathPattern A regular expression specifying the path pattern.
* @param filter A user-supplied path filter.
* @return An array of FileStatus objects.
* @throws IOException if an error occurs.
*/
@Override
public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException {
checkOpen();
logger.atFine().log("GHFS.globStatus: %s", pathPattern);
// URI does not handle glob expressions nicely, for the purpose of
// fully-qualifying a path we can URI-encode them.
// Using toString() to avoid Path(URI) constructor.
Path encodedPath = new Path(pathPattern.toUri().toString());
// We convert pathPattern to GCS path and then to Hadoop path to ensure that it ends up in
// the correct format. See note in getHadoopPath for more information.
Path encodedFixedPath = getHadoopPath(getGcsPath(encodedPath));
// Decode URI-encoded path back into a glob path.
Path fixedPath = new Path(URI.create(encodedFixedPath.toString()));
logger.atFine().log("GHFS.globStatus fixedPath: %s => %s", pathPattern, fixedPath);
if (enableConcurrentGlob && couldUseFlatGlob(fixedPath)) {
return concurrentGlobInternal(fixedPath, filter, pathPattern);
}
if (enableFlatGlob && couldUseFlatGlob(fixedPath)) {
return flatGlobInternal(fixedPath, filter);
}
return globInternal(fixedPath, filter, pathPattern);
}
/**
* Use 2 glob algorithms that return the same result but one of them could be significantly faster
* than another one depending on directory layout.
*/
private FileStatus[] concurrentGlobInternal(Path fixedPath, PathFilter filter, Path pathPattern)
throws IOException {
ExecutorService executorService = Executors.newFixedThreadPool(2, DAEMON_THREAD_FACTORY);
Callable<FileStatus[]> flatGlobTask = () -> flatGlobInternal(fixedPath, filter);
Callable<FileStatus[]> nonFlatGlobTask = () -> globInternal(fixedPath, filter, pathPattern);
try {
return executorService.invokeAny(Arrays.asList(flatGlobTask, nonFlatGlobTask));
} catch (InterruptedException | ExecutionException e) {
throw (e.getCause() instanceof IOException) ? (IOException) e.getCause() : new IOException(e);
} finally {
executorService.shutdownNow();
}
}
private FileStatus[] flatGlobInternal(Path fixedPath, PathFilter filter) throws IOException {
String pathString = fixedPath.toString();
String prefixString = trimToPrefixWithoutGlob(pathString);
Path prefixPath = new Path(prefixString);
URI prefixUri = getGcsPath(prefixPath);
if (prefixString.endsWith("/") && !prefixPath.toString().endsWith("/")) {
// Path strips a trailing slash unless it's the 'root' path. We want to keep the trailing
// slash so that we don't wastefully list sibling files which may match the directory-name
// as a strict prefix but would've been omitted due to not containing the '/' at the end.
prefixUri = FileInfo.convertToDirectoryPath(pathCodec, prefixUri);
}
// Get everything matching the non-glob prefix.
logger.atFine().log("Listing everything with prefix '%s'", prefixUri);
List<FileStatus> matchedStatuses = null;
String pageToken = null;
do {
ListPage<FileInfo> infoPage = getGcsFs().listAllFileInfoForPrefixPage(prefixUri, pageToken);
// TODO: Are implicit directories really always needed for globbing?
// Probably they should be inferred only when fs.gs.implicit.dir.infer.enable is true.
Collection<FileStatus> statusPage =
toFileStatusesWithImplicitDirectories(infoPage.getItems());
// TODO: refactor to use GlobPattern and PathFilter directly without helper FS
FileSystem helperFileSystem =
InMemoryGlobberFileSystem.createInstance(getConf(), getWorkingDirectory(), statusPage);
FileStatus[] matchedStatusPage = helperFileSystem.globStatus(fixedPath, filter);
if (matchedStatusPage != null) {
Collections.addAll(
(matchedStatuses == null ? matchedStatuses = new ArrayList<>() : matchedStatuses),
matchedStatusPage);
}
pageToken = infoPage.getNextPageToken();
} while (pageToken != null);
if (matchedStatuses == null || matchedStatuses.isEmpty()) {
return matchedStatuses == null ? null : new FileStatus[0];
}
matchedStatuses.sort(
((Comparator<FileStatus>) Comparator.<FileStatus>naturalOrder())
// Place duplicate implicit directories after real directory
.thenComparingInt((FileStatus f) -> isImplicitDirectory(f) ? 1 : 0));
// Remove duplicate file statuses that could be in the matchedStatuses
// because of pagination and implicit directories
List<FileStatus> filteredStatuses = new ArrayList<>(matchedStatuses.size());
FileStatus lastAdded = null;
for (FileStatus fileStatus : matchedStatuses) {
if (lastAdded == null || lastAdded.compareTo(fileStatus) != 0) {
filteredStatuses.add(fileStatus);
lastAdded = fileStatus;
}
}
FileStatus[] returnList = filteredStatuses.toArray(new FileStatus[0]);
// If the return list contains directories, we should repair them if they're 'implicit'.
if (isAutoRepairImplicitDirectoriesEnabled()) {
List<URI> toRepair = new ArrayList<>();
for (FileStatus status : returnList) {
if (isImplicitDirectory(status)) {
toRepair.add(getGcsPath(status.getPath()));
}
}
if (!toRepair.isEmpty()) {
logger.atWarning().log(
"Discovered %s implicit directories to repair within return values.", toRepair.size());
getGcsFs().repairDirs(toRepair);
}
}
return returnList;
}
private FileStatus[] globInternal(Path fixedPath, PathFilter filter, Path pathPattern)
throws IOException {
FileStatus[] ret = super.globStatus(fixedPath, filter);
if (ret == null) {
if (isAutoRepairImplicitDirectoriesEnabled()) {
logger.atFine().log(
"GHFS.globStatus returned null for '%s', attempting possible repair.", pathPattern);
if (getGcsFs().repairPossibleImplicitDirectory(getGcsPath(fixedPath))) {
logger.atWarning().log("Success repairing '%s', re-globbing.", pathPattern);
ret = super.globStatus(fixedPath, filter);
}
}
}
return ret;
}
private static boolean isImplicitDirectory(FileStatus curr) {
// Modification time of 0 indicates implicit directory.
return curr.isDir() && curr.getModificationTime() == 0;
}
/** Helper method that converts {@link FileInfo} collection to {@link FileStatus} collection. */
private Collection<FileStatus> toFileStatusesWithImplicitDirectories(
Collection<FileInfo> fileInfos) throws IOException {
List<FileStatus> fileStatuses = new ArrayList<>(fileInfos.size());
Set<URI> filePaths = Sets.newHashSetWithExpectedSize(fileInfos.size());
String userName = getUgiUserName();
for (FileInfo fileInfo : fileInfos) {
filePaths.add(fileInfo.getPath());
fileStatuses.add(getFileStatus(fileInfo, userName));
}
// The flow for populating this doesn't bother to populate metadata entries for parent
// directories but we know the parent directories are expected to exist, so we'll just
// populate the missing entries explicitly here. Necessary for getFileStatus(parentOfInfo)
// to work when using an instance of this class.
for (FileInfo fileInfo : fileInfos) {
URI parentPath = getGcsFs().getParentPath(fileInfo.getPath());
while (parentPath != null && !parentPath.equals(GoogleCloudStorageFileSystem.GCS_ROOT)) {
if (!filePaths.contains(parentPath)) {
logger.atFine().log("Adding fake entry for missing parent path '%s'", parentPath);
StorageResourceId id = pathCodec.validatePathAndGetId(parentPath, true);
GoogleCloudStorageItemInfo fakeItemInfo =
GoogleCloudStorageItemInfo.createInferredDirectory(id);
FileInfo fakeFileInfo = FileInfo.fromItemInfo(pathCodec, fakeItemInfo);
filePaths.add(parentPath);
fileStatuses.add(getFileStatus(fakeFileInfo, userName));
}
parentPath = getGcsFs().getParentPath(parentPath);
}
}
return fileStatuses;
}
/** Helper method to get the UGI short user name */
private static String getUgiUserName() throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
return ugi.getShortUserName();
}
/**
* Returns home directory of the current user.
*
* Note: This directory is only used for Hadoop purposes.
* It is not the same as a user's OS home directory.
*/
@Override
public Path getHomeDirectory() {
Path result = new Path(getFileSystemRoot(), getHomeDirectorySubpath());
logger.atFine().log("GHFS.getHomeDirectory:=> %s", result);
return result;
}
/**
* Converts the given FileStatus to its string representation.
*
* @param stat FileStatus to convert.
* @return String representation of the given FileStatus.
*/
private static String fileStatusToString(FileStatus stat) {
assert stat != null;
return String.format(
"path: %s, isDir: %s, len: %d, owner: %s",
stat.getPath().toString(),
stat.isDir(),
stat.getLen(),
stat.getOwner());
}
/**
* Gets system bucket name.
*
* @deprecated Use getUri().authority instead.
*/
@VisibleForTesting
@Deprecated
String getSystemBucketName() {
return systemBucket;
}
/**
* {@inheritDoc}
*
* <p>Returns null, because GHFS does not use security tokens.
*/
@Override
public String getCanonicalServiceName() {
logger.atFine().log("GHFS.getCanonicalServiceName:");
logger.atFine().log("GHFS.getCanonicalServiceName:=> null");
return null;
}
/**
* Gets GCS FS instance.
*/
public GoogleCloudStorageFileSystem getGcsFs() {
return gcsFsSupplier.get();
}
/**
* Increments by 1 the counter indicated by key.
*/
void increment(Counter key) {
increment(key, 1);
}
/**
* Adds value to the counter indicated by key.
*/
void increment(Counter key, long value) {
counters.get(key).addAndGet(value);
}
/**
* Gets value of all counters as a formatted string.
*/
@VisibleForTesting
String countersToString() {
StringBuilder sb = new StringBuilder();
sb.append("\n");
double numNanoSecPerSec = TimeUnit.SECONDS.toNanos(1);
String timeSuffix = "_TIME";
for (Counter c : Counter.values()) {
String name = c.toString();
if (!name.endsWith(timeSuffix)) {
// Log invocation counter.
long count = counters.get(c).get();
sb.append(String.format("%20s = %d calls\n", name, count));
// Log duration counter.
String timeCounterName = name + timeSuffix;
double totalTime =
counters.get(Enum.valueOf(Counter.class, timeCounterName)).get()
/ numNanoSecPerSec;
sb.append(String.format("%20s = %.2f sec\n", timeCounterName, totalTime));
// Compute and log average duration per call (== total duration / num invocations).
String avgName = name + " avg.";
double avg = totalTime / count;
sb.append(String.format("%20s = %.2f sec / call\n\n", avgName, avg));
}
}
return sb.toString();
}
/**
* Logs values of all counters.
*/
private void logCounters() {
logger.atFine().log("%s", lazy(this::countersToString));
}
/**
* Copy the value of the deprecated key to the new key if a value is present for the deprecated
* key, but not the new key.
*/
private static void copyIfNotPresent(Configuration config, String deprecatedKey, String newKey) {
String deprecatedValue = config.get(deprecatedKey);
if (config.get(newKey) == null && deprecatedValue != null) {
logger.atWarning().log(
"Key %s is deprecated. Copying the value of key %s to new key %s",
deprecatedKey, deprecatedKey, newKey);
config.set(newKey, deprecatedValue);
}
}
/**
* Copy deprecated configuration options to new keys, if present.
*/
private static void copyDeprecatedConfigurationOptions(Configuration config) {
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_ENABLE.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.ENABLE_SERVICE_ACCOUNTS_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_KEY_FILE.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.SERVICE_ACCOUNT_KEYFILE_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_EMAIL.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.SERVICE_ACCOUNT_EMAIL_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_ID.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.CLIENT_ID_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_SECRET.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.CLIENT_SECRET_SUFFIX);
String oauthClientFileKey =
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.OAUTH_CLIENT_FILE_SUFFIX;
if (config.get(oauthClientFileKey) == null) {
// No property to copy, but we can set this fairly safely (it's only invoked if client ID,
// client secret are set and we're not using service accounts).
config.set(
oauthClientFileKey, System.getProperty("user.home") + "/.credentials/storage.json");
}
}
/**
* Retrieve user's Credential. If user implemented {@link AccessTokenProvider} and provided the
* class name (See {@link AccessTokenProviderClassFromConfigFactory} then build a credential with
* access token provided by this provider; Otherwise obtain credential through {@link
* HadoopCredentialConfiguration#getCredential(List)}.
*/
private static Credential getCredential(
AccessTokenProviderClassFromConfigFactory providerClassFactory, Configuration config)
throws IOException, GeneralSecurityException {
Credential credential =
CredentialFromAccessTokenProviderClassFactory.credential(
providerClassFactory, config, CredentialFactory.GCS_SCOPES);
if (credential != null) {
return credential;
}
return HadoopCredentialConfiguration.newBuilder()
.withConfiguration(config)
.withOverridePrefix(AUTHENTICATION_PREFIX)
.build()
.getCredential(CredentialFactory.GCS_SCOPES);
}
/**
* Configures GHFS using the supplied configuration.
*
* @param config Hadoop configuration object.
*/
private synchronized void configure(Configuration config) throws IOException {
logger.atFine().log("GHFS.configure");
logger.atFine().log("GHFS_ID = %s", GHFS_ID);
overrideConfigFromFile(config);
copyDeprecatedConfigurationOptions(config);
// Set this configuration as the default config for this instance.
setConf(config);
systemBucket = emptyToNull(GCS_SYSTEM_BUCKET.get(config, config::get));
enableFlatGlob = GCS_FLAT_GLOB_ENABLE.get(config, config::getBoolean);
enableConcurrentGlob = GCS_CONCURRENT_GLOB_ENABLE.get(config, config::getBoolean);
checksumType = GCS_FILE_CHECKSUM_TYPE.get(config, config::getEnum);
defaultBlockSize = BLOCK_SIZE.get(config, config::getLong);
reportedPermissions = new FsPermission(PERMISSIONS_TO_REPORT.get(config, config::get));
boolean createSystemBucket = GCS_CREATE_SYSTEM_BUCKET.get(config, config::getBoolean);
if (gcsFsSupplier == null) {
if (GCS_LAZY_INITIALIZATION_ENABLE.get(config, config::getBoolean)) {
gcsFsSupplier =
Suppliers.memoize(
() -> {
try {
GoogleCloudStorageFileSystem gcsFs = createGcsFs(config);
pathCodec = gcsFs.getPathCodec();
configureBuckets(gcsFs, systemBucket, createSystemBucket);
configureWorkingDirectory(config);
gcsFsInitialized = true;
return gcsFs;
} catch (IOException e) {
throw new RuntimeException("Failed to create GCS FS", e);
}
});
pathCodec = getPathCodec(config);
} else {
setGcsFs(createGcsFs(config));
configureBuckets(getGcsFs(), systemBucket, createSystemBucket);
configureWorkingDirectory(config);
}
} else {
configureBuckets(getGcsFs(), systemBucket, createSystemBucket);
configureWorkingDirectory(config);
}
logger.atFine().log("GHFS.configure: done");
}
/**
* If overrides file configured, update properties from override file into {@link Configuration}
* object
*/
private void overrideConfigFromFile(Configuration config) throws IOException {
String configFile = GCS_CONFIG_OVERRIDE_FILE.get(config, config::get);
if (configFile != null) {
config.addResource(new FileInputStream(configFile));
}
}
private static PathCodec getPathCodec(Configuration config) {
String specifiedPathCodec = Ascii.toLowerCase(PATH_CODEC.get(config, config::get));
switch (specifiedPathCodec) {
case PATH_CODEC_USE_LEGACY_ENCODING:
return GoogleCloudStorageFileSystem.LEGACY_PATH_CODEC;
case PATH_CODEC_USE_URI_ENCODING:
return GoogleCloudStorageFileSystem.URI_ENCODED_PATH_CODEC;
default:
logger.atWarning().log(
"Unknown path codec specified %s. Using default / legacy.", specifiedPathCodec);
return GoogleCloudStorageFileSystem.LEGACY_PATH_CODEC;
}
}
private static GoogleCloudStorageFileSystem createGcsFs(Configuration config) throws IOException {
Credential credential;
try {
credential =
getCredential(
new AccessTokenProviderClassFromConfigFactory().withOverridePrefix("fs.gs"), config);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
GoogleCloudStorageFileSystemOptions gcsFsOptions =
GoogleHadoopFileSystemConfiguration.getGcsFsOptionsBuilder(config)
.setPathCodec(getPathCodec(config))
.build();
return new GoogleCloudStorageFileSystem(credential, gcsFsOptions);
}
/**
* Validates and possibly creates the system bucket. Should be overridden to configure other
* buckets.
*
* @param gcsFs {@link GoogleCloudStorageFileSystem} to configure buckets
* @param systemBucketName Name of system bucket
* @param createSystemBucket Whether or not to create systemBucketName if it does not exist.
* @throws IOException if systemBucketName is invalid or cannot be found and createSystemBucket is
* false.
*/
@VisibleForTesting
protected void configureBuckets(
GoogleCloudStorageFileSystem gcsFs, String systemBucketName, boolean createSystemBucket)
throws IOException {
logger.atFine().log("GHFS.configureBuckets: %s, %s", systemBucketName, createSystemBucket);
systemBucket = systemBucketName;
if (systemBucket != null) {
logger.atFine().log("GHFS.configureBuckets: Warning fs.gs.system.bucket is deprecated.");
// Ensure that system bucket exists. It really must be a bucket, not a GCS path.
URI systemBucketPath =
gcsFs
.getPathCodec()
.getPath(systemBucket, /* objectName= */ null, /* allowEmptyObjectName= */ true);
if (!gcsFs.exists(systemBucketPath)) {
if (createSystemBucket) {
gcsFs.mkdirs(systemBucketPath);
} else {
throw new FileNotFoundException(
String.format(
"%s: system bucket not found: %s", GCS_SYSTEM_BUCKET.getKey(), systemBucket));
}
}
}
logger.atFine().log("GHFS.configureBuckets:=>");
}
private void configureWorkingDirectory(Configuration config) {
// Set initial working directory to root so that any configured value gets resolved
// against file system root.
workingDirectory = getFileSystemRoot();
Path newWorkingDirectory;
String configWorkingDirectory = GCS_WORKING_DIRECTORY.get(config, config::get);
if (Strings.isNullOrEmpty(configWorkingDirectory)) {
newWorkingDirectory = getDefaultWorkingDirectory();
logger.atWarning().log(
"No working directory configured, using default: '%s'", newWorkingDirectory);
} else {
newWorkingDirectory = new Path(configWorkingDirectory);
}
// Use the public method to ensure proper behavior of normalizing and resolving the new
// working directory relative to the initial filesystem-root directory.
setWorkingDirectory(newWorkingDirectory);
logger.atFine().log("%s = %s", GCS_WORKING_DIRECTORY.getKey(), getWorkingDirectory());
}
/**
* Assert that the FileSystem has been initialized and not close()d.
*/
private void checkOpen() throws IOException {
if (isClosed()) {
throw new IOException("GoogleHadoopFileSystem has been closed or not initialized.");
}
}
protected void checkOpenUnchecked() {
if (isClosed()) {
throw new RuntimeException("GoogleHadoopFileSystem has been closed or not initialized.");
}
}
private boolean isClosed() {
return gcsFsSupplier == null || gcsFsSupplier.get() == null;
}
// =================================================================
// Overridden functions for debug tracing. The following functions
// do not change functionality. They just log parameters and call base
// class' function.
// =================================================================
@Override
public boolean deleteOnExit(Path f)
throws IOException {
checkOpen();
logger.atFine().log("GHFS.deleteOnExit: %s", f);
boolean result = super.deleteOnExit(f);
logger.atFine().log("GHFS.deleteOnExit:=> %s", result);
return result;
}
@Override
protected void processDeleteOnExit() {
logger.atFine().log("GHFS.processDeleteOnExit:");
super.processDeleteOnExit();
}
@Override
public ContentSummary getContentSummary(Path f)
throws IOException {
logger.atFine().log("GHFS.getContentSummary: %s", f);
ContentSummary result = super.getContentSummary(f);
logger.atFine().log("GHFS.getContentSummary:=> %s", result);
return result;
}
@Override
public Token<?> getDelegationToken(String renewer)
throws IOException {
logger.atFine().log("GHFS.getDelegationToken: renewer: %s", renewer);
Token<?> result = super.getDelegationToken(renewer);
logger.atFine().log("GHFS.getDelegationToken:=> %s", result);
return result;
}
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite,
Path[] srcs, Path dst)
throws IOException {
logger.atFine().log(
"GHFS.copyFromLocalFile: delSrc: %s, overwrite: %s, #srcs: %s, dst: %s",
delSrc, overwrite, srcs.length, dst);
super.copyFromLocalFile(delSrc, overwrite, srcs, dst);
logger.atFine().log("GHFS.copyFromLocalFile:=> ");
}
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite,
Path src, Path dst)
throws IOException {
logger.atFine().log(
"GHFS.copyFromLocalFile: delSrc: %s, overwrite: %s, src: %s, dst: %s",
delSrc, overwrite, src, dst);
super.copyFromLocalFile(delSrc, overwrite, src, dst);
logger.atFine().log("GHFS.copyFromLocalFile:=> ");
}
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst)
throws IOException {
logger.atFine().log("GHFS.copyToLocalFile: delSrc: %s, src: %s, dst: %s", delSrc, src, dst);
super.copyToLocalFile(delSrc, src, dst);
logger.atFine().log("GHFS.copyToLocalFile:=> ");
}
@Override
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
logger.atFine().log("GHFS.startLocalOutput: out: %s, tmp: %s", fsOutputFile, tmpLocalFile);
Path result = super.startLocalOutput(fsOutputFile, tmpLocalFile);
logger.atFine().log("GHFS.startLocalOutput:=> %s", result);
return result;
}
@Override
public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
logger.atFine().log("GHFS.startLocalOutput: out: %s, tmp: %s", fsOutputFile, tmpLocalFile);
super.completeLocalOutput(fsOutputFile, tmpLocalFile);
logger.atFine().log("GHFS.completeLocalOutput:=> ");
}
@Override
public void close() throws IOException {
logger.atFine().log("GHFS.close:");
super.close();
// NB: We must *first* have the superclass close() before we close the underlying gcsFsSupplier
// since the superclass may decide to perform various heavyweight cleanup operations (such as
// deleteOnExit).
if (gcsFsSupplier != null) {
if (gcsFsInitialized) {
getGcsFs().close();
}
gcsFsSupplier = null;
}
logCounters();
logger.atFine().log("GHFS.close:=> ");
}
@Override
public long getUsed()
throws IOException{
logger.atFine().log("GHFS.getUsed:");
long result = super.getUsed();
logger.atFine().log("GHFS.getUsed:=> %s", result);
return result;
}
@Override
public long getDefaultBlockSize() {
logger.atFine().log("GHFS.getDefaultBlockSize:");
long result = defaultBlockSize;
logger.atFine().log("GHFS.getDefaultBlockSize:=> %s", result);
return result;
}
@Override
public FileChecksum getFileChecksum(Path hadoopPath) throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
URI gcsPath = getGcsPath(hadoopPath);
final FileInfo fileInfo = getGcsFs().getFileInfo(gcsPath);
if (!fileInfo.exists()) {
logger.atFine().log("GHFS.getFileStatus: not found: %s", gcsPath);
throw new FileNotFoundException(
(fileInfo.isDirectory() ? "Directory not found : " : "File not found : ") + hadoopPath);
}
FileChecksum checksum = getFileChecksum(checksumType, fileInfo);
logger.atFine().log("GHFS.getFileChecksum:=> %s", checksum);
long duration = System.nanoTime() - startTime;
increment(Counter.GET_FILE_CHECKSUM);
increment(Counter.GET_FILE_CHECKSUM_TIME, duration);
return checksum;
}
private static FileChecksum getFileChecksum(GcsFileChecksumType type, FileInfo fileInfo)
throws IOException {
switch (type) {
case NONE:
return null;
case CRC32C:
return new GcsFileChecksum(
type, fileInfo.getItemInfo().getVerificationAttributes().getCrc32c());
case MD5:
return new GcsFileChecksum(
type, fileInfo.getItemInfo().getVerificationAttributes().getMd5hash());
}
throw new IOException("Unrecognized GcsFileChecksumType: " + type);
}
@Override
public void setVerifyChecksum(boolean verifyChecksum) {
logger.atFine().log("GHFS.setVerifyChecksum:");
super.setVerifyChecksum(verifyChecksum);
logger.atFine().log("GHFS.setVerifyChecksum:=> ");
}
@Override
public void setPermission(Path p, FsPermission permission)
throws IOException {
logger.atFine().log("GHFS.setPermission: path: %s, perm: %s", p, permission);
super.setPermission(p, permission);
logger.atFine().log("GHFS.setPermission:=> ");
}
@Override
public void setOwner(Path p, String username, String groupname)
throws IOException {
logger.atFine().log("GHFS.setOwner: path: %s, user: %s, group: %s", p, username, groupname);
super.setOwner(p, username, groupname);
logger.atFine().log("GHFS.setOwner:=> ");
}
@Override
public void setTimes(Path p, long mtime, long atime)
throws IOException {
logger.atFine().log("GHFS.setTimes: path: %s, mtime: %s, atime: %s", p, mtime, atime);
super.setTimes(p, mtime, atime);
logger.atFine().log("GHFS.setTimes:=> ");
}
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PERMISSIONS_TO_REPORT} */
@Deprecated
public static final String PERMISSIONS_TO_REPORT_KEY =
GoogleHadoopFileSystemConfiguration.PERMISSIONS_TO_REPORT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PERMISSIONS_TO_REPORT} */
@Deprecated
public static final String PERMISSIONS_TO_REPORT_DEFAULT =
GoogleHadoopFileSystemConfiguration.PERMISSIONS_TO_REPORT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final String BUFFERSIZE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_BUFFER_SIZE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final int BUFFERSIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_BUFFER_SIZE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final String WRITE_BUFFERSIZE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_UPLOAD_CHUNK_SIZE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final int WRITE_BUFFERSIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_UPLOAD_CHUNK_SIZE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#BLOCK_SIZE} */
@Deprecated
public static final String BLOCK_SIZE_KEY =
GoogleHadoopFileSystemConfiguration.BLOCK_SIZE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#BLOCK_SIZE} */
@Deprecated
public static final int BLOCK_SIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.BLOCK_SIZE.getDefault().intValue();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_SERVICE_ACCOUNT_ENABLE} */
@Deprecated
public static final String ENABLE_GCE_SERVICE_ACCOUNT_AUTH_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_SERVICE_ACCOUNT_EMAIL} */
@Deprecated
public static final String SERVICE_ACCOUNT_AUTH_EMAIL_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_EMAIL.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_SERVICE_ACCOUNT_KEY_FILE} */
@Deprecated
public static final String SERVICE_ACCOUNT_AUTH_KEYFILE_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_KEY_FILE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PROJECT_ID} */
@Deprecated
public static final String GCS_PROJECT_ID_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PROJECT_ID.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_REQUESTER_PAYS_MODE} */
@Deprecated
public static final String GCS_REQUESTER_PAYS_MODE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REQUESTER_PAYS_MODE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_REQUESTER_PAYS_PROJECT_ID} */
@Deprecated
public static final String GCS_REQUESTER_PAYS_PROJECT_ID_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REQUESTER_PAYS_PROJECT_ID.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_REQUESTER_PAYS_BUCKETS} */
@Deprecated
public static final String GCS_REQUESTER_PAYS_BUCKETS_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REQUESTER_PAYS_BUCKETS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_CLIENT_ID} */
@Deprecated
public static final String GCS_CLIENT_ID_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_ID.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_CLIENT_SECRET} */
@Deprecated
public static final String GCS_CLIENT_SECRET_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_SECRET.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_SYSTEM_BUCKET} */
@Deprecated
public static final String GCS_SYSTEM_BUCKET_KEY =
GoogleHadoopFileSystemConfiguration.GCS_SYSTEM_BUCKET.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_CREATE_SYSTEM_BUCKET} */
@Deprecated
public static final String GCS_CREATE_SYSTEM_BUCKET_KEY =
GoogleHadoopFileSystemConfiguration.GCS_CREATE_SYSTEM_BUCKET.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_CREATE_SYSTEM_BUCKET} */
@Deprecated
public static final boolean GCS_CREATE_SYSTEM_BUCKET_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_CREATE_SYSTEM_BUCKET.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_WORKING_DIRECTORY} */
@Deprecated
public static final String GCS_WORKING_DIRECTORY_KEY =
GoogleHadoopFileSystemConfiguration.GCS_WORKING_DIRECTORY.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FILE_SIZE_LIMIT_250GB} */
@Deprecated
public static final String GCS_FILE_SIZE_LIMIT_250GB =
GoogleHadoopFileSystemConfiguration.GCS_FILE_SIZE_LIMIT_250GB.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FILE_SIZE_LIMIT_250GB} */
@Deprecated
public static final boolean GCS_FILE_SIZE_LIMIT_250GB_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_FILE_SIZE_LIMIT_250GB.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MARKER_FILE_PATTERN} */
@Deprecated
public static final String GCS_MARKER_FILE_PATTERN_KEY =
GoogleHadoopFileSystemConfiguration.GCS_MARKER_FILE_PATTERN.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_PERFORMANCE_CACHE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_PERFORMANCE_CACHE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_ENABLE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS}
*/
@Deprecated
public static final String GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS}
*/
@Deprecated
public static final long GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE}
*/
@Deprecated
public static final String GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE}
*/
@Deprecated
public static final boolean GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT}
*/
@Deprecated
public static final String GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT
.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT}
*/
@Deprecated
public static final long GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT
.getDefault();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_ENABLE}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE.getKey();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_ENABLE}
*/
@Deprecated
public static final boolean GCS_PARENT_TIMESTAMP_UPDATE_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_DEFAULT =
Joiner.on(',')
.join(
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES
.getDefault());
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY}
*/
@Deprecated
public static final String MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY =
GoogleHadoopFileSystemConfiguration.MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY;
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#MR_JOB_HISTORY_DONE_DIR_KEY} */
@Deprecated
public static final String MR_JOB_HISTORY_DONE_DIR_KEY =
GoogleHadoopFileSystemConfiguration.MR_JOB_HISTORY_DONE_DIR_KEY;
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_DEFAULT =
Joiner.on(',')
.join(
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES
.getDefault());
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final String GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final boolean GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PATH_CODEC} */
@Deprecated
public static final String PATH_CODEC_KEY =
GoogleHadoopFileSystemConfiguration.PATH_CODEC.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PATH_CODEC} */
@Deprecated
public static final String PATH_CODEC_DEFAULT =
GoogleHadoopFileSystemConfiguration.PATH_CODEC.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final String GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final boolean GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FLAT_GLOB_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_FLAT_GLOB_KEY =
GoogleHadoopFileSystemConfiguration.GCS_FLAT_GLOB_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FLAT_GLOB_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_FLAT_GLOB_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_FLAT_GLOB_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MARKER_FILE_CREATION_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_MARKER_FILE_CREATION_KEY =
GoogleHadoopFileSystemConfiguration.GCS_MARKER_FILE_CREATION_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MARKER_FILE_CREATION_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_MARKER_FILE_CREATION_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MARKER_FILE_CREATION_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_WITH_REWRITE_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_COPY_WITH_REWRITE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_COPY_WITH_REWRITE_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_WITH_REWRITE_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_COPY_WITH_REWRITE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_COPY_WITH_REWRITE_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final String GCS_COPY_MAX_REQUESTS_PER_BATCH =
GoogleHadoopFileSystemConfiguration.GCS_COPY_MAX_REQUESTS_PER_BATCH.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final long GCS_COPY_MAX_REQUESTS_PER_BATCH_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_COPY_MAX_REQUESTS_PER_BATCH.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_BATCH_THREADS} */
@Deprecated
public static final String GCS_COPY_BATCH_THREADS =
GoogleHadoopFileSystemConfiguration.GCS_COPY_BATCH_THREADS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_BATCH_THREADS} */
@Deprecated
public static final int GCS_COPY_BATCH_THREADS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_COPY_BATCH_THREADS.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_LIST_ITEMS_PER_CALL} */
@Deprecated
public static final String GCS_MAX_LIST_ITEMS_PER_CALL =
GoogleHadoopFileSystemConfiguration.GCS_MAX_LIST_ITEMS_PER_CALL.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_LIST_ITEMS_PER_CALL} */
@Deprecated
public static final long GCS_MAX_LIST_ITEMS_PER_CALL_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MAX_LIST_ITEMS_PER_CALL.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final String GCS_MAX_REQUESTS_PER_BATCH =
GoogleHadoopFileSystemConfiguration.GCS_MAX_REQUESTS_PER_BATCH.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final long GCS_MAX_REQUESTS_PER_BATCH_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MAX_REQUESTS_PER_BATCH.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_BATCH_THREADS} */
@Deprecated
public static final String GCS_BATCH_THREADS =
GoogleHadoopFileSystemConfiguration.GCS_BATCH_THREADS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_BATCH_THREADS} */
@Deprecated
public static final int GCS_BATCH_THREADS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_BATCH_THREADS.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_MAX_RETRY} */
@Deprecated
public static final String GCS_HTTP_MAX_RETRY_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_MAX_RETRY.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_MAX_RETRY} */
@Deprecated
public static final int GCS_HTTP_MAX_RETRY_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_MAX_RETRY.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_CONNECT_TIMEOUT} */
@Deprecated
public static final String GCS_HTTP_CONNECT_TIMEOUT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_CONNECT_TIMEOUT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_CONNECT_TIMEOUT} */
@Deprecated
public static final int GCS_HTTP_CONNECT_TIMEOUT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_CONNECT_TIMEOUT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_READ_TIMEOUT} */
@Deprecated
public static final String GCS_HTTP_READ_TIMEOUT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_READ_TIMEOUT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_READ_TIMEOUT} */
@Deprecated
public static final int GCS_HTTP_READ_TIMEOUT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_READ_TIMEOUT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PROXY_ADDRESS} */
@Deprecated
public static final String GCS_PROXY_ADDRESS_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PROXY_ADDRESS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PROXY_ADDRESS} */
@Deprecated
public static final String GCS_PROXY_ADDRESS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PROXY_ADDRESS.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_TRANSPORT} */
@Deprecated
public static final String GCS_HTTP_TRANSPORT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_TRANSPORT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_TRANSPORT} */
@Deprecated public static final String GCS_HTTP_TRANSPORT_DEFAULT = null;
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_APPLICATION_NAME_SUFFIX} */
@Deprecated
public static final String GCS_APPLICATION_NAME_SUFFIX_KEY =
GoogleHadoopFileSystemConfiguration.GCS_APPLICATION_NAME_SUFFIX.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_APPLICATION_NAME_SUFFIX} */
@Deprecated
public static final String GCS_APPLICATION_NAME_SUFFIX_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_APPLICATION_NAME_SUFFIX.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE}
*/
@Deprecated
public static final String GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE}
*/
@Deprecated
public static final int GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_TYPE} */
@Deprecated
public static final String GCS_OUTPUTSTREAM_TYPE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_TYPE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_TYPE} */
@Deprecated
public static final String GCS_OUTPUTSTREAM_TYPE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_TYPE.getDefault().toString();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_GENERATION_READ_CONSISTENCY} */
@Deprecated
public static final String GCS_GENERATION_READ_CONSISTENCY_KEY =
GoogleHadoopFileSystemConfiguration.GCS_GENERATION_READ_CONSISTENCY.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_GENERATION_READ_CONSISTENCY} */
@Deprecated
public static final GenerationReadConsistency GCS_GENERATION_READ_CONSISTENCY_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_GENERATION_READ_CONSISTENCY.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE}
*/
@Deprecated
public static final String GCS_INPUTSTREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE}
*/
@Deprecated
public static final boolean GCS_INPUTSTREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE
.getDefault();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT}
*/
@Deprecated
public static final String GCS_INPUTSTREAM_INPLACE_SEEK_LIMIT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT.getKey();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT}
*/
@Deprecated
public static final long GCS_INPUTSTREAM_INPLACE_SEEK_LIMIT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FADVISE} */
@Deprecated
public static final String GCS_INPUTSTREAM_FADVISE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FADVISE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FADVISE} */
@Deprecated
public static final Fadvise GCS_INPUTSTREAM_FADVISE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FADVISE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE}
*/
@Deprecated
public static final String GCS_INPUTSTREAM_MIN_RANGE_REQUEST_SIZE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE}
*/
@Deprecated
public static final int GCS_INPUTSTREAM_MIN_RANGE_REQUEST_SIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCE_BUCKET_DELETE_ENABLE} */
@Deprecated
public static final String GCE_BUCKET_DELETE_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCE_BUCKET_DELETE_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCE_BUCKET_DELETE_ENABLE} */
@Deprecated
public static final boolean GCE_BUCKET_DELETE_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCE_BUCKET_DELETE_ENABLE.getDefault();
}
|
gcs/src/main/java/com/google/cloud/hadoop/fs/gcs/GoogleHadoopFileSystemBase.java
|
/*
* Copyright 2013 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.hadoop.fs.gcs;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.BLOCK_SIZE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_CONCURRENT_GLOB_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_CONFIG_OVERRIDE_FILE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_CREATE_SYSTEM_BUCKET;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_FILE_CHECKSUM_TYPE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_FLAT_GLOB_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_LAZY_INITIALIZATION_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_TYPE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_SYSTEM_BUCKET;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.GCS_WORKING_DIRECTORY;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.PATH_CODEC;
import static com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration.PERMISSIONS_TO_REPORT;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.flogger.LazyArgs.lazy;
import com.google.api.client.auth.oauth2.Credential;
import com.google.cloud.hadoop.gcsio.CreateFileOptions;
import com.google.cloud.hadoop.gcsio.FileInfo;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorage;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorage.ListPage;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystemOptions;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageItemInfo;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadOptions;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadOptions.Fadvise;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadOptions.GenerationReadConsistency;
import com.google.cloud.hadoop.gcsio.PathCodec;
import com.google.cloud.hadoop.gcsio.StorageResourceId;
import com.google.cloud.hadoop.util.AccessTokenProvider;
import com.google.cloud.hadoop.util.AccessTokenProviderClassFromConfigFactory;
import com.google.cloud.hadoop.util.CredentialFactory;
import com.google.cloud.hadoop.util.CredentialFromAccessTokenProviderClassFactory;
import com.google.cloud.hadoop.util.HadoopCredentialConfiguration;
import com.google.cloud.hadoop.util.HadoopVersionInfo;
import com.google.cloud.hadoop.util.PropertyUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Ascii;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.flogger.GoogleLogger;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.nio.file.DirectoryNotEmptyException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.GlobPattern;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
/**
* This class provides a Hadoop compatible File System on top of Google Cloud Storage (GCS).
*
* <p>It is implemented as a thin abstraction layer on top of GCS. The layer hides any specific
* characteristics of the underlying store and exposes FileSystem interface understood by the Hadoop
* engine.
*
* <p>Users interact with the files in the storage using fully qualified URIs. The file system
* exposed by this class is identified using the 'gs' scheme. For example, {@code
* gs://dir1/dir2/file1.txt}.
*
* <p>This implementation translates paths between hadoop Path and GCS URI with the convention that
* the Hadoop root directly corresponds to the GCS "root", e.g. gs:/. This is convenient for many
* reasons, such as data portability and close equivalence to gsutil paths, but imposes certain
* inherited constraints, such as files not being allowed in root (only 'directories' can be placed
* in root), and directory names inside root have a more limited set of allowed characters.
*
* <p>One of the main goals of this implementation is to maintain compatibility with behavior of
* HDFS implementation when accessed through FileSystem interface. HDFS implementation is not very
* consistent about the cases when it throws versus the cases when methods return false. We run GHFS
* tests and HDFS tests against the same test data and use that as a guide to decide whether to
* throw or to return false.
*/
public abstract class GoogleHadoopFileSystemBase extends GoogleHadoopFileSystemBaseSpecific
implements FileSystemDescriptor {
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
/**
* Available types for use with {@link
* GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_TYPE}.
*/
public enum OutputStreamType {
BASIC,
SYNCABLE_COMPOSITE
}
/**
* Available GCS checksum types for use with {@link
* GoogleHadoopFileSystemConfiguration#GCS_FILE_CHECKSUM_TYPE}.
*/
public static enum GcsFileChecksumType {
NONE(null, 0),
CRC32C("COMPOSITE-CRC32C", 4),
MD5("MD5", 16);
private final String algorithmName;
private final int byteLength;
GcsFileChecksumType(String algorithmName, int byteLength) {
this.algorithmName = algorithmName;
this.byteLength = byteLength;
}
public String getAlgorithmName() {
return algorithmName;
}
public int getByteLength() {
return byteLength;
}
}
/** Use new URI_ENCODED_PATH_CODEC. */
public static final String PATH_CODEC_USE_URI_ENCODING = "uri-path";
/** Use LEGACY_PATH_CODEC. */
public static final String PATH_CODEC_USE_LEGACY_ENCODING = "legacy";
/** Default value of replication factor. */
public static final short REPLICATION_FACTOR_DEFAULT = 3;
/** Default PathFilter that accepts all paths. */
public static final PathFilter DEFAULT_FILTER = path -> true;
/** Prefix to use for common authentication keys. */
public static final String AUTHENTICATION_PREFIX = "fs.gs";
/** A resource file containing GCS related build properties. */
public static final String PROPERTIES_FILE = "gcs.properties";
/** The key in the PROPERTIES_FILE that contains the version built. */
public static final String VERSION_PROPERTY = "gcs.connector.version";
/** The version returned when one cannot be found in properties. */
public static final String UNKNOWN_VERSION = "0.0.0";
/** Current version. */
public static final String VERSION;
/** Identifies this version of the GoogleHadoopFileSystemBase library. */
public static final String GHFS_ID;
static {
VERSION =
PropertyUtil.getPropertyOrDefault(
GoogleHadoopFileSystemBase.class, PROPERTIES_FILE, VERSION_PROPERTY, UNKNOWN_VERSION);
logger.atFine().log("GHFS version: %s", VERSION);
GHFS_ID = String.format("GHFS/%s", VERSION);
}
@VisibleForTesting
boolean enableFlatGlob = GCS_FLAT_GLOB_ENABLE.getDefault();
@VisibleForTesting
boolean enableConcurrentGlob = GCS_CONCURRENT_GLOB_ENABLE.getDefault();
private GcsFileChecksumType checksumType = GCS_FILE_CHECKSUM_TYPE.getDefault();
/** The URI the File System is passed in initialize. */
protected URI initUri;
/**
* The retrieved configuration value for {@link
* GoogleHadoopFileSystemConfiguration#GCS_SYSTEM_BUCKET}. Used as a fallback for a root bucket,
* when required.
*/
@Deprecated protected String systemBucket;
/** Underlying GCS file system object. */
private Supplier<GoogleCloudStorageFileSystem> gcsFsSupplier;
private boolean gcsFsInitialized = false;
protected PathCodec pathCodec;
/**
* Current working directory; overridden in initialize() if {@link
* GoogleHadoopFileSystemConfiguration#GCS_WORKING_DIRECTORY} is set.
*/
private Path workingDirectory;
/**
* Default block size. Note that this is the size that is reported to Hadoop FS clients. It does
* not modify the actual block size of an underlying GCS object, because GCS JSON API does not
* allow modifying or querying the value. Modifying this value allows one to control how many
* mappers are used to process a given file.
*/
protected long defaultBlockSize = BLOCK_SIZE.getDefault();
/** The fixed reported permission of all files. */
private FsPermission reportedPermissions;
/** Map of counter values */
protected final ImmutableMap<Counter, AtomicLong> counters = createCounterMap();
protected ImmutableMap<Counter, AtomicLong> createCounterMap() {
EnumMap<Counter, AtomicLong> countersMap = new EnumMap<>(Counter.class);
for (Counter counter : ALL_COUNTERS) {
countersMap.put(counter, new AtomicLong());
}
return Maps.immutableEnumMap(countersMap);
}
/**
* Behavior of listStatus when a path is not found.
*/
protected enum ListStatusFileNotFoundBehavior {
Hadoop1 {
@Override
public FileStatus[] handle(String path) throws IOException {
return null;
}
},
Hadoop2 {
@Override
public FileStatus[] handle(String path) throws IOException {
throw new FileNotFoundException(String.format("Path '%s' does not exist.", path));
}
};
/**
* Perform version specific handling for a missing path.
* @param path The missing path
*/
public abstract FileStatus[] handle(String path) throws IOException;
/**
* Get the ListStatusFileNotFoundBehavior for the currently running Hadoop version.
*/
public static ListStatusFileNotFoundBehavior get() {
return get(HadoopVersionInfo.getInstance());
}
/**
* Get the ListStatusFileNotFoundBehavior for the given hadoop version/
* @param hadoopVersionInfo The hadoop version.
*/
public static ListStatusFileNotFoundBehavior get(HadoopVersionInfo hadoopVersionInfo) {
if (hadoopVersionInfo.isGreaterThan(2, 0)
|| hadoopVersionInfo.isEqualTo(2, 0)
|| hadoopVersionInfo.isEqualTo(0, 23)) {
return Hadoop2;
}
return Hadoop1;
}
}
// Behavior when a path is not found in listStatus()
protected ListStatusFileNotFoundBehavior listStatusFileNotFoundBehavior =
ListStatusFileNotFoundBehavior.get();
@VisibleForTesting
protected void setListStatusFileNotFoundBehavior(ListStatusFileNotFoundBehavior behavior) {
this.listStatusFileNotFoundBehavior = behavior;
}
/**
* Defines names of counters we track for each operation.
*
* There are two types of counters:
* -- METHOD_NAME : Number of successful invocations of method METHOD.
* -- METHOD_NAME_TIME : Total inclusive time spent in method METHOD.
*/
public enum Counter {
APPEND,
APPEND_TIME,
CREATE,
CREATE_TIME,
DELETE,
DELETE_TIME,
GET_FILE_CHECKSUM,
GET_FILE_CHECKSUM_TIME,
GET_FILE_STATUS,
GET_FILE_STATUS_TIME,
INIT,
INIT_TIME,
INPUT_STREAM,
INPUT_STREAM_TIME,
LIST_STATUS,
LIST_STATUS_TIME,
MKDIRS,
MKDIRS_TIME,
OPEN,
OPEN_TIME,
OUTPUT_STREAM,
OUTPUT_STREAM_TIME,
READ1,
READ1_TIME,
READ,
READ_TIME,
READ_FROM_CHANNEL,
READ_FROM_CHANNEL_TIME,
READ_CLOSE,
READ_CLOSE_TIME,
READ_POS,
READ_POS_TIME,
RENAME,
RENAME_TIME,
SEEK,
SEEK_TIME,
SET_WD,
SET_WD_TIME,
WRITE1,
WRITE1_TIME,
WRITE,
WRITE_TIME,
WRITE_CLOSE,
WRITE_CLOSE_TIME,
}
/**
* Set of all counters.
*
* <p>It is used for performance optimization instead of `Counter.values`, because
* `Counter.values` returns new array on each invocation.
*/
private static final ImmutableSet<Counter> ALL_COUNTERS =
Sets.immutableEnumSet(EnumSet.allOf(Counter.class));
/**
* GCS {@link FileChecksum} which takes constructor parameters to define the return values of the
* various abstract methods of {@link FileChecksum}.
*/
private static class GcsFileChecksum extends FileChecksum {
private final GcsFileChecksumType checksumType;
private final byte[] bytes;
public GcsFileChecksum(GcsFileChecksumType checksumType, byte[] bytes) {
this.checksumType = checksumType;
this.bytes = bytes;
checkState(
bytes == null || bytes.length == checksumType.getByteLength(),
"Checksum value length (%s) should be equal to the algorithm byte length (%s)",
checksumType.getByteLength(), bytes.length);
}
@Override
public String getAlgorithmName() {
return checksumType.getAlgorithmName();
}
@Override
public int getLength() {
return checksumType.getByteLength();
}
@Override
public byte[] getBytes() {
return bytes;
}
@Override
public void readFields(DataInput in) throws IOException {
in.readFully(bytes);
}
@Override
public void write(DataOutput out) throws IOException {
out.write(bytes);
}
@Override
public String toString() {
return getAlgorithmName() + ": " + (bytes == null ? null : new String(Hex.encodeHex(bytes)));
}
}
/**
* A predicate that processes individual directory paths and evaluates the conditions set in
* fs.gs.parent.timestamp.update.enable, fs.gs.parent.timestamp.update.substrings.include and
* fs.gs.parent.timestamp.update.substrings.exclude to determine if a path should be ignored
* when running directory timestamp updates. If no match is found in either include or
* exclude and updates are enabled, the directory timestamp will be updated.
*/
public static class ParentTimestampUpdateIncludePredicate
implements GoogleCloudStorageFileSystemOptions.TimestampUpdatePredicate {
/**
* Create a new ParentTimestampUpdateIncludePredicate from the passed Hadoop configuration
* object.
*/
public static ParentTimestampUpdateIncludePredicate create(Configuration config) {
return new ParentTimestampUpdateIncludePredicate(
GCS_PARENT_TIMESTAMP_UPDATE_ENABLE.get(config, config::getBoolean),
GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES.getStringCollection(config),
GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES.getStringCollection(config));
}
// Include and exclude lists are intended to be small N and checked relatively
// infrequently. If that becomes not that case, consider Aho-Corasick or similar matching
// algorithms.
private final Collection<String> includeSubstrings;
private final Collection<String> excludeSubstrings;
private final boolean enableTimestampUpdates;
public ParentTimestampUpdateIncludePredicate(
boolean enableTimestampUpdates,
Collection<String> includeSubstrings,
Collection<String> excludeSubstrings) {
this.includeSubstrings = includeSubstrings;
this.excludeSubstrings = excludeSubstrings;
this.enableTimestampUpdates = enableTimestampUpdates;
}
/**
* Determine if updating directory timestamps should be ignored.
* @return True if the directory timestamp should not be updated. False to indicate it should
* be updated.
*/
@Override
public boolean shouldUpdateTimestamp(URI uri) {
if (!enableTimestampUpdates) {
logger.atFine().log("Timestamp updating disabled. Not updating uri %s", uri);
return false;
}
for (String include : includeSubstrings) {
if (uri.toString().contains(include)) {
logger.atFine().log(
"Path %s matched included path %s. Updating timestamps.", uri, include);
return true;
}
}
for (String exclude : excludeSubstrings) {
if (uri.toString().contains(exclude)) {
logger.atFine().log(
"Path %s matched excluded path %s. Not updating timestamps.", uri, exclude);
return false;
}
}
return true;
}
}
/**
* Constructs an instance of GoogleHadoopFileSystemBase; the internal {@link
* GoogleCloudStorageFileSystem} will be set up with config settings when initialize() is called.
*/
public GoogleHadoopFileSystemBase() {}
/**
* Constructs an instance of {@link GoogleHadoopFileSystemBase} using the provided
* GoogleCloudStorageFileSystem; initialize() will not re-initialize it.
*/
// TODO(b/120887495): This @VisibleForTesting annotation was being ignored by prod code.
// Please check that removing it is correct, and remove this comment along with it.
// @VisibleForTesting
GoogleHadoopFileSystemBase(GoogleCloudStorageFileSystem gcsFs) {
checkNotNull(gcsFs, "gcsFs must not be null");
setGcsFs(gcsFs);
}
private void setGcsFs(GoogleCloudStorageFileSystem gcsFs) {
this.gcsFsSupplier = Suppliers.ofInstance(gcsFs);
this.gcsFsInitialized = true;
this.pathCodec = gcsFs.getPathCodec();
}
/**
* Returns an unqualified path without any leading slash, relative to the filesystem root,
* which serves as the home directory of the current user; see {@code getHomeDirectory} for
* a description of what the home directory means.
*/
protected abstract String getHomeDirectorySubpath();
/**
* Gets Hadoop path corresponding to the given GCS path.
*
* @param gcsPath Fully-qualified GCS path, of the form gs://<bucket>/<object>.
*/
public abstract Path getHadoopPath(URI gcsPath);
/**
* Gets GCS path corresponding to the given Hadoop path, which can be relative or absolute,
* and can have either gs://<path> or gs:/<path> forms.
*
* @param hadoopPath Hadoop path.
*/
public abstract URI getGcsPath(Path hadoopPath);
/**
* Gets the default value of working directory.
*/
public abstract Path getDefaultWorkingDirectory();
// =================================================================
// Methods implementing FileSystemDescriptor interface; these define the way
// paths are translated between Hadoop and GCS.
// =================================================================
@Override
public abstract Path getFileSystemRoot();
@Override
public abstract String getScheme();
@Deprecated
@Override
public String getHadoopScheme() {
return getScheme();
}
/**
*
* <p> Overridden to make root it's own parent. This is POSIX compliant, but more importantly
* guards against poor directory accounting in the PathData class of Hadoop 2's FsShell.
*/
@Override
public Path makeQualified(Path path) {
logger.atFine().log("GHFS.makeQualified: path: %s", path);
Path qualifiedPath = super.makeQualified(path);
URI uri = qualifiedPath.toUri();
checkState(
"".equals(uri.getPath()) || qualifiedPath.isAbsolute(),
"Path '%s' must be fully qualified.",
qualifiedPath);
// Strip initial '..'s to make root is its own parent.
StringBuilder sb = new StringBuilder(uri.getPath());
while (sb.indexOf("/../") == 0) {
// Leave a preceding slash, so path is still absolute.
sb.delete(0, 3);
}
String strippedPath = sb.toString();
// Allow a Path of gs://someBucket to map to gs://someBucket/
if (strippedPath.equals("/..") || strippedPath.equals("")) {
strippedPath = "/";
}
Path result = new Path(uri.getScheme(), uri.getAuthority(), strippedPath);
logger.atFine().log("GHFS.makeQualified:=> %s", result);
return result;
}
@Override
protected void checkPath(Path path) {
URI uri = path.toUri();
String scheme = uri.getScheme();
// Only check that the scheme matches. The authority and path will be
// validated later.
if (scheme == null || scheme.equalsIgnoreCase(getScheme())) {
return;
}
String msg = String.format(
"Wrong FS scheme: %s, in path: %s, expected scheme: %s",
scheme, path, getScheme());
throw new IllegalArgumentException(msg);
}
/**
* See {@link #initialize(URI, Configuration, boolean)} for details; calls with third arg
* defaulting to 'true' for initializing the superclass.
*
* @param path URI of a file/directory within this file system.
* @param config Hadoop configuration.
*/
@Override
public void initialize(URI path, Configuration config) throws IOException {
initialize(path, config, /* initSuperclass= */ true);
}
/**
* Initializes this file system instance.
*
* Note:
* The path passed to this method could be path of any file/directory.
* It does not matter because the only thing we check is whether
* it uses 'gs' scheme. The rest is ignored.
*
* @param path URI of a file/directory within this file system.
* @param config Hadoop configuration.
* @param initSuperclass if false, doesn't call super.initialize(path, config); avoids
* registering a global Statistics object for this instance.
*/
public void initialize(URI path, Configuration config, boolean initSuperclass)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(path != null, "path must not be null");
Preconditions.checkArgument(config != null, "config must not be null");
Preconditions.checkArgument(path.getScheme() != null, "scheme of path must not be null");
if (!path.getScheme().equals(getScheme())) {
throw new IllegalArgumentException("URI scheme not supported: " + path);
}
initUri = path;
logger.atFine().log("GHFS.initialize: %s", path);
if (initSuperclass) {
super.initialize(path, config);
} else {
logger.atFine().log(
"Initializing 'statistics' as an instance not attached to the static FileSystem map");
// Provide an ephemeral Statistics object to avoid NPE, but still avoid registering a global
// statistics object.
statistics = new Statistics(getScheme());
}
configure(config);
long duration = System.nanoTime() - startTime;
increment(Counter.INIT);
increment(Counter.INIT_TIME, duration);
}
/**
* Returns a URI of the root of this FileSystem.
*/
@Override
public URI getUri() {
return getFileSystemRoot().toUri();
}
/**
* The default port is listed as -1 as an indication that ports are not used.
*/
@Override
protected int getDefaultPort() {
logger.atFine().log("GHFS.getDefaultPort:");
int result = -1;
logger.atFine().log("GHFS.getDefaultPort:=> %s", result);
return result;
}
// TODO(user): Improve conversion of exceptions to 'false'.
// Hadoop is inconsistent about when methods are expected to throw
// and when they should return false. The FileSystem documentation
// is unclear on this and many other aspects. For now, we convert
// all IOExceptions to false which is not the right thing to do.
// We need to find a way to only convert known cases to 'false'
// and let the other exceptions bubble up.
/**
* Opens the given file for reading.
*
* <p>Note: This function overrides the given bufferSize value with a higher number unless further
* overridden using configuration parameter {@code fs.gs.inputstream.buffer.size}.
*
* @param hadoopPath File to open.
* @param bufferSize Size of buffer to use for IO.
* @return A readable stream.
* @throws FileNotFoundException if the given path does not exist.
* @throws IOException if an error occurs.
*/
@Override
public FSDataInputStream open(Path hadoopPath, int bufferSize) throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.open: %s, bufferSize: %d (ignored)", hadoopPath, bufferSize);
URI gcsPath = getGcsPath(hadoopPath);
GoogleCloudStorageReadOptions readChannelOptions =
getGcsFs().getOptions().getCloudStorageOptions().getReadChannelOptions();
GoogleHadoopFSInputStream in =
new GoogleHadoopFSInputStream(this, gcsPath, readChannelOptions, statistics);
long duration = System.nanoTime() - startTime;
increment(Counter.OPEN);
increment(Counter.OPEN_TIME, duration);
return new FSDataInputStream(in);
}
/**
* Opens the given file for writing.
*
* <p>Note: This function overrides the given bufferSize value with a higher number unless further
* overridden using configuration parameter {@code fs.gs.outputstream.buffer.size}.
*
* @param hadoopPath The file to open.
* @param permission Permissions to set on the new file. Ignored.
* @param overwrite If a file with this name already exists, then if true, the file will be
* overwritten, and if false an error will be thrown.
* @param bufferSize The size of the buffer to use.
* @param replication Required block replication for the file. Ignored.
* @param blockSize The block-size to be used for the new file. Ignored.
* @param progress Progress is reported through this. Ignored.
* @return A writable stream.
* @throws IOException if an error occurs.
* @see #setPermission(Path, FsPermission)
*/
@Override
public FSDataOutputStream create(
Path hadoopPath,
FsPermission permission,
boolean overwrite,
int bufferSize,
short replication,
long blockSize,
Progressable progress)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
Preconditions.checkArgument(
replication > 0, "replication must be a positive integer: %s", replication);
Preconditions.checkArgument(
blockSize > 0, "blockSize must be a positive integer: %s", blockSize);
checkOpen();
logger.atFine().log(
"GHFS.create: %s, overwrite: %s, bufferSize: %d (ignored)",
hadoopPath, overwrite, bufferSize);
URI gcsPath = getGcsPath(hadoopPath);
OutputStreamType type = GCS_OUTPUT_STREAM_TYPE.get(getConf(), getConf()::getEnum);
OutputStream out;
switch (type) {
case BASIC:
out =
new GoogleHadoopOutputStream(
this, gcsPath, statistics, new CreateFileOptions(overwrite));
break;
case SYNCABLE_COMPOSITE:
out =
new GoogleHadoopSyncableOutputStream(
this, gcsPath, statistics, new CreateFileOptions(overwrite));
break;
default:
throw new IOException(
String.format(
"Unsupported output stream type given for key '%s': '%s'",
GCS_OUTPUT_STREAM_TYPE.getKey(), type));
}
long duration = System.nanoTime() - startTime;
increment(Counter.CREATE);
increment(Counter.CREATE_TIME, duration);
return new FSDataOutputStream(out, null);
}
/**
* Appends to an existing file (optional operation). Not supported.
*
* @param hadoopPath The existing file to be appended.
* @param bufferSize The size of the buffer to be used.
* @param progress For reporting progress if it is not null.
* @return A writable stream.
* @throws IOException if an error occurs.
*/
@Override
public FSDataOutputStream append(Path hadoopPath, int bufferSize, Progressable progress)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
logger.atFine().log("GHFS.append: %s, bufferSize: %d (ignored)", hadoopPath, bufferSize);
long duration = System.nanoTime() - startTime;
increment(Counter.APPEND);
increment(Counter.APPEND_TIME, duration);
throw new IOException("The append operation is not supported.");
}
/**
* Concat existing files into one file.
*
* @param trg the path to the target destination.
* @param psrcs the paths to the sources to use for the concatenation.
* @throws IOException IO failure
*/
@Override
public void concat(Path trg, Path[] psrcs) throws IOException {
logger.atFine().log("GHFS.concat: %s, %s", trg, lazy(() -> Arrays.toString(psrcs)));
checkArgument(psrcs.length > 0, "psrcs must have at least one source");
URI trgPath = getGcsPath(trg);
List<URI> srcPaths = Arrays.stream(psrcs).map(this::getGcsPath).collect(toImmutableList());
checkArgument(!srcPaths.contains(trgPath), "target must not be contained in sources");
List<List<URI>> partitions =
Lists.partition(srcPaths, GoogleCloudStorage.MAX_COMPOSE_OBJECTS - 1);
logger.atFine().log("GHFS.concat: %s, %d partitions", trg, partitions.size());
for (List<URI> partition : partitions) {
// We need to include the target in the list of sources to compose since
// the GCS FS compose operation will overwrite the target, whereas the Hadoop
// concat operation appends to the target.
List<URI> sources = Lists.newArrayList(trgPath);
sources.addAll(partition);
logger.atFine().log("GHFS.concat compose: %s, %s", trgPath, sources);
getGcsFs().compose(sources, trgPath, CreateFileOptions.DEFAULT_CONTENT_TYPE);
}
logger.atFine().log("GHFS.concat:=> ");
}
/**
* Renames src to dst. Src must not be equal to the filesystem root.
*
* @param src Source path.
* @param dst Destination path.
* @return true if rename succeeds.
* @throws FileNotFoundException if src does not exist.
* @throws IOException if an error occurs.
*/
@Override
public boolean rename(Path src, Path dst) throws IOException {
// Even though the underlying GCSFS will also throw an IAE if src is root, since our filesystem
// root happens to equal the global root, we want to explicitly check it here since derived
// classes may not have filesystem roots equal to the global root.
if (src.makeQualified(this).equals(getFileSystemRoot())) {
logger.atFine().log("GHFS.rename: src is root: '%s'", src);
return false;
}
long startTime = System.nanoTime();
Preconditions.checkArgument(src != null, "src must not be null");
Preconditions.checkArgument(dst != null, "dst must not be null");
checkOpen();
URI srcPath = getGcsPath(src);
URI dstPath = getGcsPath(dst);
logger.atFine().log("GHFS.rename: %s -> %s", src, dst);
try {
getGcsFs().rename(srcPath, dstPath);
} catch (IOException e) {
// Occasionally log exceptions that have a cause at info level,
// because they could surface real issues and help with troubleshooting
(logger.atFine().isEnabled() || e.getCause() == null
? logger.atFine()
: logger.atInfo().atMostEvery(5, TimeUnit.MINUTES))
.withCause(e)
.log("Failed GHFS.rename: %s -> %s", src, dst);
return false;
}
long duration = System.nanoTime() - startTime;
increment(Counter.RENAME);
increment(Counter.RENAME_TIME, duration);
return true;
}
/**
* Delete a file.
* @deprecated Use {@code delete(Path, boolean)} instead
*/
@Deprecated
@Override
public boolean delete(Path f)
throws IOException {
return delete(f, true);
}
/**
* Deletes the given file or directory.
*
* @param hadoopPath The path to delete.
* @param recursive If path is a directory and set to
* true, the directory is deleted, else throws an exception.
* In case of a file, the recursive parameter is ignored.
* @return true if delete is successful else false.
* @throws IOException if an error occurs.
*/
@Override
public boolean delete(Path hadoopPath, boolean recursive) throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.delete: %s, recursive: %s", hadoopPath, recursive);
URI gcsPath = getGcsPath(hadoopPath);
try {
getGcsFs().delete(gcsPath, recursive);
} catch (DirectoryNotEmptyException e) {
throw e;
} catch (IOException e) {
// Occasionally log exceptions that have a cause at info level,
// because they could surface real issues and help with troubleshooting
(logger.atFine().isEnabled() || e.getCause() == null
? logger.atFine()
: logger.atInfo().atMostEvery(5, TimeUnit.MINUTES))
.withCause(e)
.log("Failed GHFS.delete: %s, recursive: %s", hadoopPath, recursive);
return false;
}
long duration = System.nanoTime() - startTime;
increment(Counter.DELETE);
increment(Counter.DELETE_TIME, duration);
return true;
}
/**
* Lists file status. If the given path points to a directory then the status
* of children is returned, otherwise the status of the given file is returned.
*
* @param hadoopPath Given path.
* @return File status list or null if path does not exist.
* @throws IOException if an error occurs.
*/
@Override
public FileStatus[] listStatus(Path hadoopPath)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.listStatus: %s", hadoopPath);
URI gcsPath = getGcsPath(hadoopPath);
List<FileStatus> status;
try {
List<FileInfo> fileInfos =
getGcsFs().listFileInfo(gcsPath, isAutoRepairImplicitDirectoriesEnabled());
status = new ArrayList<>(fileInfos.size());
String userName = getUgiUserName();
for (FileInfo fileInfo : fileInfos) {
status.add(getFileStatus(fileInfo, userName));
}
} catch (FileNotFoundException fnfe) {
logger.atFine().withCause(fnfe).log("Got fnfe: ");
return listStatusFileNotFoundBehavior.handle(gcsPath.toString());
}
long duration = System.nanoTime() - startTime;
increment(Counter.LIST_STATUS);
increment(Counter.LIST_STATUS_TIME, duration);
return status.toArray(new FileStatus[0]);
}
private boolean isAutoRepairImplicitDirectoriesEnabled() {
GoogleCloudStorageFileSystemOptions gcsFsOptions = getGcsFs().getOptions();
return gcsFsOptions.getCloudStorageOptions().isAutoRepairImplicitDirectoriesEnabled();
}
/**
* Sets the current working directory to the given path.
*
* @param hadoopPath New working directory.
*/
@Override
public void setWorkingDirectory(Path hadoopPath) {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
logger.atFine().log("GHFS.setWorkingDirectory: %s", hadoopPath);
URI gcsPath = FileInfo.convertToDirectoryPath(pathCodec, getGcsPath(hadoopPath));
Path newPath = getHadoopPath(gcsPath);
// Ideally we should check (as we did earlier) if the given path really points to an existing
// directory. However, it takes considerable amount of time for that check which hurts perf.
// Given that HDFS code does not do such checks either, we choose to not do them in favor of
// better performance.
workingDirectory = newPath;
logger.atFine().log("GHFS.setWorkingDirectory: => %s", workingDirectory);
long duration = System.nanoTime() - startTime;
increment(Counter.SET_WD);
increment(Counter.SET_WD_TIME, duration);
}
/**
* Gets the current working directory.
*
* @return The current working directory.
*/
@Override
public Path getWorkingDirectory() {
logger.atFine().log("GHFS.getWorkingDirectory: %s", workingDirectory);
return workingDirectory;
}
/**
* Makes the given path and all non-existent parents directories.
* Has the semantics of Unix 'mkdir -p'.
*
* @param hadoopPath Given path.
* @param permission Permissions to set on the given directory.
* @return true on success, false otherwise.
* @throws IOException if an error occurs.
*/
@Override
public boolean mkdirs(Path hadoopPath, FsPermission permission)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.mkdirs: %s, perm: %s", hadoopPath, permission);
URI gcsPath = getGcsPath(hadoopPath);
try {
getGcsFs().mkdirs(gcsPath);
} catch (java.nio.file.FileAlreadyExistsException faee) {
// Need to convert to the Hadoop flavor of FileAlreadyExistsException.
throw (FileAlreadyExistsException)
new FileAlreadyExistsException(faee.getMessage()).initCause(faee);
}
long duration = System.nanoTime() - startTime;
increment(Counter.MKDIRS);
increment(Counter.MKDIRS_TIME, duration);
return true;
}
/**
* Gets the default replication factor.
*/
@Override
public short getDefaultReplication() {
return REPLICATION_FACTOR_DEFAULT;
}
/**
* Gets status of the given path item.
*
* @param hadoopPath The path we want information about.
* @return A FileStatus object for the given path.
* @throws FileNotFoundException when the path does not exist;
* @throws IOException on other errors.
*/
@Override
public FileStatus getFileStatus(Path hadoopPath)
throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
logger.atFine().log("GHFS.getFileStatus: %s", hadoopPath);
URI gcsPath = getGcsPath(hadoopPath);
FileInfo fileInfo = getGcsFs().getFileInfo(gcsPath);
if (!fileInfo.exists()) {
logger.atFine().log("GHFS.getFileStatus: not found: %s", gcsPath);
throw new FileNotFoundException(
(fileInfo.isDirectory() ? "Directory not found : " : "File not found : ") + hadoopPath);
}
String userName = getUgiUserName();
FileStatus status = getFileStatus(fileInfo, userName);
long duration = System.nanoTime() - startTime;
increment(Counter.GET_FILE_STATUS);
increment(Counter.GET_FILE_STATUS_TIME, duration);
return status;
}
/** Gets FileStatus corresponding to the given FileInfo value. */
private FileStatus getFileStatus(FileInfo fileInfo, String userName) throws IOException {
// GCS does not provide modification time. It only provides creation time.
// It works for objects because they are immutable once created.
FileStatus status =
new FileStatus(
fileInfo.getSize(),
fileInfo.isDirectory(),
REPLICATION_FACTOR_DEFAULT,
defaultBlockSize,
/* modificationTime= */ fileInfo.getModificationTime(),
/* accessTime= */ fileInfo.getModificationTime(),
reportedPermissions,
/* owner= */ userName,
/* group= */ userName,
getHadoopPath(fileInfo.getPath()));
logger.atFine().log(
"GHFS.getFileStatus: %s => %s", fileInfo.getPath(), lazy(() -> fileStatusToString(status)));
return status;
}
/**
* Determines based on suitability of {@code fixedPath} whether to use flat globbing logic where
* we use a single large listing during globStatus to then perform the core globbing logic
* in-memory.
*/
@VisibleForTesting
boolean couldUseFlatGlob(Path fixedPath) {
// Only works for filesystems where the base Hadoop Path scheme matches the underlying URI
// scheme for GCS.
if (!getUri().getScheme().equals(GoogleCloudStorageFileSystem.SCHEME)) {
logger.atFine().log(
"Flat glob is on, but doesn't work for scheme '%s'; using default behavior.",
getUri().getScheme());
return false;
}
// The full pattern should have a wildcard, otherwise there's no point doing the flat glob.
GlobPattern fullPattern = new GlobPattern(fixedPath.toString());
if (!fullPattern.hasWildcard()) {
logger.atFine().log(
"Flat glob is on, but Path '%s' has no wildcard; using default behavior.", fixedPath);
return false;
}
// To use a flat glob, there must be an authority defined.
if (Strings.isNullOrEmpty(fixedPath.toUri().getAuthority())) {
logger.atInfo().log(
"Flat glob is on, but Path '%s' has a empty authority, using default behavior.",
fixedPath);
return false;
}
// And the authority must not contain a wildcard.
GlobPattern authorityPattern = new GlobPattern(fixedPath.toUri().getAuthority());
if (authorityPattern.hasWildcard()) {
logger.atInfo().log(
"Flat glob is on, but Path '%s' has a wildcard authority, using default behavior.",
fixedPath);
return false;
}
return true;
}
@VisibleForTesting
String trimToPrefixWithoutGlob(String path) {
char[] wildcardChars = "*?{[".toCharArray();
int trimIndex = path.length();
// Find the first occurrence of any one of the wildcard characters, or just path.length()
// if none are found.
for (char wildcard : wildcardChars) {
int wildcardIndex = path.indexOf(wildcard);
if (wildcardIndex >= 0 && wildcardIndex < trimIndex) {
trimIndex = wildcardIndex;
}
}
return path.substring(0, trimIndex);
}
/**
* Returns an array of FileStatus objects whose path names match pathPattern.
*
* Return null if pathPattern has no glob and the path does not exist.
* Return an empty array if pathPattern has a glob and no path matches it.
*
* @param pathPattern A regular expression specifying the path pattern.
* @return An array of FileStatus objects.
* @throws IOException if an error occurs.
*/
@Override
public FileStatus[] globStatus(Path pathPattern) throws IOException {
return globStatus(pathPattern, DEFAULT_FILTER);
}
/**
* Returns an array of FileStatus objects whose path names match pathPattern and is accepted by
* the user-supplied path filter. Results are sorted by their path names.
*
* <p>Return null if pathPattern has no glob and the path does not exist. Return an empty array if
* pathPattern has a glob and no path matches it.
*
* @param pathPattern A regular expression specifying the path pattern.
* @param filter A user-supplied path filter.
* @return An array of FileStatus objects.
* @throws IOException if an error occurs.
*/
@Override
public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException {
checkOpen();
logger.atFine().log("GHFS.globStatus: %s", pathPattern);
// URI does not handle glob expressions nicely, for the purpose of
// fully-qualifying a path we can URI-encode them.
// Using toString() to avoid Path(URI) constructor.
Path encodedPath = new Path(pathPattern.toUri().toString());
// We convert pathPattern to GCS path and then to Hadoop path to ensure that it ends up in
// the correct format. See note in getHadoopPath for more information.
Path encodedFixedPath = getHadoopPath(getGcsPath(encodedPath));
// Decode URI-encoded path back into a glob path.
Path fixedPath = new Path(URI.create(encodedFixedPath.toString()));
logger.atFine().log("GHFS.globStatus fixedPath: %s => %s", pathPattern, fixedPath);
if (enableConcurrentGlob && couldUseFlatGlob(fixedPath)) {
return concurrentGlobInternal(fixedPath, filter, pathPattern);
}
if (enableFlatGlob && couldUseFlatGlob(fixedPath)) {
return flatGlobInternal(fixedPath, filter);
}
return globInternal(fixedPath, filter, pathPattern);
}
private FileStatus[] concurrentGlobInternal(Path fixedPath, PathFilter filter, Path pathPattern)
throws IOException {
ExecutorService executorService = Executors.newFixedThreadPool(2);
Callable<FileStatus[]> flatGlobTask = () -> flatGlobInternal(fixedPath, filter);
Callable<FileStatus[]> nonFlatGlobTask = () -> globInternal(fixedPath, filter, pathPattern);
try {
return executorService.invokeAny(Arrays.asList(flatGlobTask, nonFlatGlobTask));
} catch (InterruptedException | ExecutionException e) {
throw (e.getCause() instanceof IOException) ? (IOException) e.getCause() : new IOException(e);
} finally {
executorService.shutdownNow();
}
}
private FileStatus[] flatGlobInternal(Path fixedPath, PathFilter filter) throws IOException {
String pathString = fixedPath.toString();
String prefixString = trimToPrefixWithoutGlob(pathString);
Path prefixPath = new Path(prefixString);
URI prefixUri = getGcsPath(prefixPath);
if (prefixString.endsWith("/") && !prefixPath.toString().endsWith("/")) {
// Path strips a trailing slash unless it's the 'root' path. We want to keep the trailing
// slash so that we don't wastefully list sibling files which may match the directory-name
// as a strict prefix but would've been omitted due to not containing the '/' at the end.
prefixUri = FileInfo.convertToDirectoryPath(pathCodec, prefixUri);
}
// Get everything matching the non-glob prefix.
logger.atFine().log("Listing everything with prefix '%s'", prefixUri);
List<FileStatus> matchedStatuses = null;
String pageToken = null;
do {
ListPage<FileInfo> infoPage = getGcsFs().listAllFileInfoForPrefixPage(prefixUri, pageToken);
// TODO: Are implicit directories really always needed for globbing?
// Probably they should be inferred only when fs.gs.implicit.dir.infer.enable is true.
Collection<FileStatus> statusPage =
toFileStatusesWithImplicitDirectories(infoPage.getItems());
// TODO: refactor to use GlobPattern and PathFilter directly without helper FS
FileSystem helperFileSystem =
InMemoryGlobberFileSystem.createInstance(getConf(), getWorkingDirectory(), statusPage);
FileStatus[] matchedStatusPage = helperFileSystem.globStatus(fixedPath, filter);
if (matchedStatusPage != null) {
Collections.addAll(
(matchedStatuses == null ? matchedStatuses = new ArrayList<>() : matchedStatuses),
matchedStatusPage);
}
pageToken = infoPage.getNextPageToken();
} while (pageToken != null);
if (matchedStatuses == null || matchedStatuses.isEmpty()) {
return matchedStatuses == null ? null : new FileStatus[0];
}
matchedStatuses.sort(
((Comparator<FileStatus>) Comparator.<FileStatus>naturalOrder())
// Place duplicate implicit directories after real directory
.thenComparingInt((FileStatus f) -> isImplicitDirectory(f) ? 1 : 0));
// Remove duplicate file statuses that could be in the matchedStatuses
// because of pagination and implicit directories
List<FileStatus> filteredStatuses = new ArrayList<>(matchedStatuses.size());
FileStatus lastAdded = null;
for (FileStatus fileStatus : matchedStatuses) {
if (lastAdded == null || lastAdded.compareTo(fileStatus) != 0) {
filteredStatuses.add(fileStatus);
lastAdded = fileStatus;
}
}
FileStatus[] returnList = filteredStatuses.toArray(new FileStatus[0]);
// If the return list contains directories, we should repair them if they're 'implicit'.
if (isAutoRepairImplicitDirectoriesEnabled()) {
List<URI> toRepair = new ArrayList<>();
for (FileStatus status : returnList) {
if (isImplicitDirectory(status)) {
toRepair.add(getGcsPath(status.getPath()));
}
}
if (!toRepair.isEmpty()) {
logger.atWarning().log(
"Discovered %s implicit directories to repair within return values.", toRepair.size());
getGcsFs().repairDirs(toRepair);
}
}
return returnList;
}
private FileStatus[] globInternal(Path fixedPath, PathFilter filter, Path pathPattern)
throws IOException {
FileStatus[] ret = super.globStatus(fixedPath, filter);
if (ret == null) {
if (isAutoRepairImplicitDirectoriesEnabled()) {
logger.atFine().log(
"GHFS.globStatus returned null for '%s', attempting possible repair.", pathPattern);
if (getGcsFs().repairPossibleImplicitDirectory(getGcsPath(fixedPath))) {
logger.atWarning().log("Success repairing '%s', re-globbing.", pathPattern);
ret = super.globStatus(fixedPath, filter);
}
}
}
return ret;
}
private static boolean isImplicitDirectory(FileStatus curr) {
// Modification time of 0 indicates implicit directory.
return curr.isDir() && curr.getModificationTime() == 0;
}
/** Helper method that converts {@link FileInfo} collection to {@link FileStatus} collection. */
private Collection<FileStatus> toFileStatusesWithImplicitDirectories(
Collection<FileInfo> fileInfos) throws IOException {
List<FileStatus> fileStatuses = new ArrayList<>(fileInfos.size());
Set<URI> filePaths = Sets.newHashSetWithExpectedSize(fileInfos.size());
String userName = getUgiUserName();
for (FileInfo fileInfo : fileInfos) {
filePaths.add(fileInfo.getPath());
fileStatuses.add(getFileStatus(fileInfo, userName));
}
// The flow for populating this doesn't bother to populate metadata entries for parent
// directories but we know the parent directories are expected to exist, so we'll just
// populate the missing entries explicitly here. Necessary for getFileStatus(parentOfInfo)
// to work when using an instance of this class.
for (FileInfo fileInfo : fileInfos) {
URI parentPath = getGcsFs().getParentPath(fileInfo.getPath());
while (parentPath != null && !parentPath.equals(GoogleCloudStorageFileSystem.GCS_ROOT)) {
if (!filePaths.contains(parentPath)) {
logger.atFine().log("Adding fake entry for missing parent path '%s'", parentPath);
StorageResourceId id = pathCodec.validatePathAndGetId(parentPath, true);
GoogleCloudStorageItemInfo fakeItemInfo =
GoogleCloudStorageItemInfo.createInferredDirectory(id);
FileInfo fakeFileInfo = FileInfo.fromItemInfo(pathCodec, fakeItemInfo);
filePaths.add(parentPath);
fileStatuses.add(getFileStatus(fakeFileInfo, userName));
}
parentPath = getGcsFs().getParentPath(parentPath);
}
}
return fileStatuses;
}
/** Helper method to get the UGI short user name */
private static String getUgiUserName() throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
return ugi.getShortUserName();
}
/**
* Returns home directory of the current user.
*
* Note: This directory is only used for Hadoop purposes.
* It is not the same as a user's OS home directory.
*/
@Override
public Path getHomeDirectory() {
Path result = new Path(getFileSystemRoot(), getHomeDirectorySubpath());
logger.atFine().log("GHFS.getHomeDirectory:=> %s", result);
return result;
}
/**
* Converts the given FileStatus to its string representation.
*
* @param stat FileStatus to convert.
* @return String representation of the given FileStatus.
*/
private static String fileStatusToString(FileStatus stat) {
assert stat != null;
return String.format(
"path: %s, isDir: %s, len: %d, owner: %s",
stat.getPath().toString(),
stat.isDir(),
stat.getLen(),
stat.getOwner());
}
/**
* Gets system bucket name.
*
* @deprecated Use getUri().authority instead.
*/
@VisibleForTesting
@Deprecated
String getSystemBucketName() {
return systemBucket;
}
/**
* {@inheritDoc}
*
* <p>Returns null, because GHFS does not use security tokens.
*/
@Override
public String getCanonicalServiceName() {
logger.atFine().log("GHFS.getCanonicalServiceName:");
logger.atFine().log("GHFS.getCanonicalServiceName:=> null");
return null;
}
/**
* Gets GCS FS instance.
*/
public GoogleCloudStorageFileSystem getGcsFs() {
return gcsFsSupplier.get();
}
/**
* Increments by 1 the counter indicated by key.
*/
void increment(Counter key) {
increment(key, 1);
}
/**
* Adds value to the counter indicated by key.
*/
void increment(Counter key, long value) {
counters.get(key).addAndGet(value);
}
/**
* Gets value of all counters as a formatted string.
*/
@VisibleForTesting
String countersToString() {
StringBuilder sb = new StringBuilder();
sb.append("\n");
double numNanoSecPerSec = TimeUnit.SECONDS.toNanos(1);
String timeSuffix = "_TIME";
for (Counter c : Counter.values()) {
String name = c.toString();
if (!name.endsWith(timeSuffix)) {
// Log invocation counter.
long count = counters.get(c).get();
sb.append(String.format("%20s = %d calls\n", name, count));
// Log duration counter.
String timeCounterName = name + timeSuffix;
double totalTime =
counters.get(Enum.valueOf(Counter.class, timeCounterName)).get()
/ numNanoSecPerSec;
sb.append(String.format("%20s = %.2f sec\n", timeCounterName, totalTime));
// Compute and log average duration per call (== total duration / num invocations).
String avgName = name + " avg.";
double avg = totalTime / count;
sb.append(String.format("%20s = %.2f sec / call\n\n", avgName, avg));
}
}
return sb.toString();
}
/**
* Logs values of all counters.
*/
private void logCounters() {
logger.atFine().log("%s", lazy(this::countersToString));
}
/**
* Copy the value of the deprecated key to the new key if a value is present for the deprecated
* key, but not the new key.
*/
private static void copyIfNotPresent(Configuration config, String deprecatedKey, String newKey) {
String deprecatedValue = config.get(deprecatedKey);
if (config.get(newKey) == null && deprecatedValue != null) {
logger.atWarning().log(
"Key %s is deprecated. Copying the value of key %s to new key %s",
deprecatedKey, deprecatedKey, newKey);
config.set(newKey, deprecatedValue);
}
}
/**
* Copy deprecated configuration options to new keys, if present.
*/
private static void copyDeprecatedConfigurationOptions(Configuration config) {
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_ENABLE.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.ENABLE_SERVICE_ACCOUNTS_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_KEY_FILE.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.SERVICE_ACCOUNT_KEYFILE_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_EMAIL.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.SERVICE_ACCOUNT_EMAIL_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_ID.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.CLIENT_ID_SUFFIX);
copyIfNotPresent(
config,
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_SECRET.getKey(),
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.CLIENT_SECRET_SUFFIX);
String oauthClientFileKey =
AUTHENTICATION_PREFIX + HadoopCredentialConfiguration.OAUTH_CLIENT_FILE_SUFFIX;
if (config.get(oauthClientFileKey) == null) {
// No property to copy, but we can set this fairly safely (it's only invoked if client ID,
// client secret are set and we're not using service accounts).
config.set(
oauthClientFileKey, System.getProperty("user.home") + "/.credentials/storage.json");
}
}
/**
* Retrieve user's Credential. If user implemented {@link AccessTokenProvider} and provided the
* class name (See {@link AccessTokenProviderClassFromConfigFactory} then build a credential with
* access token provided by this provider; Otherwise obtain credential through {@link
* HadoopCredentialConfiguration#getCredential(List)}.
*/
private static Credential getCredential(
AccessTokenProviderClassFromConfigFactory providerClassFactory, Configuration config)
throws IOException, GeneralSecurityException {
Credential credential =
CredentialFromAccessTokenProviderClassFactory.credential(
providerClassFactory, config, CredentialFactory.GCS_SCOPES);
if (credential != null) {
return credential;
}
return HadoopCredentialConfiguration.newBuilder()
.withConfiguration(config)
.withOverridePrefix(AUTHENTICATION_PREFIX)
.build()
.getCredential(CredentialFactory.GCS_SCOPES);
}
/**
* Configures GHFS using the supplied configuration.
*
* @param config Hadoop configuration object.
*/
private synchronized void configure(Configuration config) throws IOException {
logger.atFine().log("GHFS.configure");
logger.atFine().log("GHFS_ID = %s", GHFS_ID);
overrideConfigFromFile(config);
copyDeprecatedConfigurationOptions(config);
// Set this configuration as the default config for this instance.
setConf(config);
systemBucket = emptyToNull(GCS_SYSTEM_BUCKET.get(config, config::get));
enableFlatGlob = GCS_FLAT_GLOB_ENABLE.get(config, config::getBoolean);
enableConcurrentGlob = GCS_CONCURRENT_GLOB_ENABLE.get(config, config::getBoolean);
checksumType = GCS_FILE_CHECKSUM_TYPE.get(config, config::getEnum);
defaultBlockSize = BLOCK_SIZE.get(config, config::getLong);
reportedPermissions = new FsPermission(PERMISSIONS_TO_REPORT.get(config, config::get));
boolean createSystemBucket = GCS_CREATE_SYSTEM_BUCKET.get(config, config::getBoolean);
if (gcsFsSupplier == null) {
if (GCS_LAZY_INITIALIZATION_ENABLE.get(config, config::getBoolean)) {
gcsFsSupplier =
Suppliers.memoize(
() -> {
try {
GoogleCloudStorageFileSystem gcsFs = createGcsFs(config);
pathCodec = gcsFs.getPathCodec();
configureBuckets(gcsFs, systemBucket, createSystemBucket);
configureWorkingDirectory(config);
gcsFsInitialized = true;
return gcsFs;
} catch (IOException e) {
throw new RuntimeException("Failed to create GCS FS", e);
}
});
pathCodec = getPathCodec(config);
} else {
setGcsFs(createGcsFs(config));
configureBuckets(getGcsFs(), systemBucket, createSystemBucket);
configureWorkingDirectory(config);
}
} else {
configureBuckets(getGcsFs(), systemBucket, createSystemBucket);
configureWorkingDirectory(config);
}
logger.atFine().log("GHFS.configure: done");
}
/**
* If overrides file configured, update properties from override file into {@link Configuration}
* object
*/
private void overrideConfigFromFile(Configuration config) throws IOException {
String configFile = GCS_CONFIG_OVERRIDE_FILE.get(config, config::get);
if (configFile != null) {
config.addResource(new FileInputStream(configFile));
}
}
private static PathCodec getPathCodec(Configuration config) {
String specifiedPathCodec = Ascii.toLowerCase(PATH_CODEC.get(config, config::get));
switch (specifiedPathCodec) {
case PATH_CODEC_USE_LEGACY_ENCODING:
return GoogleCloudStorageFileSystem.LEGACY_PATH_CODEC;
case PATH_CODEC_USE_URI_ENCODING:
return GoogleCloudStorageFileSystem.URI_ENCODED_PATH_CODEC;
default:
logger.atWarning().log(
"Unknown path codec specified %s. Using default / legacy.", specifiedPathCodec);
return GoogleCloudStorageFileSystem.LEGACY_PATH_CODEC;
}
}
private static GoogleCloudStorageFileSystem createGcsFs(Configuration config) throws IOException {
Credential credential;
try {
credential =
getCredential(
new AccessTokenProviderClassFromConfigFactory().withOverridePrefix("fs.gs"), config);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
GoogleCloudStorageFileSystemOptions gcsFsOptions =
GoogleHadoopFileSystemConfiguration.getGcsFsOptionsBuilder(config)
.setPathCodec(getPathCodec(config))
.build();
return new GoogleCloudStorageFileSystem(credential, gcsFsOptions);
}
/**
* Validates and possibly creates the system bucket. Should be overridden to configure other
* buckets.
*
* @param gcsFs {@link GoogleCloudStorageFileSystem} to configure buckets
* @param systemBucketName Name of system bucket
* @param createSystemBucket Whether or not to create systemBucketName if it does not exist.
* @throws IOException if systemBucketName is invalid or cannot be found and createSystemBucket is
* false.
*/
@VisibleForTesting
protected void configureBuckets(
GoogleCloudStorageFileSystem gcsFs, String systemBucketName, boolean createSystemBucket)
throws IOException {
logger.atFine().log("GHFS.configureBuckets: %s, %s", systemBucketName, createSystemBucket);
systemBucket = systemBucketName;
if (systemBucket != null) {
logger.atFine().log("GHFS.configureBuckets: Warning fs.gs.system.bucket is deprecated.");
// Ensure that system bucket exists. It really must be a bucket, not a GCS path.
URI systemBucketPath =
gcsFs
.getPathCodec()
.getPath(systemBucket, /* objectName= */ null, /* allowEmptyObjectName= */ true);
if (!gcsFs.exists(systemBucketPath)) {
if (createSystemBucket) {
gcsFs.mkdirs(systemBucketPath);
} else {
throw new FileNotFoundException(
String.format(
"%s: system bucket not found: %s", GCS_SYSTEM_BUCKET.getKey(), systemBucket));
}
}
}
logger.atFine().log("GHFS.configureBuckets:=>");
}
private void configureWorkingDirectory(Configuration config) {
// Set initial working directory to root so that any configured value gets resolved
// against file system root.
workingDirectory = getFileSystemRoot();
Path newWorkingDirectory;
String configWorkingDirectory = GCS_WORKING_DIRECTORY.get(config, config::get);
if (Strings.isNullOrEmpty(configWorkingDirectory)) {
newWorkingDirectory = getDefaultWorkingDirectory();
logger.atWarning().log(
"No working directory configured, using default: '%s'", newWorkingDirectory);
} else {
newWorkingDirectory = new Path(configWorkingDirectory);
}
// Use the public method to ensure proper behavior of normalizing and resolving the new
// working directory relative to the initial filesystem-root directory.
setWorkingDirectory(newWorkingDirectory);
logger.atFine().log("%s = %s", GCS_WORKING_DIRECTORY.getKey(), getWorkingDirectory());
}
/**
* Assert that the FileSystem has been initialized and not close()d.
*/
private void checkOpen() throws IOException {
if (isClosed()) {
throw new IOException("GoogleHadoopFileSystem has been closed or not initialized.");
}
}
protected void checkOpenUnchecked() {
if (isClosed()) {
throw new RuntimeException("GoogleHadoopFileSystem has been closed or not initialized.");
}
}
private boolean isClosed() {
return gcsFsSupplier == null || gcsFsSupplier.get() == null;
}
// =================================================================
// Overridden functions for debug tracing. The following functions
// do not change functionality. They just log parameters and call base
// class' function.
// =================================================================
@Override
public boolean deleteOnExit(Path f)
throws IOException {
checkOpen();
logger.atFine().log("GHFS.deleteOnExit: %s", f);
boolean result = super.deleteOnExit(f);
logger.atFine().log("GHFS.deleteOnExit:=> %s", result);
return result;
}
@Override
protected void processDeleteOnExit() {
logger.atFine().log("GHFS.processDeleteOnExit:");
super.processDeleteOnExit();
}
@Override
public ContentSummary getContentSummary(Path f)
throws IOException {
logger.atFine().log("GHFS.getContentSummary: %s", f);
ContentSummary result = super.getContentSummary(f);
logger.atFine().log("GHFS.getContentSummary:=> %s", result);
return result;
}
@Override
public Token<?> getDelegationToken(String renewer)
throws IOException {
logger.atFine().log("GHFS.getDelegationToken: renewer: %s", renewer);
Token<?> result = super.getDelegationToken(renewer);
logger.atFine().log("GHFS.getDelegationToken:=> %s", result);
return result;
}
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite,
Path[] srcs, Path dst)
throws IOException {
logger.atFine().log(
"GHFS.copyFromLocalFile: delSrc: %s, overwrite: %s, #srcs: %s, dst: %s",
delSrc, overwrite, srcs.length, dst);
super.copyFromLocalFile(delSrc, overwrite, srcs, dst);
logger.atFine().log("GHFS.copyFromLocalFile:=> ");
}
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite,
Path src, Path dst)
throws IOException {
logger.atFine().log(
"GHFS.copyFromLocalFile: delSrc: %s, overwrite: %s, src: %s, dst: %s",
delSrc, overwrite, src, dst);
super.copyFromLocalFile(delSrc, overwrite, src, dst);
logger.atFine().log("GHFS.copyFromLocalFile:=> ");
}
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst)
throws IOException {
logger.atFine().log("GHFS.copyToLocalFile: delSrc: %s, src: %s, dst: %s", delSrc, src, dst);
super.copyToLocalFile(delSrc, src, dst);
logger.atFine().log("GHFS.copyToLocalFile:=> ");
}
@Override
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
logger.atFine().log("GHFS.startLocalOutput: out: %s, tmp: %s", fsOutputFile, tmpLocalFile);
Path result = super.startLocalOutput(fsOutputFile, tmpLocalFile);
logger.atFine().log("GHFS.startLocalOutput:=> %s", result);
return result;
}
@Override
public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
logger.atFine().log("GHFS.startLocalOutput: out: %s, tmp: %s", fsOutputFile, tmpLocalFile);
super.completeLocalOutput(fsOutputFile, tmpLocalFile);
logger.atFine().log("GHFS.completeLocalOutput:=> ");
}
@Override
public void close() throws IOException {
logger.atFine().log("GHFS.close:");
super.close();
// NB: We must *first* have the superclass close() before we close the underlying gcsFsSupplier
// since the superclass may decide to perform various heavyweight cleanup operations (such as
// deleteOnExit).
if (gcsFsSupplier != null) {
if (gcsFsInitialized) {
getGcsFs().close();
}
gcsFsSupplier = null;
}
logCounters();
logger.atFine().log("GHFS.close:=> ");
}
@Override
public long getUsed()
throws IOException{
logger.atFine().log("GHFS.getUsed:");
long result = super.getUsed();
logger.atFine().log("GHFS.getUsed:=> %s", result);
return result;
}
@Override
public long getDefaultBlockSize() {
logger.atFine().log("GHFS.getDefaultBlockSize:");
long result = defaultBlockSize;
logger.atFine().log("GHFS.getDefaultBlockSize:=> %s", result);
return result;
}
@Override
public FileChecksum getFileChecksum(Path hadoopPath) throws IOException {
long startTime = System.nanoTime();
Preconditions.checkArgument(hadoopPath != null, "hadoopPath must not be null");
checkOpen();
URI gcsPath = getGcsPath(hadoopPath);
final FileInfo fileInfo = getGcsFs().getFileInfo(gcsPath);
if (!fileInfo.exists()) {
logger.atFine().log("GHFS.getFileStatus: not found: %s", gcsPath);
throw new FileNotFoundException(
(fileInfo.isDirectory() ? "Directory not found : " : "File not found : ") + hadoopPath);
}
FileChecksum checksum = getFileChecksum(checksumType, fileInfo);
logger.atFine().log("GHFS.getFileChecksum:=> %s", checksum);
long duration = System.nanoTime() - startTime;
increment(Counter.GET_FILE_CHECKSUM);
increment(Counter.GET_FILE_CHECKSUM_TIME, duration);
return checksum;
}
private static FileChecksum getFileChecksum(GcsFileChecksumType type, FileInfo fileInfo)
throws IOException {
switch (type) {
case NONE:
return null;
case CRC32C:
return new GcsFileChecksum(
type, fileInfo.getItemInfo().getVerificationAttributes().getCrc32c());
case MD5:
return new GcsFileChecksum(
type, fileInfo.getItemInfo().getVerificationAttributes().getMd5hash());
}
throw new IOException("Unrecognized GcsFileChecksumType: " + type);
}
@Override
public void setVerifyChecksum(boolean verifyChecksum) {
logger.atFine().log("GHFS.setVerifyChecksum:");
super.setVerifyChecksum(verifyChecksum);
logger.atFine().log("GHFS.setVerifyChecksum:=> ");
}
@Override
public void setPermission(Path p, FsPermission permission)
throws IOException {
logger.atFine().log("GHFS.setPermission: path: %s, perm: %s", p, permission);
super.setPermission(p, permission);
logger.atFine().log("GHFS.setPermission:=> ");
}
@Override
public void setOwner(Path p, String username, String groupname)
throws IOException {
logger.atFine().log("GHFS.setOwner: path: %s, user: %s, group: %s", p, username, groupname);
super.setOwner(p, username, groupname);
logger.atFine().log("GHFS.setOwner:=> ");
}
@Override
public void setTimes(Path p, long mtime, long atime)
throws IOException {
logger.atFine().log("GHFS.setTimes: path: %s, mtime: %s, atime: %s", p, mtime, atime);
super.setTimes(p, mtime, atime);
logger.atFine().log("GHFS.setTimes:=> ");
}
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PERMISSIONS_TO_REPORT} */
@Deprecated
public static final String PERMISSIONS_TO_REPORT_KEY =
GoogleHadoopFileSystemConfiguration.PERMISSIONS_TO_REPORT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PERMISSIONS_TO_REPORT} */
@Deprecated
public static final String PERMISSIONS_TO_REPORT_DEFAULT =
GoogleHadoopFileSystemConfiguration.PERMISSIONS_TO_REPORT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final String BUFFERSIZE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_BUFFER_SIZE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final int BUFFERSIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_BUFFER_SIZE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final String WRITE_BUFFERSIZE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_UPLOAD_CHUNK_SIZE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_BUFFER_SIZE} */
@Deprecated
public static final int WRITE_BUFFERSIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_UPLOAD_CHUNK_SIZE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#BLOCK_SIZE} */
@Deprecated
public static final String BLOCK_SIZE_KEY =
GoogleHadoopFileSystemConfiguration.BLOCK_SIZE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#BLOCK_SIZE} */
@Deprecated
public static final int BLOCK_SIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.BLOCK_SIZE.getDefault().intValue();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_SERVICE_ACCOUNT_ENABLE} */
@Deprecated
public static final String ENABLE_GCE_SERVICE_ACCOUNT_AUTH_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_SERVICE_ACCOUNT_EMAIL} */
@Deprecated
public static final String SERVICE_ACCOUNT_AUTH_EMAIL_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_EMAIL.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_SERVICE_ACCOUNT_KEY_FILE} */
@Deprecated
public static final String SERVICE_ACCOUNT_AUTH_KEYFILE_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_SERVICE_ACCOUNT_KEY_FILE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PROJECT_ID} */
@Deprecated
public static final String GCS_PROJECT_ID_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PROJECT_ID.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_REQUESTER_PAYS_MODE} */
@Deprecated
public static final String GCS_REQUESTER_PAYS_MODE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REQUESTER_PAYS_MODE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_REQUESTER_PAYS_PROJECT_ID} */
@Deprecated
public static final String GCS_REQUESTER_PAYS_PROJECT_ID_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REQUESTER_PAYS_PROJECT_ID.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_REQUESTER_PAYS_BUCKETS} */
@Deprecated
public static final String GCS_REQUESTER_PAYS_BUCKETS_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REQUESTER_PAYS_BUCKETS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_CLIENT_ID} */
@Deprecated
public static final String GCS_CLIENT_ID_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_ID.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#AUTH_CLIENT_SECRET} */
@Deprecated
public static final String GCS_CLIENT_SECRET_KEY =
GoogleHadoopFileSystemConfiguration.AUTH_CLIENT_SECRET.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_SYSTEM_BUCKET} */
@Deprecated
public static final String GCS_SYSTEM_BUCKET_KEY =
GoogleHadoopFileSystemConfiguration.GCS_SYSTEM_BUCKET.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_CREATE_SYSTEM_BUCKET} */
@Deprecated
public static final String GCS_CREATE_SYSTEM_BUCKET_KEY =
GoogleHadoopFileSystemConfiguration.GCS_CREATE_SYSTEM_BUCKET.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_CREATE_SYSTEM_BUCKET} */
@Deprecated
public static final boolean GCS_CREATE_SYSTEM_BUCKET_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_CREATE_SYSTEM_BUCKET.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_WORKING_DIRECTORY} */
@Deprecated
public static final String GCS_WORKING_DIRECTORY_KEY =
GoogleHadoopFileSystemConfiguration.GCS_WORKING_DIRECTORY.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FILE_SIZE_LIMIT_250GB} */
@Deprecated
public static final String GCS_FILE_SIZE_LIMIT_250GB =
GoogleHadoopFileSystemConfiguration.GCS_FILE_SIZE_LIMIT_250GB.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FILE_SIZE_LIMIT_250GB} */
@Deprecated
public static final boolean GCS_FILE_SIZE_LIMIT_250GB_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_FILE_SIZE_LIMIT_250GB.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MARKER_FILE_PATTERN} */
@Deprecated
public static final String GCS_MARKER_FILE_PATTERN_KEY =
GoogleHadoopFileSystemConfiguration.GCS_MARKER_FILE_PATTERN.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_PERFORMANCE_CACHE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_PERFORMANCE_CACHE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_ENABLE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS}
*/
@Deprecated
public static final String GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS}
*/
@Deprecated
public static final long GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_MAX_ENTRY_AGE_MILLIS.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE}
*/
@Deprecated
public static final String GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE}
*/
@Deprecated
public static final boolean GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_LIST_CACHING_ENABLE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT}
*/
@Deprecated
public static final String GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT
.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT}
*/
@Deprecated
public static final long GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PERFORMANCE_CACHE_DIR_METADATA_PREFETCH_LIMIT
.getDefault();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_ENABLE}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE.getKey();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_ENABLE}
*/
@Deprecated
public static final boolean GCS_PARENT_TIMESTAMP_UPDATE_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_ENABLE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES_DEFAULT =
Joiner.on(',')
.join(
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_EXCLUDES
.getDefault());
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY}
*/
@Deprecated
public static final String MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY =
GoogleHadoopFileSystemConfiguration.MR_JOB_HISTORY_INTERMEDIATE_DONE_DIR_KEY;
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#MR_JOB_HISTORY_DONE_DIR_KEY} */
@Deprecated
public static final String MR_JOB_HISTORY_DONE_DIR_KEY =
GoogleHadoopFileSystemConfiguration.MR_JOB_HISTORY_DONE_DIR_KEY;
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES}
*/
@Deprecated
public static final String GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES_DEFAULT =
Joiner.on(',')
.join(
GoogleHadoopFileSystemConfiguration.GCS_PARENT_TIMESTAMP_UPDATE_INCLUDES
.getDefault());
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final String GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final boolean GCS_ENABLE_REPAIR_IMPLICIT_DIRECTORIES_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_REPAIR_IMPLICIT_DIRECTORIES_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PATH_CODEC} */
@Deprecated
public static final String PATH_CODEC_KEY =
GoogleHadoopFileSystemConfiguration.PATH_CODEC.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#PATH_CODEC} */
@Deprecated
public static final String PATH_CODEC_DEFAULT =
GoogleHadoopFileSystemConfiguration.PATH_CODEC.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final String GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE}
*/
@Deprecated
public static final boolean GCS_ENABLE_INFER_IMPLICIT_DIRECTORIES_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INFER_IMPLICIT_DIRECTORIES_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FLAT_GLOB_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_FLAT_GLOB_KEY =
GoogleHadoopFileSystemConfiguration.GCS_FLAT_GLOB_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_FLAT_GLOB_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_FLAT_GLOB_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_FLAT_GLOB_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MARKER_FILE_CREATION_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_MARKER_FILE_CREATION_KEY =
GoogleHadoopFileSystemConfiguration.GCS_MARKER_FILE_CREATION_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MARKER_FILE_CREATION_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_MARKER_FILE_CREATION_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MARKER_FILE_CREATION_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_WITH_REWRITE_ENABLE} */
@Deprecated
public static final String GCS_ENABLE_COPY_WITH_REWRITE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_COPY_WITH_REWRITE_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_WITH_REWRITE_ENABLE} */
@Deprecated
public static final boolean GCS_ENABLE_COPY_WITH_REWRITE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_COPY_WITH_REWRITE_ENABLE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final String GCS_COPY_MAX_REQUESTS_PER_BATCH =
GoogleHadoopFileSystemConfiguration.GCS_COPY_MAX_REQUESTS_PER_BATCH.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final long GCS_COPY_MAX_REQUESTS_PER_BATCH_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_COPY_MAX_REQUESTS_PER_BATCH.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_BATCH_THREADS} */
@Deprecated
public static final String GCS_COPY_BATCH_THREADS =
GoogleHadoopFileSystemConfiguration.GCS_COPY_BATCH_THREADS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_COPY_BATCH_THREADS} */
@Deprecated
public static final int GCS_COPY_BATCH_THREADS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_COPY_BATCH_THREADS.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_LIST_ITEMS_PER_CALL} */
@Deprecated
public static final String GCS_MAX_LIST_ITEMS_PER_CALL =
GoogleHadoopFileSystemConfiguration.GCS_MAX_LIST_ITEMS_PER_CALL.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_LIST_ITEMS_PER_CALL} */
@Deprecated
public static final long GCS_MAX_LIST_ITEMS_PER_CALL_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MAX_LIST_ITEMS_PER_CALL.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final String GCS_MAX_REQUESTS_PER_BATCH =
GoogleHadoopFileSystemConfiguration.GCS_MAX_REQUESTS_PER_BATCH.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_MAX_REQUESTS_PER_BATCH} */
@Deprecated
public static final long GCS_MAX_REQUESTS_PER_BATCH_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MAX_REQUESTS_PER_BATCH.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_BATCH_THREADS} */
@Deprecated
public static final String GCS_BATCH_THREADS =
GoogleHadoopFileSystemConfiguration.GCS_BATCH_THREADS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_BATCH_THREADS} */
@Deprecated
public static final int GCS_BATCH_THREADS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_BATCH_THREADS.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_MAX_RETRY} */
@Deprecated
public static final String GCS_HTTP_MAX_RETRY_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_MAX_RETRY.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_MAX_RETRY} */
@Deprecated
public static final int GCS_HTTP_MAX_RETRY_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_MAX_RETRY.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_CONNECT_TIMEOUT} */
@Deprecated
public static final String GCS_HTTP_CONNECT_TIMEOUT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_CONNECT_TIMEOUT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_CONNECT_TIMEOUT} */
@Deprecated
public static final int GCS_HTTP_CONNECT_TIMEOUT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_CONNECT_TIMEOUT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_READ_TIMEOUT} */
@Deprecated
public static final String GCS_HTTP_READ_TIMEOUT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_READ_TIMEOUT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_READ_TIMEOUT} */
@Deprecated
public static final int GCS_HTTP_READ_TIMEOUT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_READ_TIMEOUT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PROXY_ADDRESS} */
@Deprecated
public static final String GCS_PROXY_ADDRESS_KEY =
GoogleHadoopFileSystemConfiguration.GCS_PROXY_ADDRESS.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_PROXY_ADDRESS} */
@Deprecated
public static final String GCS_PROXY_ADDRESS_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_PROXY_ADDRESS.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_TRANSPORT} */
@Deprecated
public static final String GCS_HTTP_TRANSPORT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_HTTP_TRANSPORT.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_HTTP_TRANSPORT} */
@Deprecated public static final String GCS_HTTP_TRANSPORT_DEFAULT = null;
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_APPLICATION_NAME_SUFFIX} */
@Deprecated
public static final String GCS_APPLICATION_NAME_SUFFIX_KEY =
GoogleHadoopFileSystemConfiguration.GCS_APPLICATION_NAME_SUFFIX.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_APPLICATION_NAME_SUFFIX} */
@Deprecated
public static final String GCS_APPLICATION_NAME_SUFFIX_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_APPLICATION_NAME_SUFFIX.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE}
*/
@Deprecated
public static final String GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE}
*/
@Deprecated
public static final int GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_MAX_WAIT_MILLIS_EMPTY_OBJECT_CREATE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_TYPE} */
@Deprecated
public static final String GCS_OUTPUTSTREAM_TYPE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_TYPE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_OUTPUT_STREAM_TYPE} */
@Deprecated
public static final String GCS_OUTPUTSTREAM_TYPE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_OUTPUT_STREAM_TYPE.getDefault().toString();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_GENERATION_READ_CONSISTENCY} */
@Deprecated
public static final String GCS_GENERATION_READ_CONSISTENCY_KEY =
GoogleHadoopFileSystemConfiguration.GCS_GENERATION_READ_CONSISTENCY.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_GENERATION_READ_CONSISTENCY} */
@Deprecated
public static final GenerationReadConsistency GCS_GENERATION_READ_CONSISTENCY_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_GENERATION_READ_CONSISTENCY.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE}
*/
@Deprecated
public static final String GCS_INPUTSTREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE}
*/
@Deprecated
public static final boolean GCS_INPUTSTREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FAST_FAIL_ON_NOT_FOUND_ENABLE
.getDefault();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT}
*/
@Deprecated
public static final String GCS_INPUTSTREAM_INPLACE_SEEK_LIMIT_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT.getKey();
/**
* @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT}
*/
@Deprecated
public static final long GCS_INPUTSTREAM_INPLACE_SEEK_LIMIT_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_INPLACE_SEEK_LIMIT.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FADVISE} */
@Deprecated
public static final String GCS_INPUTSTREAM_FADVISE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FADVISE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_FADVISE} */
@Deprecated
public static final Fadvise GCS_INPUTSTREAM_FADVISE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_FADVISE.getDefault();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE}
*/
@Deprecated
public static final String GCS_INPUTSTREAM_MIN_RANGE_REQUEST_SIZE_KEY =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE.getKey();
/**
* @deprecated use {@link
* GoogleHadoopFileSystemConfiguration#GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE}
*/
@Deprecated
public static final int GCS_INPUTSTREAM_MIN_RANGE_REQUEST_SIZE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCS_INPUT_STREAM_MIN_RANGE_REQUEST_SIZE.getDefault();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCE_BUCKET_DELETE_ENABLE} */
@Deprecated
public static final String GCE_BUCKET_DELETE_ENABLE_KEY =
GoogleHadoopFileSystemConfiguration.GCE_BUCKET_DELETE_ENABLE.getKey();
/** @deprecated use {@link GoogleHadoopFileSystemConfiguration#GCE_BUCKET_DELETE_ENABLE} */
@Deprecated
public static final boolean GCE_BUCKET_DELETE_ENABLE_DEFAULT =
GoogleHadoopFileSystemConfiguration.GCE_BUCKET_DELETE_ENABLE.getDefault();
}
|
Use daemon threads for concurrent globbing.
This will prevent Spark jobs from hanging after all work is done.
Fixes #150:
https://github.com/GoogleCloudPlatform/bigdata-interop/issues/150
Change on 2019/02/11 by idv <[email protected]>
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=233437145
|
gcs/src/main/java/com/google/cloud/hadoop/fs/gcs/GoogleHadoopFileSystemBase.java
|
Use daemon threads for concurrent globbing.
|
<ide><path>cs/src/main/java/com/google/cloud/hadoop/fs/gcs/GoogleHadoopFileSystemBase.java
<ide> import com.google.common.collect.Maps;
<ide> import com.google.common.collect.Sets;
<ide> import com.google.common.flogger.GoogleLogger;
<add>import com.google.common.util.concurrent.ThreadFactoryBuilder;
<ide> import java.io.DataInput;
<ide> import java.io.DataOutput;
<ide> import java.io.FileInputStream;
<ide> import java.util.concurrent.ExecutionException;
<ide> import java.util.concurrent.ExecutorService;
<ide> import java.util.concurrent.Executors;
<add>import java.util.concurrent.ThreadFactory;
<ide> import java.util.concurrent.TimeUnit;
<ide> import java.util.concurrent.atomic.AtomicLong;
<ide> import java.util.function.Supplier;
<ide> logger.atFine().log("GHFS version: %s", VERSION);
<ide> GHFS_ID = String.format("GHFS/%s", VERSION);
<ide> }
<add>
<add> private static final ThreadFactory DAEMON_THREAD_FACTORY =
<add> new ThreadFactoryBuilder().setNameFormat("ghfs-thread-%d").setDaemon(true).build();
<ide>
<ide> @VisibleForTesting
<ide> boolean enableFlatGlob = GCS_FLAT_GLOB_ENABLE.getDefault();
<ide> return globInternal(fixedPath, filter, pathPattern);
<ide> }
<ide>
<add> /**
<add> * Use 2 glob algorithms that return the same result but one of them could be significantly faster
<add> * than another one depending on directory layout.
<add> */
<ide> private FileStatus[] concurrentGlobInternal(Path fixedPath, PathFilter filter, Path pathPattern)
<ide> throws IOException {
<del> ExecutorService executorService = Executors.newFixedThreadPool(2);
<add> ExecutorService executorService = Executors.newFixedThreadPool(2, DAEMON_THREAD_FACTORY);
<ide> Callable<FileStatus[]> flatGlobTask = () -> flatGlobInternal(fixedPath, filter);
<ide> Callable<FileStatus[]> nonFlatGlobTask = () -> globInternal(fixedPath, filter, pathPattern);
<ide>
|
|
JavaScript
|
mit
|
e016cfa6b7add0373148303ca64b13f53216b536
| 0 |
rwaldron/jquery-mobile,startxfr/jquery-mobile,startxfr/jquery-mobile,arschmitz/uglymongrel-jquery-mobile,startxfr/jquery-mobile,arschmitz/uglymongrel-jquery-mobile,rwaldron/jquery-mobile,npmcomponent/cbou-jquery-mobile,arschmitz/jquery-mobile,hinaloe/jqm-demo-ja,arschmitz/uglymongrel-jquery-mobile,arschmitz/jquery-mobile,arschmitz/jquery-mobile,npmcomponent/cbou-jquery-mobile,hinaloe/jqm-demo-ja,npmcomponent/cbou-jquery-mobile,hinaloe/jqm-demo-ja
|
/*
* jQuery Mobile Framework : prototype for "globalnav" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($){
$.fn.globalnav = function(settings){
return $(this).each(function(){ //there should only ever be one of these... is each necessary?
if($(this).find('.ui-globalnav').length){ return; }
var o = $.extend({
fixedAs: 'footer'
},settings);
//wrap it with footer classes
var globalnav = $(this).wrapInner('<div class="ui-globalnav ui-bar-a"></div>').children(0).addClass(o.fixedAs == 'footer' ? 'ui-footer' : 'ui-header');
//apply fixed footer markup to ui-footer
$(this).fixHeaderFooter();
//set up the nav tabs widths (currently evenly divided widths, to be improved later)
var navtabs = globalnav.find('li');
navtabs.width(100/navtabs.length+'%');
//apply state on click and at load
//NOTE: we'll need to find a way to highlight an active tab at load as well
navtabs.find('a')
.buttonMarkup({corners: false, iconPos: 'top', icon: 'arrow-u'})
.bind('tap',function(){
navtabs.find('.ui-btn-active').removeClass('ui-btn-active');
$(this).addClass('ui-btn-active');
});
});
};
})(jQuery);
|
js/jQuery.mobile.globalnav.js
|
/*
* jQuery Mobile Framework : prototype for "globalnav" plugin
* Copyright (c) jQuery Project
* Dual licensed under the MIT (MIT-LICENSE.txt) and GPL (GPL-LICENSE.txt) licenses.
* Note: Code is in draft form and is subject to change
*/
(function($){
$.fn.globalnav = function(settings){
return $(this).each(function(){ //there should only ever be one of these... is each necessary?
var o = $.extend({
fixedAs: 'footer'
},settings);
//wrap it with footer classes
var globalnav = $(this).wrapInner('<div class="ui-globalnav ui-bar-a"></div>').children(0).addClass(o.fixedAs == 'footer' ? 'ui-footer' : 'ui-header');
//apply fixed footer markup to ui-footer
$(this).fixHeaderFooter();
//set up the nav tabs widths (currently evenly divided widths, to be improved later)
var navtabs = globalnav.find('li');
navtabs.width(100/navtabs.length+'%');
//apply state on click and at load
//NOTE: we'll need to find a way to highlight an active tab at load as well
navtabs.find('a')
.buttonMarkup({corners: false, iconPos: 'top', icon: 'arrow-u'})
.bind('tap',function(){
navtabs.find('.ui-btn-active').removeClass('ui-btn-active');
$(this).addClass('ui-btn-active');
});
});
};
})(jQuery);
|
make sure globalnav is only called once on a div
|
js/jQuery.mobile.globalnav.js
|
make sure globalnav is only called once on a div
|
<ide><path>s/jQuery.mobile.globalnav.js
<ide> (function($){
<ide> $.fn.globalnav = function(settings){
<ide> return $(this).each(function(){ //there should only ever be one of these... is each necessary?
<add> if($(this).find('.ui-globalnav').length){ return; }
<ide> var o = $.extend({
<ide> fixedAs: 'footer'
<ide> },settings);
|
|
Java
|
apache-2.0
|
3b68d4c694fe879f6faa609431b291d8039f269a
| 0 |
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.jdisc.http.server.jetty;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.entity.GzipCompressingEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.entity.mime.FormBodyPart;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
import org.hamcrest.Matcher;
import org.hamcrest.MatcherAssert;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* A simple http client for testing
*
* @author Simon Thoresen Hult
* @author bjorncs
*/
public class SimpleHttpClient implements AutoCloseable {
private final CloseableHttpClient delegate;
private final String scheme;
private final int listenPort;
public SimpleHttpClient(SSLContext sslContext, int listenPort, boolean useCompression) {
this(sslContext, null, null, listenPort, useCompression);
}
public SimpleHttpClient(SSLContext sslContext, List<String> enabledProtocols, List<String> enabledCiphers,
int listenPort, boolean useCompression) {
HttpClientBuilder builder = HttpClientBuilder.create();
if (!useCompression) {
builder.disableContentCompression();
}
if (sslContext != null) {
SSLConnectionSocketFactory sslConnectionFactory = new SSLConnectionSocketFactory(
sslContext,
toArray(enabledProtocols),
toArray(enabledCiphers),
new DefaultHostnameVerifier());
builder.setSSLSocketFactory(sslConnectionFactory);
Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("https", sslConnectionFactory)
.build();
builder.setConnectionManager(new BasicHttpClientConnectionManager(registry));
scheme = "https";
} else {
scheme = "http";
}
this.delegate = builder.build();
this.listenPort = listenPort;
}
private static String[] toArray(List<String> list) {
return list != null ? list.toArray(new String[0]) : null;
}
public URI newUri(final String path) {
return URI.create(scheme + "://localhost:" + listenPort + path);
}
public RequestExecutor newGet(final String path) {
return newRequest(new HttpGet(newUri(path)));
}
public RequestExecutor newPost(final String path) {
return newRequest(new HttpPost(newUri(path)));
}
public RequestExecutor newRequest(final HttpUriRequest request) {
return new RequestExecutor().setRequest(request);
}
public ResponseValidator execute(final HttpUriRequest request) throws IOException {
return newRequest(request).execute();
}
public ResponseValidator get(final String path) throws IOException {
return newGet(path).execute();
}
@Override
public void close() throws IOException {
delegate.close();
}
public class RequestExecutor {
private HttpUriRequest request;
private HttpEntity entity;
public RequestExecutor setRequest(final HttpUriRequest request) {
this.request = request;
return this;
}
public RequestExecutor addHeader(final String name, final String value) {
this.request.addHeader(name, value);
return this;
}
public RequestExecutor setContent(final String content) {
this.entity = new StringEntity(content, StandardCharsets.UTF_8);
return this;
}
public RequestExecutor setGzipContent(String content) {
this.entity = new GzipCompressingEntity(new StringEntity(content, StandardCharsets.UTF_8));
return this;
}
public RequestExecutor setBinaryContent(final byte[] content) {
this.entity = new ByteArrayEntity(content);
return this;
}
public RequestExecutor setMultipartContent(final FormBodyPart... parts) {
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
Arrays.stream(parts).forEach(part -> builder.addPart(part.getName(), part.getBody()));
this.entity = builder.build();
return this;
}
public ResponseValidator execute() throws IOException {
if (entity != null) {
((HttpPost)request).setEntity(entity);
}
try (CloseableHttpResponse response = delegate.execute(request)){
return new ResponseValidator(response);
}
}
}
public static class ResponseValidator {
private final HttpResponse response;
private final String content;
public ResponseValidator(final HttpResponse response) throws IOException {
this.response = response;
final HttpEntity entity = response.getEntity();
this.content = entity == null ? null :
EntityUtils.toString(entity, StandardCharsets.UTF_8);
}
public ResponseValidator expectStatusCode(final Matcher<Integer> matcher) {
MatcherAssert.assertThat(response.getStatusLine().getStatusCode(), matcher);
return this;
}
public ResponseValidator expectHeader(final String headerName, final Matcher<String> matcher) {
final Header firstHeader = response.getFirstHeader(headerName);
final String headerValue = firstHeader != null ? firstHeader.getValue() : null;
MatcherAssert.assertThat(headerValue, matcher);
assertThat(firstHeader, is(not(nullValue())));
return this;
}
public ResponseValidator expectNoHeader(final String headerName) {
final Header firstHeader = response.getFirstHeader(headerName);
assertThat(firstHeader, is(nullValue()));
return this;
}
public ResponseValidator expectContent(final Matcher<String> matcher) throws IOException {
MatcherAssert.assertThat(content, matcher);
return this;
}
}
}
|
jdisc_http_service/src/test/java/com/yahoo/jdisc/http/server/jetty/SimpleHttpClient.java
|
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.jdisc.http.server.jetty;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.entity.GzipCompressingEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.entity.mime.FormBodyPart;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
import org.hamcrest.Matcher;
import org.hamcrest.MatcherAssert;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* A simple http client for testing
*
* @author Simon Thoresen Hult
* @author bjorncs
*/
public class SimpleHttpClient implements AutoCloseable {
private final CloseableHttpClient delegate;
private final String scheme;
private final int listenPort;
public SimpleHttpClient(final SSLContext sslContext, final int listenPort, final boolean useCompression) {
HttpClientBuilder builder = HttpClientBuilder.create();
if (!useCompression) {
builder.disableContentCompression();
}
if (sslContext != null) {
SSLConnectionSocketFactory sslConnectionFactory = new SSLConnectionSocketFactory(
sslContext,
new DefaultHostnameVerifier());
builder.setSSLSocketFactory(sslConnectionFactory);
Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("https", sslConnectionFactory)
.build();
builder.setConnectionManager(new BasicHttpClientConnectionManager(registry));
scheme = "https";
} else {
scheme = "http";
}
this.delegate = builder.build();
this.listenPort = listenPort;
}
public URI newUri(final String path) {
return URI.create(scheme + "://localhost:" + listenPort + path);
}
public RequestExecutor newGet(final String path) {
return newRequest(new HttpGet(newUri(path)));
}
public RequestExecutor newPost(final String path) {
return newRequest(new HttpPost(newUri(path)));
}
public RequestExecutor newRequest(final HttpUriRequest request) {
return new RequestExecutor().setRequest(request);
}
public ResponseValidator execute(final HttpUriRequest request) throws IOException {
return newRequest(request).execute();
}
public ResponseValidator get(final String path) throws IOException {
return newGet(path).execute();
}
@Override
public void close() throws IOException {
delegate.close();
}
public class RequestExecutor {
private HttpUriRequest request;
private HttpEntity entity;
public RequestExecutor setRequest(final HttpUriRequest request) {
this.request = request;
return this;
}
public RequestExecutor addHeader(final String name, final String value) {
this.request.addHeader(name, value);
return this;
}
public RequestExecutor setContent(final String content) {
this.entity = new StringEntity(content, StandardCharsets.UTF_8);
return this;
}
public RequestExecutor setGzipContent(String content) {
this.entity = new GzipCompressingEntity(new StringEntity(content, StandardCharsets.UTF_8));
return this;
}
public RequestExecutor setBinaryContent(final byte[] content) {
this.entity = new ByteArrayEntity(content);
return this;
}
public RequestExecutor setMultipartContent(final FormBodyPart... parts) {
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
Arrays.stream(parts).forEach(part -> builder.addPart(part.getName(), part.getBody()));
this.entity = builder.build();
return this;
}
public ResponseValidator execute() throws IOException {
if (entity != null) {
((HttpPost)request).setEntity(entity);
}
try (CloseableHttpResponse response = delegate.execute(request)){
return new ResponseValidator(response);
}
}
}
public static class ResponseValidator {
private final HttpResponse response;
private final String content;
public ResponseValidator(final HttpResponse response) throws IOException {
this.response = response;
final HttpEntity entity = response.getEntity();
this.content = entity == null ? null :
EntityUtils.toString(entity, StandardCharsets.UTF_8);
}
public ResponseValidator expectStatusCode(final Matcher<Integer> matcher) {
MatcherAssert.assertThat(response.getStatusLine().getStatusCode(), matcher);
return this;
}
public ResponseValidator expectHeader(final String headerName, final Matcher<String> matcher) {
final Header firstHeader = response.getFirstHeader(headerName);
final String headerValue = firstHeader != null ? firstHeader.getValue() : null;
MatcherAssert.assertThat(headerValue, matcher);
assertThat(firstHeader, is(not(nullValue())));
return this;
}
public ResponseValidator expectNoHeader(final String headerName) {
final Header firstHeader = response.getFirstHeader(headerName);
assertThat(firstHeader, is(nullValue()));
return this;
}
public ResponseValidator expectContent(final Matcher<String> matcher) throws IOException {
MatcherAssert.assertThat(content, matcher);
return this;
}
}
}
|
Add constructor overload with list of enabled ciphers and protocol versions
|
jdisc_http_service/src/test/java/com/yahoo/jdisc/http/server/jetty/SimpleHttpClient.java
|
Add constructor overload with list of enabled ciphers and protocol versions
|
<ide><path>disc_http_service/src/test/java/com/yahoo/jdisc/http/server/jetty/SimpleHttpClient.java
<ide> import java.net.URI;
<ide> import java.nio.charset.StandardCharsets;
<ide> import java.util.Arrays;
<add>import java.util.List;
<ide>
<ide> import static org.hamcrest.CoreMatchers.is;
<ide> import static org.hamcrest.CoreMatchers.not;
<ide> private final String scheme;
<ide> private final int listenPort;
<ide>
<del> public SimpleHttpClient(final SSLContext sslContext, final int listenPort, final boolean useCompression) {
<add> public SimpleHttpClient(SSLContext sslContext, int listenPort, boolean useCompression) {
<add> this(sslContext, null, null, listenPort, useCompression);
<add> }
<add>
<add> public SimpleHttpClient(SSLContext sslContext, List<String> enabledProtocols, List<String> enabledCiphers,
<add> int listenPort, boolean useCompression) {
<ide> HttpClientBuilder builder = HttpClientBuilder.create();
<ide> if (!useCompression) {
<ide> builder.disableContentCompression();
<ide> if (sslContext != null) {
<ide> SSLConnectionSocketFactory sslConnectionFactory = new SSLConnectionSocketFactory(
<ide> sslContext,
<add> toArray(enabledProtocols),
<add> toArray(enabledCiphers),
<ide> new DefaultHostnameVerifier());
<ide> builder.setSSLSocketFactory(sslConnectionFactory);
<ide>
<ide> this.listenPort = listenPort;
<ide> }
<ide>
<add> private static String[] toArray(List<String> list) {
<add> return list != null ? list.toArray(new String[0]) : null;
<add> }
<add>
<ide> public URI newUri(final String path) {
<ide> return URI.create(scheme + "://localhost:" + listenPort + path);
<ide> }
|
|
Java
|
apache-2.0
|
680569c4e7c91e5cde6a8ec8a7a2d204bd5b72cf
| 0 |
incodehq/isis,apache/isis,incodehq/isis,apache/isis,apache/isis,apache/isis,apache/isis,incodehq/isis,estatio/isis,apache/isis,estatio/isis,estatio/isis,incodehq/isis,estatio/isis
|
/*
*
* Copyright 2012-2014 Eurocommercial Properties NV
*
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.applib.fixturescripts;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.isis.applib.annotation.Programmatic;
public abstract class BuilderScriptAbstract<T,F extends BuilderScriptAbstract<T,F>>
extends FixtureScript implements FixtureScriptWithExecutionStrategy {
private final FixtureScripts.MultipleExecutionStrategy executionStrategy;
/**
* Typically we expect builders to have value semantics, so this is provided as a convenience.
*/
protected BuilderScriptAbstract() {
this(FixtureScripts.MultipleExecutionStrategy.EXECUTE_ONCE_BY_VALUE);
}
protected BuilderScriptAbstract(final FixtureScripts.MultipleExecutionStrategy executionStrategy) {
this.executionStrategy = executionStrategy;
}
@Override
public FixtureScripts.MultipleExecutionStrategy getMultipleExecutionStrategy() {
return executionStrategy;
}
@Programmatic
public F build(
final FixtureScript parentFixtureScript,
ExecutionContext executionContext) {
// returns the fixture script that is run
// (either this one, or possibly one previously executed).
return (F)executionContext.executeChildT(parentFixtureScript, this);
}
@Override
protected final void execute(final ExecutionContext executionContext) {
final F onFixture = (F) BuilderScriptAbstract.this;
for (final Block<T,F> prereq : prereqs) {
prereq.execute(onFixture, executionContext);
}
doExecute(executionContext);
}
protected abstract void doExecute(final ExecutionContext executionContext);
public abstract T getObject();
public <E extends EnumWithBuilderScript<T, F>, T, F extends BuilderScriptAbstract<T,F>> T objectFor(
final E datum,
final FixtureScript.ExecutionContext ec) {
if(datum == null) {
return null;
}
final F fixtureScript = datum.toFixtureScript();
return ec.executeChildT(this, fixtureScript).getObject();
}
private final List<Block> prereqs = Lists.newArrayList();
public F set(Block<T,F> prereq) {
prereqs.add(prereq);
return (F)this;
}
public interface Block<T,F extends BuilderScriptAbstract<T,F>> {
void execute(final F onFixture, final ExecutionContext executionContext);
}
}
|
core/applib/src/main/java/org/apache/isis/applib/fixturescripts/BuilderScriptAbstract.java
|
/*
*
* Copyright 2012-2014 Eurocommercial Properties NV
*
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.applib.fixturescripts;
import org.apache.isis.applib.annotation.Programmatic;
public abstract class BuilderScriptAbstract<T,F extends BuilderScriptAbstract<T,F>>
extends FixtureScript implements FixtureScriptWithExecutionStrategy {
private final FixtureScripts.MultipleExecutionStrategy executionStrategy;
/**
* Typically we expect builders to have value semantics, so this is provided as a convenience.
*/
protected BuilderScriptAbstract() {
this(FixtureScripts.MultipleExecutionStrategy.EXECUTE_ONCE_BY_VALUE);
}
protected BuilderScriptAbstract(final FixtureScripts.MultipleExecutionStrategy executionStrategy) {
this.executionStrategy = executionStrategy;
}
@Override
public FixtureScripts.MultipleExecutionStrategy getMultipleExecutionStrategy() {
return executionStrategy;
}
@Programmatic
public F build(
final FixtureScript parentFixtureScript,
ExecutionContext executionContext) {
// returns the fixture script that is run
// (either this one, or possibly one previously executed).
return (F)executionContext.executeChildT(parentFixtureScript, this);
}
public abstract T getObject();
protected <E extends EnumWithBuilderScript<T, F>, T, F extends BuilderScriptAbstract<T,F>> T objectFor(
final E datum,
final FixtureScript.ExecutionContext ec) {
if(datum == null) {
return null;
}
final F fixtureScript = datum.toFixtureScript();
return ec.executeChildT(this, fixtureScript).getObject();
}
}
|
ISIS-1784: extends BuilderScriptAbstract further, with new set(...) method to allow prereqs to be run within the execute
|
core/applib/src/main/java/org/apache/isis/applib/fixturescripts/BuilderScriptAbstract.java
|
ISIS-1784: extends BuilderScriptAbstract further, with new set(...) method to allow prereqs to be run within the execute
|
<ide><path>ore/applib/src/main/java/org/apache/isis/applib/fixturescripts/BuilderScriptAbstract.java
<ide> * under the License.
<ide> */
<ide> package org.apache.isis.applib.fixturescripts;
<add>
<add>import java.util.List;
<add>
<add>import com.google.common.collect.Lists;
<ide>
<ide> import org.apache.isis.applib.annotation.Programmatic;
<ide>
<ide> return (F)executionContext.executeChildT(parentFixtureScript, this);
<ide> }
<ide>
<add> @Override
<add> protected final void execute(final ExecutionContext executionContext) {
<add>
<add> final F onFixture = (F) BuilderScriptAbstract.this;
<add> for (final Block<T,F> prereq : prereqs) {
<add> prereq.execute(onFixture, executionContext);
<add> }
<add>
<add> doExecute(executionContext);
<add> }
<add>
<add> protected abstract void doExecute(final ExecutionContext executionContext);
<add>
<ide> public abstract T getObject();
<ide>
<del> protected <E extends EnumWithBuilderScript<T, F>, T, F extends BuilderScriptAbstract<T,F>> T objectFor(
<add> public <E extends EnumWithBuilderScript<T, F>, T, F extends BuilderScriptAbstract<T,F>> T objectFor(
<ide> final E datum,
<ide> final FixtureScript.ExecutionContext ec) {
<ide> if(datum == null) {
<ide> return ec.executeChildT(this, fixtureScript).getObject();
<ide> }
<ide>
<add> private final List<Block> prereqs = Lists.newArrayList();
<add> public F set(Block<T,F> prereq) {
<add> prereqs.add(prereq);
<add> return (F)this;
<add> }
<add>
<add> public interface Block<T,F extends BuilderScriptAbstract<T,F>> {
<add> void execute(final F onFixture, final ExecutionContext executionContext);
<add> }
<ide>
<ide> }
<ide>
|
|
Java
|
mit
|
e712e9b9d7c4c64e756a8bb425351a57bc1b2f48
| 0 |
fvasquezjatar/fermat-unused,fvasquezjatar/fermat-unused
|
package com.bitdubai.fermat_pip_plugin.layer.actor.developer.developer.bitdubai.version_1.structure;
import com.bitdubai.fermat_api.Addon;
import com.bitdubai.fermat_api.Plugin;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel;
import com.bitdubai.fermat_api.layer.all_definition.developer.LogManagerForDevelopers;
import com.bitdubai.fermat_api.layer.all_definition.enums.Addons;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import com.bitdubai.fermat_api.layer.pip_actor.developer.ClassHierarchy;
import com.bitdubai.fermat_api.layer.pip_actor.developer.LogTool;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* Created by ciencias on 6/25/15.
*/
public class DeveloperActorLogTool implements LogTool {
private Map<Plugins,Plugin> LoggingLstPlugins;
private Map<Addons,Addon> LoggingLstAddons;
public DeveloperActorLogTool(Map<Plugins, Plugin> LoggingLstPlugins, Map<Addons, Addon> LoggingLstAddons) {
this.LoggingLstPlugins = LoggingLstPlugins;
this.LoggingLstAddons = LoggingLstAddons;
}
@Override
public List<Plugins> getAvailablePluginList() {
List<Plugins> lstPlugins=new ArrayList<Plugins>();
for(Map.Entry<Plugins, Plugin> entry : LoggingLstPlugins.entrySet()) {
Plugins key = entry.getKey();
lstPlugins.add(key);
}
return lstPlugins;
}
@Override
public List<Addons> getAvailableAddonList() {
List<Addons> lstAddons=new ArrayList<Addons>();
for(Map.Entry<Addons, Addon> entry : LoggingLstAddons.entrySet()) {
Addons key = entry.getKey();
lstAddons.add(key);
}
return lstAddons;
}
@Override
public LogLevel getLogLevel(Plugins plugin) {
return ((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).getLoggingLevel();
}
@Override
public LogLevel getLogLevel(Addons addon) {
return ((LogManagerForDevelopers)this.LoggingLstAddons.get(addon)).getLoggingLevel();
}
@Override
public void setLogLevel(Plugins plugin, LogLevel newLogLevel) {
((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).changeLoggingLevel(newLogLevel);
}
@Override
public void setLogLevel(Addons addon, LogLevel newLogLevel) {
((LogManagerForDevelopers)this.LoggingLstAddons.get(addon)).changeLoggingLevel(newLogLevel);
}
@Override
public List<ClassHierarchy> getClassesHierarchy(Plugins plugin) {
/**
* I get the class full patch from the plug in.
*/
List<String> classes = ((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).getClassesFullPath();
/**
* I need to know the minimun number of packages on the plug in.
* If there are more than three, then I will create only three levels
*/
int minPackages=100, cantPackages = 0;
for (String myClass : classes){
String[] packages = myClass.split(Pattern.quote("."));
cantPackages = packages.length;
if (minPackages > cantPackages)
minPackages = cantPackages;
}
/**
* minPackages holds the minimun number of packages available on the plug in.
*/
/**
* I instantiate the class that will hold the levels of the packages.
* Level 1: root (which may contain a lot of packages)
* Level 2: the last package
* Level 3: the class name.
*/
List<ClassHierarchy> returnedClasses = new ArrayList<ClassHierarchy>();
if (minPackages >= 4){
for (String myClass : classes){
String[] packages = myClass.split(Pattern.quote("."));
StringBuilder splitedPackages = new StringBuilder();
for (int i=0; i<packages.length-2;i++){
splitedPackages.append(packages[i]);
splitedPackages.append(".");
}
/**
* I remove the last dot of the package.
*/
splitedPackages.substring(0, splitedPackages.length() -1);
/**
* I add the packages to each level.
*/
ClassHierarchy classesAndPackages = new ClassHierarchy();
classesAndPackages.setLevel1(splitedPackages.toString());
classesAndPackages.setLevel2(packages[packages.length - 2]);
classesAndPackages.setLevel3(packages[packages.length -1]);
returnedClasses.add(classesAndPackages);
splitedPackages.delete(0,splitedPackages.length()-1 );
}
} else
/**
* If there are less four I add the levels I have.
*/
{
for (String myClass : classes) {
String[] packages = myClass.split(Pattern.quote("."));
ClassHierarchy classesAndPackages = new ClassHierarchy();
classesAndPackages.setLevel1(packages[0]);
/**
* If I had one more level, I will add it
*/
if (packages.length > 1)
classesAndPackages.setLevel2(packages[1]);
if (packages.length > 2)
classesAndPackages.setLevel3(packages[2]);
/**
* I add the class to the returning object
*/
returnedClasses.add(classesAndPackages);
}
}
/**
* I return the object
*/
return returnedClasses;
}
}
|
PIP/plugin/actor/fermat-pip-plugin-actor-developer-bitdubai/src/main/java/com/bitdubai/fermat_pip_plugin/layer/actor/developer/developer/bitdubai/version_1/structure/DeveloperActorLogTool.java
|
package com.bitdubai.fermat_pip_plugin.layer.actor.developer.developer.bitdubai.version_1.structure;
import com.bitdubai.fermat_api.Addon;
import com.bitdubai.fermat_api.Plugin;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel;
import com.bitdubai.fermat_api.layer.all_definition.developer.LogManagerForDevelopers;
import com.bitdubai.fermat_api.layer.all_definition.enums.Addons;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import com.bitdubai.fermat_api.layer.pip_actor.developer.LogTool;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by ciencias on 6/25/15.
*/
public class DeveloperActorLogTool implements LogTool {
private Map<Plugins,Plugin> LoggingLstPlugins;
private Map<Addons,Addon> LoggingLstAddons;
public DeveloperActorLogTool(Map<Plugins, Plugin> LoggingLstPlugins, Map<Addons, Addon> LoggingLstAddons) {
this.LoggingLstPlugins = LoggingLstPlugins;
this.LoggingLstAddons = LoggingLstAddons;
}
@Override
public List<Plugins> getAvailablePluginList() {
List<Plugins> lstPlugins=new ArrayList<Plugins>();
for(Map.Entry<Plugins, Plugin> entry : LoggingLstPlugins.entrySet()) {
Plugins key = entry.getKey();
lstPlugins.add(key);
}
return lstPlugins;
}
@Override
public List<Addons> getAvailableAddonList() {
List<Addons> lstAddons=new ArrayList<Addons>();
for(Map.Entry<Addons, Addon> entry : LoggingLstAddons.entrySet()) {
Addons key = entry.getKey();
lstAddons.add(key);
}
return lstAddons;
}
@Override
public LogLevel getLogLevel(Plugins plugin) {
return ((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).getLoggingLevel();
}
@Override
public LogLevel getLogLevel(Addons addon) {
return ((LogManagerForDevelopers)this.LoggingLstAddons.get(addon)).getLoggingLevel();
}
@Override
public void setLogLevel(Plugins plugin, LogLevel newLogLevel) {
((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).changeLoggingLevel(newLogLevel);
}
@Override
public void setLogLevel(Addons addon, LogLevel newLogLevel) {
((LogManagerForDevelopers)this.LoggingLstAddons.get(addon)).changeLoggingLevel(newLogLevel);
}
}
|
Added GetClassHierarchy implementation
|
PIP/plugin/actor/fermat-pip-plugin-actor-developer-bitdubai/src/main/java/com/bitdubai/fermat_pip_plugin/layer/actor/developer/developer/bitdubai/version_1/structure/DeveloperActorLogTool.java
|
Added GetClassHierarchy implementation
|
<ide><path>IP/plugin/actor/fermat-pip-plugin-actor-developer-bitdubai/src/main/java/com/bitdubai/fermat_pip_plugin/layer/actor/developer/developer/bitdubai/version_1/structure/DeveloperActorLogTool.java
<ide> import com.bitdubai.fermat_api.layer.all_definition.developer.LogManagerForDevelopers;
<ide> import com.bitdubai.fermat_api.layer.all_definition.enums.Addons;
<ide> import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
<add>import com.bitdubai.fermat_api.layer.pip_actor.developer.ClassHierarchy;
<ide> import com.bitdubai.fermat_api.layer.pip_actor.developer.LogTool;
<ide>
<ide> import java.util.ArrayList;
<ide> import java.util.List;
<ide> import java.util.Map;
<add>import java.util.regex.Pattern;
<ide>
<ide> /**
<ide> * Created by ciencias on 6/25/15.
<ide> @Override
<ide> public void setLogLevel(Plugins plugin, LogLevel newLogLevel) {
<ide> ((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).changeLoggingLevel(newLogLevel);
<add>
<ide> }
<ide>
<ide> @Override
<ide> public void setLogLevel(Addons addon, LogLevel newLogLevel) {
<ide> ((LogManagerForDevelopers)this.LoggingLstAddons.get(addon)).changeLoggingLevel(newLogLevel);
<ide> }
<add>
<add> @Override
<add> public List<ClassHierarchy> getClassesHierarchy(Plugins plugin) {
<add> /**
<add> * I get the class full patch from the plug in.
<add> */
<add> List<String> classes = ((LogManagerForDevelopers)this.LoggingLstPlugins.get(plugin)).getClassesFullPath();
<add>
<add> /**
<add> * I need to know the minimun number of packages on the plug in.
<add> * If there are more than three, then I will create only three levels
<add> */
<add> int minPackages=100, cantPackages = 0;
<add> for (String myClass : classes){
<add> String[] packages = myClass.split(Pattern.quote("."));
<add> cantPackages = packages.length;
<add> if (minPackages > cantPackages)
<add> minPackages = cantPackages;
<add> }
<add> /**
<add> * minPackages holds the minimun number of packages available on the plug in.
<add> */
<add>
<add> /**
<add> * I instantiate the class that will hold the levels of the packages.
<add> * Level 1: root (which may contain a lot of packages)
<add> * Level 2: the last package
<add> * Level 3: the class name.
<add> */
<add> List<ClassHierarchy> returnedClasses = new ArrayList<ClassHierarchy>();
<add>
<add> if (minPackages >= 4){
<add> for (String myClass : classes){
<add> String[] packages = myClass.split(Pattern.quote("."));
<add> StringBuilder splitedPackages = new StringBuilder();
<add> for (int i=0; i<packages.length-2;i++){
<add> splitedPackages.append(packages[i]);
<add> splitedPackages.append(".");
<add> }
<add> /**
<add> * I remove the last dot of the package.
<add> */
<add> splitedPackages.substring(0, splitedPackages.length() -1);
<add>
<add> /**
<add> * I add the packages to each level.
<add> */
<add> ClassHierarchy classesAndPackages = new ClassHierarchy();
<add> classesAndPackages.setLevel1(splitedPackages.toString());
<add> classesAndPackages.setLevel2(packages[packages.length - 2]);
<add> classesAndPackages.setLevel3(packages[packages.length -1]);
<add> returnedClasses.add(classesAndPackages);
<add> splitedPackages.delete(0,splitedPackages.length()-1 );
<add> }
<add> } else
<add> /**
<add> * If there are less four I add the levels I have.
<add> */
<add> {
<add> for (String myClass : classes) {
<add> String[] packages = myClass.split(Pattern.quote("."));
<add> ClassHierarchy classesAndPackages = new ClassHierarchy();
<add> classesAndPackages.setLevel1(packages[0]);
<add>
<add> /**
<add> * If I had one more level, I will add it
<add> */
<add> if (packages.length > 1)
<add> classesAndPackages.setLevel2(packages[1]);
<add>
<add> if (packages.length > 2)
<add> classesAndPackages.setLevel3(packages[2]);
<add>
<add> /**
<add> * I add the class to the returning object
<add> */
<add> returnedClasses.add(classesAndPackages);
<add> }
<add> }
<add>
<add> /**
<add> * I return the object
<add> */
<add> return returnedClasses;
<add> }
<ide> }
|
|
Java
|
bsd-3-clause
|
87da34c4f2f311c2631bf01aa01f7ea1d3805516
| 0 |
iig-uni-freiburg/WOLFGANG,iig-uni-freiburg/WOLFGANG,iig-uni-freiburg/WOLFGANG
|
package de.uni.freiburg.iig.telematik.wolfgang.graph;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.geom.Line2D;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import javax.swing.JOptionPane;
import com.mxgraph.canvas.mxGraphics2DCanvas;
import com.mxgraph.canvas.mxICanvas;
import com.mxgraph.canvas.mxImageCanvas;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.model.mxGraphModel;
import com.mxgraph.model.mxICell;
import com.mxgraph.shape.mxIShape;
import com.mxgraph.util.mxConstants;
import com.mxgraph.util.mxEvent;
import com.mxgraph.util.mxEventObject;
import com.mxgraph.util.mxEventSource.mxIEventListener;
import com.mxgraph.util.mxPoint;
import com.mxgraph.util.mxRectangle;
import com.mxgraph.util.mxStyleUtils;
import com.mxgraph.util.mxUtils;
import com.mxgraph.view.mxCellState;
import com.mxgraph.view.mxGraph;
import com.mxgraph.view.mxGraphSelectionModel;
import com.mxgraph.view.mxGraphView;
import de.invation.code.toval.graphic.misc.CircularPointGroup;
import de.invation.code.toval.graphic.misc.PColor;
import de.invation.code.toval.graphic.util.GraphicUtils;
import de.invation.code.toval.properties.PropertyException;
import de.invation.code.toval.types.Multiset;
import de.invation.code.toval.validate.ParameterException;
import de.invation.code.toval.validate.Validate;
import de.uni.freiburg.iig.telematik.sepia.event.PlaceChangeEvent;
import de.uni.freiburg.iig.telematik.sepia.event.RelationChangeEvent;
import de.uni.freiburg.iig.telematik.sepia.event.StructureListener;
import de.uni.freiburg.iig.telematik.sepia.event.TransitionChangeEvent;
import de.uni.freiburg.iig.telematik.sepia.exception.PNException;
import de.uni.freiburg.iig.telematik.sepia.graphic.AbstractGraphicalPN;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.AbstractCPNGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.AnnotationGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.ArcGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.NodeGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.TokenGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.attributes.Dimension;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.attributes.Offset;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.attributes.Position;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractFlowRelation;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractMarking;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractPlace;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractTransition;
import de.uni.freiburg.iig.telematik.wolfgang.editor.properties.WolfgangProperties;
import de.uni.freiburg.iig.telematik.wolfgang.editor.properties.WolfgangPropertyAdapter;
import de.uni.freiburg.iig.telematik.wolfgang.graph.change.StyleChange;
import de.uni.freiburg.iig.telematik.wolfgang.graph.util.MXConstants;
import de.uni.freiburg.iig.telematik.wolfgang.graph.util.Utils;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNProperties;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNProperties.PNComponent;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNPropertiesListener;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNProperty;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNPropertyChangeEvent;
public abstract class PNGraph extends mxGraph implements PNPropertiesListener, mxIEventListener, StructureListener {
private AbstractGraphicalPN<?, ?, ?, ?, ?, ?, ?, ?, ?> netContainer = null;
private PNProperties properties = null;
private boolean labelSelected = false;
private boolean isExecution = false;
protected boolean hideContraintsAsTokens = true;
private boolean containedGraphics = false;
private PNGraphChangeHandler changeHandler;
protected PNGraphListenerSupport graphListenerSupport = new PNGraphListenerSupport();
public PNGraph(AbstractGraphicalPN<?, ?, ?, ?, ?, ?, ?, ?, ?> netContainer, PNProperties properties) {
super();
addWGPropertiesListener();
try {
setGridSize(WolfgangProperties.getInstance().getGridSize());
} catch (PropertyException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Validate.notNull(netContainer);
Validate.notNull(properties);
this.netContainer = netContainer;
this.netContainer.getPetriNet().addStructureListener(this);
this.properties = properties;
this.properties.addPNPropertiesListener(this);
this.getSelectionModel().addListener(mxEvent.CHANGE, this);
this.addListener(mxEvent.RESIZE_CELLS, this);
this.getModel().addListener(mxEvent.CHANGE, this);
this.getModel().addListener(mxEvent.UNDO, this);
changeHandler = new PNGraphChangeHandler(this);
setCellsBendable(true);
setHtmlLabels(true);
setAllowDanglingEdges(false);
setMultigraph(true);
setCellsEditable(false);
setDisconnectOnMove(false);
setExtendParents(false); // disables extending parents after adding
setVertexLabelsMovable(true);
try {
initialize();
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Cannot write Graphicsstyle to FileSystem " + e.getMessage(), "IO Exception", JOptionPane.ERROR_MESSAGE);
} catch (PropertyException e) {
JOptionPane.showMessageDialog(null, e.getMessage(), "Property Exception", JOptionPane.ERROR_MESSAGE);
}
}
private void addWGPropertiesListener() {
try {
WolfgangProperties.getInstance().addListener(new WolfgangPropertyAdapter() {
@Override
public void gridSizeChanged(int gridSize) {
try {
setGridSize(WolfgangProperties.getInstance().getGridSize());
} catch (PropertyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
refresh();
}
@Override
public void defaultTokenSizeChanged(int defaultTokenSize) {
refresh();
}
@Override
public void defaultTokenDistanceChanged(int defaultTokenDistance) {
refresh();
}
});
} catch (IOException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
}
}
@SuppressWarnings("rawtypes")
private void initialize() throws PropertyException, IOException {
// Check if net contains Graphical Info and keep that information
if (netContainer.getPetriNetGraphics().getPlaceGraphics().size() > 0 || netContainer.getPetriNet().isEmpty()) {
containedGraphics = true;
}
// Check if net container is empty.
// If not, add all PN components to the graph.
if (!netContainer.getPetriNet().isEmpty()) {
getModel().beginUpdate();
for (AbstractPlace place : getNetContainer().getPetriNet().getPlaces()) {
if (netContainer.getPetriNetGraphics().getPlaceGraphics().get(place.getName()) == null)
netContainer.getPetriNetGraphics().getPlaceGraphics().put(place.getName(), new NodeGraphics());
if (netContainer.getPetriNetGraphics().getPlaceLabelAnnotationGraphics().get(place.getName()) == null)
netContainer.getPetriNetGraphics().getPlaceLabelAnnotationGraphics().put(place.getName(), new AnnotationGraphics());
addPlaceCell(
place.getName(),
MXConstants.extractNodeStyleFromGraphics(PNComponent.PLACE, netContainer.getPetriNetGraphics().getPlaceGraphics().get(place.getName()), netContainer.getPetriNetGraphics()
.getPlaceLabelAnnotationGraphics().get(place.getName())));
}
for (AbstractTransition transition : getNetContainer().getPetriNet().getTransitions()) {
if (netContainer.getPetriNetGraphics().getTransitionGraphics().get(transition.getName()) == null)
netContainer.getPetriNetGraphics().getTransitionGraphics().put(transition.getName(), new NodeGraphics());
if (netContainer.getPetriNetGraphics().getTransitionLabelAnnotationGraphics().get(transition.getName()) == null)
netContainer.getPetriNetGraphics().getTransitionLabelAnnotationGraphics().put(transition.getName(), new AnnotationGraphics());
addTransitionCell(
transition.getName(),
MXConstants.extractNodeStyleFromGraphics(PNComponent.TRANSITION, netContainer.getPetriNetGraphics().getTransitionGraphics().get(transition.getName()), netContainer
.getPetriNetGraphics().getTransitionLabelAnnotationGraphics().get(transition.getName())));
}
for (AbstractFlowRelation relation : getNetContainer().getPetriNet().getFlowRelations()) {
if (netContainer.getPetriNetGraphics().getArcGraphics().get(relation.getName()) == null)
netContainer.getPetriNetGraphics().getArcGraphics().put(relation.getName(), new ArcGraphics());
if (netContainer.getPetriNetGraphics().getArcAnnotationGraphics().get(relation.getName()) == null)
netContainer.getPetriNetGraphics().getArcAnnotationGraphics().put(relation.getName(), new AnnotationGraphics());
addArcCell(
relation.getName(),
MXConstants.extractArcStyleFromGraphics(netContainer.getPetriNetGraphics().getArcGraphics().get(relation.getName()), netContainer.getPetriNetGraphics()
.getArcAnnotationGraphics().get(relation.getName())));
}
getModel().endUpdate();
}
}
public void setLabelSelected(boolean selected) {
this.labelSelected = selected;
}
public boolean isLabelSelected() {
return labelSelected;
}
public void addPNGraphListener(PNGraphListener listener) {
graphListenerSupport.addPNGraphListener(listener);
}
public void removePNGraphListener(PNGraphListener listener) {
graphListenerSupport.removePNGraphListener(listener);
}
private void ensureValidPlaceSize() {
for (PNGraphCell selectedCell : getSelectedGraphCells()) {
if (selectedCell.getType() == PNComponent.PLACE) {
Rectangle bounds = selectedCell.getGeometry().getRectangle();
if (bounds.getHeight() == bounds.getWidth()) {
return;
}
int tagetSize = (int) Math.round(Math.min(bounds.getWidth(), bounds.getHeight()));
mxRectangle targetBounds = getView().getState(selectedCell).getBoundingBox();
targetBounds.setWidth(tagetSize);
targetBounds.setHeight(tagetSize);
resizeCell(selectedCell, targetBounds);
setSelectionCell(selectedCell);
}
}
}
private Set<PNGraphCell> getSelectedGraphCells() {
Set<PNGraphCell> placeCells = new HashSet<PNGraphCell>();
for (Object selectedObject : getSelectionCells()) {
if (selectedObject instanceof PNGraphCell) {
placeCells.add((PNGraphCell) selectedObject);
}
}
return placeCells;
}
public AbstractGraphicalPN<?, ?, ?, ?, ?, ?, ?, ?, ?> getNetContainer() {
return netContainer;
}
protected PNProperties getPNProperties() {
return properties;
}
/**
* Adds anew place with default style.
*
* @param point
* @return
* @throws IOException
* @throws PropertyException
*/
public PNGraphCell addNewPlace(mxPoint point) throws PropertyException, IOException {
Offset offset = new Offset(WolfgangProperties.getInstance().getDefaultHorizontalLabelOffset(), WolfgangProperties.getInstance().getDefaultVerticalLabelOffset());
Dimension dimension = new Dimension(WolfgangProperties.getInstance().getDefaultPlaceSize(), WolfgangProperties.getInstance().getDefaultPlaceSize());
return addNewPlace(point, MXConstants.getDefaultNodeStyle(PNComponent.PLACE), offset, dimension);
}
/**
* Adds a new place with existing style.
*
* @param mxPoint
* @param style
* @return
*/
public PNGraphCell addNewPlace(mxPoint point, String style, Offset offset, Dimension dimension) {
String nodeName = getNewPlaceName();
PNGraphCell newPlaceCell = getNodeCell(nodeName);
if (newPlaceCell != null) {
nodeName = newPlaceCell.getId();
}
if (getNetContainer().getPetriNet().addPlace(nodeName)) {
NodeGraphics nodeGraphics = Utils.createNodeGraphicsFromStyle(style);
nodeGraphics.setPosition(new Position(point.getX(), point.getY()));
nodeGraphics.setDimension(dimension);
AnnotationGraphics annotationGraphics = Utils.createAnnotationGraphicsFromStyle(style);
annotationGraphics.setOffset(offset);
getNetContainer().getPetriNetGraphics().getPlaceGraphics().put(nodeName, nodeGraphics);
getNetContainer().getPetriNetGraphics().getPlaceLabelAnnotationGraphics().put(nodeName, annotationGraphics);
if (newPlaceCell == null) {
newPlaceCell = addPlaceCell(nodeName, style);
}
graphListenerSupport.notifyPlaceAdded(getNetContainer().getPetriNet().getPlace(nodeName));
return newPlaceCell;
}
return null;
}
/**
* Inserts a new place with existing graphic information into the graphical
* Petri net.
*
* @param place
* @param nodeGraphics
* @param annotationGraphics
* @return
*/
@SuppressWarnings("rawtypes")
public PNGraphCell addPlaceCell(String nodeName, String style) {
AbstractPlace place = getNetContainer().getPetriNet().getPlace(nodeName);
NodeGraphics nodeGraphics = getNetContainer().getPetriNetGraphics().getPlaceGraphics(nodeName);
AnnotationGraphics annotationGraphics = getNetContainer().getPetriNetGraphics().getPlaceLabelAnnotationGraphics(nodeName);
PNGraphCell newCell = createPlaceCell(place.getName(), place.getLabel(), nodeGraphics.getPosition().getX(), nodeGraphics.getPosition().getY(), nodeGraphics.getDimension().getX(), nodeGraphics
.getDimension().getY(), style);
double offx = annotationGraphics.getOffset().getX();
double offy = annotationGraphics.getOffset().getY();
mxPoint offset = new mxPoint(offx, offy);
newCell.getGeometry().setOffset(offset);
// if (nodeGraphics == null || annotationGraphics == null) {
// mxCellState state = getView().getState(newCell, true);
// }
addCell(newCell, getDefaultParent());
return newCell;
}
public PNGraphCell createPlaceCell(String name, String label, double posX, double posY, double width, double height, String style) {
mxGeometry geometry = new mxGeometry(posX - (width / 2), posY - (height / 2), width, height);
geometry.setRelative(false);
PNGraphCell vertex = new PNGraphCell(label, geometry, style, PNComponent.PLACE);
vertex.setId(name);
vertex.setVertex(true);
vertex.setConnectable(true);
return vertex;
}
public PNGraphCell addNewFlowRelation(PNGraphCell sourceCell, PNGraphCell targetCell) throws PropertyException, IOException {
Offset offset = new Offset(WolfgangProperties.getInstance().getDefaultHorizontalLabelOffset(), WolfgangProperties.getInstance().getDefaultVerticalLabelOffset());
return addNewFlowRelation(sourceCell, targetCell, offset, null, null, MXConstants.getDefaultArcStyle());
}
public PNGraphCell addNewFlowRelation(PNGraphCell sourceCell, PNGraphCell targetCell, Offset offset, List<mxPoint> points, mxPoint referencePoint, String style) {
AbstractFlowRelation relation = null;
if (sourceCell.getType() == PNComponent.PLACE && targetCell.getType() == PNComponent.TRANSITION) {
relation = getNetContainer().getPetriNet().addFlowRelationPT(sourceCell.getId(), targetCell.getId());
} else if (sourceCell.getType() == PNComponent.TRANSITION && targetCell.getType() == PNComponent.PLACE) {
relation = getNetContainer().getPetriNet().addFlowRelationTP(sourceCell.getId(), targetCell.getId());
}
if (relation != null) {
PNGraphCell newRelationCell = getNodeCell(relation.getName());
ArcGraphics arcGraphics = Utils.createArcGraphicsFromStyle(style);
if (points != null && !points.isEmpty() && referencePoint != null) {
Vector<Position> vector = new Vector<Position>();
for (mxPoint p : points) {
vector.add(new Position(p.getX() + referencePoint.getX(), p.getY() + referencePoint.getY()));
}
arcGraphics.setPositions(vector);
}
AnnotationGraphics annotationGraphics = Utils.createAnnotationGraphicsFromStyle(style);
annotationGraphics.setOffset(offset);
getNetContainer().getPetriNetGraphics().getArcGraphics().put(relation.getName(), arcGraphics);
getNetContainer().getPetriNetGraphics().getArcAnnotationGraphics().put(relation.getName(), annotationGraphics);
if (newRelationCell == null) {
newRelationCell = addArcCell(relation.getName(), style);
}
graphListenerSupport.notifyRelationAdded(relation);
return newRelationCell;
}
return null;
}
@SuppressWarnings("rawtypes")
public PNGraphCell addArcCell(String arcID, String style) {
AbstractFlowRelation relation = getNetContainer().getPetriNet().getFlowRelation(arcID);
ArcGraphics arcGraphics = getNetContainer().getPetriNetGraphics().getArcGraphics(arcID);
AnnotationGraphics annotationGraphics = getNetContainer().getPetriNetGraphics().getArcAnnotationGraphics(arcID);
PNGraphCell newCell = createArcCell(arcID, getArcConstraint(relation), style);
addEdge(newCell, getDefaultParent(), getNodeCell(relation.getSource().getName()), getNodeCell(relation.getTarget().getName()), null);
double offx = annotationGraphics.getOffset().getX();
double offy = annotationGraphics.getOffset().getY();
mxPoint offset = new mxPoint(offx, offy);
newCell.getGeometry().setOffset(offset);
Vector<Position> positions = arcGraphics.getPositions();
List<mxPoint> points = new ArrayList<mxPoint>();
for (Position position : positions) {
points.add(new mxPoint(position.getX(), position.getY()));
}
newCell.getGeometry().setPoints(points);
return newCell;
}
/**
* Adds anew transition with default style.
*/
public PNGraphCell addNewTransition(mxPoint point) throws PropertyException, IOException {
Offset offset = new Offset(WolfgangProperties.getInstance().getDefaultHorizontalLabelOffset(), WolfgangProperties.getInstance().getDefaultVerticalLabelOffset());
Dimension dimension = new Dimension(WolfgangProperties.getInstance().getDefaultTransitionWidth(), WolfgangProperties.getInstance().getDefaultTransitionHeight());
return addNewTransition(point, MXConstants.getDefaultNodeStyle(PNComponent.TRANSITION), offset, dimension);
}
/**
* Adds a new transition with existing style.
*/
public PNGraphCell addNewTransition(mxPoint point, String style, Offset offset, Dimension dimension) {
String nodeName = getNewTransitionName();
PNGraphCell newTransitionCell = getNodeCell(nodeName);
if (newTransitionCell != null) {
nodeName = newTransitionCell.getId();
}
if (getNetContainer().getPetriNet().addTransition(nodeName)) {
NodeGraphics nodeGraphics = Utils.createNodeGraphicsFromStyle(style);
nodeGraphics.setPosition(new Position(point.getX(), point.getY()));
nodeGraphics.setDimension(dimension);
AnnotationGraphics annotationGraphics = Utils.createAnnotationGraphicsFromStyle(style);
annotationGraphics.setOffset(offset);
getNetContainer().getPetriNetGraphics().getTransitionGraphics().put(nodeName, nodeGraphics);
getNetContainer().getPetriNetGraphics().getTransitionLabelAnnotationGraphics().put(nodeName, annotationGraphics);
if (newTransitionCell == null) {
newTransitionCell = addTransitionCell(nodeName, style);
}
graphListenerSupport.notifyTransitionAdded(getNetContainer().getPetriNet().getTransition(nodeName));
return newTransitionCell;
}
return null;
}
/**
* Inserts a new place with existing graphic information into the graphical
* Petri net.
*
* @param place
* @param nodeGraphics
* @param annotationGraphics
* @return
*/
@SuppressWarnings("rawtypes")
public PNGraphCell addTransitionCell(String nodeName, String style) {
AbstractTransition transition = getNetContainer().getPetriNet().getTransition(nodeName);
NodeGraphics nodeGraphics = getNetContainer().getPetriNetGraphics().getTransitionGraphics(nodeName);
AnnotationGraphics annotationGraphics = getNetContainer().getPetriNetGraphics().getTransitionLabelAnnotationGraphics(nodeName);
PNGraphCell newCell = createTransitionCell(transition.getName(), transition.getLabel(), nodeGraphics.getPosition().getX(), nodeGraphics.getPosition().getY(), nodeGraphics.getDimension()
.getX(), nodeGraphics.getDimension().getY(), style);
double offx = annotationGraphics.getOffset().getX();
double offy = annotationGraphics.getOffset().getY();
mxPoint offset = new mxPoint(offx, offy);
newCell.getGeometry().setOffset(offset);
// if (nodeGraphics == null || annotationGraphics == null) {
// mxCellState state = getView().getState(newCell, true);
// }
addCell(newCell, getDefaultParent());
return newCell;
}
public PNGraphCell createTransitionCell(String name, String label, double posX, double posY, double width, double height, String style) {
mxGeometry geometry = new mxGeometry(posX - (width / 2), posY - (height / 2), width, height);
geometry.setRelative(false);
PNGraphCell vertex = new PNGraphCell(label, geometry, style, PNComponent.TRANSITION);
vertex.setId(name);
vertex.setVertex(true);
vertex.setConnectable(true);
return vertex;
}
public PNGraphCell createArcCell(String name, String label, String style) {
mxGeometry geometry = new mxGeometry();
geometry.setRelative(true);
PNGraphCell vertex = new PNGraphCell(label, geometry, style, PNComponent.ARC);
vertex.setId(name);
vertex.setVertex(false);
vertex.setEdge(true);
vertex.setConnectable(true);
return vertex;
}
public abstract void updatePlaceState(String name, Multiset<String> input);
@Override
public boolean isCellLocked(Object cell) {
if (isExecution)
return true;
return super.isCellLocked(cell);
}
@Override
public boolean isCellConnectable(Object cell) {
if (isExecution)
return false;
return super.isCellConnectable(cell);
}
public boolean isExecution() {
return isExecution;
}
public void setExecution(boolean isExecution) {
this.isExecution = isExecution;
}
@Override
/**
* Constructs a new view to be used in this graph.
*/
protected mxGraphView createGraphView() {
return new GraphView(this);
}
@Override
/**
* Returns the tooltip to be used for the given cell.
*/
public String getToolTipForCell(Object object) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
switch (cell.getType()) {
case ARC:
return getArcToolTip(cell);
case PLACE:
return getPlaceToolTip(cell);
case TRANSITION:
return getTransitionToolTip(cell);
}
}
return "";
}
protected abstract String getPlaceToolTip(PNGraphCell cell);
protected abstract String getTransitionToolTip(PNGraphCell cell);
protected abstract String getArcToolTip(PNGraphCell cell);
protected abstract String getArcConstraint(AbstractFlowRelation relation);
public abstract Color getTokenColorForName(String name);
public abstract void updateTokenColor(String name, Color value);
public abstract Multiset<String> getConstraintforArc(String name);
public abstract void updateConstraint(String name, Multiset value);
public abstract void updateTokenConfigurer(String name);
public abstract void updatePlaceCapacity(String name, String color, int newCapacity);
public abstract int getCapacityforPlace(String name, String color);
public abstract void updateViews();
/**
* @param name
* @param circularPointGroup
* @return
*/
public abstract Multiset<String> getPlaceStateForCell(String id, CircularPointGroup circularPointGroup);
/**
* Method for incrementing or decrementing the current #AbstractMarking of
* the given #AbstractPNPlace
*
* @param cell
* @param wheelRotation
* @return @
*/
public abstract AbstractMarking inOrDecrementPlaceState(PNGraphCell cell, int wheelRotation);
/**
* Selects all vertices and/or edges depending on the given boolean
* arguments recursively, starting at the given parent or the default parent
* if no parent is specified. Use <code>selectAll</code> to select all
* cells.
*
* @param vertices
* Boolean indicating if vertices should be selected.
*/
public void selectPNGraphCells(final PNComponent type) {
Collection<Object> cells = mxGraphModel.filterDescendants(getModel(), new mxGraphModel.Filter() {
public boolean filter(Object cell) {
return view.getState(cell) != null && model.getChildCount(cell) == 0 && ((PNGraphCell) cell).getType() == type;
}
});
setSelectionCells(cells);
}
// Needs to bee overriden for Token-Painting
@Override
/**
* Draws the cell state with the given label onto the canvas. No
* children or descendants are painted here. This method invokes
* cellDrawn after the cell, but not its descendants have been
* painted.
*
* @param canvas Canvas onto which the cell should be drawn.
* @param state State of the cell to be drawn.
* @param drawLabel Indicates if the label should be drawn.
*/
public void drawState(mxICanvas canvas, mxCellState state, boolean drawLabel) {
Object cell = (state != null) ? state.getCell() : null;
if (cell != null && cell != view.getCurrentRoot() && cell != model.getRoot() && (model.isVertex(cell) || model.isEdge(cell))) {
PNGraphCell customcell;
Object obj = null;
if (canvas instanceof mxImageCanvas)
obj = canvas.drawCell(state);
else
try {
obj = drawCell((mxGraphics2DCanvas) canvas, state);
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Cannot write Graphicsstyle to FileSystem " + e.getMessage(), "IO Exception", JOptionPane.ERROR_MESSAGE);
} catch (PropertyException e) {
JOptionPane.showMessageDialog(null, e.getMessage(), "Property Exception", JOptionPane.ERROR_MESSAGE);
}
Object lab = null;
// Holds the current clipping region in case the label will be
// clipped
Shape clip = null;
Rectangle newClip = state.getRectangle();
// Indirection for image canvas that contains a graphics canvas
mxICanvas clippedCanvas = (isLabelClipped(state.getCell())) ? canvas : null;
if (clippedCanvas instanceof mxImageCanvas) {
clippedCanvas = ((mxImageCanvas) clippedCanvas).getGraphicsCanvas();
// TODO: Shift newClip to match the image offset
// Point pt = ((mxImageCanvas) canvas).getTranslate();
// newClip.translate(-pt.x, -pt.y);
}
if (clippedCanvas instanceof mxGraphics2DCanvas) {
Graphics g = ((mxGraphics2DCanvas) clippedCanvas).getGraphics();
clip = g.getClip();
// Ensure that our new clip resides within our old clip
if (clip instanceof Rectangle) {
g.setClip(newClip.intersection((Rectangle) clip));
}
// Otherwise, default to original implementation
else {
g.setClip(newClip);
}
}
if (drawLabel) {
String label = state.getLabel();
if (label != null && state.getLabelBounds() != null) {
Graphics2D g = null;
if (canvas instanceof mxGraphics2DCanvas) {
Map<String, Object> style = state.getStyle();
g = ((mxGraphics2DCanvas) canvas).getGraphics();
Color color = mxUtils.getColor(state.getStyle(), mxConstants.STYLE_STROKECOLOR);
g.setColor(color);
g.setStroke(Utils.createLabelStroke(style, canvas.getScale()));
}
lab = canvas.drawLabel(label, state, isHtmlLabel(cell));
if (g != null)
g.setStroke(new BasicStroke((float) 2));
}
}
// Restores the previous clipping region
if (clippedCanvas instanceof mxGraphics2DCanvas) {
((mxGraphics2DCanvas) clippedCanvas).getGraphics().setClip(clip);
}
// Invokes the cellDrawn callback with the object which was created
// by the canvas to represent the cell graphically
if (obj != null) {
cellDrawn(canvas, state, obj, lab);
}
}
}
public Object drawCell(mxGraphics2DCanvas canvas, mxCellState state) throws PropertyException, IOException {
Map<String, Object> style = state.getStyle();
mxIShape shape = canvas.getShape(style);
Graphics2D g;
if (canvas.getGraphics() != null && shape != null) {
// Creates a temporary graphics instance for drawing this shape
float opacity = mxUtils.getFloat(style, mxConstants.STYLE_OPACITY, 100);
Graphics2D previousGraphics = canvas.getGraphics();
g = ((mxGraphics2DCanvas) canvas).createTemporaryGraphics(style, opacity, state);
// Paints the shape and restores the graphics object
shape.paintShape(canvas, state);
if (state.getCell() instanceof PNGraphCell) {
PNGraphCell customcell = (PNGraphCell) state.getCell();
if (customcell.getType() == PNComponent.PLACE) {
drawAdditionalPlaceGrahpics(canvas, state);
}
if (customcell.getType() == PNComponent.TRANSITION) {
try {
drawAdditionalTransitionGrahpics(canvas, state);
} catch (ParameterException e) {
JOptionPane.showMessageDialog(null, "Graphic for Access Mode is not avaiable \nReason: " + e.getMessage(), "Parameter Exception", JOptionPane.ERROR);
} catch (PropertyException e) {
JOptionPane.showMessageDialog(null, "Graphic for Access Mode is not avaiable \nReason: " + e.getMessage(), "Property Exception", JOptionPane.ERROR);
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Graphic for Access Mode is not avaiable \nReason: " + e.getMessage(), "IO Exception", JOptionPane.ERROR);
}
}
if (customcell.getType() == PNComponent.ARC) {
if (!hideContraintsAsTokens)
drawAdditionalArcGrahpics(canvas, state);
}
}
g.dispose();
g = previousGraphics;
}
return shape;
}
protected abstract void drawAdditionalArcGrahpics(mxGraphics2DCanvas canvas, mxCellState state);
protected abstract void drawAdditionalTransitionGrahpics(mxGraphics2DCanvas canvas, mxCellState state) throws PropertyException, IOException;
protected void drawAdditionalPlaceGrahpics(mxGraphics2DCanvas canvas, mxCellState state) throws PropertyException, IOException {
Rectangle temp = state.getRectangle();
PNGraphCell cell = (PNGraphCell) state.getCell();
int minDistance = (int) (WolfgangProperties.getInstance().getDefaultTokenDistance() * getView().getScale());
int pointDiameter = (int) (WolfgangProperties.getInstance().getDefaultTokenSize() * getView().getScale());
CircularPointGroup circularPointGroup = new CircularPointGroup(minDistance, pointDiameter);
// TODO Making method more general to be able to handle colored marking
// in cpn
Multiset<String> placeState = getPlaceStateForCell(cell.getId(), circularPointGroup);
if (placeState != null) {
AbstractCPNGraphics cpnGraphics;
Map<String, Color> colors = null;
if (getNetContainer().getPetriNetGraphics() instanceof AbstractCPNGraphics) {
{
cpnGraphics = (AbstractCPNGraphics) getNetContainer().getPetriNetGraphics();
colors = cpnGraphics.getColors();
}
Set<String> keyset = placeState.support();
//
for (String s : keyset) {
Color color = colors.get(s);
int number = placeState.multiplicity(s);
PColor pco;
if (color != null)
pco = new PColor(color.getRed(), color.getGreen(), color.getBlue());
else {
pco = PColor.black;
}
circularPointGroup.addPoints(pco, number);
}
}
int k = placeState.size();
Point center = new Point(temp.x + temp.width / 2, temp.y + temp.height / 2);
int requiredWidth = 0;
if (k == 1)
requiredWidth = circularPointGroup.getPointDiameter();
if (k == 2 || k == 3)
requiredWidth = (circularPointGroup.getPointDiameter() + minDistance) * 2;
if (k == 4)
requiredWidth = (circularPointGroup.getPointDiameter() + minDistance * 2) * 2;
if (k == 2)
requiredWidth = (circularPointGroup.getPointDiameter() + minDistance) * 2;
if (k >= 5)
requiredWidth = circularPointGroup.getRequiredDiameter();
if (state.getWidth() >= requiredWidth)
drawPoints(canvas, temp, circularPointGroup, center);
else
drawNumbers(cell, k + "", canvas, temp, center);
}
}
private void drawNumbers(PNGraphCell cell, String numbers, mxGraphics2DCanvas canvas, Rectangle temp, Point center) {
Graphics g = canvas.getGraphics();
Graphics2D g2 = (Graphics2D) g;
String family = (getCellStyle(cell).get(mxConstants.STYLE_FONTFAMILY) != null) ? getCellStyle(cell).get(mxConstants.STYLE_FONTFAMILY).toString() : mxConstants.DEFAULT_FONTFAMILY;
g2.setFont(new Font(family, Font.PLAIN, (int) (10 * getView().getScale())));
g2.setPaint(Color.black);
drawString(g2, numbers + "\n", center.x - (int) (temp.width * 0.1), center.y - (int) (g.getFontMetrics().getHeight() * 0.8));
}
private void drawString(Graphics g, String text, int x, int y) {
for (String line : text.split("\n"))
g.drawString(line, x, y += g.getFontMetrics().getHeight());
}
protected void drawPoints(mxGraphics2DCanvas canvas, Rectangle temp, CircularPointGroup circularPointGroup, Point center) {
Graphics g = canvas.getGraphics();
Iterator<PColor> iter = circularPointGroup.getColors().iterator();
PColor actColor;
Set<TokenGraphics> tgSet = new HashSet<TokenGraphics>();
while (iter.hasNext()) {
actColor = iter.next();
g.setColor(new Color(actColor.getRGB()));
for (de.invation.code.toval.graphic.misc.Position p : circularPointGroup.getCoordinatesFor(actColor)) {
GraphicUtils.fillCircle(g, (int) (center.getX() + p.getX()), (int) (center.getY() + p.getY()), circularPointGroup.getPointDiameter());
}
}
}
/**
* Sets the positions of place and transition labels according to the<br>
* information contained in the corresponding annotation graphics.<br>
* This method is called when a graph is created with a non-empty Petri net.
*
* @param pnGraphics
* The Petri net graphics
*/
// public void updatePositionPropertiesFromCells() {
// for (PNGraphCell cell : nodeReferences.values()) {
// mxCellState state = getView().getState(cell);
// setPositionProperties((PNGraphCell) state.getCell());
// }
//
// }
private void setPositionProperties(PNGraphCell cell) {
switch (cell.getType()) {
case ARC:
break;
case PLACE:
if (cell.getGeometry().getCenterX() >= 0)
properties.setPlacePositionX(this, cell.getId(), (int) cell.getGeometry().getCenterX());
if (cell.getGeometry().getCenterY() >= 0)
properties.setPlacePositionY(this, cell.getId(), (int) cell.getGeometry().getCenterY());
break;
case TRANSITION:
if (cell.getGeometry().getCenterX() >= 0)
properties.setTransitionPositionX(this, cell.getId(), (int) cell.getGeometry().getCenterX());
if (cell.getGeometry().getCenterY() >= 0)
properties.setTransitionPositionY(this, cell.getId(), (int) cell.getGeometry().getCenterY());
break;
}
}
// ------- Property change
// handling--------------------------------------------------------------------------------
// These methods are called when some Petri net properties changed by other
// classes. ----------------
/**
* This method notifies a PNPropertiesListener that a PN component was
* added.<br>
* This can be a place, transition or arc.<br>
* In the
*/
@Override
public void componentAdded(PNComponent component, String name) {
}
@Override
public void componentRemoved(PNComponent component, String name) {
}
@Override
public void propertyChange(PNPropertyChangeEvent event) {
if (!event.getOldValue().equals(event.getNewValue()))
if (event.getSource() != this) {
switch (event.getFieldType()) {
case PLACE:
handlePlacePropertyChange(event.getName(), event.getProperty(), event.getOldValue(), event.getNewValue());
break;
case TRANSITION:
handleTransitionPropertyChange(event.getName(), event.getProperty(), event.getOldValue(), event.getNewValue());
break;
case ARC:
handleArcPropertyChange(event.getName(), event.getProperty(), event.getOldValue(), event.getNewValue());
break;
}
refresh();
}
}
private boolean handlePlacePropertyChange(String name, PNProperty property, Object oldValue, Object newValue) {
PNGraphCell placeCell = getNodeCell(name);
mxRectangle bounds;
switch (property) {
case PLACE_LABEL:
getModel().setValue(placeCell, newValue);
return true;
case PLACE_SIZE:
bounds = getView().getState(placeCell).getBoundingBox();
bounds.setWidth(new Integer((Integer) newValue).doubleValue());
bounds.setHeight(new Integer((Integer) newValue).doubleValue());
resizeCell(placeCell, bounds);
setSelectionCell(placeCell);
return true;
case PLACE_POSITION_X:
moveCells(new Object[] { placeCell }, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue(), 0);
setSelectionCell(placeCell);
return true;
case PLACE_POSITION_Y:
moveCells(new Object[] { placeCell }, 0, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue());
setSelectionCell(placeCell);
return true;
}
return false;
}
protected boolean handleTransitionPropertyChange(String name, PNProperty property, Object oldValue, Object newValue) {
PNGraphCell transitionCell = getNodeCell(name);
mxRectangle bounds;
switch (property) {
case TRANSITION_LABEL:
getModel().setValue(transitionCell, newValue);
return true;
case TRANSITION_POSITION_X:
moveCells(new Object[] { transitionCell }, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue(), 0);
setSelectionCell(transitionCell);
return true;
case TRANSITION_POSITION_Y:
moveCells(new Object[] { transitionCell }, 0, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue());
setSelectionCell(transitionCell);
return true;
case TRANSITION_SIZE_X:
bounds = getView().getState(transitionCell).getBoundingBox();
bounds.setWidth(new Integer((Integer) newValue).doubleValue());
bounds.setHeight(transitionCell.getGeometry().getHeight());
resizeCell(transitionCell, bounds);
setSelectionCell(transitionCell);
return true;
case TRANSITION_SIZE_Y:
bounds = getView().getState(transitionCell).getBoundingBox();
bounds.setWidth(transitionCell.getGeometry().getWidth());
bounds.setHeight(new Integer((Integer) newValue).doubleValue());
resizeCell(transitionCell, bounds);
setSelectionCell(transitionCell);
return true;
}
return false;
}
protected boolean handleArcPropertyChange(String name, PNProperty property, Object oldValue, Object newValue) {
PNGraphCell arcCell = getNodeCell(name);
switch (property) {
case ARC_WEIGHT:
getModel().setValue(arcCell, newValue);
break;
}
return false;
}
public void selectPlace(String name) {
if (!isCellSelected(name)) {
PNGraphCell cell = getNodeCell(name);
setSelectionCell(cell);
}
}
public void selectTransition(String name) {
if (!isCellSelected(name)) {
PNGraphCell cell = getNodeCell(name);
setSelectionCell(cell);
}
}
public void selectArc(String name) {
if (!isCellSelected(name)) {
PNGraphCell cell = getNodeCell(name);
setSelectionCell(cell);
}
}
private boolean isCellSelected(String id) {
PNGraphCell currentSelectionCell = null;
if (getSelectionCell() instanceof PNGraphCell) {
currentSelectionCell = (PNGraphCell) getSelectionCell();
}
if (currentSelectionCell != null) {
if (currentSelectionCell.getId() == id) {
return true;
}
}
return false;
}
@Override
public void cellsMoved(Object[] cells, double dx, double dy, boolean disconnect, boolean constrain) {
super.cellsMoved(cells, dx, dy, disconnect, constrain);
for (Object object : cells) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
setPositionProperties(cell);
}
}
}
/**
* This method notifies the graph, that some cells have been added.<br>
* Note: Only by copy/pase actions on graph canvas!<br>
* In case these cells stand for new places or transitions, they have to be
* added to the Petri net.
*/
@Override
public void cellsResized(Object[] cells, mxRectangle[] bounds) {
// TODO Auto-generated method stub
super.cellsResized(cells, bounds);
for (Object object : cells) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
switch (cell.getType()) {
case ARC:
break;
case PLACE:
properties.setPlaceSize(this, cell.getId(), (int) cell.getGeometry().getWidth());
break;
case TRANSITION:
properties.setTransitionSizeX(this, cell.getId(), (int) cell.getGeometry().getWidth());
properties.setTransitionSizeY(this, cell.getId(), (int) cell.getGeometry().getHeight());
break;
}
}
}
}
@Override
public void cellsRemoved(Object[] cells) {
super.cellsRemoved(cells);
for (Object object : cells) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
switch (cell.getType()) {
case ARC:
removeFlowRelation(cell.getId());
break;
case PLACE:
removePlace(cell.getId());
break;
case TRANSITION:
removeTransition(cell.getId());
break;
}
}
}
}
@Override
/**
* Returns true if split is enabled and the given edge may be splitted into
* two edges with the given cell as a new terminal between the two.
*
* @param target Object that represents the edge to be splitted.
* @param cells Array of cells to add into the given edge.
* @return Returns true if the given edge may be splitted by the given
* cell.
*/
public boolean isSplitTarget(Object target, Object[] cells) {
// since this works only for one cell, this would always hurt the
// Petri-Net order
// if (target != null && cells != null && cells.length == 1)
// {
// Object src = model.getTerminal(target, true);
// Object trg = model.getTerminal(target, false);
//
// return (model.isEdge(target)
// && isCellConnectable(cells[0])
// && getEdgeValidationError(target,
// model.getTerminal(target, true), cells[0]) == null
// && !model.isAncestor(cells[0], src) && !model.isAncestor(
// cells[0], trg));
// }
return false;
}
@Override
/**
* Returns true if the given target cell is a valid target for source.
* This is a boolean implementation for not allowing connections between
* certain pairs of vertices and is called by <getEdgeValidationError>.
* This implementation returns true if <isValidSource> returns true for
* the source and <isValidTarget> returns true for the target.
*
* @param source Object that represents the source cell.
* @param target Object that represents the target cell.
* @return Returns true if the the connection between the given terminals
* is valid.
*/
public boolean isValidConnection(Object source, Object target) {
boolean result = isValidSource(source) && isValidTarget(target) && (isAllowLoops() || source != target);
PNComponent sourceType, targetType;
if (result && source instanceof PNGraphCell && target instanceof PNGraphCell) {
sourceType = ((PNGraphCell) source).getType();
targetType = ((PNGraphCell) target).getType();
if (sourceType == targetType)
return false;
}
return result;
}
protected boolean removeFlowRelation(String name) {
return netContainer.getPetriNet().removeFlowRelation(name);
}
protected boolean removeTransition(String name) {
return netContainer.getPetriNet().removeTransition(name);
}
protected boolean removePlace(String name) {
return netContainer.getPetriNet().removePlace(name);
}
@Override
public void invoke(Object sender, mxEventObject evt) {
if (evt.getName().equals(mxEvent.CHANGE)) {
changeHandler.handleChange(evt);
if (sender instanceof mxGraphSelectionModel || sender instanceof PNGraphComponent) {
graphListenerSupport.notifyComponentsSelected(getSelectedGraphCells());
}
} else if (evt.getName().equals(mxEvent.RESIZE_CELLS)) {
ensureValidPlaceSize();
}
}
protected abstract void setArcLabel(String id, String string);
public void setFontOfSelectedCellLabel(String font) {
Validate.notNull(font);
if (font != null && !font.equals("-")) {
setCellStyles(mxConstants.STYLE_FONTFAMILY, font);
}
}
public void setFontSizeOfSelectedCellLabel(String font) {
setCellStyles(mxConstants.STYLE_FONTSIZE, font);
}
public void setStrokeWeightOfSelectedCell(String strokeWeight) {
for (Object cell : getSelectionCells()) {
if (cell instanceof PNGraphCell) {
String styleKey = (isLabelSelected()) ? MXConstants.LABEL_LINE_WIDTH : mxConstants.STYLE_STROKEWIDTH;
if(getView().getState(cell).getStyle().containsKey(styleKey)){
String currentStrokeWidth = mxUtils.getString(getView().getState(cell).getStyle(), styleKey).replace(".0", "");
if (!currentStrokeWidth.equals(strokeWeight)) {
setCellStyles(styleKey, strokeWeight, new Object[] { cell });
}
}
}
}
}
@Override
/**
* Sets the key to value in the styles of the given cells. This will modify
* the existing cell styles in-place and override any existing assignment
* for the given key. If no cells are specified, then the selection cells
* are changed. If no value is specified, then the respective key is
* removed from the styles.
*
* @param key String representing the key to be assigned.
* @param value String representing the new value for the key.
* @param cells Array of cells to change the style for.
*/
public Object[] setCellStyles(String key, String value, Object[] cells) {
if (cells == null) {
cells = getSelectionCells();
}
setCellStyles(this, cells, key, value);
return cells;
}
/**
* Assigns the value for the given key in the styles of the given cells, or
* removes the key from the styles if the value is null.
*
* @param pnGraph
* Model to execute the transaction in.
* @param cells
* Array of cells to be updated.
* @param key
* Key of the style to be changed.
* @param value
* New value for the given key.
*/
public static void setCellStyles(PNGraph pnGraph, Object[] cells, String key, String value) {
if (cells != null && cells.length > 0) {
pnGraph.getModel().beginUpdate();
try {
for (int i = 0; i < cells.length; i++) {
if (cells[i] != null) {
String style = mxStyleUtils.setStyle(pnGraph.getModel().getStyle(cells[i]), key, value);
setStyle(cells[i], style, key, pnGraph);
}
}
} finally {
pnGraph.getModel().endUpdate();
}
}
}
/*
* (non-Javadoc)
*
* @see com.mxgraph.model.mxIGraphModel#setStyle(Object, String)
*/
public static String setStyle(Object cell, String style, String key, PNGraph pnGraph) {
if (style == null || !style.equals(pnGraph.getModel().getStyle(cell)))
{
((mxGraphModel) pnGraph.getModel()).execute(new StyleChange(pnGraph, cell, style, key));
}
return style;
}
@Override
/**
* Sets the style of the specified cells. If no cells are given, then the
* selection cells are changed.
*
* @param style String representing the new style of the cells.
* @param cells Optional array of <mxCells> to set the style for. Default is the
* selection cells.
*/
public Object[] setCellStyle(String style, Object[] cells) {
if (cells == null) {
cells = getSelectionCells();
}
if (cells != null) {
model.beginUpdate();
try {
for (int i = 0; i < cells.length; i++) {
setStyle(cells[i], style, null, this);
}
} finally {
model.endUpdate();
}
}
return cells;
}
/**
* @param graph
* @return
*/
public String getNewTransitionName() {
String prefix = MXConstants.TRANSITION_NAME_PREFIX;
Integer index = 0;
while (getNetContainer().getPetriNet().containsTransition(prefix + index)) {
index++;
}
return prefix + index;
}
public String getNewPlaceName() {
String prefix = MXConstants.PLACE_NAME_PREFIX;
Integer index = 0;
while (getNetContainer().getPetriNet().containsPlace(prefix + index)) {
index++;
}
return prefix + index;
}
public boolean isValidNodeName(String name, PNComponent type) {
String prefix = null;
switch (type) {
case PLACE:
prefix = MXConstants.PLACE_NAME_PREFIX;
break;
case TRANSITION:
prefix = MXConstants.TRANSITION_NAME_PREFIX;
break;
}
if (!name.startsWith(prefix))
return false;
String possibleInteger = name.substring(prefix.length());
Validate.isInteger(possibleInteger);
return true;
}
public void addWayPoint(PNGraphCell cell, Point pt) {
if (cell.getType().equals(PNComponent.ARC)) {
List<mxPoint> points = cell.getGeometry().getPoints();
if (points != null) {
// This code enables adding waypoints in between two existing
// waypoints, and not just adding it at the end of the given
// line
if (points.size() == 0) {
points.add(new mxPoint(pt.getX(), pt.getY()));
} else {
double sourceX = cell.getSource().getGeometry().getCenterX();
double sourceY = cell.getSource().getGeometry().getCenterY();
double targetX = cell.getTarget().getGeometry().getCenterX();
double targetY = cell.getTarget().getGeometry().getCenterY();
points.add(new mxPoint(targetX, targetY));
points.add(0, new mxPoint(sourceX, sourceY));
for (int i = 0; i < points.size() - 1; i++) {
mxPoint p = points.get(i);
double x1 = p.getX();
double y1 = p.getY();
mxPoint p2 = points.get(i + 1);
double x2 = p2.getX();
double y2 = p2.getY();
mxPoint newPoint = new mxPoint(pt.getX(), pt.getY());
double xP = newPoint.getX();
double yP = newPoint.getY();
double comp = Line2D.ptSegDist(x1, y1, x2, y2, xP, yP);
if (comp <= 5.0 * getView().getScale()) {
points.add(i + 1, newPoint);
i = points.size();
}
}
points.remove(points.size() - 1);
points.remove(0);
}
} else {
points = new ArrayList<mxPoint>();
points.add(new mxPoint(pt.getX(), pt.getY()));
}
cell.getGeometry().setPoints(points);
updatePointsInArcGraphics(cell, points);
}
}
public void removePoint(PNGraphCell cell, int index) {
if (cell.getType().equals(PNComponent.ARC)) {
List<mxPoint> points = cell.getGeometry().getPoints();
if (points != null && points.size() > 0) {
cell.getGeometry().getPoints().remove(index - 1);
}
}
updatePointsInArcGraphics(cell, cell.getGeometry().getPoints());
}
protected void updatePointsInArcGraphics(PNGraphCell cell, List<mxPoint> points) {
ArcGraphics arcGraphics = getNetContainer().getPetriNetGraphics().getArcGraphics().get(cell.getId());
if (arcGraphics != null) {
Vector<Position> vector = new Vector<Position>();
if (points != null) {
if (points.size() >= 0) {
for (mxPoint p : points) {
vector.add(new Position(p.getX(), p.getY()));
}
arcGraphics.setPositions(vector);
}
}
}
}
public void enterEditingMode() {
setExecution(false);
setCellsSelectable(true);
getNetContainer().getPetriNet().reset();
refresh();
}
public void fireTransition(PNGraphCell cell) throws PNException {
getNetContainer().getPetriNet().fire(cell.getId());
graphListenerSupport.notifyTransitionFired(cell);
refresh();
}
public void updateTransitionSilent(String id, boolean setSilent) {
getNetContainer().getPetriNet().getTransition(id).setSilent(setSilent);
}
public boolean getTransitionSilentState(String name) {
return getNetContainer().getPetriNet().getTransition(name).isSilent();
}
public void removeAllArcPoints() {
for (AbstractFlowRelation fr : getNetContainer().getPetriNet().getFlowRelations()) {
PNGraphCell arcCell = getNodeCell(fr.getName());
arcCell.getGeometry().setPoints(new ArrayList<mxPoint>());
mxPoint point = getView().getState(arcCell).getAbsolutePoint(0);
ArcGraphics arcGraphics = getNetContainer().getPetriNetGraphics().getArcGraphics().get(arcCell.getId());
arcGraphics.setPositions(new Vector<Position>());
}
}
public void setTokenOnArcVisibility(boolean b) {
this.hideContraintsAsTokens = b;
refresh();
}
public boolean containedGraphics() {
return containedGraphics;
}
protected double getDefaultTokenSize() {
// TODO Auto-generated method stub
try {
return WolfgangProperties.getInstance().getDefaultTokenSize();
} catch (PropertyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return (Double) null;
}
@Override
public double snap(double value) {
try {
if (WolfgangProperties.getInstance().getSnapToGrid()) {
return super.snap(value);
}
} catch (Exception e) {
e.printStackTrace();
}
return value;
}
/**
* Listeners are not notified when the underlying Petri reports structure
* changes (new elements).<br>
* This is done in the methods
* {@link #addNewPlace(mxPoint, String, Offset, Dimension)},
* {@link #addNewTransition(mxPoint, String, Offset, Dimension)} ans
* {@link #addNewFlowRelation(PNGraphCell, PNGraphCell, Offset, List, mxPoint, String)}
* .
*/
@Override
public void placeAdded(PlaceChangeEvent event) {
}
@Override
public void placeRemoved(PlaceChangeEvent event) {
graphListenerSupport.notifyPlaceRemoved(event.place);
}
/**
* Listeners are not notified when the underlying Petri reports structure
* changes (new elements).<br>
* This is done in the methods
* {@link #addNewPlace(mxPoint, String, Offset, Dimension)},
* {@link #addNewTransition(mxPoint, String, Offset, Dimension)} ans
* {@link #addNewFlowRelation(PNGraphCell, PNGraphCell, Offset, List, mxPoint, String)}
* .
*/
@Override
public void transitionAdded(TransitionChangeEvent event) {
}
@Override
public void transitionRemoved(TransitionChangeEvent event) {
graphListenerSupport.notifyTransitionRemoved(event.transition);
}
/**
* Listeners are not notified when the underlying Petri reports structure
* changes (new elements).<br>
* This is done in the methods
* {@link #addNewPlace(mxPoint, String, Offset, Dimension)},
* {@link #addNewTransition(mxPoint, String, Offset, Dimension)} ans
* {@link #addNewFlowRelation(PNGraphCell, PNGraphCell, Offset, List, mxPoint, String)}
* .
*/
@Override
public void relationAdded(RelationChangeEvent event) {
}
@Override
public void relationRemoved(RelationChangeEvent event) {
graphListenerSupport.notifyRelationRemoved(event.relation);
}
@Override
public void structureChanged() {
}
public PNGraphCell getNodeCell(String name) {
mxICell superParent = (mxICell) model.getRoot();
for (int i = 0; i < superParent.getChildCount(); i++) {
mxICell parent = superParent.getChildAt(i);
for (int j = 0; j < parent.getChildCount(); j++) {
PNGraphCell child = (PNGraphCell) parent.getChildAt(j);
if (child.getId().equals(name)) {
return child;
}
}
}
return null;
}
}
|
src/de/uni/freiburg/iig/telematik/wolfgang/graph/PNGraph.java
|
package de.uni.freiburg.iig.telematik.wolfgang.graph;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.geom.Line2D;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import javax.swing.JOptionPane;
import com.mxgraph.canvas.mxGraphics2DCanvas;
import com.mxgraph.canvas.mxICanvas;
import com.mxgraph.canvas.mxImageCanvas;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.model.mxGraphModel;
import com.mxgraph.model.mxICell;
import com.mxgraph.shape.mxIShape;
import com.mxgraph.util.mxConstants;
import com.mxgraph.util.mxEvent;
import com.mxgraph.util.mxEventObject;
import com.mxgraph.util.mxEventSource.mxIEventListener;
import com.mxgraph.util.mxPoint;
import com.mxgraph.util.mxRectangle;
import com.mxgraph.util.mxStyleUtils;
import com.mxgraph.util.mxUtils;
import com.mxgraph.view.mxCellState;
import com.mxgraph.view.mxGraph;
import com.mxgraph.view.mxGraphSelectionModel;
import com.mxgraph.view.mxGraphView;
import de.invation.code.toval.graphic.misc.CircularPointGroup;
import de.invation.code.toval.graphic.misc.PColor;
import de.invation.code.toval.graphic.util.GraphicUtils;
import de.invation.code.toval.properties.PropertyException;
import de.invation.code.toval.types.Multiset;
import de.invation.code.toval.validate.ParameterException;
import de.invation.code.toval.validate.Validate;
import de.uni.freiburg.iig.telematik.sepia.event.PlaceChangeEvent;
import de.uni.freiburg.iig.telematik.sepia.event.RelationChangeEvent;
import de.uni.freiburg.iig.telematik.sepia.event.StructureListener;
import de.uni.freiburg.iig.telematik.sepia.event.TransitionChangeEvent;
import de.uni.freiburg.iig.telematik.sepia.exception.PNException;
import de.uni.freiburg.iig.telematik.sepia.graphic.AbstractGraphicalPN;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.AbstractCPNGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.AnnotationGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.ArcGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.NodeGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.TokenGraphics;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.attributes.Dimension;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.attributes.Offset;
import de.uni.freiburg.iig.telematik.sepia.graphic.netgraphics.attributes.Position;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractFlowRelation;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractMarking;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractPlace;
import de.uni.freiburg.iig.telematik.sepia.petrinet.AbstractTransition;
import de.uni.freiburg.iig.telematik.wolfgang.editor.properties.WolfgangProperties;
import de.uni.freiburg.iig.telematik.wolfgang.editor.properties.WolfgangPropertyAdapter;
import de.uni.freiburg.iig.telematik.wolfgang.graph.change.StyleChange;
import de.uni.freiburg.iig.telematik.wolfgang.graph.util.MXConstants;
import de.uni.freiburg.iig.telematik.wolfgang.graph.util.Utils;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNProperties;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNProperties.PNComponent;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNPropertiesListener;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNProperty;
import de.uni.freiburg.iig.telematik.wolfgang.properties.PNPropertyChangeEvent;
public abstract class PNGraph extends mxGraph implements PNPropertiesListener, mxIEventListener, StructureListener {
private AbstractGraphicalPN<?, ?, ?, ?, ?, ?, ?, ?, ?> netContainer = null;
private PNProperties properties = null;
private boolean labelSelected = false;
private boolean isExecution = false;
protected boolean hideContraintsAsTokens = true;
private boolean containedGraphics = false;
private PNGraphChangeHandler changeHandler;
protected PNGraphListenerSupport graphListenerSupport = new PNGraphListenerSupport();
public PNGraph(AbstractGraphicalPN<?, ?, ?, ?, ?, ?, ?, ?, ?> netContainer, PNProperties properties) {
super();
addWGPropertiesListener();
try {
setGridSize(WolfgangProperties.getInstance().getGridSize());
} catch (PropertyException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Validate.notNull(netContainer);
Validate.notNull(properties);
this.netContainer = netContainer;
this.netContainer.getPetriNet().addStructureListener(this);
this.properties = properties;
this.properties.addPNPropertiesListener(this);
this.getSelectionModel().addListener(mxEvent.CHANGE, this);
this.addListener(mxEvent.RESIZE_CELLS, this);
this.getModel().addListener(mxEvent.CHANGE, this);
this.getModel().addListener(mxEvent.UNDO, this);
changeHandler = new PNGraphChangeHandler(this);
setCellsBendable(true);
setHtmlLabels(true);
setAllowDanglingEdges(false);
setMultigraph(true);
setCellsEditable(false);
setDisconnectOnMove(false);
setExtendParents(false); // disables extending parents after adding
setVertexLabelsMovable(true);
try {
initialize();
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Cannot write Graphicsstyle to FileSystem " + e.getMessage(), "IO Exception", JOptionPane.ERROR_MESSAGE);
} catch (PropertyException e) {
JOptionPane.showMessageDialog(null, e.getMessage(), "Property Exception", JOptionPane.ERROR_MESSAGE);
}
}
private void addWGPropertiesListener() {
try {
WolfgangProperties.getInstance().addListener(new WolfgangPropertyAdapter() {
@Override
public void gridSizeChanged(int gridSize) {
try {
setGridSize(WolfgangProperties.getInstance().getGridSize());
} catch (PropertyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
refresh();
}
@Override
public void defaultTokenSizeChanged(int defaultTokenSize) {
refresh();
}
@Override
public void defaultTokenDistanceChanged(int defaultTokenDistance) {
refresh();
}
});
} catch (IOException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
}
}
@SuppressWarnings("rawtypes")
private void initialize() throws PropertyException, IOException {
// Check if net contains Graphical Info and keep that information
if (netContainer.getPetriNetGraphics().getPlaceGraphics().size() > 0 || netContainer.getPetriNet().isEmpty()) {
containedGraphics = true;
}
// Check if net container is empty.
// If not, add all PN components to the graph.
if (!netContainer.getPetriNet().isEmpty()) {
getModel().beginUpdate();
for (AbstractPlace place : getNetContainer().getPetriNet().getPlaces()) {
if (netContainer.getPetriNetGraphics().getPlaceGraphics().get(place.getName()) == null)
netContainer.getPetriNetGraphics().getPlaceGraphics().put(place.getName(), new NodeGraphics());
if (netContainer.getPetriNetGraphics().getPlaceLabelAnnotationGraphics().get(place.getName()) == null)
netContainer.getPetriNetGraphics().getPlaceLabelAnnotationGraphics().put(place.getName(), new AnnotationGraphics());
addPlaceCell(
place.getName(),
MXConstants.extractNodeStyleFromGraphics(PNComponent.PLACE, netContainer.getPetriNetGraphics().getPlaceGraphics().get(place.getName()), netContainer.getPetriNetGraphics()
.getPlaceLabelAnnotationGraphics().get(place.getName())));
}
for (AbstractTransition transition : getNetContainer().getPetriNet().getTransitions()) {
if (netContainer.getPetriNetGraphics().getTransitionGraphics().get(transition.getName()) == null)
netContainer.getPetriNetGraphics().getTransitionGraphics().put(transition.getName(), new NodeGraphics());
if (netContainer.getPetriNetGraphics().getTransitionLabelAnnotationGraphics().get(transition.getName()) == null)
netContainer.getPetriNetGraphics().getTransitionLabelAnnotationGraphics().put(transition.getName(), new AnnotationGraphics());
addTransitionCell(
transition.getName(),
MXConstants.extractNodeStyleFromGraphics(PNComponent.TRANSITION, netContainer.getPetriNetGraphics().getTransitionGraphics().get(transition.getName()), netContainer
.getPetriNetGraphics().getTransitionLabelAnnotationGraphics().get(transition.getName())));
}
for (AbstractFlowRelation relation : getNetContainer().getPetriNet().getFlowRelations()) {
if (netContainer.getPetriNetGraphics().getArcGraphics().get(relation.getName()) == null)
netContainer.getPetriNetGraphics().getArcGraphics().put(relation.getName(), new ArcGraphics());
if (netContainer.getPetriNetGraphics().getArcAnnotationGraphics().get(relation.getName()) == null)
netContainer.getPetriNetGraphics().getArcAnnotationGraphics().put(relation.getName(), new AnnotationGraphics());
addArcCell(
relation.getName(),
MXConstants.extractArcStyleFromGraphics(netContainer.getPetriNetGraphics().getArcGraphics().get(relation.getName()), netContainer.getPetriNetGraphics()
.getArcAnnotationGraphics().get(relation.getName())));
}
getModel().endUpdate();
}
}
public void setLabelSelected(boolean selected) {
this.labelSelected = selected;
}
public boolean isLabelSelected() {
return labelSelected;
}
public void addPNGraphListener(PNGraphListener listener) {
graphListenerSupport.addPNGraphListener(listener);
}
public void removePNGraphListener(PNGraphListener listener) {
graphListenerSupport.removePNGraphListener(listener);
}
private void ensureValidPlaceSize() {
for (PNGraphCell selectedCell : getSelectedGraphCells()) {
if (selectedCell.getType() == PNComponent.PLACE) {
Rectangle bounds = selectedCell.getGeometry().getRectangle();
if (bounds.getHeight() == bounds.getWidth()) {
return;
}
int tagetSize = (int) Math.round(Math.min(bounds.getWidth(), bounds.getHeight()));
mxRectangle targetBounds = getView().getState(selectedCell).getBoundingBox();
targetBounds.setWidth(tagetSize);
targetBounds.setHeight(tagetSize);
resizeCell(selectedCell, targetBounds);
setSelectionCell(selectedCell);
}
}
}
private Set<PNGraphCell> getSelectedGraphCells() {
Set<PNGraphCell> placeCells = new HashSet<PNGraphCell>();
for (Object selectedObject : getSelectionCells()) {
if (selectedObject instanceof PNGraphCell) {
placeCells.add((PNGraphCell) selectedObject);
}
}
return placeCells;
}
public AbstractGraphicalPN<?, ?, ?, ?, ?, ?, ?, ?, ?> getNetContainer() {
return netContainer;
}
protected PNProperties getPNProperties() {
return properties;
}
/**
* Adds anew place with default style.
*
* @param point
* @return
* @throws IOException
* @throws PropertyException
*/
public PNGraphCell addNewPlace(mxPoint point) throws PropertyException, IOException {
Offset offset = new Offset(WolfgangProperties.getInstance().getDefaultHorizontalLabelOffset(), WolfgangProperties.getInstance().getDefaultVerticalLabelOffset());
Dimension dimension = new Dimension(WolfgangProperties.getInstance().getDefaultPlaceSize(), WolfgangProperties.getInstance().getDefaultPlaceSize());
return addNewPlace(point, MXConstants.getDefaultNodeStyle(PNComponent.PLACE), offset, dimension);
}
/**
* Adds a new place with existing style.
*
* @param mxPoint
* @param style
* @return
*/
public PNGraphCell addNewPlace(mxPoint point, String style, Offset offset, Dimension dimension) {
String nodeName = getNewPlaceName();
PNGraphCell newPlaceCell = getNodeCell(nodeName);
if (newPlaceCell != null) {
nodeName = newPlaceCell.getId();
}
if (getNetContainer().getPetriNet().addPlace(nodeName)) {
NodeGraphics nodeGraphics = Utils.createNodeGraphicsFromStyle(style);
nodeGraphics.setPosition(new Position(point.getX(), point.getY()));
nodeGraphics.setDimension(dimension);
AnnotationGraphics annotationGraphics = Utils.createAnnotationGraphicsFromStyle(style);
annotationGraphics.setOffset(offset);
getNetContainer().getPetriNetGraphics().getPlaceGraphics().put(nodeName, nodeGraphics);
getNetContainer().getPetriNetGraphics().getPlaceLabelAnnotationGraphics().put(nodeName, annotationGraphics);
if (newPlaceCell == null) {
newPlaceCell = addPlaceCell(nodeName, style);
}
graphListenerSupport.notifyPlaceAdded(getNetContainer().getPetriNet().getPlace(nodeName));
return newPlaceCell;
}
return null;
}
/**
* Inserts a new place with existing graphic information into the graphical
* Petri net.
*
* @param place
* @param nodeGraphics
* @param annotationGraphics
* @return
*/
@SuppressWarnings("rawtypes")
public PNGraphCell addPlaceCell(String nodeName, String style) {
AbstractPlace place = getNetContainer().getPetriNet().getPlace(nodeName);
NodeGraphics nodeGraphics = getNetContainer().getPetriNetGraphics().getPlaceGraphics(nodeName);
AnnotationGraphics annotationGraphics = getNetContainer().getPetriNetGraphics().getPlaceLabelAnnotationGraphics(nodeName);
PNGraphCell newCell = createPlaceCell(place.getName(), place.getLabel(), nodeGraphics.getPosition().getX(), nodeGraphics.getPosition().getY(), nodeGraphics.getDimension().getX(), nodeGraphics
.getDimension().getY(), style);
double offx = annotationGraphics.getOffset().getX();
double offy = annotationGraphics.getOffset().getY();
mxPoint offset = new mxPoint(offx, offy);
newCell.getGeometry().setOffset(offset);
// if (nodeGraphics == null || annotationGraphics == null) {
// mxCellState state = getView().getState(newCell, true);
// }
addCell(newCell, getDefaultParent());
return newCell;
}
public PNGraphCell createPlaceCell(String name, String label, double posX, double posY, double width, double height, String style) {
mxGeometry geometry = new mxGeometry(posX - (width / 2), posY - (height / 2), width, height);
geometry.setRelative(false);
PNGraphCell vertex = new PNGraphCell(label, geometry, style, PNComponent.PLACE);
vertex.setId(name);
vertex.setVertex(true);
vertex.setConnectable(true);
return vertex;
}
public PNGraphCell addNewFlowRelation(PNGraphCell sourceCell, PNGraphCell targetCell) throws PropertyException, IOException {
Offset offset = new Offset(WolfgangProperties.getInstance().getDefaultHorizontalLabelOffset(), WolfgangProperties.getInstance().getDefaultVerticalLabelOffset());
return addNewFlowRelation(sourceCell, targetCell, offset, null, null, MXConstants.getDefaultArcStyle());
}
public PNGraphCell addNewFlowRelation(PNGraphCell sourceCell, PNGraphCell targetCell, Offset offset, List<mxPoint> points, mxPoint referencePoint, String style) {
AbstractFlowRelation relation = null;
if (sourceCell.getType() == PNComponent.PLACE && targetCell.getType() == PNComponent.TRANSITION) {
relation = getNetContainer().getPetriNet().addFlowRelationPT(sourceCell.getId(), targetCell.getId());
} else if (sourceCell.getType() == PNComponent.TRANSITION && targetCell.getType() == PNComponent.PLACE) {
relation = getNetContainer().getPetriNet().addFlowRelationTP(sourceCell.getId(), targetCell.getId());
}
PNGraphCell newRelationCell = getNodeCell(relation.getName());
if (relation != null) {
ArcGraphics arcGraphics = Utils.createArcGraphicsFromStyle(style);
if (points != null && !points.isEmpty() && referencePoint != null) {
Vector<Position> vector = new Vector<Position>();
for (mxPoint p : points) {
vector.add(new Position(p.getX() + referencePoint.getX(), p.getY() + referencePoint.getY()));
}
arcGraphics.setPositions(vector);
}
AnnotationGraphics annotationGraphics = Utils.createAnnotationGraphicsFromStyle(style);
annotationGraphics.setOffset(offset);
getNetContainer().getPetriNetGraphics().getArcGraphics().put(relation.getName(), arcGraphics);
getNetContainer().getPetriNetGraphics().getArcAnnotationGraphics().put(relation.getName(), annotationGraphics);
if (newRelationCell == null) {
newRelationCell = addArcCell(relation.getName(), style);
}
graphListenerSupport.notifyRelationAdded(relation);
return newRelationCell;
}
return null;
}
@SuppressWarnings("rawtypes")
public PNGraphCell addArcCell(String arcID, String style) {
AbstractFlowRelation relation = getNetContainer().getPetriNet().getFlowRelation(arcID);
ArcGraphics arcGraphics = getNetContainer().getPetriNetGraphics().getArcGraphics(arcID);
AnnotationGraphics annotationGraphics = getNetContainer().getPetriNetGraphics().getArcAnnotationGraphics(arcID);
PNGraphCell newCell = createArcCell(arcID, getArcConstraint(relation), style);
addEdge(newCell, getDefaultParent(), getNodeCell(relation.getSource().getName()), getNodeCell(relation.getTarget().getName()), null);
double offx = annotationGraphics.getOffset().getX();
double offy = annotationGraphics.getOffset().getY();
mxPoint offset = new mxPoint(offx, offy);
newCell.getGeometry().setOffset(offset);
Vector<Position> positions = arcGraphics.getPositions();
List<mxPoint> points = new ArrayList<mxPoint>();
for (Position position : positions) {
points.add(new mxPoint(position.getX(), position.getY()));
}
newCell.getGeometry().setPoints(points);
return newCell;
}
/**
* Adds anew transition with default style.
*/
public PNGraphCell addNewTransition(mxPoint point) throws PropertyException, IOException {
Offset offset = new Offset(WolfgangProperties.getInstance().getDefaultHorizontalLabelOffset(), WolfgangProperties.getInstance().getDefaultVerticalLabelOffset());
Dimension dimension = new Dimension(WolfgangProperties.getInstance().getDefaultTransitionWidth(), WolfgangProperties.getInstance().getDefaultTransitionHeight());
return addNewTransition(point, MXConstants.getDefaultNodeStyle(PNComponent.TRANSITION), offset, dimension);
}
/**
* Adds a new transition with existing style.
*/
public PNGraphCell addNewTransition(mxPoint point, String style, Offset offset, Dimension dimension) {
String nodeName = getNewTransitionName();
PNGraphCell newTransitionCell = getNodeCell(nodeName);
if (newTransitionCell != null) {
nodeName = newTransitionCell.getId();
}
if (getNetContainer().getPetriNet().addTransition(nodeName)) {
NodeGraphics nodeGraphics = Utils.createNodeGraphicsFromStyle(style);
nodeGraphics.setPosition(new Position(point.getX(), point.getY()));
nodeGraphics.setDimension(dimension);
AnnotationGraphics annotationGraphics = Utils.createAnnotationGraphicsFromStyle(style);
annotationGraphics.setOffset(offset);
getNetContainer().getPetriNetGraphics().getTransitionGraphics().put(nodeName, nodeGraphics);
getNetContainer().getPetriNetGraphics().getTransitionLabelAnnotationGraphics().put(nodeName, annotationGraphics);
if (newTransitionCell == null) {
newTransitionCell = addTransitionCell(nodeName, style);
}
graphListenerSupport.notifyTransitionAdded(getNetContainer().getPetriNet().getTransition(nodeName));
return newTransitionCell;
}
return null;
}
/**
* Inserts a new place with existing graphic information into the graphical
* Petri net.
*
* @param place
* @param nodeGraphics
* @param annotationGraphics
* @return
*/
@SuppressWarnings("rawtypes")
public PNGraphCell addTransitionCell(String nodeName, String style) {
AbstractTransition transition = getNetContainer().getPetriNet().getTransition(nodeName);
NodeGraphics nodeGraphics = getNetContainer().getPetriNetGraphics().getTransitionGraphics(nodeName);
AnnotationGraphics annotationGraphics = getNetContainer().getPetriNetGraphics().getTransitionLabelAnnotationGraphics(nodeName);
PNGraphCell newCell = createTransitionCell(transition.getName(), transition.getLabel(), nodeGraphics.getPosition().getX(), nodeGraphics.getPosition().getY(), nodeGraphics.getDimension()
.getX(), nodeGraphics.getDimension().getY(), style);
double offx = annotationGraphics.getOffset().getX();
double offy = annotationGraphics.getOffset().getY();
mxPoint offset = new mxPoint(offx, offy);
newCell.getGeometry().setOffset(offset);
// if (nodeGraphics == null || annotationGraphics == null) {
// mxCellState state = getView().getState(newCell, true);
// }
addCell(newCell, getDefaultParent());
return newCell;
}
public PNGraphCell createTransitionCell(String name, String label, double posX, double posY, double width, double height, String style) {
mxGeometry geometry = new mxGeometry(posX - (width / 2), posY - (height / 2), width, height);
geometry.setRelative(false);
PNGraphCell vertex = new PNGraphCell(label, geometry, style, PNComponent.TRANSITION);
vertex.setId(name);
vertex.setVertex(true);
vertex.setConnectable(true);
return vertex;
}
public PNGraphCell createArcCell(String name, String label, String style) {
mxGeometry geometry = new mxGeometry();
geometry.setRelative(true);
PNGraphCell vertex = new PNGraphCell(label, geometry, style, PNComponent.ARC);
vertex.setId(name);
vertex.setVertex(false);
vertex.setEdge(true);
vertex.setConnectable(true);
return vertex;
}
public abstract void updatePlaceState(String name, Multiset<String> input);
@Override
public boolean isCellLocked(Object cell) {
if (isExecution)
return true;
return super.isCellLocked(cell);
}
@Override
public boolean isCellConnectable(Object cell) {
if (isExecution)
return false;
return super.isCellConnectable(cell);
}
public boolean isExecution() {
return isExecution;
}
public void setExecution(boolean isExecution) {
this.isExecution = isExecution;
}
@Override
/**
* Constructs a new view to be used in this graph.
*/
protected mxGraphView createGraphView() {
return new GraphView(this);
}
@Override
/**
* Returns the tooltip to be used for the given cell.
*/
public String getToolTipForCell(Object object) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
switch (cell.getType()) {
case ARC:
return getArcToolTip(cell);
case PLACE:
return getPlaceToolTip(cell);
case TRANSITION:
return getTransitionToolTip(cell);
}
}
return "";
}
protected abstract String getPlaceToolTip(PNGraphCell cell);
protected abstract String getTransitionToolTip(PNGraphCell cell);
protected abstract String getArcToolTip(PNGraphCell cell);
protected abstract String getArcConstraint(AbstractFlowRelation relation);
public abstract Color getTokenColorForName(String name);
public abstract void updateTokenColor(String name, Color value);
public abstract Multiset<String> getConstraintforArc(String name);
public abstract void updateConstraint(String name, Multiset value);
public abstract void updateTokenConfigurer(String name);
public abstract void updatePlaceCapacity(String name, String color, int newCapacity);
public abstract int getCapacityforPlace(String name, String color);
public abstract void updateViews();
/**
* @param name
* @param circularPointGroup
* @return
*/
public abstract Multiset<String> getPlaceStateForCell(String id, CircularPointGroup circularPointGroup);
/**
* Method for incrementing or decrementing the current #AbstractMarking of
* the given #AbstractPNPlace
*
* @param cell
* @param wheelRotation
* @return @
*/
public abstract AbstractMarking inOrDecrementPlaceState(PNGraphCell cell, int wheelRotation);
/**
* Selects all vertices and/or edges depending on the given boolean
* arguments recursively, starting at the given parent or the default parent
* if no parent is specified. Use <code>selectAll</code> to select all
* cells.
*
* @param vertices
* Boolean indicating if vertices should be selected.
*/
public void selectPNGraphCells(final PNComponent type) {
Collection<Object> cells = mxGraphModel.filterDescendants(getModel(), new mxGraphModel.Filter() {
public boolean filter(Object cell) {
return view.getState(cell) != null && model.getChildCount(cell) == 0 && ((PNGraphCell) cell).getType() == type;
}
});
setSelectionCells(cells);
}
// Needs to bee overriden for Token-Painting
@Override
/**
* Draws the cell state with the given label onto the canvas. No
* children or descendants are painted here. This method invokes
* cellDrawn after the cell, but not its descendants have been
* painted.
*
* @param canvas Canvas onto which the cell should be drawn.
* @param state State of the cell to be drawn.
* @param drawLabel Indicates if the label should be drawn.
*/
public void drawState(mxICanvas canvas, mxCellState state, boolean drawLabel) {
Object cell = (state != null) ? state.getCell() : null;
if (cell != null && cell != view.getCurrentRoot() && cell != model.getRoot() && (model.isVertex(cell) || model.isEdge(cell))) {
PNGraphCell customcell;
Object obj = null;
if (canvas instanceof mxImageCanvas)
obj = canvas.drawCell(state);
else
try {
obj = drawCell((mxGraphics2DCanvas) canvas, state);
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Cannot write Graphicsstyle to FileSystem " + e.getMessage(), "IO Exception", JOptionPane.ERROR_MESSAGE);
} catch (PropertyException e) {
JOptionPane.showMessageDialog(null, e.getMessage(), "Property Exception", JOptionPane.ERROR_MESSAGE);
}
Object lab = null;
// Holds the current clipping region in case the label will be
// clipped
Shape clip = null;
Rectangle newClip = state.getRectangle();
// Indirection for image canvas that contains a graphics canvas
mxICanvas clippedCanvas = (isLabelClipped(state.getCell())) ? canvas : null;
if (clippedCanvas instanceof mxImageCanvas) {
clippedCanvas = ((mxImageCanvas) clippedCanvas).getGraphicsCanvas();
// TODO: Shift newClip to match the image offset
// Point pt = ((mxImageCanvas) canvas).getTranslate();
// newClip.translate(-pt.x, -pt.y);
}
if (clippedCanvas instanceof mxGraphics2DCanvas) {
Graphics g = ((mxGraphics2DCanvas) clippedCanvas).getGraphics();
clip = g.getClip();
// Ensure that our new clip resides within our old clip
if (clip instanceof Rectangle) {
g.setClip(newClip.intersection((Rectangle) clip));
}
// Otherwise, default to original implementation
else {
g.setClip(newClip);
}
}
if (drawLabel) {
String label = state.getLabel();
if (label != null && state.getLabelBounds() != null) {
Graphics2D g = null;
if (canvas instanceof mxGraphics2DCanvas) {
Map<String, Object> style = state.getStyle();
g = ((mxGraphics2DCanvas) canvas).getGraphics();
Color color = mxUtils.getColor(state.getStyle(), mxConstants.STYLE_STROKECOLOR);
g.setColor(color);
g.setStroke(Utils.createLabelStroke(style, canvas.getScale()));
}
lab = canvas.drawLabel(label, state, isHtmlLabel(cell));
if (g != null)
g.setStroke(new BasicStroke((float) 2));
}
}
// Restores the previous clipping region
if (clippedCanvas instanceof mxGraphics2DCanvas) {
((mxGraphics2DCanvas) clippedCanvas).getGraphics().setClip(clip);
}
// Invokes the cellDrawn callback with the object which was created
// by the canvas to represent the cell graphically
if (obj != null) {
cellDrawn(canvas, state, obj, lab);
}
}
}
public Object drawCell(mxGraphics2DCanvas canvas, mxCellState state) throws PropertyException, IOException {
Map<String, Object> style = state.getStyle();
mxIShape shape = canvas.getShape(style);
Graphics2D g;
if (canvas.getGraphics() != null && shape != null) {
// Creates a temporary graphics instance for drawing this shape
float opacity = mxUtils.getFloat(style, mxConstants.STYLE_OPACITY, 100);
Graphics2D previousGraphics = canvas.getGraphics();
g = ((mxGraphics2DCanvas) canvas).createTemporaryGraphics(style, opacity, state);
// Paints the shape and restores the graphics object
shape.paintShape(canvas, state);
if (state.getCell() instanceof PNGraphCell) {
PNGraphCell customcell = (PNGraphCell) state.getCell();
if (customcell.getType() == PNComponent.PLACE) {
drawAdditionalPlaceGrahpics(canvas, state);
}
if (customcell.getType() == PNComponent.TRANSITION) {
try {
drawAdditionalTransitionGrahpics(canvas, state);
} catch (ParameterException e) {
JOptionPane.showMessageDialog(null, "Graphic for Access Mode is not avaiable \nReason: " + e.getMessage(), "Parameter Exception", JOptionPane.ERROR);
} catch (PropertyException e) {
JOptionPane.showMessageDialog(null, "Graphic for Access Mode is not avaiable \nReason: " + e.getMessage(), "Property Exception", JOptionPane.ERROR);
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Graphic for Access Mode is not avaiable \nReason: " + e.getMessage(), "IO Exception", JOptionPane.ERROR);
}
}
if (customcell.getType() == PNComponent.ARC) {
if (!hideContraintsAsTokens)
drawAdditionalArcGrahpics(canvas, state);
}
}
g.dispose();
g = previousGraphics;
}
return shape;
}
protected abstract void drawAdditionalArcGrahpics(mxGraphics2DCanvas canvas, mxCellState state);
protected abstract void drawAdditionalTransitionGrahpics(mxGraphics2DCanvas canvas, mxCellState state) throws PropertyException, IOException;
protected void drawAdditionalPlaceGrahpics(mxGraphics2DCanvas canvas, mxCellState state) throws PropertyException, IOException {
Rectangle temp = state.getRectangle();
PNGraphCell cell = (PNGraphCell) state.getCell();
int minDistance = (int) (WolfgangProperties.getInstance().getDefaultTokenDistance() * getView().getScale());
int pointDiameter = (int) (WolfgangProperties.getInstance().getDefaultTokenSize() * getView().getScale());
CircularPointGroup circularPointGroup = new CircularPointGroup(minDistance, pointDiameter);
// TODO Making method more general to be able to handle colored marking
// in cpn
Multiset<String> placeState = getPlaceStateForCell(cell.getId(), circularPointGroup);
if (placeState != null) {
AbstractCPNGraphics cpnGraphics;
Map<String, Color> colors = null;
if (getNetContainer().getPetriNetGraphics() instanceof AbstractCPNGraphics) {
{
cpnGraphics = (AbstractCPNGraphics) getNetContainer().getPetriNetGraphics();
colors = cpnGraphics.getColors();
}
Set<String> keyset = placeState.support();
//
for (String s : keyset) {
Color color = colors.get(s);
int number = placeState.multiplicity(s);
PColor pco;
if (color != null)
pco = new PColor(color.getRed(), color.getGreen(), color.getBlue());
else {
pco = PColor.black;
}
circularPointGroup.addPoints(pco, number);
}
}
int k = placeState.size();
Point center = new Point(temp.x + temp.width / 2, temp.y + temp.height / 2);
int requiredWidth = 0;
if (k == 1)
requiredWidth = circularPointGroup.getPointDiameter();
if (k == 2 || k == 3)
requiredWidth = (circularPointGroup.getPointDiameter() + minDistance) * 2;
if (k == 4)
requiredWidth = (circularPointGroup.getPointDiameter() + minDistance * 2) * 2;
if (k == 2)
requiredWidth = (circularPointGroup.getPointDiameter() + minDistance) * 2;
if (k >= 5)
requiredWidth = circularPointGroup.getRequiredDiameter();
if (state.getWidth() >= requiredWidth)
drawPoints(canvas, temp, circularPointGroup, center);
else
drawNumbers(cell, k + "", canvas, temp, center);
}
}
private void drawNumbers(PNGraphCell cell, String numbers, mxGraphics2DCanvas canvas, Rectangle temp, Point center) {
Graphics g = canvas.getGraphics();
Graphics2D g2 = (Graphics2D) g;
String family = (getCellStyle(cell).get(mxConstants.STYLE_FONTFAMILY) != null) ? getCellStyle(cell).get(mxConstants.STYLE_FONTFAMILY).toString() : mxConstants.DEFAULT_FONTFAMILY;
g2.setFont(new Font(family, Font.PLAIN, (int) (10 * getView().getScale())));
g2.setPaint(Color.black);
drawString(g2, numbers + "\n", center.x - (int) (temp.width * 0.1), center.y - (int) (g.getFontMetrics().getHeight() * 0.8));
}
private void drawString(Graphics g, String text, int x, int y) {
for (String line : text.split("\n"))
g.drawString(line, x, y += g.getFontMetrics().getHeight());
}
protected void drawPoints(mxGraphics2DCanvas canvas, Rectangle temp, CircularPointGroup circularPointGroup, Point center) {
Graphics g = canvas.getGraphics();
Iterator<PColor> iter = circularPointGroup.getColors().iterator();
PColor actColor;
Set<TokenGraphics> tgSet = new HashSet<TokenGraphics>();
while (iter.hasNext()) {
actColor = iter.next();
g.setColor(new Color(actColor.getRGB()));
for (de.invation.code.toval.graphic.misc.Position p : circularPointGroup.getCoordinatesFor(actColor)) {
GraphicUtils.fillCircle(g, (int) (center.getX() + p.getX()), (int) (center.getY() + p.getY()), circularPointGroup.getPointDiameter());
}
}
}
/**
* Sets the positions of place and transition labels according to the<br>
* information contained in the corresponding annotation graphics.<br>
* This method is called when a graph is created with a non-empty Petri net.
*
* @param pnGraphics
* The Petri net graphics
*/
// public void updatePositionPropertiesFromCells() {
// for (PNGraphCell cell : nodeReferences.values()) {
// mxCellState state = getView().getState(cell);
// setPositionProperties((PNGraphCell) state.getCell());
// }
//
// }
private void setPositionProperties(PNGraphCell cell) {
switch (cell.getType()) {
case ARC:
break;
case PLACE:
if (cell.getGeometry().getCenterX() >= 0)
properties.setPlacePositionX(this, cell.getId(), (int) cell.getGeometry().getCenterX());
if (cell.getGeometry().getCenterY() >= 0)
properties.setPlacePositionY(this, cell.getId(), (int) cell.getGeometry().getCenterY());
break;
case TRANSITION:
if (cell.getGeometry().getCenterX() >= 0)
properties.setTransitionPositionX(this, cell.getId(), (int) cell.getGeometry().getCenterX());
if (cell.getGeometry().getCenterY() >= 0)
properties.setTransitionPositionY(this, cell.getId(), (int) cell.getGeometry().getCenterY());
break;
}
}
// ------- Property change
// handling--------------------------------------------------------------------------------
// These methods are called when some Petri net properties changed by other
// classes. ----------------
/**
* This method notifies a PNPropertiesListener that a PN component was
* added.<br>
* This can be a place, transition or arc.<br>
* In the
*/
@Override
public void componentAdded(PNComponent component, String name) {
}
@Override
public void componentRemoved(PNComponent component, String name) {
}
@Override
public void propertyChange(PNPropertyChangeEvent event) {
if (!event.getOldValue().equals(event.getNewValue()))
if (event.getSource() != this) {
switch (event.getFieldType()) {
case PLACE:
handlePlacePropertyChange(event.getName(), event.getProperty(), event.getOldValue(), event.getNewValue());
break;
case TRANSITION:
handleTransitionPropertyChange(event.getName(), event.getProperty(), event.getOldValue(), event.getNewValue());
break;
case ARC:
handleArcPropertyChange(event.getName(), event.getProperty(), event.getOldValue(), event.getNewValue());
break;
}
refresh();
}
}
private boolean handlePlacePropertyChange(String name, PNProperty property, Object oldValue, Object newValue) {
PNGraphCell placeCell = getNodeCell(name);
mxRectangle bounds;
switch (property) {
case PLACE_LABEL:
getModel().setValue(placeCell, newValue);
return true;
case PLACE_SIZE:
bounds = getView().getState(placeCell).getBoundingBox();
bounds.setWidth(new Integer((Integer) newValue).doubleValue());
bounds.setHeight(new Integer((Integer) newValue).doubleValue());
resizeCell(placeCell, bounds);
setSelectionCell(placeCell);
return true;
case PLACE_POSITION_X:
moveCells(new Object[] { placeCell }, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue(), 0);
setSelectionCell(placeCell);
return true;
case PLACE_POSITION_Y:
moveCells(new Object[] { placeCell }, 0, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue());
setSelectionCell(placeCell);
return true;
}
return false;
}
protected boolean handleTransitionPropertyChange(String name, PNProperty property, Object oldValue, Object newValue) {
PNGraphCell transitionCell = getNodeCell(name);
mxRectangle bounds;
switch (property) {
case TRANSITION_LABEL:
getModel().setValue(transitionCell, newValue);
return true;
case TRANSITION_POSITION_X:
moveCells(new Object[] { transitionCell }, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue(), 0);
setSelectionCell(transitionCell);
return true;
case TRANSITION_POSITION_Y:
moveCells(new Object[] { transitionCell }, 0, new Integer((Integer) newValue).doubleValue() - new Integer((Integer) oldValue).doubleValue());
setSelectionCell(transitionCell);
return true;
case TRANSITION_SIZE_X:
bounds = getView().getState(transitionCell).getBoundingBox();
bounds.setWidth(new Integer((Integer) newValue).doubleValue());
bounds.setHeight(transitionCell.getGeometry().getHeight());
resizeCell(transitionCell, bounds);
setSelectionCell(transitionCell);
return true;
case TRANSITION_SIZE_Y:
bounds = getView().getState(transitionCell).getBoundingBox();
bounds.setWidth(transitionCell.getGeometry().getWidth());
bounds.setHeight(new Integer((Integer) newValue).doubleValue());
resizeCell(transitionCell, bounds);
setSelectionCell(transitionCell);
return true;
}
return false;
}
protected boolean handleArcPropertyChange(String name, PNProperty property, Object oldValue, Object newValue) {
PNGraphCell arcCell = getNodeCell(name);
switch (property) {
case ARC_WEIGHT:
getModel().setValue(arcCell, newValue);
break;
}
return false;
}
public void selectPlace(String name) {
if (!isCellSelected(name)) {
PNGraphCell cell = getNodeCell(name);
setSelectionCell(cell);
}
}
public void selectTransition(String name) {
if (!isCellSelected(name)) {
PNGraphCell cell = getNodeCell(name);
setSelectionCell(cell);
}
}
public void selectArc(String name) {
if (!isCellSelected(name)) {
PNGraphCell cell = getNodeCell(name);
setSelectionCell(cell);
}
}
private boolean isCellSelected(String id) {
PNGraphCell currentSelectionCell = null;
if (getSelectionCell() instanceof PNGraphCell) {
currentSelectionCell = (PNGraphCell) getSelectionCell();
}
if (currentSelectionCell != null) {
if (currentSelectionCell.getId() == id) {
return true;
}
}
return false;
}
@Override
public void cellsMoved(Object[] cells, double dx, double dy, boolean disconnect, boolean constrain) {
super.cellsMoved(cells, dx, dy, disconnect, constrain);
for (Object object : cells) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
setPositionProperties(cell);
}
}
}
/**
* This method notifies the graph, that some cells have been added.<br>
* Note: Only by copy/pase actions on graph canvas!<br>
* In case these cells stand for new places or transitions, they have to be
* added to the Petri net.
*/
@Override
public void cellsResized(Object[] cells, mxRectangle[] bounds) {
// TODO Auto-generated method stub
super.cellsResized(cells, bounds);
for (Object object : cells) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
switch (cell.getType()) {
case ARC:
break;
case PLACE:
properties.setPlaceSize(this, cell.getId(), (int) cell.getGeometry().getWidth());
break;
case TRANSITION:
properties.setTransitionSizeX(this, cell.getId(), (int) cell.getGeometry().getWidth());
properties.setTransitionSizeY(this, cell.getId(), (int) cell.getGeometry().getHeight());
break;
}
}
}
}
@Override
public void cellsRemoved(Object[] cells) {
super.cellsRemoved(cells);
for (Object object : cells) {
if (object instanceof PNGraphCell) {
PNGraphCell cell = (PNGraphCell) object;
switch (cell.getType()) {
case ARC:
removeFlowRelation(cell.getId());
break;
case PLACE:
removePlace(cell.getId());
break;
case TRANSITION:
removeTransition(cell.getId());
break;
}
}
}
}
@Override
/**
* Returns true if split is enabled and the given edge may be splitted into
* two edges with the given cell as a new terminal between the two.
*
* @param target Object that represents the edge to be splitted.
* @param cells Array of cells to add into the given edge.
* @return Returns true if the given edge may be splitted by the given
* cell.
*/
public boolean isSplitTarget(Object target, Object[] cells) {
// since this works only for one cell, this would always hurt the
// Petri-Net order
// if (target != null && cells != null && cells.length == 1)
// {
// Object src = model.getTerminal(target, true);
// Object trg = model.getTerminal(target, false);
//
// return (model.isEdge(target)
// && isCellConnectable(cells[0])
// && getEdgeValidationError(target,
// model.getTerminal(target, true), cells[0]) == null
// && !model.isAncestor(cells[0], src) && !model.isAncestor(
// cells[0], trg));
// }
return false;
}
@Override
/**
* Returns true if the given target cell is a valid target for source.
* This is a boolean implementation for not allowing connections between
* certain pairs of vertices and is called by <getEdgeValidationError>.
* This implementation returns true if <isValidSource> returns true for
* the source and <isValidTarget> returns true for the target.
*
* @param source Object that represents the source cell.
* @param target Object that represents the target cell.
* @return Returns true if the the connection between the given terminals
* is valid.
*/
public boolean isValidConnection(Object source, Object target) {
boolean result = isValidSource(source) && isValidTarget(target) && (isAllowLoops() || source != target);
PNComponent sourceType, targetType;
if (result && source instanceof PNGraphCell && target instanceof PNGraphCell) {
sourceType = ((PNGraphCell) source).getType();
targetType = ((PNGraphCell) target).getType();
if (sourceType == targetType)
return false;
}
return result;
}
protected boolean removeFlowRelation(String name) {
return netContainer.getPetriNet().removeFlowRelation(name);
}
protected boolean removeTransition(String name) {
return netContainer.getPetriNet().removeTransition(name);
}
protected boolean removePlace(String name) {
return netContainer.getPetriNet().removePlace(name);
}
@Override
public void invoke(Object sender, mxEventObject evt) {
if (evt.getName().equals(mxEvent.CHANGE)) {
changeHandler.handleChange(evt);
if (sender instanceof mxGraphSelectionModel || sender instanceof PNGraphComponent) {
graphListenerSupport.notifyComponentsSelected(getSelectedGraphCells());
}
} else if (evt.getName().equals(mxEvent.RESIZE_CELLS)) {
ensureValidPlaceSize();
}
}
protected abstract void setArcLabel(String id, String string);
public void setFontOfSelectedCellLabel(String font) {
Validate.notNull(font);
if (font != null && !font.equals("-")) {
setCellStyles(mxConstants.STYLE_FONTFAMILY, font);
}
}
public void setFontSizeOfSelectedCellLabel(String font) {
setCellStyles(mxConstants.STYLE_FONTSIZE, font);
}
public void setStrokeWeightOfSelectedCell(String strokeWeight) {
for (Object cell : getSelectionCells()) {
if (cell instanceof PNGraphCell) {
String styleKey = (isLabelSelected()) ? MXConstants.LABEL_LINE_WIDTH : mxConstants.STYLE_STROKEWIDTH;
if(getView().getState(cell).getStyle().containsKey(styleKey)){
String currentStrokeWidth = mxUtils.getString(getView().getState(cell).getStyle(), styleKey).replace(".0", "");
if (!currentStrokeWidth.equals(strokeWeight)) {
setCellStyles(styleKey, strokeWeight, new Object[] { cell });
}
}
}
}
}
@Override
/**
* Sets the key to value in the styles of the given cells. This will modify
* the existing cell styles in-place and override any existing assignment
* for the given key. If no cells are specified, then the selection cells
* are changed. If no value is specified, then the respective key is
* removed from the styles.
*
* @param key String representing the key to be assigned.
* @param value String representing the new value for the key.
* @param cells Array of cells to change the style for.
*/
public Object[] setCellStyles(String key, String value, Object[] cells) {
if (cells == null) {
cells = getSelectionCells();
}
setCellStyles(this, cells, key, value);
return cells;
}
/**
* Assigns the value for the given key in the styles of the given cells, or
* removes the key from the styles if the value is null.
*
* @param pnGraph
* Model to execute the transaction in.
* @param cells
* Array of cells to be updated.
* @param key
* Key of the style to be changed.
* @param value
* New value for the given key.
*/
public static void setCellStyles(PNGraph pnGraph, Object[] cells, String key, String value) {
if (cells != null && cells.length > 0) {
pnGraph.getModel().beginUpdate();
try {
for (int i = 0; i < cells.length; i++) {
if (cells[i] != null) {
String style = mxStyleUtils.setStyle(pnGraph.getModel().getStyle(cells[i]), key, value);
setStyle(cells[i], style, key, pnGraph);
}
}
} finally {
pnGraph.getModel().endUpdate();
}
}
}
/*
* (non-Javadoc)
*
* @see com.mxgraph.model.mxIGraphModel#setStyle(Object, String)
*/
public static String setStyle(Object cell, String style, String key, PNGraph pnGraph) {
if (style == null || !style.equals(pnGraph.getModel().getStyle(cell)))
{
((mxGraphModel) pnGraph.getModel()).execute(new StyleChange(pnGraph, cell, style, key));
}
return style;
}
@Override
/**
* Sets the style of the specified cells. If no cells are given, then the
* selection cells are changed.
*
* @param style String representing the new style of the cells.
* @param cells Optional array of <mxCells> to set the style for. Default is the
* selection cells.
*/
public Object[] setCellStyle(String style, Object[] cells) {
if (cells == null) {
cells = getSelectionCells();
}
if (cells != null) {
model.beginUpdate();
try {
for (int i = 0; i < cells.length; i++) {
setStyle(cells[i], style, null, this);
}
} finally {
model.endUpdate();
}
}
return cells;
}
/**
* @param graph
* @return
*/
public String getNewTransitionName() {
String prefix = MXConstants.TRANSITION_NAME_PREFIX;
Integer index = 0;
while (getNetContainer().getPetriNet().containsTransition(prefix + index)) {
index++;
}
return prefix + index;
}
public String getNewPlaceName() {
String prefix = MXConstants.PLACE_NAME_PREFIX;
Integer index = 0;
while (getNetContainer().getPetriNet().containsPlace(prefix + index)) {
index++;
}
return prefix + index;
}
public boolean isValidNodeName(String name, PNComponent type) {
String prefix = null;
switch (type) {
case PLACE:
prefix = MXConstants.PLACE_NAME_PREFIX;
break;
case TRANSITION:
prefix = MXConstants.TRANSITION_NAME_PREFIX;
break;
}
if (!name.startsWith(prefix))
return false;
String possibleInteger = name.substring(prefix.length());
Validate.isInteger(possibleInteger);
return true;
}
public void addWayPoint(PNGraphCell cell, Point pt) {
if (cell.getType().equals(PNComponent.ARC)) {
List<mxPoint> points = cell.getGeometry().getPoints();
if (points != null) {
// This code enables adding waypoints in between two existing
// waypoints, and not just adding it at the end of the given
// line
if (points.size() == 0) {
points.add(new mxPoint(pt.getX(), pt.getY()));
} else {
double sourceX = cell.getSource().getGeometry().getCenterX();
double sourceY = cell.getSource().getGeometry().getCenterY();
double targetX = cell.getTarget().getGeometry().getCenterX();
double targetY = cell.getTarget().getGeometry().getCenterY();
points.add(new mxPoint(targetX, targetY));
points.add(0, new mxPoint(sourceX, sourceY));
for (int i = 0; i < points.size() - 1; i++) {
mxPoint p = points.get(i);
double x1 = p.getX();
double y1 = p.getY();
mxPoint p2 = points.get(i + 1);
double x2 = p2.getX();
double y2 = p2.getY();
mxPoint newPoint = new mxPoint(pt.getX(), pt.getY());
double xP = newPoint.getX();
double yP = newPoint.getY();
double comp = Line2D.ptSegDist(x1, y1, x2, y2, xP, yP);
if (comp <= 5.0 * getView().getScale()) {
points.add(i + 1, newPoint);
i = points.size();
}
}
points.remove(points.size() - 1);
points.remove(0);
}
} else {
points = new ArrayList<mxPoint>();
points.add(new mxPoint(pt.getX(), pt.getY()));
}
cell.getGeometry().setPoints(points);
updatePointsInArcGraphics(cell, points);
}
}
public void removePoint(PNGraphCell cell, int index) {
if (cell.getType().equals(PNComponent.ARC)) {
List<mxPoint> points = cell.getGeometry().getPoints();
if (points != null && points.size() > 0) {
cell.getGeometry().getPoints().remove(index - 1);
}
}
updatePointsInArcGraphics(cell, cell.getGeometry().getPoints());
}
protected void updatePointsInArcGraphics(PNGraphCell cell, List<mxPoint> points) {
ArcGraphics arcGraphics = getNetContainer().getPetriNetGraphics().getArcGraphics().get(cell.getId());
if (arcGraphics != null) {
Vector<Position> vector = new Vector<Position>();
if (points != null) {
if (points.size() >= 0) {
for (mxPoint p : points) {
vector.add(new Position(p.getX(), p.getY()));
}
arcGraphics.setPositions(vector);
}
}
}
}
public void enterEditingMode() {
setExecution(false);
setCellsSelectable(true);
getNetContainer().getPetriNet().reset();
refresh();
}
public void fireTransition(PNGraphCell cell) throws PNException {
getNetContainer().getPetriNet().fire(cell.getId());
graphListenerSupport.notifyTransitionFired(cell);
refresh();
}
public void updateTransitionSilent(String id, boolean setSilent) {
getNetContainer().getPetriNet().getTransition(id).setSilent(setSilent);
}
public boolean getTransitionSilentState(String name) {
return getNetContainer().getPetriNet().getTransition(name).isSilent();
}
public void removeAllArcPoints() {
for (AbstractFlowRelation fr : getNetContainer().getPetriNet().getFlowRelations()) {
PNGraphCell arcCell = getNodeCell(fr.getName());
arcCell.getGeometry().setPoints(new ArrayList<mxPoint>());
mxPoint point = getView().getState(arcCell).getAbsolutePoint(0);
ArcGraphics arcGraphics = getNetContainer().getPetriNetGraphics().getArcGraphics().get(arcCell.getId());
arcGraphics.setPositions(new Vector<Position>());
}
}
public void setTokenOnArcVisibility(boolean b) {
this.hideContraintsAsTokens = b;
refresh();
}
public boolean containedGraphics() {
return containedGraphics;
}
protected double getDefaultTokenSize() {
// TODO Auto-generated method stub
try {
return WolfgangProperties.getInstance().getDefaultTokenSize();
} catch (PropertyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return (Double) null;
}
@Override
public double snap(double value) {
try {
if (WolfgangProperties.getInstance().getSnapToGrid()) {
return super.snap(value);
}
} catch (Exception e) {
e.printStackTrace();
}
return value;
}
/**
* Listeners are not notified when the underlying Petri reports structure
* changes (new elements).<br>
* This is done in the methods
* {@link #addNewPlace(mxPoint, String, Offset, Dimension)},
* {@link #addNewTransition(mxPoint, String, Offset, Dimension)} ans
* {@link #addNewFlowRelation(PNGraphCell, PNGraphCell, Offset, List, mxPoint, String)}
* .
*/
@Override
public void placeAdded(PlaceChangeEvent event) {
}
@Override
public void placeRemoved(PlaceChangeEvent event) {
graphListenerSupport.notifyPlaceRemoved(event.place);
}
/**
* Listeners are not notified when the underlying Petri reports structure
* changes (new elements).<br>
* This is done in the methods
* {@link #addNewPlace(mxPoint, String, Offset, Dimension)},
* {@link #addNewTransition(mxPoint, String, Offset, Dimension)} ans
* {@link #addNewFlowRelation(PNGraphCell, PNGraphCell, Offset, List, mxPoint, String)}
* .
*/
@Override
public void transitionAdded(TransitionChangeEvent event) {
}
@Override
public void transitionRemoved(TransitionChangeEvent event) {
graphListenerSupport.notifyTransitionRemoved(event.transition);
}
/**
* Listeners are not notified when the underlying Petri reports structure
* changes (new elements).<br>
* This is done in the methods
* {@link #addNewPlace(mxPoint, String, Offset, Dimension)},
* {@link #addNewTransition(mxPoint, String, Offset, Dimension)} ans
* {@link #addNewFlowRelation(PNGraphCell, PNGraphCell, Offset, List, mxPoint, String)}
* .
*/
@Override
public void relationAdded(RelationChangeEvent event) {
}
@Override
public void relationRemoved(RelationChangeEvent event) {
graphListenerSupport.notifyRelationRemoved(event.relation);
}
@Override
public void structureChanged() {
}
public PNGraphCell getNodeCell(String name) {
mxICell superParent = (mxICell) model.getRoot();
for (int i = 0; i < superParent.getChildCount(); i++) {
mxICell parent = superParent.getChildAt(i);
for (int j = 0; j < parent.getChildCount(); j++) {
PNGraphCell child = (PNGraphCell) parent.getChildAt(j);
if (child.getId().equals(name)) {
return child;
}
}
}
return null;
}
}
|
Corrected Error: F57
Modification: Moved command PNGraphCell newRelationCell = getNodeCell(relation.getName()) in if (relation != null) statement to avoid null-exception
|
src/de/uni/freiburg/iig/telematik/wolfgang/graph/PNGraph.java
|
Corrected Error: F57 Modification: Moved command PNGraphCell newRelationCell = getNodeCell(relation.getName()) in if (relation != null) statement to avoid null-exception
|
<ide><path>rc/de/uni/freiburg/iig/telematik/wolfgang/graph/PNGraph.java
<ide> } else if (sourceCell.getType() == PNComponent.TRANSITION && targetCell.getType() == PNComponent.PLACE) {
<ide> relation = getNetContainer().getPetriNet().addFlowRelationTP(sourceCell.getId(), targetCell.getId());
<ide> }
<del> PNGraphCell newRelationCell = getNodeCell(relation.getName());
<ide> if (relation != null) {
<add> PNGraphCell newRelationCell = getNodeCell(relation.getName());
<ide> ArcGraphics arcGraphics = Utils.createArcGraphicsFromStyle(style);
<ide> if (points != null && !points.isEmpty() && referencePoint != null) {
<ide> Vector<Position> vector = new Vector<Position>();
|
|
Java
|
mit
|
ceb16db812c0fb3b8810067bef8baa3ecf70e078
| 0 |
facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper
|
/*
* Copyright (c) 2018-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the LICENSE
* file in the root directory of this source tree.
*
*/
package com.facebook.sonar.plugins.sharedpreferences;
import static android.content.Context.MODE_PRIVATE;
import android.content.Context;
import android.content.SharedPreferences;
import com.facebook.sonar.core.SonarConnection;
import com.facebook.sonar.core.SonarObject;
import com.facebook.sonar.core.SonarPlugin;
import com.facebook.sonar.core.SonarReceiver;
import com.facebook.sonar.core.SonarResponder;
import java.util.Map;
public class SharedPreferencesSonarPlugin implements SonarPlugin {
private SonarConnection mConnection;
private final SharedPreferences mSharedPreferences;
/**
* Creates a {@link android.content.SharedPreferences} plugin for Sonar
*
* @param context The context to retrieve the file from. Will use the package name as the file
* name with {@link Context#MODE_PRIVATE}.
*/
public SharedPreferencesSonarPlugin(Context context) {
this(context, context.getPackageName());
}
/**
* Creates a {@link android.content.SharedPreferences} plugin for Sonar
*
* @param context The context to retrieve the file from. Will use the name as the file name with
* {@link Context#MODE_PRIVATE}.
* @param name The preference file name.
*/
public SharedPreferencesSonarPlugin(Context context, String name) {
this(context, name, MODE_PRIVATE);
}
/**
* Creates a {@link android.content.SharedPreferences} plugin for Sonar
*
* @param context The context to retrieve the file from.
* @param name The preference file name.
* @param mode The Context mode to utilize.
*/
public SharedPreferencesSonarPlugin(Context context, String name, int mode) {
mSharedPreferences = context.getSharedPreferences(name, mode);
mSharedPreferences.registerOnSharedPreferenceChangeListener(
new SharedPreferences.OnSharedPreferenceChangeListener() {
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (mConnection != null) {
mConnection.send(
"sharedPreferencesChange",
new SonarObject.Builder()
.put("name", key)
.put("deleted", !mSharedPreferences.contains(key))
.put("time", System.currentTimeMillis())
.put("value", mSharedPreferences.getAll().get(key))
.build());
}
}
});
}
@Override
public String getId() {
return "Preferences";
}
private SonarObject getSharedPreferencesObject() {
final SonarObject.Builder builder = new SonarObject.Builder();
final Map<String, ?> map = mSharedPreferences.getAll();
for (Map.Entry<String, ?> entry : map.entrySet()) {
final Object val = entry.getValue();
builder.put(entry.getKey(), val);
}
return builder.build();
}
@Override
public void onConnect(SonarConnection connection) {
mConnection = connection;
connection.receive(
"getSharedPreferences",
new SonarReceiver() {
@Override
public void onReceive(SonarObject params, SonarResponder responder) {
responder.success(getSharedPreferencesObject());
}
});
connection.receive(
"setSharedPreference",
new SonarReceiver() {
@Override
public void onReceive(SonarObject params, SonarResponder responder)
throws IllegalArgumentException {
String preferenceName = params.getString("preferenceName");
Object originalValue = mSharedPreferences.getAll().get(preferenceName);
SharedPreferences.Editor editor = mSharedPreferences.edit();
if (originalValue instanceof Boolean) {
editor.putBoolean(preferenceName, params.getBoolean("preferenceValue"));
} else if (originalValue instanceof Long) {
editor.putLong(preferenceName, params.getLong("preferenceValue"));
} else if (originalValue instanceof Integer) {
editor.putInt(preferenceName, params.getInt("preferenceValue"));
} else if (originalValue instanceof Float) {
editor.putFloat(preferenceName, params.getFloat("preferenceValue"));
} else if (originalValue instanceof String) {
editor.putString(preferenceName, params.getString("preferenceValue"));
} else {
throw new IllegalArgumentException("Type not supported: " + preferenceName);
}
editor.apply();
responder.success(getSharedPreferencesObject());
}
});
}
@Override
public void onDisconnect() {
mConnection = null;
}
}
|
android/plugins/sharedpreferences/SharedPreferencesSonarPlugin.java
|
/*
* Copyright (c) 2018-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the LICENSE
* file in the root directory of this source tree.
*
*/
package com.facebook.sonar.plugins.sharedpreferences;
import static android.content.Context.MODE_PRIVATE;
import android.content.Context;
import android.content.SharedPreferences;
import com.facebook.sonar.core.SonarConnection;
import com.facebook.sonar.core.SonarObject;
import com.facebook.sonar.core.SonarPlugin;
import com.facebook.sonar.core.SonarReceiver;
import com.facebook.sonar.core.SonarResponder;
import java.util.Map;
public class SharedPreferencesSonarPlugin implements SonarPlugin {
private SonarConnection mConnection;
private final SharedPreferences mSharedPreferences;
public SharedPreferencesSonarPlugin(Context context) {
mSharedPreferences = context.getSharedPreferences(context.getPackageName(), MODE_PRIVATE);
mSharedPreferences.registerOnSharedPreferenceChangeListener(
new SharedPreferences.OnSharedPreferenceChangeListener() {
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (mConnection != null) {
mConnection.send(
"sharedPreferencesChange",
new SonarObject.Builder()
.put("name", key)
.put("deleted", !mSharedPreferences.contains(key))
.put("time", System.currentTimeMillis())
.put("value", mSharedPreferences.getAll().get(key))
.build());
}
}
});
}
@Override
public String getId() {
return "Preferences";
}
private SonarObject getSharedPreferencesObject() {
final SonarObject.Builder builder = new SonarObject.Builder();
final Map<String, ?> map = mSharedPreferences.getAll();
for (Map.Entry<String, ?> entry : map.entrySet()) {
final Object val = entry.getValue();
builder.put(entry.getKey(), val);
}
return builder.build();
}
@Override
public void onConnect(SonarConnection connection) {
mConnection = connection;
connection.receive(
"getSharedPreferences",
new SonarReceiver() {
@Override
public void onReceive(SonarObject params, SonarResponder responder) {
responder.success(getSharedPreferencesObject());
}
});
connection.receive(
"setSharedPreference",
new SonarReceiver() {
@Override
public void onReceive(SonarObject params, SonarResponder responder)
throws IllegalArgumentException {
String preferenceName = params.getString("preferenceName");
Object originalValue = mSharedPreferences.getAll().get(preferenceName);
SharedPreferences.Editor editor = mSharedPreferences.edit();
if (originalValue instanceof Boolean) {
editor.putBoolean(preferenceName, params.getBoolean("preferenceValue"));
} else if (originalValue instanceof Long) {
editor.putLong(preferenceName, params.getLong("preferenceValue"));
} else if (originalValue instanceof Integer) {
editor.putInt(preferenceName, params.getInt("preferenceValue"));
} else if (originalValue instanceof Float) {
editor.putFloat(preferenceName, params.getFloat("preferenceValue"));
} else if (originalValue instanceof String) {
editor.putString(preferenceName, params.getString("preferenceValue"));
} else {
throw new IllegalArgumentException("Type not supported: " + preferenceName);
}
editor.apply();
responder.success(getSharedPreferencesObject());
}
});
}
@Override
public void onDisconnect() {
mConnection = null;
}
}
|
Make the shared preferences plugin more flexible
Summary:
You don't necessarily need / have your shared preferences defined in a file that is your package
name. This adds the ability to pass in the name + mode you need to read the correct shared
preferences file.
Reviewed By: sjkirby
Differential Revision: D8661573
fbshipit-source-id: 49e57b0371228eca7fc4f06e8ba65ff8cc059b11
|
android/plugins/sharedpreferences/SharedPreferencesSonarPlugin.java
|
Make the shared preferences plugin more flexible
|
<ide><path>ndroid/plugins/sharedpreferences/SharedPreferencesSonarPlugin.java
<ide> private SonarConnection mConnection;
<ide> private final SharedPreferences mSharedPreferences;
<ide>
<add> /**
<add> * Creates a {@link android.content.SharedPreferences} plugin for Sonar
<add> *
<add> * @param context The context to retrieve the file from. Will use the package name as the file
<add> * name with {@link Context#MODE_PRIVATE}.
<add> */
<ide> public SharedPreferencesSonarPlugin(Context context) {
<del> mSharedPreferences = context.getSharedPreferences(context.getPackageName(), MODE_PRIVATE);
<add> this(context, context.getPackageName());
<add> }
<add>
<add> /**
<add> * Creates a {@link android.content.SharedPreferences} plugin for Sonar
<add> *
<add> * @param context The context to retrieve the file from. Will use the name as the file name with
<add> * {@link Context#MODE_PRIVATE}.
<add> * @param name The preference file name.
<add> */
<add> public SharedPreferencesSonarPlugin(Context context, String name) {
<add> this(context, name, MODE_PRIVATE);
<add> }
<add>
<add> /**
<add> * Creates a {@link android.content.SharedPreferences} plugin for Sonar
<add> *
<add> * @param context The context to retrieve the file from.
<add> * @param name The preference file name.
<add> * @param mode The Context mode to utilize.
<add> */
<add> public SharedPreferencesSonarPlugin(Context context, String name, int mode) {
<add> mSharedPreferences = context.getSharedPreferences(name, mode);
<ide>
<ide> mSharedPreferences.registerOnSharedPreferenceChangeListener(
<ide> new SharedPreferences.OnSharedPreferenceChangeListener() {
|
|
JavaScript
|
mit
|
2d0d74dd754665d4d250dacedb7d19df7ec49361
| 0 |
pebble/cloudpebble,pebble/cloudpebble,pebble/cloudpebble,pebble/cloudpebble,thunsaker/cloudpebble,thunsaker/cloudpebble,thunsaker/cloudpebble,thunsaker/cloudpebble,pebble/cloudpebble
|
$(function() {
$('#create-project').click(function() {
$('#create-project').find('input button select').removeAttr('disabled');
$('#project-prompt').modal();
});
$('#project-type').change(function() {
var val = $(this).val();
if(val != 'native') {
$('#project-template').val(0);
$('#template-holder').hide();
if (val == 'pebblejs') {
$('#project-sdk-version').val('3');
} else {
$('#project-sdk-version').val('2');
}
$('.sdk-version').hide();
} else {
$('#template-holder').show();
$('.sdk-version').show();
}
});
$('#project-sdk-version').change(function() {
var val = $(this).val();
if(val == '3') {
$('#project-type').find('[value=simplyjs]').attr('disabled', 'disabled');
$('#project-type').find('[value=pebblejs]').removeAttr('disabled');
} else {
$('#project-type').find('[value=simplyjs]').removeAttr('disabled');
$('#project-type').find('[value=pebblejs]').attr('disabled', 'disabled');
}
});
$('#project-confirm-button').click(function() {
var value = $('#project-prompt-value').val();
$('project-prompt-errors').addClass("hide");
if(value === '') {
$('#project-prompt-errors').removeClass('hide').text(gettext("You must enter a name."));
return;
}
$('#create-project').find('input button select').attr('disabled', 'disabled');
$.post('/ide/project/create', {
name: value,
template: $('#project-template').val(),
type: $('#project-type').val(),
sdk: $('#project-sdk-version').val()
}, function(data) {
if(!data.success) {
$('#project-prompt-errors').removeClass('hide').text(data.error);
} else {
window.location.href = "/ide/project/" + data.id;
}
}
);
});
var disable_import_controls = function() {
$('#import-prompt').find('input, button').attr('disabled', 'disabled');
$('#import-prompt').find('.nav > li').addClass('disabled').find('a').removeAttr('data-toggle');
};
var enable_import_controls = function() {
$('#import-prompt').find('input, button').removeAttr('disabled');
$('#import-prompt').find('.nav > li').removeClass('disabled').find('a').attr('data-toggle', 'tab');
};
var handle_import_progress = function(active_set, task_id, project_id) {
var check = function() {
$.getJSON('/ide/task/' + task_id, function(data) {
if(data.state.status == 'SUCCESS') {
window.location.href = '/ide/project/' + project_id;
return;
} else if(data.state.status == 'FAILURE') {
active_set.find('.errors').removeClass('hide').text(interpolate(gettext("Error: %s"), [data.state.result]));
enable_import_controls();
active_set.find('.progress').addClass('hide');
return;
} else {
setTimeout(check, 1000);
}
});
};
setTimeout(check, 1000);
};
var import_archive = function(active_set) {
var name = active_set.find('#import-zip-name').val();
if(name.replace(/\s/g, '') === '') {
active_set.find('.errors').removeClass('hide').text(gettext("You must specify a project name."));
return;
}
var files = active_set.find('input[type=file]').get(0).files;
if(files.length != 1) {
active_set.find('.errors').removeClass('hide').text(gettext("You must upload a zip file."));
return;
}
var file = files[0];
// This check seems to fail on some systems.
//if(file.type != 'application/zip' && file.type != 'application/x-zip-compressed') {
// active_set.find('.errors').removeClass('hide').text("You must upload a zip file.");
// return;
//}
disable_import_controls();
var form_data = new FormData();
form_data.append('name', name);
form_data.append('archive', file);
active_set.find('.progress').removeClass('hide');
$.ajax({
url: '/ide/import/zip',
type: "POST",
data: form_data,
processData: false,
contentType: false,
dataType: 'json',
success: function(data) {
if(data.success) {
handle_import_progress(active_set, data.task_id, data.project_id);
} else {
active_set.find('.errors').removeClass('hide').text(data.error);
enable_import_controls();
active_set.find('.progress').addClass('hide');
}
}
});
ga('send', 'event', 'project', 'import', 'zip');
};
var import_github = function(active_set) {
var name = active_set.find('#import-github-name').val();
var url = active_set.find('#import-github-url').val();
var branch = active_set.find('#import-github-branch').val();
var add_remote = !!active_set.find('#import-github-add-remote').is(':checked');
if(name.replace(/\s/g, '') === '') {
active_set.find('.errors').removeClass('hide').text(gettext("You must specify a project name."));
return;
}
// This is identical to the regex used on the server.
if(!/^(?:https?:\/\/|git@|git:\/\/)?(?:www\.)?github\.com[\/:]([\w.-]+)\/([\w.-]+?)(?:\.git|\/|$)/.test(url)) {
active_set.find('.errors').removeClass('hide').text(gettext("You must specify a complete GitHub project URL"));
return;
}
if(branch.length == 0) {
branch = 'master';
}
disable_import_controls();
active_set.find('.progress').removeClass('hide');
$.post('/ide/import/github', {name: name, repo: url, branch: branch, add_remote: add_remote}, function(data) {
if(data.success) {
handle_import_progress(active_set, data.task_id, data.project_id);
} else {
active_set.find('.errors').removeClass('hide').text(data.error);
enable_import_controls();
active_set.find('.progress').addClass('hide');
}
});
ga('send', 'event', 'project', 'import', 'github');
};
var run_project_import = function() {
var active_set = $('#import-prompt .tab-pane.active');
active_set.find('.errors').addClass('hide');
if(active_set.attr('id') == 'import-zip') {
import_archive(active_set);
} else if(active_set.attr('id') == 'import-github') {
import_github(active_set);
}
};
$('#run-import').click(run_project_import);
$('#import-prompt form').submit(function (e) {
e.preventDefault();
$('#run-import').click();
});
$('#import-project').click(function() {
$('#import-prompt').modal();
});
$('#project-prompt form').submit(function (e){
e.preventDefault();
$('#project-confirm-button').click();
});
// Clean up stray forward slashes.
var path = location.pathname.replace(/\/+/g, '/');
if (path.indexOf('/ide/import/github/') === 0) {
var parts = path.substr(1).split('/');
$('#import-prompt').modal();
$('#import-github-name').val(parts[3]);
$('#import-github-url').val('github.com/' + parts[3] + '/' + parts[4]);
if (parts.length > 5) {
$('#import-github-branch').val(parts.slice(5).join('/'));
}
$('a[href=#import-github]').tab('show');
}
jquery_csrf_setup();
});
|
ide/static/ide/js/project_list.js
|
$(function() {
$('#create-project').click(function() {
$('#create-project').find('input button select').removeAttr('disabled');
$('#project-prompt').modal();
});
$('#project-type').change(function() {
var val = $(this).val();
if(val != 'native') {
$('#project-template').val(0);
$('#template-holder').hide();
if (val == 'pebblejs') {
$('#project-sdk-version').val('3');
} else {
$('#project-sdk-version').val('2');
}
$('.sdk-version').hide();
} else {
$('#template-holder').show();
$('.sdk-version').show();
}
});
$('#project-sdk-version').change(function() {
var val = $(this).val();
if(val == '3') {
$('#project-type').find('[value=simplyjs]').attr('disabled', 'disabled');
$('#project-type').find('[value=pebblejs]').removeAttr('disabled');
} else {
$('#project-type').find('[value=simplyjs]').removeAttr('disabled');
$('#project-type').find('[value=pebblejs]').attr('disabled', 'disabled');
}
});
$('#project-confirm-button').click(function() {
var value = $('#project-prompt-value').val();
$('project-prompt-errors').addClass("hide");
if(value === '') {
$('#project-prompt-errors').removeClass('hide').text(gettext("You must enter a name."));
return;
}
$('#create-project').find('input button select').attr('disabled', 'disabled');
$.post('/ide/project/create', {
name: value,
template: $('#project-template').val(),
type: $('#project-type').val(),
sdk: $('#project-sdk-version').val()
}, function(data) {
if(!data.success) {
$('#project-prompt-errors').removeClass('hide').text(data.error);
} else {
window.location.href = "/ide/project/" + data.id;
}
}
);
});
var disable_import_controls = function() {
$('#import-prompt').find('input, button').attr('disabled', 'disabled');
$('#import-prompt').find('.nav > li').addClass('disabled').find('a').removeAttr('data-toggle');
};
var enable_import_controls = function() {
$('#import-prompt').find('input, button').removeAttr('disabled');
$('#import-prompt').find('.nav > li').removeClass('disabled').find('a').attr('data-toggle', 'tab');
};
var handle_import_progress = function(active_set, task_id, project_id) {
var check = function() {
$.getJSON('/ide/task/' + task_id, function(data) {
if(data.state.status == 'SUCCESS') {
window.location.href = '/ide/project/' + project_id;
return;
} else if(data.state.status == 'FAILURE') {
active_set.find('.errors').removeClass('hide').text(interpolate(gettext("Error: %s"), [data.state.result]));
enable_import_controls();
active_set.find('.progress').addClass('hide');
return;
} else {
setTimeout(check, 1000);
}
});
};
setTimeout(check, 1000);
};
var import_archive = function(active_set) {
var name = active_set.find('#import-zip-name').val();
if(name.replace(/\s/g, '') === '') {
active_set.find('.errors').removeClass('hide').text(gettext("You must specify a project name."));
return;
}
var files = active_set.find('input[type=file]').get(0).files;
if(files.length != 1) {
active_set.find('.errors').removeClass('hide').text(gettext("You must upload a zip file."));
return;
}
var file = files[0];
// This check seems to fail on some systems.
//if(file.type != 'application/zip' && file.type != 'application/x-zip-compressed') {
// active_set.find('.errors').removeClass('hide').text("You must upload a zip file.");
// return;
//}
disable_import_controls();
var form_data = new FormData();
form_data.append('name', name);
form_data.append('archive', file);
active_set.find('.progress').removeClass('hide');
$.ajax({
url: '/ide/import/zip',
type: "POST",
data: form_data,
processData: false,
contentType: false,
dataType: 'json',
success: function(data) {
if(data.success) {
handle_import_progress(active_set, data.task_id, data.project_id);
} else {
active_set.find('.errors').removeClass('hide').text(data.error);
enable_import_controls();
active_set.find('.progress').addClass('hide');
}
}
});
ga('send', 'event', 'project', 'import', 'zip');
};
var import_github = function(active_set) {
var name = active_set.find('#import-github-name').val();
var url = active_set.find('#import-github-url').val();
var branch = active_set.find('#import-github-branch').val();
var add_remote = !!active_set.find('#import-github-add-remote').is(':checked');
if(name.replace(/\s/g, '') === '') {
active_set.find('.errors').removeClass('hide').text(gettext("You must specify a project name."));
return;
}
// This is identical to the regex used on the server.
if(!/^(?:https?:\/\/|git@|git:\/\/)?(?:www\.)?github\.com[\/:]([\w.-]+)\/([\w.-]+?)(?:\.git|\/|$)/.test(url)) {
active_set.find('.errors').removeClass('hide').text(gettext("You must specify a complete GitHub project URL"));
return;
}
if(branch.length == 0) {
branch = 'master';
}
disable_import_controls();
active_set.find('.progress').removeClass('hide');
$.post('/ide/import/github', {name: name, repo: url, branch: branch, add_remote: add_remote}, function(data) {
if(data.success) {
handle_import_progress(active_set, data.task_id, data.project_id);
} else {
active_set.find('.errors').removeClass('hide').text(data.error);
enable_import_controls();
active_set.find('.progress').addClass('hide');
}
});
ga('send', 'event', 'project', 'import', 'github');
};
var run_project_import = function() {
var active_set = $('#import-prompt .tab-pane.active');
active_set.find('.errors').addClass('hide');
if(active_set.attr('id') == 'import-zip') {
import_archive(active_set);
} else if(active_set.attr('id') == 'import-github') {
import_github(active_set);
}
};
$('#run-import').click(run_project_import);
$('#import-prompt form').submit(function(event) {
event.preventDefault();
$('#run-import').click();
});
$('#import-project').click(function() {
$('#import-prompt').modal();
});
$('#project-prompt form').submit(function (e){
e.preventDefault();
$('#project-confirm-button').click();
});
// Clean up stray forward slashes.
var path = location.pathname.replace(/\/+/g, '/');
if (path.indexOf('/ide/import/github/') === 0) {
var parts = path.substr(1).split('/');
$('#import-prompt').modal();
$('#import-github-name').val(parts[3]);
$('#import-github-url').val('github.com/' + parts[3] + '/' + parts[4]);
if (parts.length > 5) {
$('#import-github-branch').val(parts.slice(5).join('/'));
}
$('a[href=#import-github]').tab('show');
}
jquery_csrf_setup();
});
|
Renamed event parameter to e.
|
ide/static/ide/js/project_list.js
|
Renamed event parameter to e.
|
<ide><path>de/static/ide/js/project_list.js
<ide> };
<ide>
<ide> $('#run-import').click(run_project_import);
<del> $('#import-prompt form').submit(function(event) {
<del> event.preventDefault();
<add> $('#import-prompt form').submit(function (e) {
<add> e.preventDefault();
<ide> $('#run-import').click();
<ide> });
<ide>
|
|
Java
|
apache-2.0
|
f6173ec7ec7a795114661f47d9e9bc72f8ff4f62
| 0 |
philliphsu/BottomSheetPickers
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.philliphsu.bottomsheetpickers.date;
import android.app.Activity;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.content.ContextCompat;
import android.text.format.DateUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.philliphsu.bottomsheetpickers.HapticFeedbackController;
import com.philliphsu.bottomsheetpickers.R;
import com.philliphsu.bottomsheetpickers.Utils;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import static com.philliphsu.bottomsheetpickers.date.DateFormatHelper.formatDate;
import static com.philliphsu.bottomsheetpickers.date.PagingDayPickerView.DAY_PICKER_INDEX;
/**
* Dialog allowing users to select a date.
*/
public class BottomSheetDatePickerDialog extends DatePickerDialog implements
OnClickListener, DatePickerController {
private static final String TAG = "DatePickerDialog";
private static final int UNINITIALIZED = -1;
private static final int MONTH_AND_DAY_VIEW = 0;
private static final int YEAR_VIEW = 1;
private static final String KEY_SELECTED_YEAR = "year";
private static final String KEY_SELECTED_MONTH = "month";
private static final String KEY_SELECTED_DAY = "day";
private static final String KEY_LIST_POSITION = "list_position";
private static final String KEY_WEEK_START = "week_start";
private static final String KEY_YEAR_START = "year_start";
private static final String KEY_YEAR_END = "year_end";
private static final String KEY_CURRENT_VIEW = "current_view";
private static final String KEY_LIST_POSITION_OFFSET = "list_position_offset";
private static final String KEY_DAY_PICKER_CURRENT_INDEX = "day_picker_current_index";
private static final String KEY_MIN_DATE_MILLIS = "min_date_millis";
private static final String KEY_MAX_DATE_MILLIS = "max_date_millis";
private static final int DEFAULT_START_YEAR = 1900;
private static final int DEFAULT_END_YEAR = 2100;
private static final int ANIMATION_DURATION = 300;
private static final int ANIMATION_DELAY = 500;
private static SimpleDateFormat YEAR_FORMAT = new SimpleDateFormat("yyyy", Locale.getDefault());
private static SimpleDateFormat DAY_FORMAT = new SimpleDateFormat("dd", Locale.getDefault());
private final Calendar mCalendar = Calendar.getInstance();
private OnDateSetListener mCallBack;
private HashSet<OnDateChangedListener> mListeners = new HashSet<OnDateChangedListener>();
private AccessibleDateAnimator mAnimator;
private TextView mDayOfWeekView;
private LinearLayout mMonthDayYearView;
private TextView mFirstTextView;
private TextView mSecondTextView;
private PagingDayPickerView mDayPickerView;
private YearPickerView mYearPickerView;
private Button mDoneButton;
private Button mCancelButton;
private int mCurrentView = UNINITIALIZED;
private int mWeekStart = mCalendar.getFirstDayOfWeek();
private int mMinYear = DEFAULT_START_YEAR;
private int mMaxYear = DEFAULT_END_YEAR;
@Nullable
private Calendar mMinDate;
@Nullable
private Calendar mMaxDate;
private HapticFeedbackController mHapticFeedbackController;
private CalendarDay mSelectedDay;
private boolean mDelayAnimation = true;
// Accessibility strings.
private String mDayPickerDescription;
private String mSelectDay;
private String mYearPickerDescription;
private String mSelectYear;
// Relative positions of (MD) and Y in the locale's date formatting style.
private int mLocaleMonthDayIndex;
private int mLocaleYearIndex;
public BottomSheetDatePickerDialog() {
// Empty constructor required for dialog fragment.
}
/**
* @param callBack How the parent is notified that the date is set.
* @param year The initial year of the dialog.
* @param monthOfYear The initial month of the dialog.
* @param dayOfMonth The initial day of the dialog.
*/
public static BottomSheetDatePickerDialog newInstance(OnDateSetListener callBack, int year,
int monthOfYear,
int dayOfMonth) {
BottomSheetDatePickerDialog ret = new BottomSheetDatePickerDialog();
ret.initialize(callBack, year, monthOfYear, dayOfMonth);
return ret;
}
public void initialize(OnDateSetListener callBack, int year, int monthOfYear, int dayOfMonth) {
mCallBack = callBack;
mCalendar.set(Calendar.YEAR, year);
mCalendar.set(Calendar.MONTH, monthOfYear);
mCalendar.set(Calendar.DAY_OF_MONTH, dayOfMonth);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Activity activity = getActivity();
activity.getWindow().setSoftInputMode(
WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
if (savedInstanceState != null) {
mCalendar.set(Calendar.YEAR, savedInstanceState.getInt(KEY_SELECTED_YEAR));
mCalendar.set(Calendar.MONTH, savedInstanceState.getInt(KEY_SELECTED_MONTH));
mCalendar.set(Calendar.DAY_OF_MONTH, savedInstanceState.getInt(KEY_SELECTED_DAY));
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(KEY_SELECTED_YEAR, mCalendar.get(Calendar.YEAR));
outState.putInt(KEY_SELECTED_MONTH, mCalendar.get(Calendar.MONTH));
outState.putInt(KEY_SELECTED_DAY, mCalendar.get(Calendar.DAY_OF_MONTH));
outState.putInt(KEY_WEEK_START, mWeekStart);
outState.putInt(KEY_YEAR_START, mMinYear);
outState.putInt(KEY_YEAR_END, mMaxYear);
outState.putInt(KEY_CURRENT_VIEW, mCurrentView);
int listPosition = -1;
if (mCurrentView == MONTH_AND_DAY_VIEW) {
listPosition = mDayPickerView.getPagerPosition();
outState.putInt(KEY_DAY_PICKER_CURRENT_INDEX, mDayPickerView.getCurrentView());
} else if (mCurrentView == YEAR_VIEW) {
listPosition = mYearPickerView.getFirstVisiblePosition();
outState.putInt(KEY_LIST_POSITION_OFFSET, mYearPickerView.getFirstPositionOffset());
}
outState.putInt(KEY_LIST_POSITION, listPosition);
if (mMinDate != null) {
outState.putLong(KEY_MIN_DATE_MILLIS, mMinDate.getTimeInMillis());
}
if (mMaxDate != null) {
outState.putLong(KEY_MAX_DATE_MILLIS, mMaxDate.getTimeInMillis());
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
final View view = super.onCreateView(inflater, container, savedInstanceState);
mDayOfWeekView = (TextView) view.findViewById(R.id.date_picker_header);
mDayOfWeekView.setTypeface(Utils.SANS_SERIF_LIGHT_BOLD);
mMonthDayYearView = (LinearLayout) view.findViewById(R.id.date_picker_month_day_year);
mFirstTextView = (TextView) view.findViewById(R.id.date_picker_first_textview);
mFirstTextView.setOnClickListener(this);
mFirstTextView.setTypeface(Utils.SANS_SERIF_LIGHT_BOLD);
mSecondTextView = (TextView) view.findViewById(R.id.date_picker_second_textview);
mSecondTextView.setOnClickListener(this);
mSecondTextView.setTypeface(Utils.SANS_SERIF_LIGHT_BOLD);
int listPosition = -1;
int listPositionOffset = 0;
int currentView = MONTH_AND_DAY_VIEW;
int dayPickerCurrentView = DAY_PICKER_INDEX;
if (savedInstanceState != null) {
mWeekStart = savedInstanceState.getInt(KEY_WEEK_START);
mMinYear = savedInstanceState.getInt(KEY_YEAR_START);
mMaxYear = savedInstanceState.getInt(KEY_YEAR_END);
currentView = savedInstanceState.getInt(KEY_CURRENT_VIEW);
listPosition = savedInstanceState.getInt(KEY_LIST_POSITION);
listPositionOffset = savedInstanceState.getInt(KEY_LIST_POSITION_OFFSET);
dayPickerCurrentView = savedInstanceState.getInt(KEY_DAY_PICKER_CURRENT_INDEX);
// Don't restore both in one block because it may well be that only one was set.
if (savedInstanceState.containsKey(KEY_MIN_DATE_MILLIS)) {
mMinDate = Calendar.getInstance();
mMinDate.setTimeInMillis(savedInstanceState.getLong(KEY_MIN_DATE_MILLIS));
}
if (savedInstanceState.containsKey(KEY_MAX_DATE_MILLIS)) {
mMaxDate = Calendar.getInstance();
mMaxDate.setTimeInMillis(savedInstanceState.getLong(KEY_MAX_DATE_MILLIS));
}
}
final Activity activity = getActivity();
mDayPickerView = new PagingDayPickerView(activity, this, mThemeDark);
mYearPickerView = new YearPickerView(activity, this);
mYearPickerView.setTheme(activity, mThemeDark);
Resources res = getResources();
mDayPickerDescription = res.getString(R.string.day_picker_description);
mSelectDay = res.getString(R.string.select_day);
mYearPickerDescription = res.getString(R.string.year_picker_description);
mSelectYear = res.getString(R.string.select_year);
mAnimator = (AccessibleDateAnimator) view.findViewById(R.id.animator);
mAnimator.addView(mDayPickerView);
mAnimator.addView(mYearPickerView);
mAnimator.setDateMillis(mCalendar.getTimeInMillis());
// TODO: Replace with animation decided upon by the design team.
Animation animation = new AlphaAnimation(0.0f, 1.0f);
animation.setDuration(ANIMATION_DURATION);
mAnimator.setInAnimation(animation);
// TODO: Replace with animation decided upon by the design team.
Animation animation2 = new AlphaAnimation(1.0f, 0.0f);
animation2.setDuration(ANIMATION_DURATION);
mAnimator.setOutAnimation(animation2);
mDoneButton = (Button) view.findViewById(R.id.done);
mDoneButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
tryVibrate();
if (mCallBack != null) {
mCallBack.onDateSet(BottomSheetDatePickerDialog.this, mCalendar.get(Calendar.YEAR),
mCalendar.get(Calendar.MONTH), mCalendar.get(Calendar.DAY_OF_MONTH));
}
dismiss();
}
});
mCancelButton = (Button) view.findViewById(R.id.cancel);
mCancelButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
dismiss();
}
});
// Setup action button text colors.
mCancelButton.setTextColor(mAccentColor);
mDoneButton.setTextColor(mAccentColor);
// Theme-specific configurations.
if (mThemeDark) {
// This is so the margin gets colored as well.
view.setBackgroundColor(mDarkGray);
mAnimator.setBackgroundColor(mDarkGray);
int selectableItemBg = ContextCompat.getColor(activity, R.color.selectable_item_background_dark);
Utils.setColorControlHighlight(mCancelButton, selectableItemBg);
Utils.setColorControlHighlight(mDoneButton, selectableItemBg);
}
// Configurations for both themes.
View selectedDateLayout = view.findViewById(R.id.day_picker_selected_date_layout);
selectedDateLayout.setBackgroundColor(mThemeDark ? mLightGray : mAccentColor);
determineLocale_MD_Y_Indices();
updateDisplay(false);
setCurrentView(currentView);
if (listPosition != -1) {
if (currentView == MONTH_AND_DAY_VIEW) {
mDayPickerView.postSetSelection(listPosition, false);
} else if (currentView == YEAR_VIEW) {
mYearPickerView.postSetSelectionFromTop(listPosition, listPositionOffset);
}
}
mDayPickerView.postSetupCurrentView(dayPickerCurrentView, false);
mHapticFeedbackController = new HapticFeedbackController(activity);
return view;
}
@Override
public void onResume() {
super.onResume();
mHapticFeedbackController.start();
}
@Override
public void onPause() {
super.onPause();
mHapticFeedbackController.stop();
}
private void setCurrentView(final int viewIndex) {
long millis = mCalendar.getTimeInMillis();
switch (viewIndex) {
case MONTH_AND_DAY_VIEW:
mDayPickerView.onDateChanged();
if (mCurrentView != viewIndex) {
updateHeaderSelectedView(MONTH_AND_DAY_VIEW);
mAnimator.setDisplayedChild(MONTH_AND_DAY_VIEW);
mCurrentView = viewIndex;
}
String dayString = formatDate(mCalendar, DateUtils.FORMAT_SHOW_DATE);
mAnimator.setContentDescription(mDayPickerDescription + ": " + dayString);
Utils.tryAccessibilityAnnounce(mAnimator, mSelectDay);
break;
case YEAR_VIEW:
mYearPickerView.onDateChanged();
if (mCurrentView != viewIndex) {
updateHeaderSelectedView(YEAR_VIEW);
mAnimator.setDisplayedChild(YEAR_VIEW);
mCurrentView = viewIndex;
}
CharSequence yearString = YEAR_FORMAT.format(millis);
mAnimator.setContentDescription(mYearPickerDescription + ": " + yearString);
Utils.tryAccessibilityAnnounce(mAnimator, mSelectYear);
break;
}
}
private void updateHeaderSelectedView(final int viewIndex) {
switch (viewIndex) {
case MONTH_AND_DAY_VIEW:
mFirstTextView.setSelected(mLocaleMonthDayIndex == 0);
mSecondTextView.setSelected(mLocaleMonthDayIndex != 0);
break;
case YEAR_VIEW:
mFirstTextView.setSelected(mLocaleYearIndex == 0);
mSecondTextView.setSelected(mLocaleYearIndex != 0);
break;
}
}
/**
* Determine the relative positions of (MD) and Y according to the formatting style
* of the current locale.
*/
private void determineLocale_MD_Y_Indices() {
String formattedDate = formatMonthDayYear(mCalendar);
// Get the (MD) and Y parts of the formatted date in the current locale,
// so that we can compare their relative positions.
//
// You may be wondering why we need this method at all.
// "Just split() the formattedDate string around the year delimiter
// to get the two parts in an array already positioned correctly!
// Then setText() on mFirstTextView and mSecondTextView with the contents of that array!"
// That is harder than it sounds.
// Different locales use different year delimiters, and some don't use one at all.
// For example, a fully formatted date in the French locale is "30 juin 2009".
String monthAndDay = formatMonthAndDay(mCalendar);
String year = extractYearFromFormattedDate(formattedDate, monthAndDay);
// All locales format the M and D together; which comes
// first is not a necessary consideration for the comparison.
if (formattedDate.indexOf(monthAndDay) < formattedDate.indexOf(year/*not null*/)) {
mLocaleMonthDayIndex = 0;
mLocaleYearIndex = 1;
} else {
mLocaleYearIndex = 0;
mLocaleMonthDayIndex = 1;
}
}
private static String formatMonthDayYear(Calendar calendar) {
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_ABBREV_MONTH | DateUtils.FORMAT_SHOW_YEAR;
return formatDate(calendar, flags);
}
private static String formatMonthAndDay(Calendar calendar) {
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_ABBREV_MONTH | DateUtils.FORMAT_NO_YEAR;
return formatDate(calendar, flags);
}
private String extractYearFromFormattedDate(String formattedDate, String monthAndDay) {
String[] parts = formattedDate.split(monthAndDay);
for (String part : parts) {
// If the locale's date format is (MD)Y, then split(MD) = {"", Y}.
// If it is Y(MD), then split(MD) = {Y}. "Trailing empty strings are
// [...] not included in the resulting array."
if (!part.isEmpty()) {
return part;
}
}
// We will NEVER reach here, as long as the parameters are valid strings.
// We don't want this because it is not localized.
return YEAR_FORMAT.format(mCalendar.getTime());
}
private void updateDisplay(boolean announce) {
if (mDayOfWeekView != null) {
mDayOfWeekView.setText(mCalendar.getDisplayName(Calendar.DAY_OF_WEEK,
Calendar.LONG, Locale.getDefault()));
}
String fullDate = formatMonthDayYear(mCalendar);
String monthAndDay = formatMonthAndDay(mCalendar);
String year = YEAR_FORMAT.format(mCalendar.getTime());
int yearStart = fullDate.indexOf(year);
int yearEnd = yearStart + year.length();
int monthDayStart = fullDate.indexOf(monthAndDay);
int monthDayEnd = monthDayStart + monthAndDay.length();
boolean processed = false;
if (monthDayStart != -1 && yearStart != -1) {
if (mLocaleMonthDayIndex < mLocaleYearIndex) {
if (yearStart - monthDayEnd <= 2) {
monthAndDay = fullDate.substring(monthDayStart, yearStart);
year = fullDate.substring(yearStart, fullDate.length());
processed = true;
}
} else {
if (monthDayStart - yearEnd <= 2) {
year = fullDate.substring(yearStart, monthDayStart);
monthAndDay = fullDate.substring(monthDayStart, fullDate.length());
processed = true;
}
}
} else {
// Some locales format the standalone month-day or standalone year differently
// than it appears in the full date. For instance, Turkey is one such locale.
// TODO: You may want to consider making localized string resources of the
// pattern strings used to format the (MD) and (Y) parts separately.
//
// We can't compare the relative indices of (MD) and (Y) determined earlier,
// because the results are dubious if we're here.
// It is appropriate to assume yearStart != -1. The case where the raw year
// is NOT present in the full date string is hard to imagine. As such,
// even though monthDayStart == -1, we can still determine the relative indices
// of (MD) and (Y) as follows.
//
// If yearStart is non-zero positive, then we can probably guess monthDayStart
// comes before the former.
if (yearStart > 0) {
monthAndDay = fullDate.substring(0, yearStart);
year = fullDate.substring(yearStart, fullDate.length());
mLocaleMonthDayIndex = 0;
mLocaleYearIndex = 1;
} else {
year = fullDate.substring(0, yearEnd);
monthAndDay = fullDate.substring(yearEnd, fullDate.length());
mLocaleYearIndex = 0;
mLocaleMonthDayIndex = 1;
}
processed = true;
}
// Year delimiters longer than 2 characters, fall back on pre-2.1.1 implementation.
if (!processed) {
// The month-day is already formatted appropriately
year = extractYearFromFormattedDate(fullDate, monthAndDay);
}
mFirstTextView.setText(mLocaleMonthDayIndex == 0 ? monthAndDay : year);
mSecondTextView.setText(mLocaleMonthDayIndex == 0 ? year : monthAndDay);
// Accessibility.
long millis = mCalendar.getTimeInMillis();
mAnimator.setDateMillis(millis);
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_NO_YEAR;
String monthAndDayText = formatDate(millis, flags);
mMonthDayYearView.setContentDescription(monthAndDayText);
if (announce) {
flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_YEAR;
String fullDateText = formatDate(millis, flags);
Utils.tryAccessibilityAnnounce(mAnimator, fullDateText);
}
}
/**
* Use this to set the day that a week should start on.
* @param startOfWeek A value from {@link Calendar#SUNDAY SUNDAY}
* through {@link Calendar#SATURDAY SATURDAY}
*/
public void setFirstDayOfWeek(int startOfWeek) {
if (startOfWeek < Calendar.SUNDAY || startOfWeek > Calendar.SATURDAY) {
throw new IllegalArgumentException("Value must be between Calendar.SUNDAY and " +
"Calendar.SATURDAY");
}
mWeekStart = startOfWeek;
if (mDayPickerView != null) {
mDayPickerView.onChange();
}
}
/**
* Sets the range of years to be displayed by this date picker. If a {@link #setMinDate(Calendar)
* minimal date} and/or {@link #setMaxDate(Calendar) maximal date} were set, dates in the
* specified range of years that lie outside of the minimal and maximal dates will be disallowed
* from being selected.
* <em>This does NOT change the minimal date's year or the maximal date's year.</em>
*
* @param startYear the start of the year range
* @param endYear the end of the year range
*/
public void setYearRange(int startYear, int endYear) {
if (endYear <= startYear) {
throw new IllegalArgumentException("Year end must be larger than year start");
}
mMinYear = startYear;
mMaxYear = endYear;
if (mDayPickerView != null) {
mDayPickerView.onChange();
}
}
/**
* Sets the minimal date that can be selected in this date picker. Dates before (but not including)
* the specified date will be disallowed from being selected.
*
* @param calendar a Calendar object set to the year, month, day desired as the mindate.
*/
public void setMinDate(Calendar calendar) {
mMinDate = calendar;
setYearRange(calendar.get(Calendar.YEAR), mMaxYear);
}
/**
* @return The minimal date supported by this date picker. Null if it has not been set.
*/
@Nullable
@Override
public Calendar getMinDate() {
return mMinDate;
}
/**
* Sets the maximal date that can be selected in this date picker. Dates after (but not including)
* the specified date will be disallowed from being selected.
*
* @param calendar a Calendar object set to the year, month, day desired as the maxdate.
*/
public void setMaxDate(Calendar calendar) {
mMaxDate = calendar;
setYearRange(mMinYear, calendar.get(Calendar.YEAR));
}
/**
* @return The maximal date supported by this date picker. Null if it has not been set.
*/
@Nullable
@Override
public Calendar getMaxDate() {
return mMaxDate;
}
public void setOnDateSetListener(OnDateSetListener listener) {
mCallBack = listener;
}
// If the newly selected month / year does not contain the currently selected day number,
// change the selected day number to the last day of the selected month or year.
// e.g. Switching from Mar to Apr when Mar 31 is selected -> Apr 30
// e.g. Switching from 2012 to 2013 when Feb 29, 2012 is selected -> Feb 28, 2013
private void adjustDayInMonthIfNeeded(int month, int year) {
int day = mCalendar.get(Calendar.DAY_OF_MONTH);
int daysInMonth = Utils.getDaysInMonth(month, year);
if (day > daysInMonth) {
mCalendar.set(Calendar.DAY_OF_MONTH, daysInMonth);
}
}
@Override
public void onClick(View v) {
tryVibrate();
if (v.getId() == R.id.date_picker_second_textview) {
setCurrentView(mLocaleMonthDayIndex == 0 ? YEAR_VIEW : MONTH_AND_DAY_VIEW);
} else if (v.getId() == R.id.date_picker_first_textview) {
setCurrentView(mLocaleMonthDayIndex == 0 ? MONTH_AND_DAY_VIEW : YEAR_VIEW);
}
}
@Override
public void onYearSelected(int year) {
adjustDayInMonthIfNeeded(mCalendar.get(Calendar.MONTH), year);
mCalendar.set(Calendar.YEAR, year);
updatePickers();
setCurrentView(MONTH_AND_DAY_VIEW);
updateDisplay(true);
}
@Override
public void onDayOfMonthSelected(int year, int month, int day) {
mCalendar.set(Calendar.YEAR, year);
mCalendar.set(Calendar.MONTH, month);
mCalendar.set(Calendar.DAY_OF_MONTH, day);
updatePickers();
updateDisplay(true);
}
@Override
public void onMonthYearSelected(int month, int year) {
adjustDayInMonthIfNeeded(month, year);
mCalendar.set(Calendar.MONTH, month);
mCalendar.set(Calendar.YEAR, year);
updatePickers();
// Even though the MonthPickerView is already contained in this index,
// keep this call here for accessibility announcement of the new selection.
setCurrentView(MONTH_AND_DAY_VIEW);
updateDisplay(true);
}
private void updatePickers() {
Iterator<OnDateChangedListener> iterator = mListeners.iterator();
while (iterator.hasNext()) {
iterator.next().onDateChanged();
}
}
@Override
public CalendarDay getSelectedDay() {
if (mSelectedDay == null) {
mSelectedDay = new CalendarDay(mCalendar);
} else {
mSelectedDay.setDay(mCalendar.get(Calendar.YEAR),
mCalendar.get(Calendar.MONTH),
mCalendar.get(Calendar.DAY_OF_MONTH));
}
return mSelectedDay;
}
@Override
public int getMinYear() {
return mMinYear;
}
@Override
public int getMaxYear() {
return mMaxYear;
}
@Override
public int getFirstDayOfWeek() {
return mWeekStart;
}
@Override
public void registerOnDateChangedListener(OnDateChangedListener listener) {
mListeners.add(listener);
}
@Override
public void unregisterOnDateChangedListener(OnDateChangedListener listener) {
mListeners.remove(listener);
}
@Override
public void tryVibrate() {
mHapticFeedbackController.tryVibrate();
}
@Override
protected int contentLayout() {
return R.layout.date_picker_dialog;
}
}
|
bottomsheetpickers/src/main/java/com/philliphsu/bottomsheetpickers/date/BottomSheetDatePickerDialog.java
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.philliphsu.bottomsheetpickers.date;
import android.app.Activity;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.content.ContextCompat;
import android.text.format.DateUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.philliphsu.bottomsheetpickers.HapticFeedbackController;
import com.philliphsu.bottomsheetpickers.R;
import com.philliphsu.bottomsheetpickers.Utils;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import static com.philliphsu.bottomsheetpickers.date.DateFormatHelper.formatDate;
import static com.philliphsu.bottomsheetpickers.date.PagingDayPickerView.DAY_PICKER_INDEX;
/**
* Dialog allowing users to select a date.
*/
public class BottomSheetDatePickerDialog extends DatePickerDialog implements
OnClickListener, DatePickerController {
private static final String TAG = "DatePickerDialog";
private static final int UNINITIALIZED = -1;
private static final int MONTH_AND_DAY_VIEW = 0;
private static final int YEAR_VIEW = 1;
private static final String KEY_SELECTED_YEAR = "year";
private static final String KEY_SELECTED_MONTH = "month";
private static final String KEY_SELECTED_DAY = "day";
private static final String KEY_LIST_POSITION = "list_position";
private static final String KEY_WEEK_START = "week_start";
private static final String KEY_YEAR_START = "year_start";
private static final String KEY_YEAR_END = "year_end";
private static final String KEY_CURRENT_VIEW = "current_view";
private static final String KEY_LIST_POSITION_OFFSET = "list_position_offset";
private static final String KEY_DAY_PICKER_CURRENT_INDEX = "day_picker_current_index";
private static final String KEY_MIN_DATE_MILLIS = "min_date_millis";
private static final String KEY_MAX_DATE_MILLIS = "max_date_millis";
private static final int DEFAULT_START_YEAR = 1900;
private static final int DEFAULT_END_YEAR = 2100;
private static final int ANIMATION_DURATION = 300;
private static final int ANIMATION_DELAY = 500;
private static SimpleDateFormat YEAR_FORMAT = new SimpleDateFormat("yyyy", Locale.getDefault());
private static SimpleDateFormat DAY_FORMAT = new SimpleDateFormat("dd", Locale.getDefault());
private final Calendar mCalendar = Calendar.getInstance();
private OnDateSetListener mCallBack;
private HashSet<OnDateChangedListener> mListeners = new HashSet<OnDateChangedListener>();
private AccessibleDateAnimator mAnimator;
private TextView mDayOfWeekView;
private LinearLayout mMonthDayYearView;
private TextView mFirstTextView;
private TextView mSecondTextView;
private PagingDayPickerView mDayPickerView;
private YearPickerView mYearPickerView;
private Button mDoneButton;
private Button mCancelButton;
private int mCurrentView = UNINITIALIZED;
private int mWeekStart = mCalendar.getFirstDayOfWeek();
private int mMinYear = DEFAULT_START_YEAR;
private int mMaxYear = DEFAULT_END_YEAR;
@Nullable
private Calendar mMinDate;
@Nullable
private Calendar mMaxDate;
private HapticFeedbackController mHapticFeedbackController;
private CalendarDay mSelectedDay;
private boolean mDelayAnimation = true;
// Accessibility strings.
private String mDayPickerDescription;
private String mSelectDay;
private String mYearPickerDescription;
private String mSelectYear;
// Relative positions of (MD) and Y in the locale's date formatting style.
private int mLocaleMonthDayIndex;
private int mLocaleYearIndex;
public BottomSheetDatePickerDialog() {
// Empty constructor required for dialog fragment.
}
/**
* @param callBack How the parent is notified that the date is set.
* @param year The initial year of the dialog.
* @param monthOfYear The initial month of the dialog.
* @param dayOfMonth The initial day of the dialog.
*/
public static BottomSheetDatePickerDialog newInstance(OnDateSetListener callBack, int year,
int monthOfYear,
int dayOfMonth) {
BottomSheetDatePickerDialog ret = new BottomSheetDatePickerDialog();
ret.initialize(callBack, year, monthOfYear, dayOfMonth);
return ret;
}
public void initialize(OnDateSetListener callBack, int year, int monthOfYear, int dayOfMonth) {
mCallBack = callBack;
mCalendar.set(Calendar.YEAR, year);
mCalendar.set(Calendar.MONTH, monthOfYear);
mCalendar.set(Calendar.DAY_OF_MONTH, dayOfMonth);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Activity activity = getActivity();
activity.getWindow().setSoftInputMode(
WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
if (savedInstanceState != null) {
mCalendar.set(Calendar.YEAR, savedInstanceState.getInt(KEY_SELECTED_YEAR));
mCalendar.set(Calendar.MONTH, savedInstanceState.getInt(KEY_SELECTED_MONTH));
mCalendar.set(Calendar.DAY_OF_MONTH, savedInstanceState.getInt(KEY_SELECTED_DAY));
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(KEY_SELECTED_YEAR, mCalendar.get(Calendar.YEAR));
outState.putInt(KEY_SELECTED_MONTH, mCalendar.get(Calendar.MONTH));
outState.putInt(KEY_SELECTED_DAY, mCalendar.get(Calendar.DAY_OF_MONTH));
outState.putInt(KEY_WEEK_START, mWeekStart);
outState.putInt(KEY_YEAR_START, mMinYear);
outState.putInt(KEY_YEAR_END, mMaxYear);
outState.putInt(KEY_CURRENT_VIEW, mCurrentView);
int listPosition = -1;
if (mCurrentView == MONTH_AND_DAY_VIEW) {
listPosition = mDayPickerView.getPagerPosition();
outState.putInt(KEY_DAY_PICKER_CURRENT_INDEX, mDayPickerView.getCurrentView());
} else if (mCurrentView == YEAR_VIEW) {
listPosition = mYearPickerView.getFirstVisiblePosition();
outState.putInt(KEY_LIST_POSITION_OFFSET, mYearPickerView.getFirstPositionOffset());
}
outState.putInt(KEY_LIST_POSITION, listPosition);
if (mMinDate != null) {
outState.putLong(KEY_MIN_DATE_MILLIS, mMinDate.getTimeInMillis());
}
if (mMaxDate != null) {
outState.putLong(KEY_MAX_DATE_MILLIS, mMaxDate.getTimeInMillis());
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
final View view = super.onCreateView(inflater, container, savedInstanceState);
mDayOfWeekView = (TextView) view.findViewById(R.id.date_picker_header);
mDayOfWeekView.setTypeface(Utils.SANS_SERIF_LIGHT_BOLD);
mMonthDayYearView = (LinearLayout) view.findViewById(R.id.date_picker_month_day_year);
mFirstTextView = (TextView) view.findViewById(R.id.date_picker_first_textview);
mFirstTextView.setOnClickListener(this);
mFirstTextView.setTypeface(Utils.SANS_SERIF_LIGHT_BOLD);
mSecondTextView = (TextView) view.findViewById(R.id.date_picker_second_textview);
mSecondTextView.setOnClickListener(this);
mSecondTextView.setTypeface(Utils.SANS_SERIF_LIGHT_BOLD);
int listPosition = -1;
int listPositionOffset = 0;
int currentView = MONTH_AND_DAY_VIEW;
int dayPickerCurrentView = DAY_PICKER_INDEX;
if (savedInstanceState != null) {
mWeekStart = savedInstanceState.getInt(KEY_WEEK_START);
mMinYear = savedInstanceState.getInt(KEY_YEAR_START);
mMaxYear = savedInstanceState.getInt(KEY_YEAR_END);
currentView = savedInstanceState.getInt(KEY_CURRENT_VIEW);
listPosition = savedInstanceState.getInt(KEY_LIST_POSITION);
listPositionOffset = savedInstanceState.getInt(KEY_LIST_POSITION_OFFSET);
dayPickerCurrentView = savedInstanceState.getInt(KEY_DAY_PICKER_CURRENT_INDEX);
// Don't restore both in one block because it may well be that only one was set.
if (savedInstanceState.containsKey(KEY_MIN_DATE_MILLIS)) {
mMinDate = Calendar.getInstance();
mMinDate.setTimeInMillis(savedInstanceState.getLong(KEY_MIN_DATE_MILLIS));
}
if (savedInstanceState.containsKey(KEY_MAX_DATE_MILLIS)) {
mMaxDate = Calendar.getInstance();
mMaxDate.setTimeInMillis(savedInstanceState.getLong(KEY_MAX_DATE_MILLIS));
}
}
final Activity activity = getActivity();
mDayPickerView = new PagingDayPickerView(activity, this, mThemeDark);
mYearPickerView = new YearPickerView(activity, this);
mYearPickerView.setTheme(activity, mThemeDark);
Resources res = getResources();
mDayPickerDescription = res.getString(R.string.day_picker_description);
mSelectDay = res.getString(R.string.select_day);
mYearPickerDescription = res.getString(R.string.year_picker_description);
mSelectYear = res.getString(R.string.select_year);
mAnimator = (AccessibleDateAnimator) view.findViewById(R.id.animator);
mAnimator.addView(mDayPickerView);
mAnimator.addView(mYearPickerView);
mAnimator.setDateMillis(mCalendar.getTimeInMillis());
// TODO: Replace with animation decided upon by the design team.
Animation animation = new AlphaAnimation(0.0f, 1.0f);
animation.setDuration(ANIMATION_DURATION);
mAnimator.setInAnimation(animation);
// TODO: Replace with animation decided upon by the design team.
Animation animation2 = new AlphaAnimation(1.0f, 0.0f);
animation2.setDuration(ANIMATION_DURATION);
mAnimator.setOutAnimation(animation2);
mDoneButton = (Button) view.findViewById(R.id.done);
mDoneButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
tryVibrate();
if (mCallBack != null) {
mCallBack.onDateSet(BottomSheetDatePickerDialog.this, mCalendar.get(Calendar.YEAR),
mCalendar.get(Calendar.MONTH), mCalendar.get(Calendar.DAY_OF_MONTH));
}
dismiss();
}
});
mCancelButton = (Button) view.findViewById(R.id.cancel);
mCancelButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
dismiss();
}
});
// Setup action button text colors.
mCancelButton.setTextColor(mAccentColor);
mDoneButton.setTextColor(mAccentColor);
// Theme-specific configurations.
if (mThemeDark) {
// This is so the margin gets colored as well.
view.setBackgroundColor(mDarkGray);
mAnimator.setBackgroundColor(mDarkGray);
int selectableItemBg = ContextCompat.getColor(activity, R.color.selectable_item_background_dark);
Utils.setColorControlHighlight(mCancelButton, selectableItemBg);
Utils.setColorControlHighlight(mDoneButton, selectableItemBg);
}
// Configurations for both themes.
View selectedDateLayout = view.findViewById(R.id.day_picker_selected_date_layout);
selectedDateLayout.setBackgroundColor(mThemeDark ? mLightGray : mAccentColor);
determineLocale_MD_Y_Indices();
updateDisplay(false);
setCurrentView(currentView);
if (listPosition != -1) {
if (currentView == MONTH_AND_DAY_VIEW) {
mDayPickerView.postSetSelection(listPosition, false);
} else if (currentView == YEAR_VIEW) {
mYearPickerView.postSetSelectionFromTop(listPosition, listPositionOffset);
}
}
mDayPickerView.postSetupCurrentView(dayPickerCurrentView, false);
mHapticFeedbackController = new HapticFeedbackController(activity);
return view;
}
@Override
public void onResume() {
super.onResume();
mHapticFeedbackController.start();
}
@Override
public void onPause() {
super.onPause();
mHapticFeedbackController.stop();
}
private void setCurrentView(final int viewIndex) {
long millis = mCalendar.getTimeInMillis();
switch (viewIndex) {
case MONTH_AND_DAY_VIEW:
mDayPickerView.onDateChanged();
setCancelable(true);
if (mCurrentView != viewIndex) {
updateHeaderSelectedView(MONTH_AND_DAY_VIEW);
mAnimator.setDisplayedChild(MONTH_AND_DAY_VIEW);
mCurrentView = viewIndex;
}
String dayString = formatDate(mCalendar, DateUtils.FORMAT_SHOW_DATE);
mAnimator.setContentDescription(mDayPickerDescription + ": " + dayString);
Utils.tryAccessibilityAnnounce(mAnimator, mSelectDay);
break;
case YEAR_VIEW:
mYearPickerView.onDateChanged();
setCancelable(false);
if (mCurrentView != viewIndex) {
updateHeaderSelectedView(YEAR_VIEW);
mAnimator.setDisplayedChild(YEAR_VIEW);
mCurrentView = viewIndex;
}
CharSequence yearString = YEAR_FORMAT.format(millis);
mAnimator.setContentDescription(mYearPickerDescription + ": " + yearString);
Utils.tryAccessibilityAnnounce(mAnimator, mSelectYear);
break;
}
}
private void updateHeaderSelectedView(final int viewIndex) {
switch (viewIndex) {
case MONTH_AND_DAY_VIEW:
mFirstTextView.setSelected(mLocaleMonthDayIndex == 0);
mSecondTextView.setSelected(mLocaleMonthDayIndex != 0);
break;
case YEAR_VIEW:
mFirstTextView.setSelected(mLocaleYearIndex == 0);
mSecondTextView.setSelected(mLocaleYearIndex != 0);
break;
}
}
/**
* Determine the relative positions of (MD) and Y according to the formatting style
* of the current locale.
*/
private void determineLocale_MD_Y_Indices() {
String formattedDate = formatMonthDayYear(mCalendar);
// Get the (MD) and Y parts of the formatted date in the current locale,
// so that we can compare their relative positions.
//
// You may be wondering why we need this method at all.
// "Just split() the formattedDate string around the year delimiter
// to get the two parts in an array already positioned correctly!
// Then setText() on mFirstTextView and mSecondTextView with the contents of that array!"
// That is harder than it sounds.
// Different locales use different year delimiters, and some don't use one at all.
// For example, a fully formatted date in the French locale is "30 juin 2009".
String monthAndDay = formatMonthAndDay(mCalendar);
String year = extractYearFromFormattedDate(formattedDate, monthAndDay);
// All locales format the M and D together; which comes
// first is not a necessary consideration for the comparison.
if (formattedDate.indexOf(monthAndDay) < formattedDate.indexOf(year/*not null*/)) {
mLocaleMonthDayIndex = 0;
mLocaleYearIndex = 1;
} else {
mLocaleYearIndex = 0;
mLocaleMonthDayIndex = 1;
}
}
private static String formatMonthDayYear(Calendar calendar) {
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_ABBREV_MONTH | DateUtils.FORMAT_SHOW_YEAR;
return formatDate(calendar, flags);
}
private static String formatMonthAndDay(Calendar calendar) {
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_ABBREV_MONTH | DateUtils.FORMAT_NO_YEAR;
return formatDate(calendar, flags);
}
private String extractYearFromFormattedDate(String formattedDate, String monthAndDay) {
String[] parts = formattedDate.split(monthAndDay);
for (String part : parts) {
// If the locale's date format is (MD)Y, then split(MD) = {"", Y}.
// If it is Y(MD), then split(MD) = {Y}. "Trailing empty strings are
// [...] not included in the resulting array."
if (!part.isEmpty()) {
return part;
}
}
// We will NEVER reach here, as long as the parameters are valid strings.
// We don't want this because it is not localized.
return YEAR_FORMAT.format(mCalendar.getTime());
}
private void updateDisplay(boolean announce) {
if (mDayOfWeekView != null) {
mDayOfWeekView.setText(mCalendar.getDisplayName(Calendar.DAY_OF_WEEK,
Calendar.LONG, Locale.getDefault()));
}
String fullDate = formatMonthDayYear(mCalendar);
String monthAndDay = formatMonthAndDay(mCalendar);
String year = YEAR_FORMAT.format(mCalendar.getTime());
int yearStart = fullDate.indexOf(year);
int yearEnd = yearStart + year.length();
int monthDayStart = fullDate.indexOf(monthAndDay);
int monthDayEnd = monthDayStart + monthAndDay.length();
boolean processed = false;
if (monthDayStart != -1 && yearStart != -1) {
if (mLocaleMonthDayIndex < mLocaleYearIndex) {
if (yearStart - monthDayEnd <= 2) {
monthAndDay = fullDate.substring(monthDayStart, yearStart);
year = fullDate.substring(yearStart, fullDate.length());
processed = true;
}
} else {
if (monthDayStart - yearEnd <= 2) {
year = fullDate.substring(yearStart, monthDayStart);
monthAndDay = fullDate.substring(monthDayStart, fullDate.length());
processed = true;
}
}
} else {
// Some locales format the standalone month-day or standalone year differently
// than it appears in the full date. For instance, Turkey is one such locale.
// TODO: You may want to consider making localized string resources of the
// pattern strings used to format the (MD) and (Y) parts separately.
//
// We can't compare the relative indices of (MD) and (Y) determined earlier,
// because the results are dubious if we're here.
// It is appropriate to assume yearStart != -1. The case where the raw year
// is NOT present in the full date string is hard to imagine. As such,
// even though monthDayStart == -1, we can still determine the relative indices
// of (MD) and (Y) as follows.
//
// If yearStart is non-zero positive, then we can probably guess monthDayStart
// comes before the former.
if (yearStart > 0) {
monthAndDay = fullDate.substring(0, yearStart);
year = fullDate.substring(yearStart, fullDate.length());
mLocaleMonthDayIndex = 0;
mLocaleYearIndex = 1;
} else {
year = fullDate.substring(0, yearEnd);
monthAndDay = fullDate.substring(yearEnd, fullDate.length());
mLocaleYearIndex = 0;
mLocaleMonthDayIndex = 1;
}
processed = true;
}
// Year delimiters longer than 2 characters, fall back on pre-2.1.1 implementation.
if (!processed) {
// The month-day is already formatted appropriately
year = extractYearFromFormattedDate(fullDate, monthAndDay);
}
mFirstTextView.setText(mLocaleMonthDayIndex == 0 ? monthAndDay : year);
mSecondTextView.setText(mLocaleMonthDayIndex == 0 ? year : monthAndDay);
// Accessibility.
long millis = mCalendar.getTimeInMillis();
mAnimator.setDateMillis(millis);
int flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_NO_YEAR;
String monthAndDayText = formatDate(millis, flags);
mMonthDayYearView.setContentDescription(monthAndDayText);
if (announce) {
flags = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_YEAR;
String fullDateText = formatDate(millis, flags);
Utils.tryAccessibilityAnnounce(mAnimator, fullDateText);
}
}
/**
* Use this to set the day that a week should start on.
* @param startOfWeek A value from {@link Calendar#SUNDAY SUNDAY}
* through {@link Calendar#SATURDAY SATURDAY}
*/
public void setFirstDayOfWeek(int startOfWeek) {
if (startOfWeek < Calendar.SUNDAY || startOfWeek > Calendar.SATURDAY) {
throw new IllegalArgumentException("Value must be between Calendar.SUNDAY and " +
"Calendar.SATURDAY");
}
mWeekStart = startOfWeek;
if (mDayPickerView != null) {
mDayPickerView.onChange();
}
}
/**
* Sets the range of years to be displayed by this date picker. If a {@link #setMinDate(Calendar)
* minimal date} and/or {@link #setMaxDate(Calendar) maximal date} were set, dates in the
* specified range of years that lie outside of the minimal and maximal dates will be disallowed
* from being selected.
* <em>This does NOT change the minimal date's year or the maximal date's year.</em>
*
* @param startYear the start of the year range
* @param endYear the end of the year range
*/
public void setYearRange(int startYear, int endYear) {
if (endYear <= startYear) {
throw new IllegalArgumentException("Year end must be larger than year start");
}
mMinYear = startYear;
mMaxYear = endYear;
if (mDayPickerView != null) {
mDayPickerView.onChange();
}
}
/**
* Sets the minimal date that can be selected in this date picker. Dates before (but not including)
* the specified date will be disallowed from being selected.
*
* @param calendar a Calendar object set to the year, month, day desired as the mindate.
*/
public void setMinDate(Calendar calendar) {
mMinDate = calendar;
setYearRange(calendar.get(Calendar.YEAR), mMaxYear);
}
/**
* @return The minimal date supported by this date picker. Null if it has not been set.
*/
@Nullable
@Override
public Calendar getMinDate() {
return mMinDate;
}
/**
* Sets the maximal date that can be selected in this date picker. Dates after (but not including)
* the specified date will be disallowed from being selected.
*
* @param calendar a Calendar object set to the year, month, day desired as the maxdate.
*/
public void setMaxDate(Calendar calendar) {
mMaxDate = calendar;
setYearRange(mMinYear, calendar.get(Calendar.YEAR));
}
/**
* @return The maximal date supported by this date picker. Null if it has not been set.
*/
@Nullable
@Override
public Calendar getMaxDate() {
return mMaxDate;
}
public void setOnDateSetListener(OnDateSetListener listener) {
mCallBack = listener;
}
// If the newly selected month / year does not contain the currently selected day number,
// change the selected day number to the last day of the selected month or year.
// e.g. Switching from Mar to Apr when Mar 31 is selected -> Apr 30
// e.g. Switching from 2012 to 2013 when Feb 29, 2012 is selected -> Feb 28, 2013
private void adjustDayInMonthIfNeeded(int month, int year) {
int day = mCalendar.get(Calendar.DAY_OF_MONTH);
int daysInMonth = Utils.getDaysInMonth(month, year);
if (day > daysInMonth) {
mCalendar.set(Calendar.DAY_OF_MONTH, daysInMonth);
}
}
@Override
public void onClick(View v) {
tryVibrate();
if (v.getId() == R.id.date_picker_second_textview) {
setCurrentView(mLocaleMonthDayIndex == 0 ? YEAR_VIEW : MONTH_AND_DAY_VIEW);
} else if (v.getId() == R.id.date_picker_first_textview) {
setCurrentView(mLocaleMonthDayIndex == 0 ? MONTH_AND_DAY_VIEW : YEAR_VIEW);
}
}
@Override
public void onYearSelected(int year) {
adjustDayInMonthIfNeeded(mCalendar.get(Calendar.MONTH), year);
mCalendar.set(Calendar.YEAR, year);
updatePickers();
setCurrentView(MONTH_AND_DAY_VIEW);
updateDisplay(true);
}
@Override
public void onDayOfMonthSelected(int year, int month, int day) {
mCalendar.set(Calendar.YEAR, year);
mCalendar.set(Calendar.MONTH, month);
mCalendar.set(Calendar.DAY_OF_MONTH, day);
updatePickers();
updateDisplay(true);
}
@Override
public void onMonthYearSelected(int month, int year) {
adjustDayInMonthIfNeeded(month, year);
mCalendar.set(Calendar.MONTH, month);
mCalendar.set(Calendar.YEAR, year);
updatePickers();
// Even though the MonthPickerView is already contained in this index,
// keep this call here for accessibility announcement of the new selection.
setCurrentView(MONTH_AND_DAY_VIEW);
updateDisplay(true);
}
private void updatePickers() {
Iterator<OnDateChangedListener> iterator = mListeners.iterator();
while (iterator.hasNext()) {
iterator.next().onDateChanged();
}
}
@Override
public CalendarDay getSelectedDay() {
if (mSelectedDay == null) {
mSelectedDay = new CalendarDay(mCalendar);
} else {
mSelectedDay.setDay(mCalendar.get(Calendar.YEAR),
mCalendar.get(Calendar.MONTH),
mCalendar.get(Calendar.DAY_OF_MONTH));
}
return mSelectedDay;
}
@Override
public int getMinYear() {
return mMinYear;
}
@Override
public int getMaxYear() {
return mMaxYear;
}
@Override
public int getFirstDayOfWeek() {
return mWeekStart;
}
@Override
public void registerOnDateChangedListener(OnDateChangedListener listener) {
mListeners.add(listener);
}
@Override
public void unregisterOnDateChangedListener(OnDateChangedListener listener) {
mListeners.remove(listener);
}
@Override
public void tryVibrate() {
mHapticFeedbackController.tryVibrate();
}
@Override
protected int contentLayout() {
return R.layout.date_picker_dialog;
}
}
|
Make date picker always cancelable
|
bottomsheetpickers/src/main/java/com/philliphsu/bottomsheetpickers/date/BottomSheetDatePickerDialog.java
|
Make date picker always cancelable
|
<ide><path>ottomsheetpickers/src/main/java/com/philliphsu/bottomsheetpickers/date/BottomSheetDatePickerDialog.java
<ide> switch (viewIndex) {
<ide> case MONTH_AND_DAY_VIEW:
<ide> mDayPickerView.onDateChanged();
<del> setCancelable(true);
<ide> if (mCurrentView != viewIndex) {
<ide> updateHeaderSelectedView(MONTH_AND_DAY_VIEW);
<ide> mAnimator.setDisplayedChild(MONTH_AND_DAY_VIEW);
<ide> break;
<ide> case YEAR_VIEW:
<ide> mYearPickerView.onDateChanged();
<del> setCancelable(false);
<ide> if (mCurrentView != viewIndex) {
<ide> updateHeaderSelectedView(YEAR_VIEW);
<ide> mAnimator.setDisplayedChild(YEAR_VIEW);
|
|
Java
|
apache-2.0
|
1803a61e5fc6d87f9b952a3b31e8476cad276609
| 0 |
emerssso/SRBase
|
//This Software is distributed under The Apache License, Version 2.0
//The License is available at http://www.apache.org/licenses/LICENSE-2.0
package com.gmail.emerssso.srbase;
import com.gmail.emerssso.srbase.EditDailyActivity.DeleteFragment;
import com.gmail.emerssso.srbase.database.PartContentProvider;
import com.gmail.emerssso.srbase.database.PartTable;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Toast;
/**
* The EditPartActivity class implements an activity which provides
* a form for users to enter new parts to be associated with a particular
* SR, and certain information associated with that part.
*/
public class EditPartActivity extends Activity {
/** The part number. */
private EditText partNumber;
/** The number of parts in question. */
private EditText partQuantity;
/** The part source (i.e. work, home, Japan). */
private EditText partSource;
/** The part description. */
private EditText partDescription;
/** Indicates whether the part was used or not. */
private CheckBox partUsed;
/** The Uri to load saved data from. */
private Uri savedUri;
/** The confirm button to save data. */
private Button confirm;
/** The ID of the associated SR. */
private String srId;
/* (non-Javadoc)
* @see android.app.Activity#onCreate(android.os.Bundle)
*/
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.edit_part_activity);
partNumber = (EditText) findViewById(R.id.part_number);
partQuantity = (EditText) findViewById(R.id.part_quantity);
partSource = (EditText) findViewById(R.id.part_source);
partDescription = (EditText) findViewById(R.id.part_description);
partUsed = (CheckBox) findViewById(R.id.part_used);
confirm = (Button) findViewById(R.id.part_confirm);
Bundle extras = getIntent().getExtras();
srId = extras.getString(PartTable.COLUMN_SR_ID);
savedUri = (bundle == null) ? null :
(Uri) bundle.getParcelable(
PartContentProvider.CONTENT_ITEM_TYPE);
if (extras != null) {
savedUri = extras
.getParcelable(PartContentProvider.CONTENT_ITEM_TYPE);
if(savedUri != null)
fillData(savedUri);
}
confirm.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (partNumber.getText().toString().length() == 0) {
Toast.makeText(EditPartActivity.this, "Part Number missing",
Toast.LENGTH_LONG).show();
return;
}
else {
setResult(RESULT_OK);
finish();
}
}
});
}
/**
* Fill data from the database entry at Uri into the form.
*
* @param uri the Source of data to be loaded.
*/
private void fillData(Uri uri) {
String[] projection = { PartTable.COLUMN_DESCRIPTION,
PartTable.COLUMN_PART_NUMBER, PartTable.COLUMN_QUANTITY,
PartTable.COLUMN_SOURCE, PartTable.COLUMN_USED,
PartTable.COLUMN_SR_ID};
Cursor cursor = getContentResolver()
.query(uri, projection, null, null,null);
if (cursor != null) {
cursor.moveToFirst();
partNumber.setText(cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_PART_NUMBER)));
String temp = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_QUANTITY));
if(!temp.equals("Unknown"))
partQuantity.setText(temp);
temp = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_SOURCE));
if(!temp.equals("Unknown"))
partSource.setText(temp);
temp = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_DESCRIPTION));
if(!temp.equals("No Description"))
partDescription.setText(temp);
srId = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_SR_ID));
if(cursor.getString(cursor.getColumnIndexOrThrow
(PartTable.COLUMN_USED)).equals("Used"))
partUsed.setChecked(true);
else
partUsed.setChecked(false);
cursor.close();
}
}
/* (non-Javadoc)
* @see android.app.Activity#onSaveInstanceState(android.os.Bundle)
*/
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
saveState();
outState.putParcelable(
PartContentProvider.CONTENT_ITEM_TYPE, savedUri);
}
/* (non-Javadoc)
* @see android.app.Activity#onPause()
*/
@Override
protected void onPause() {
super.onPause();
saveState();
}
/**
* Save the state of the form into the database.
*/
private void saveState() {
String number = partNumber.getText().toString();
if(partNumber.length() == 0)
return;
String quantity = partQuantity.getText().toString();
String description = partDescription.getText().toString();
String source = partSource.getText().toString();
String used = partUsed.isChecked() ? "Used" : "Unused";
if(quantity.length() == 0) quantity = "Unknown";
if(description.length() == 0) description = "No Description";
if(source.length() == 0) source = "Unknown";
ContentValues values = new ContentValues();
values.put(PartTable.COLUMN_PART_NUMBER, number);
values.put(PartTable.COLUMN_QUANTITY, quantity);
values.put(PartTable.COLUMN_DESCRIPTION, description);
values.put(PartTable.COLUMN_SOURCE, source);
values.put(PartTable.COLUMN_USED, used);
values.put(PartTable.COLUMN_SR_ID, srId);
if (savedUri == null) {
// New Part
savedUri = getContentResolver()
.insert(PartContentProvider.CONTENT_URI, values);
} else {
// Update Part
getContentResolver().update(savedUri, values, null, null);
}
}
/* (non-Javadoc)
* @see android.app.Activity#onCreateOptionsMenu(android.view.Menu)
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.edit_menu, menu);
return super.onCreateOptionsMenu(menu);
}
/* (non-Javadoc)
* @see android.app.Activity#onOptionsItemSelected(android.view.MenuItem)
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch(item.getItemId()) {
case R.id.delete_item:
DeleteFragment dfrag = new DeleteFragment();
dfrag.show(getFragmentManager(), "Delete Fragment");
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* The DeleteFragment implements a dialog fragment
* to ask the user whether they are sure they want to delete
* the Part or not.
*/
public static class DeleteFragment extends DialogFragment {
/* (non-Javadoc)
* @see android.app.DialogFragment#onCreateDialog(android.os.Bundle)
*/
@Override
public Dialog onCreateDialog(Bundle bundle) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setTitle("Delete Part?")
.setMessage("Are you sure you want to delete this Part?")
.setPositiveButton("Yes",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
//Some chicanery to get the SR deleted
//There is probably a better way to do this
if(activity instanceof EditPartActivity) {
((EditPartActivity) activity).deletePart();
activity.finish();
}
else {
Log.w("SRBase:DeleteFragment",
"DeleteFragment called by non" +
"ViewPartActivity!");
dialog.cancel();
}
}
})
.setNegativeButton("No",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.cancel();
}
}).create();
}
}
/**
* Convenience method to request Daily deletion.
*/
public void deletePart() {
this.deletePart(savedUri);
}
/**
* This method deletes the passed Daily, and all parts and dailies
* associated with it.
* @param uri URI to the Daily to delete
*/
private void deletePart(Uri uri) {
getContentResolver().delete(uri,
null, null);
}
}
|
SRBase/src/com/gmail/emerssso/srbase/EditPartActivity.java
|
//This Software is distributed under The Apache License, Version 2.0
//The License is available at http://www.apache.org/licenses/LICENSE-2.0
package com.gmail.emerssso.srbase;
import com.gmail.emerssso.srbase.database.PartContentProvider;
import com.gmail.emerssso.srbase.database.PartTable;
import android.app.Activity;
import android.content.ContentValues;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Toast;
/**
* The EditPartActivity class implements an activity which provides
* a form for users to enter new parts to be associated with a particular
* SR, and certain information associated with that part.
*/
public class EditPartActivity extends Activity {
/** The part number. */
private EditText partNumber;
/** The number of parts in question. */
private EditText partQuantity;
/** The part source (i.e. work, home, Japan). */
private EditText partSource;
/** The part description. */
private EditText partDescription;
/** Indicates whether the part was used or not. */
private CheckBox partUsed;
/** The Uri to load saved data from. */
private Uri savedUri;
/** The confirm button to save data. */
private Button confirm;
/** The ID of the associated SR. */
private String srId;
/* (non-Javadoc)
* @see android.app.Activity#onCreate(android.os.Bundle)
*/
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
setContentView(R.layout.edit_part_activity);
partNumber = (EditText) findViewById(R.id.part_number);
partQuantity = (EditText) findViewById(R.id.part_quantity);
partSource = (EditText) findViewById(R.id.part_source);
partDescription = (EditText) findViewById(R.id.part_description);
partUsed = (CheckBox) findViewById(R.id.part_used);
confirm = (Button) findViewById(R.id.part_confirm);
Bundle extras = getIntent().getExtras();
srId = extras.getString(PartTable.COLUMN_SR_ID);
savedUri = (bundle == null) ? null :
(Uri) bundle.getParcelable(
PartContentProvider.CONTENT_ITEM_TYPE);
if (extras != null) {
savedUri = extras
.getParcelable(PartContentProvider.CONTENT_ITEM_TYPE);
if(savedUri != null)
fillData(savedUri);
}
confirm.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (partNumber.getText().toString().length() == 0) {
Toast.makeText(EditPartActivity.this, "Part Number missing",
Toast.LENGTH_LONG).show();
return;
}
else {
setResult(RESULT_OK);
finish();
}
}
});
}
/**
* Fill data from the database entry at Uri into the form.
*
* @param uri the Source of data to be loaded.
*/
private void fillData(Uri uri) {
String[] projection = { PartTable.COLUMN_DESCRIPTION,
PartTable.COLUMN_PART_NUMBER, PartTable.COLUMN_QUANTITY,
PartTable.COLUMN_SOURCE, PartTable.COLUMN_USED,
PartTable.COLUMN_SR_ID};
Cursor cursor = getContentResolver()
.query(uri, projection, null, null,null);
if (cursor != null) {
cursor.moveToFirst();
partNumber.setText(cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_PART_NUMBER)));
String temp = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_QUANTITY));
if(!temp.equals("Unknown"))
partQuantity.setText(temp);
temp = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_SOURCE));
if(!temp.equals("Unknown"))
partSource.setText(temp);
temp = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_DESCRIPTION));
if(!temp.equals("No Description"))
partDescription.setText(temp);
srId = cursor.getString(cursor
.getColumnIndexOrThrow(PartTable.COLUMN_SR_ID));
if(cursor.getString(cursor.getColumnIndexOrThrow
(PartTable.COLUMN_USED)).equals("Used"))
partUsed.setChecked(true);
else
partUsed.setChecked(false);
cursor.close();
}
}
/* (non-Javadoc)
* @see android.app.Activity#onSaveInstanceState(android.os.Bundle)
*/
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
saveState();
outState.putParcelable(
PartContentProvider.CONTENT_ITEM_TYPE, savedUri);
}
/* (non-Javadoc)
* @see android.app.Activity#onPause()
*/
@Override
protected void onPause() {
super.onPause();
saveState();
}
/**
* Save the state of the form into the database.
*/
private void saveState() {
String number = partNumber.getText().toString();
if(partNumber.length() == 0)
return;
String quantity = partQuantity.getText().toString();
String description = partDescription.getText().toString();
String source = partSource.getText().toString();
String used = partUsed.isChecked() ? "Used" : "Unused";
if(quantity.length() == 0) quantity = "Unknown";
if(description.length() == 0) description = "No Description";
if(source.length() == 0) source = "Unknown";
ContentValues values = new ContentValues();
values.put(PartTable.COLUMN_PART_NUMBER, number);
values.put(PartTable.COLUMN_QUANTITY, quantity);
values.put(PartTable.COLUMN_DESCRIPTION, description);
values.put(PartTable.COLUMN_SOURCE, source);
values.put(PartTable.COLUMN_USED, used);
values.put(PartTable.COLUMN_SR_ID, srId);
if (savedUri == null) {
// New Part
savedUri = getContentResolver()
.insert(PartContentProvider.CONTENT_URI, values);
} else {
// Update Part
getContentResolver().update(savedUri, values, null, null);
}
}
}
|
Add delete option to EditPartActivity
|
SRBase/src/com/gmail/emerssso/srbase/EditPartActivity.java
|
Add delete option to EditPartActivity
|
<ide><path>RBase/src/com/gmail/emerssso/srbase/EditPartActivity.java
<ide> //The License is available at http://www.apache.org/licenses/LICENSE-2.0
<ide> package com.gmail.emerssso.srbase;
<ide>
<add>import com.gmail.emerssso.srbase.EditDailyActivity.DeleteFragment;
<ide> import com.gmail.emerssso.srbase.database.PartContentProvider;
<ide> import com.gmail.emerssso.srbase.database.PartTable;
<ide>
<ide> import android.app.Activity;
<add>import android.app.AlertDialog;
<add>import android.app.Dialog;
<add>import android.app.DialogFragment;
<ide> import android.content.ContentValues;
<add>import android.content.DialogInterface;
<ide> import android.database.Cursor;
<ide> import android.net.Uri;
<ide> import android.os.Bundle;
<add>import android.util.Log;
<add>import android.view.Menu;
<add>import android.view.MenuInflater;
<add>import android.view.MenuItem;
<ide> import android.view.View;
<ide> import android.widget.Button;
<ide> import android.widget.CheckBox;
<ide> getContentResolver().update(savedUri, values, null, null);
<ide> }
<ide> }
<add>
<add> /* (non-Javadoc)
<add> * @see android.app.Activity#onCreateOptionsMenu(android.view.Menu)
<add> */
<add> @Override
<add> public boolean onCreateOptionsMenu(Menu menu) {
<add> MenuInflater inflater = getMenuInflater();
<add> inflater.inflate(R.menu.edit_menu, menu);
<add> return super.onCreateOptionsMenu(menu);
<add> }
<add>
<add> /* (non-Javadoc)
<add> * @see android.app.Activity#onOptionsItemSelected(android.view.MenuItem)
<add> */
<add> @Override
<add> public boolean onOptionsItemSelected(MenuItem item) {
<add> switch(item.getItemId()) {
<add> case R.id.delete_item:
<add> DeleteFragment dfrag = new DeleteFragment();
<add> dfrag.show(getFragmentManager(), "Delete Fragment");
<add> return true;
<add> default:
<add> return super.onOptionsItemSelected(item);
<add> }
<add> }
<add>
<add> /**
<add> * The DeleteFragment implements a dialog fragment
<add> * to ask the user whether they are sure they want to delete
<add> * the Part or not.
<add> */
<add> public static class DeleteFragment extends DialogFragment {
<add>
<add> /* (non-Javadoc)
<add> * @see android.app.DialogFragment#onCreateDialog(android.os.Bundle)
<add> */
<add> @Override
<add> public Dialog onCreateDialog(Bundle bundle) {
<add> final Activity activity = getActivity();
<add> return new AlertDialog.Builder(activity)
<add> .setTitle("Delete Part?")
<add> .setMessage("Are you sure you want to delete this Part?")
<add> .setPositiveButton("Yes",
<add> new DialogInterface.OnClickListener() {
<add>
<add> @Override
<add> public void onClick(DialogInterface dialog,
<add> int which) {
<add> //Some chicanery to get the SR deleted
<add> //There is probably a better way to do this
<add> if(activity instanceof EditPartActivity) {
<add> ((EditPartActivity) activity).deletePart();
<add> activity.finish();
<add> }
<add> else {
<add> Log.w("SRBase:DeleteFragment",
<add> "DeleteFragment called by non" +
<add> "ViewPartActivity!");
<add> dialog.cancel();
<add> }
<add> }
<add> })
<add> .setNegativeButton("No",
<add> new DialogInterface.OnClickListener() {
<add>
<add> @Override
<add> public void onClick(DialogInterface dialog,
<add> int which) {
<add> dialog.cancel();
<add> }
<add> }).create();
<add> }
<add>
<add> }
<add>
<add> /**
<add> * Convenience method to request Daily deletion.
<add> */
<add> public void deletePart() {
<add> this.deletePart(savedUri);
<add> }
<add>
<add> /**
<add> * This method deletes the passed Daily, and all parts and dailies
<add> * associated with it.
<add> * @param uri URI to the Daily to delete
<add> */
<add> private void deletePart(Uri uri) {
<add> getContentResolver().delete(uri,
<add> null, null);
<add> }
<ide> }
|
|
Java
|
apache-2.0
|
cf2c4b2d57e1c31b98ffccf275f38bcd8feee5a5
| 0 |
xuzhongxing/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j,xuzhongxing/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j
|
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.optimize.solvers;
import org.deeplearning4j.berkeley.Pair;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.Model;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.optimize.api.IterationListener;
import org.deeplearning4j.optimize.api.StepFunction;
import org.deeplearning4j.optimize.api.TerminationCondition;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Collection;
import java.util.Map;
/**
* Stochastic Gradient Descent
* Standard fix step size
* No line search
* @author Adam Gibson
*/
public class StochasticGradientDescent extends BaseOptimizer {
public StochasticGradientDescent(NeuralNetConfiguration conf, StepFunction stepFunction, Collection<IterationListener> iterationListeners, Model model) {
super(conf, stepFunction, iterationListeners, model);
}
public StochasticGradientDescent(NeuralNetConfiguration conf, StepFunction stepFunction, Collection<IterationListener> iterationListeners, Collection<TerminationCondition> terminationConditions, Model model) {
super(conf, stepFunction, iterationListeners, terminationConditions, model);
}
@Override
public boolean optimize() {
for(int i = 0; i < conf.getNumIterations(); i++) {
Pair<Gradient,Double> pair = gradientAndScore();
Gradient gradient = pair.getFirst();
INDArray params = model.params();
stepFunction.step(params,gradient.gradient());
//Note: model.params() is always in-place for MultiLayerNetwork and ComputationGraph, hence no setParams is necessary there
//However: for pretrain layers, params are NOT a view. Thus a setParams call is necessary
//But setParams should be a no-op for MLN and CG
model.setParams(params);
for(IterationListener listener : iterationListeners)
listener.iterationDone(model, i);
checkTerminalConditions(pair.getFirst().gradient(), oldScore, score, i);
iteration++;
}
return true;
}
@Override
public void preProcessLine() {
}
@Override
public void postStep(INDArray gradient) {
}
}
|
deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/StochasticGradientDescent.java
|
/*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.optimize.solvers;
import org.deeplearning4j.berkeley.Pair;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.Model;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.optimize.api.IterationListener;
import org.deeplearning4j.optimize.api.StepFunction;
import org.deeplearning4j.optimize.api.TerminationCondition;
import org.nd4j.linalg.api.ndarray.INDArray;
import java.util.Collection;
import java.util.Map;
/**
* Stochastic Gradient Descent
* Standard fix step size
* No line search
* @author Adam Gibson
*/
public class StochasticGradientDescent extends BaseOptimizer {
public StochasticGradientDescent(NeuralNetConfiguration conf, StepFunction stepFunction, Collection<IterationListener> iterationListeners, Model model) {
super(conf, stepFunction, iterationListeners, model);
}
public StochasticGradientDescent(NeuralNetConfiguration conf, StepFunction stepFunction, Collection<IterationListener> iterationListeners, Collection<TerminationCondition> terminationConditions, Model model) {
super(conf, stepFunction, iterationListeners, terminationConditions, model);
}
@Override
public boolean optimize() {
for(int i = 0; i < conf.getNumIterations(); i++) {
Pair<Gradient,Double> pair = gradientAndScore();
Gradient gradient = pair.getFirst();
INDArray params = model.params();
stepFunction.step(params,gradient.gradient());
//Note: model.params() is always in-place for MultiLayerNetwork and ComputationGraph, hence no setParams is necessary there
//However: for pretrain layers, params are NOT a view. Thus a setParams call is necessary
//But setParams should be a no-op for MLN and CG
model.setParams(params);
for(IterationListener listener : iterationListeners)
listener.iterationDone(model, i);
checkTerminalConditions(pair.getFirst().gradient(), oldScore, score, i);
}
return true;
}
@Override
public void preProcessLine() {
}
@Override
public void postStep(INDArray gradient) {
}
}
|
Ensure iteration count in SGD is updated
|
deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/StochasticGradientDescent.java
|
Ensure iteration count in SGD is updated
|
<ide><path>eeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/StochasticGradientDescent.java
<ide>
<ide> checkTerminalConditions(pair.getFirst().gradient(), oldScore, score, i);
<ide>
<del>
<add> iteration++;
<ide> }
<ide> return true;
<ide> }
|
|
Java
|
apache-2.0
|
8f3962e18c5370ef8f18f0cd0badcc51cdb2ffb0
| 0 |
kiritbasu/datacollector,streamsets/datacollector,WgStreamsets/datacollector,rockmkd/datacollector,SandishKumarHN/datacollector,SandishKumarHN/datacollector,streamsets/datacollector,SandishKumarHN/datacollector,studanshu/datacollector,kiritbasu/datacollector,studanshu/datacollector,z123/datacollector,rockmkd/datacollector,studanshu/datacollector,z123/datacollector,rockmkd/datacollector,kunickiaj/datacollector,WgStreamsets/datacollector,studanshu/datacollector,WgStreamsets/datacollector,streamsets/datacollector,kunickiaj/datacollector,rockmkd/datacollector,WgStreamsets/datacollector,rockmkd/datacollector,WgStreamsets/datacollector,z123/datacollector,studanshu/datacollector,streamsets/datacollector,kunickiaj/datacollector,kiritbasu/datacollector,SandishKumarHN/datacollector,kiritbasu/datacollector,streamsets/datacollector,kunickiaj/datacollector,SandishKumarHN/datacollector,z123/datacollector,kiritbasu/datacollector,z123/datacollector,kunickiaj/datacollector
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.validation;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.streamsets.pipeline.config.ConfigConfiguration;
import com.streamsets.pipeline.config.ConfigDefinition;
import com.streamsets.pipeline.config.PipelineConfiguration;
import com.streamsets.pipeline.config.StageConfiguration;
import com.streamsets.pipeline.config.StageDefinition;
import com.streamsets.pipeline.config.StageType;
import com.streamsets.pipeline.stagelibrary.StageLibrary;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class PipelineConfigurationValidator {
private static final String PIPELINE_IS_EMPTY_KEY = "validation.pipeline.is.empty";
private static final String PIPELINE_IS_EMPTY_DEFAULT = "The pipeline is empty";
private static final String FIRST_STAGE_MUST_BE_A_SOURCE_KEY = "validation.first.stage.must.be.source";
private static final String FIRST_STAGE_MUST_BE_A_SOURCE_DEFAULT = "The first stage must be a Source";
private static final String STAGE_CANNOT_BE_SOURCE_KEY = "validation.stage.cannot.be.source";
private static final String STAGE_CANNOT_BE_SOURCE_DEFAULT = "Instance '%s' cannot be a Source";
private static final String INSTANCE_ALREADY_DEFINED_KEY = "validation.instance.already.defined";
private static final String INSTANCE_ALREADY_DEFINED_DEFAULT = "Instance name '%s' already defined";
private static final String STAGE_DOES_NOT_EXIST_KEY = "validation.stage.does.not.exist";
private static final String STAGE_DOES_NOT_EXIST_DEFAULT =
"Instance '%s', stage does not exist, library '%s' name '%s' version '%s'";
private static final String STAGE_MISSING_CONFIGURATION_KEY = "validation.stage.missing.configuration";
private static final String STAGE_MISSING_CONFIGURATION_DEFAULT =
"Instance '%s', configuration '%s' value is required";
private static final String STAGE_CONFIGURATION_INVALID_TYPE_KEY = "validation.stage.configuration.invalidType";
private static final String STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT =
"Instance '%s', configuration '%s' should be a '%s'";
private static final String INSTANCE_OPEN_OUTPUT_LANE_KEY = "validation.instance.open.output.lane";
private static final String INSTANCE_OPEN_OUTPUT_LANE_DEFAULT = "Instance '%s' has an open lane '%s'";
private final StageLibrary stageLibrary;
private final PipelineConfiguration pipelineConfiguration;
private final Issues issues;
private final List<String> openLanes;
private boolean validated;
private boolean canPreview = true;
public PipelineConfigurationValidator(StageLibrary stageLibrary, PipelineConfiguration pipelineConfiguration) {
Preconditions.checkNotNull(stageLibrary, "stageLibrary cannot be null");
Preconditions.checkNotNull(pipelineConfiguration, "pipelineConfiguration cannot be null");
this.stageLibrary = stageLibrary;
this.pipelineConfiguration = pipelineConfiguration;
issues = new Issues();
openLanes = new ArrayList<String>();
}
public boolean validate() {
validated = true;
if (pipelineConfiguration.getStages().isEmpty()) {
issues.addP(new Issue(PIPELINE_IS_EMPTY_KEY, PIPELINE_IS_EMPTY_DEFAULT));
canPreview = false;
}
validatePipelineConfiguration();
canPreview = canPreview && issues.hasIssues();
validatePipelineLanes();
return issues.hasIssues();
}
public boolean canPreview() {
return canPreview;
}
public Issues getIssues() {
Preconditions.checkState(validated, String.format("validate() has not been called"));
return issues;
}
public List<String> getOpenLanes() {
return openLanes;
}
@VisibleForTesting
void validatePipelineConfiguration() {
Set<String> stageNames = new HashSet<String>();
boolean shouldBeSource = true;
for (StageConfiguration stage : pipelineConfiguration.getStages()) {
if (stageNames.contains(stage.getInstanceName())) {
issues.add(new StageIssue(stage.getInstanceName(),
INSTANCE_ALREADY_DEFINED_KEY, INSTANCE_ALREADY_DEFINED_DEFAULT,
stage.getInstanceName()));
}
StageDefinition stageDef = stageLibrary.getStage(stage.getLibrary(), stage.getStageName(),
stage.getStageVersion());
if (stageDef == null) {
issues.add(new StageIssue(stage.getInstanceName(),
STAGE_DOES_NOT_EXIST_KEY, STAGE_DOES_NOT_EXIST_DEFAULT,
stage.getInstanceName(), stage.getLibrary(), stage.getStageName(),
stage.getStageVersion()));
} else {
if (shouldBeSource) {
if (stageDef.getType() != StageType.SOURCE) {
issues.add(new StageIssue(stage.getInstanceName(),
FIRST_STAGE_MUST_BE_A_SOURCE_KEY, FIRST_STAGE_MUST_BE_A_SOURCE_DEFAULT,
stage.getInstanceName()));
}
} else {
if (stageDef.getType() == StageType.SOURCE) {
issues.add(new StageIssue(stage.getInstanceName(),
STAGE_CANNOT_BE_SOURCE_KEY, STAGE_CANNOT_BE_SOURCE_DEFAULT,
stage.getInstanceName()));
}
}
shouldBeSource = false;
for (ConfigDefinition confDef : stageDef.getConfigDefinitions()) {
if (stage.getConfig(confDef.getName()) == null && confDef.isRequired()) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_MISSING_CONFIGURATION_KEY, STAGE_MISSING_CONFIGURATION_DEFAULT,
stage.getInstanceName(), confDef.getName()));
}
}
for (ConfigConfiguration conf : stage.getConfiguration()) {
ConfigDefinition confDef = stageDef.getConfigDefinition(conf.getName());
if (conf.getValue() == null && confDef.isRequired()) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_MISSING_CONFIGURATION_KEY, STAGE_MISSING_CONFIGURATION_DEFAULT,
stage.getInstanceName(), confDef.getName()));
}
if (conf.getValue() != null) {
switch (confDef.getType()) {
case BOOLEAN:
if (!(conf.getValue() instanceof Boolean)) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_CONFIGURATION_INVALID_TYPE_KEY,
STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT,
stage.getInstanceName(), confDef.getName(), confDef.getType()));
}
break;
case INTEGER:
if (!(conf.getValue() instanceof Long || conf.getValue() instanceof Integer)) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_CONFIGURATION_INVALID_TYPE_KEY,
STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT,
stage.getInstanceName(), confDef.getName(), confDef.getType()));
}
break;
case STRING:
//NOP
break;
case MODEL:
if (!(conf.getValue() instanceof Map || conf.getValue() instanceof List)) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_CONFIGURATION_INVALID_TYPE_KEY,
STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT,
stage.getInstanceName(), confDef.getName(), confDef.getType()));
}
break;
}
}
}
}
stageNames.add(stage.getInstanceName());
}
}
@VisibleForTesting
void validatePipelineLanes() {
Set<String> output = new HashSet<String>();
Set<String> input = new HashSet<String>();
for (StageConfiguration stage : pipelineConfiguration.getStages()) {
output.addAll(stage.getOutputLanes());
input.addAll(stage.getInputLanes());
}
Set<String> open = new HashSet<String>(output);
open.removeAll(input);
openLanes.addAll(open);
if (!open.isEmpty()) {
for (String lane : open) {
for (StageConfiguration stage : pipelineConfiguration.getStages()) {
if (stage.getOutputLanes().contains(lane)) {
issues.add(new StageIssue(stage.getInstanceName(), INSTANCE_OPEN_OUTPUT_LANE_KEY,
INSTANCE_OPEN_OUTPUT_LANE_DEFAULT, stage.getInstanceName(), lane));
}
}
}
}
}
}
|
container/src/main/java/com/streamsets/pipeline/validation/PipelineConfigurationValidator.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.validation;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.streamsets.pipeline.config.ConfigConfiguration;
import com.streamsets.pipeline.config.ConfigDefinition;
import com.streamsets.pipeline.config.PipelineConfiguration;
import com.streamsets.pipeline.config.StageConfiguration;
import com.streamsets.pipeline.config.StageDefinition;
import com.streamsets.pipeline.config.StageType;
import com.streamsets.pipeline.stagelibrary.StageLibrary;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class PipelineConfigurationValidator {
private static final String PIPELINE_IS_EMPTY_KEY = "validation.pipeline.is.empty";
private static final String PIPELINE_IS_EMPTY_DEFAULT = "The pipeline is empty";
private static final String FIRST_STAGE_MUST_BE_A_SOURCE_KEY = "validation.first.stage.must.be.source";
private static final String FIRST_STAGE_MUST_BE_A_SOURCE_DEFAULT = "The first stage must be a Source";
private static final String STAGE_CANNOT_BE_SOURCE_KEY = "validation.stage.cannot.be.source";
private static final String STAGE_CANNOT_BE_SOURCE_DEFAULT = "Instance '%s' cannot be a Source";
private static final String INSTANCE_ALREADY_DEFINED_KEY = "validation.instance.already.defined";
private static final String INSTANCE_ALREADY_DEFINED_DEFAULT = "Instance name '%s' already defined";
private static final String STAGE_DOES_NOT_EXIST_KEY = "validation.stage.does.not.exist";
private static final String STAGE_DOES_NOT_EXIST_DEFAULT =
"Instance '%s', stage does not exist, library '%s' name '%s' version '%s'";
private static final String STAGE_MISSING_CONFIGURATION_KEY = "validation.stage.missing.configuration";
private static final String STAGE_MISSING_CONFIGURATION_DEFAULT =
"Instance '%s', configuration '%s' value is required";
private static final String STAGE_CONFIGURATION_INVALID_TYPE_KEY = "validation.stage.configuration.invalidType";
private static final String STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT =
"Instance '%s', configuration '%s' should be a '%s'";
private static final String INSTANCE_OPEN_OUTPUT_LANE_KEY = "validation.instance.open.output.lane";
private static final String INSTANCE_OPEN_OUTPUT_LANE_DEFAULT = "Instance '%s' has an open lane '%s'";
private final StageLibrary stageLibrary;
private final PipelineConfiguration pipelineConfiguration;
private final Issues issues;
private final List<String> openLanes;
private boolean validated;
private boolean canPreview = true;
public PipelineConfigurationValidator(StageLibrary stageLibrary, PipelineConfiguration pipelineConfiguration) {
Preconditions.checkNotNull(stageLibrary, "stageLibrary cannot be null");
Preconditions.checkNotNull(pipelineConfiguration, "pipelineConfiguration cannot be null");
this.stageLibrary = stageLibrary;
this.pipelineConfiguration = pipelineConfiguration;
issues = new Issues();
openLanes = new ArrayList<String>();
}
public boolean validate() {
validated = true;
if (pipelineConfiguration.getStages().isEmpty()) {
issues.addP(new Issue(PIPELINE_IS_EMPTY_KEY, PIPELINE_IS_EMPTY_DEFAULT));
canPreview = false;
}
validatePipelineConfiguration();
canPreview = canPreview && issues.hasIssues();
validatePipelineLanes();
return issues.hasIssues();
}
public boolean canPreview() {
return canPreview;
}
public Issues getIssues() {
Preconditions.checkState(validated, String.format("validate() has not been called"));
return issues;
}
public List<String> getOpenLanes() {
return openLanes;
}
@VisibleForTesting
void validatePipelineConfiguration() {
Set<String> stageNames = new HashSet<String>();
boolean shouldBeSource = true;
for (StageConfiguration stage : pipelineConfiguration.getStages()) {
if (stageNames.contains(stage.getInstanceName())) {
issues.add(new StageIssue(stage.getInstanceName(),
INSTANCE_ALREADY_DEFINED_KEY, INSTANCE_ALREADY_DEFINED_DEFAULT,
stage.getInstanceName()));
}
StageDefinition stageDef = stageLibrary.getStage(stage.getLibrary(), stage.getStageName(),
stage.getStageVersion());
if (stageDef == null) {
issues.add(new StageIssue(stage.getInstanceName(),
STAGE_DOES_NOT_EXIST_KEY, STAGE_DOES_NOT_EXIST_DEFAULT,
stage.getInstanceName(), stage.getLibrary(), stage.getStageName(),
stage.getStageVersion()));
} else {
if (shouldBeSource) {
if (stageDef.getType() != StageType.SOURCE) {
issues.add(new StageIssue(stage.getInstanceName(),
FIRST_STAGE_MUST_BE_A_SOURCE_KEY, FIRST_STAGE_MUST_BE_A_SOURCE_DEFAULT,
stage.getInstanceName()));
}
} else {
if (stageDef.getType() == StageType.SOURCE) {
issues.add(new StageIssue(stage.getInstanceName(),
STAGE_CANNOT_BE_SOURCE_KEY, STAGE_CANNOT_BE_SOURCE_DEFAULT,
stage.getInstanceName()));
}
}
shouldBeSource = false;
for (ConfigDefinition confDef : stageDef.getConfigDefinitions()) {
if (stage.getConfig(confDef.getName()) == null && confDef.isRequired()) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_MISSING_CONFIGURATION_KEY, STAGE_MISSING_CONFIGURATION_DEFAULT,
stage.getInstanceName(), confDef.getName()));
}
}
for (ConfigConfiguration conf : stage.getConfiguration()) {
ConfigDefinition confDef = stageDef.getConfigDefinition(conf.getName());
if (conf.getValue() != null) {
switch (confDef.getType()) {
case BOOLEAN:
if (!(conf.getValue() instanceof Boolean)) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_CONFIGURATION_INVALID_TYPE_KEY,
STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT,
stage.getInstanceName(), confDef.getName(), confDef.getType()));
}
break;
case INTEGER:
if (!(conf.getValue() instanceof Long || conf.getValue() instanceof Integer)) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_CONFIGURATION_INVALID_TYPE_KEY,
STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT,
stage.getInstanceName(), confDef.getName(), confDef.getType()));
}
break;
case STRING:
//NOP
break;
case MODEL:
if (!(conf.getValue() instanceof Map || conf.getValue() instanceof List)) {
issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
STAGE_CONFIGURATION_INVALID_TYPE_KEY,
STAGE_CONFIGURATION_INVALID_TYPE_DEFAULT,
stage.getInstanceName(), confDef.getName(), confDef.getType()));
}
break;
}
}
}
}
stageNames.add(stage.getInstanceName());
}
}
@VisibleForTesting
void validatePipelineLanes() {
Set<String> output = new HashSet<String>();
Set<String> input = new HashSet<String>();
for (StageConfiguration stage : pipelineConfiguration.getStages()) {
output.addAll(stage.getOutputLanes());
input.addAll(stage.getInputLanes());
}
Set<String> open = new HashSet<String>(output);
open.removeAll(input);
openLanes.addAll(open);
if (!open.isEmpty()) {
for (String lane : open) {
for (StageConfiguration stage : pipelineConfiguration.getStages()) {
if (stage.getOutputLanes().contains(lane)) {
issues.add(new StageIssue(stage.getInstanceName(), INSTANCE_OPEN_OUTPUT_LANE_KEY,
INSTANCE_OPEN_OUTPUT_LANE_DEFAULT, stage.getInstanceName(), lane));
}
}
}
}
}
}
|
Re-adding validation for when config is null but required
|
container/src/main/java/com/streamsets/pipeline/validation/PipelineConfigurationValidator.java
|
Re-adding validation for when config is null but required
|
<ide><path>ontainer/src/main/java/com/streamsets/pipeline/validation/PipelineConfigurationValidator.java
<ide> }
<ide> for (ConfigConfiguration conf : stage.getConfiguration()) {
<ide> ConfigDefinition confDef = stageDef.getConfigDefinition(conf.getName());
<add> if (conf.getValue() == null && confDef.isRequired()) {
<add> issues.add(new StageIssue(stage.getInstanceName(), confDef.getName(),
<add> STAGE_MISSING_CONFIGURATION_KEY, STAGE_MISSING_CONFIGURATION_DEFAULT,
<add> stage.getInstanceName(), confDef.getName()));
<add> }
<ide> if (conf.getValue() != null) {
<ide> switch (confDef.getType()) {
<ide> case BOOLEAN:
|
|
Java
|
epl-1.0
|
6e4a461cc584f37743315f40b34bc5fd2ca5155c
| 0 |
Techjar/ForgeEssentials,liachmodded/ForgeEssentials,ForgeEssentials/ForgeEssentialsMain,CityOfLearning/ForgeEssentials
|
package com.forgeessentials.protection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimerTask;
import java.util.UUID;
import net.minecraft.block.Block;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.boss.EntityWither;
import net.minecraft.entity.monster.EntityBlaze;
import net.minecraft.entity.monster.EntityCreeper;
import net.minecraft.entity.monster.EntityEnderman;
import net.minecraft.entity.monster.EntityGiantZombie;
import net.minecraft.entity.monster.EntityIronGolem;
import net.minecraft.entity.monster.EntityPigZombie;
import net.minecraft.entity.monster.EntitySilverfish;
import net.minecraft.entity.monster.EntitySkeleton;
import net.minecraft.entity.monster.EntitySnowman;
import net.minecraft.entity.monster.EntitySpider;
import net.minecraft.entity.monster.EntityWitch;
import net.minecraft.entity.monster.EntityZombie;
import net.minecraft.entity.passive.EntityChicken;
import net.minecraft.entity.passive.EntityCow;
import net.minecraft.entity.passive.EntityHorse;
import net.minecraft.entity.passive.EntityMooshroom;
import net.minecraft.entity.passive.EntityOcelot;
import net.minecraft.entity.passive.EntityPig;
import net.minecraft.entity.passive.EntitySquid;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.entity.passive.EntityWolf;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.event.ClickEvent;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.ContainerPlayer;
import net.minecraft.inventory.ContainerWorkbench;
import net.minecraft.inventory.InventoryCrafting;
import net.minecraft.inventory.SlotCrafting;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.DamageSource;
import net.minecraft.world.World;
import net.minecraftforge.permission.PermissionLevel;
import net.minecraftforge.permission.PermissionManager;
import com.forgeessentials.api.APIRegistry;
import com.forgeessentials.api.permissions.Zone;
import com.forgeessentials.core.ForgeEssentials;
import com.forgeessentials.core.misc.FECommandManager;
import com.forgeessentials.core.misc.TaskRegistry;
import com.forgeessentials.core.moduleLauncher.FEModule;
import com.forgeessentials.protection.commands.CommandItemPermission;
import com.forgeessentials.protection.commands.CommandProtectionDebug;
import com.forgeessentials.util.ServerUtil;
import com.forgeessentials.util.events.FEModuleEvent.FEModuleInitEvent;
import com.forgeessentials.util.events.FEModuleEvent.FEModuleServerInitEvent;
import com.forgeessentials.util.events.FEModuleEvent.FEModuleServerPostInitEvent;
import com.forgeessentials.util.output.ChatOutputHandler;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.registry.GameData;
import cpw.mods.fml.relauncher.ReflectionHelper;
@FEModule(name = "Protection", parentMod = ForgeEssentials.class, isCore = true, canDisable = false)
public class ModuleProtection
{
public final static String BASE_PERM = "fe.protection";
public final static String PERM_PVP = BASE_PERM + ".pvp";
public final static String PERM_SLEEP = BASE_PERM + ".sleep";
public final static String PERM_GAMEMODE = BASE_PERM + ".gamemode";
public final static String PERM_INVENTORY_GROUP = BASE_PERM + ".inventorygroup";
public final static String PERM_USE = BASE_PERM + ".use";
public final static String PERM_BREAK = BASE_PERM + ".break";
public final static String PERM_PLACE = BASE_PERM + ".place";
public final static String PERM_FIRE = BASE_PERM + ".fire";
public final static String PERM_FIRE_DESTROY = PERM_FIRE + ".destroy";
public final static String PERM_FIRE_SPREAD = PERM_FIRE + ".spread";
public final static String PERM_INTERACT = BASE_PERM + ".interact";
public final static String PERM_INTERACT_ENTITY = BASE_PERM + ".interact.entity";
public final static String PERM_DAMAGE_TO = BASE_PERM + ".damageto";
public final static String PERM_DAMAGE_BY = BASE_PERM + ".damageby";
public final static String PERM_INVENTORY = BASE_PERM + ".inventory";
public final static String PERM_EXIST = BASE_PERM + ".exist";
public static final String PERM_CRAFT = BASE_PERM + ".craft";
public final static String PERM_EXPLOSION = BASE_PERM + ".explosion";
public final static String PERM_EXPLOSION_BLOCKDMG = PERM_EXPLOSION + ".blockdmg";
public final static String PERM_NEEDSFOOD = BASE_PERM + ".needsfood";
public static final String PERM_PRESSUREPLATE = BASE_PERM + ".pressureplate";
public final static String PERM_MOBSPAWN = BASE_PERM + ".mobspawn";
public final static String PERM_MOBSPAWN_NATURAL = PERM_MOBSPAWN + ".natural";
public final static String PERM_MOBSPAWN_FORCED = PERM_MOBSPAWN + ".forced";
public static final String ZONE = BASE_PERM + ".zone";
public static final String ZONE_KNOCKBACK = ZONE + ".knockback";
public static final String ZONE_DAMAGE = ZONE + ".damage";
public static final String ZONE_DAMAGE_INTERVAL = ZONE_DAMAGE + ".interval";
public static final String ZONE_COMMAND = ZONE + ".command";
public static final String ZONE_COMMAND_INTERVAL = ZONE_COMMAND + ".interval";
public static final String ZONE_POTION = ZONE + ".potion";
public static final String ZONE_POTION_INTERVAL = ZONE_POTION + ".interval";
public static final String MSG_ZONE_DENIED = "You are not allowed to enter this area!";
private static final Class<?>[] damageEntityClasses = new Class<?>[] {
// EntityAgeable
EntityVillager.class,
// EntityAnimal
EntityChicken.class, EntityCow.class, EntityMooshroom.class, EntityHorse.class, EntityPig.class,
// EntityTameable
EntityOcelot.class, EntityWolf.class,
// EntityMob
EntityBlaze.class, EntityCreeper.class, EntityEnderman.class, EntityGiantZombie.class, EntitySilverfish.class, EntitySkeleton.class,
EntitySpider.class, EntityWitch.class, EntityWither.class, EntityZombie.class, EntityPigZombie.class,
// EntityGolem
EntityIronGolem.class, EntitySnowman.class,
// EntityWaterMob
EntitySquid.class,
/* -- end of list -- */
};
private static final DamageSource[] damageByTypes = new DamageSource[] { DamageSource.anvil, DamageSource.cactus, DamageSource.drown, DamageSource.fall,
DamageSource.fallingBlock, DamageSource.generic, DamageSource.inFire, DamageSource.inWall, DamageSource.lava, DamageSource.magic,
DamageSource.onFire, DamageSource.outOfWorld, DamageSource.starve, DamageSource.wither };
public static Map<UUID, String> debugModePlayers = new HashMap<>();
/* ------------------------------------------------------------ */
@SuppressWarnings("unused")
private ProtectionEventHandler protectionHandler;
@SubscribeEvent
public void load(FEModuleInitEvent e)
{
protectionHandler = new ProtectionEventHandler();
FECommandManager.registerCommand(new CommandItemPermission());
FECommandManager.registerCommand(new CommandProtectionDebug());
// FECommandManager.registerCommand(new CommandPlaceblock());
}
public static String getItemName(Item item)
{
try
{
return item.getItemStackDisplayName(new ItemStack(item));
}
catch (Exception | NoClassDefFoundError e)
{
return item.getUnlocalizedName();
}
}
@SuppressWarnings("unchecked")
@SubscribeEvent
public void registerPermissions(FEModuleServerInitEvent event)
{
// ----------------------------------------
// Other
APIRegistry.perms.registerPermission(PERM_SLEEP, PermissionLevel.TRUE, "Allow players to sleep in beds");
APIRegistry.perms.registerPermission(PERM_NEEDSFOOD, PermissionLevel.TRUE, "If denied to a player, their hunger bar will not deplete.");
APIRegistry.perms.registerPermission(PERM_PVP, PermissionLevel.TRUE, "If denied for at least one of two fighting players, PvP will be disabled");
APIRegistry.perms.registerPermissionProperty(PERM_GAMEMODE, "-1", "Force gamemode (-1 = none / default, 0 = survival, 1 = creative, 2 = adventure)");
APIRegistry.perms.registerPermissionProperty(PERM_INVENTORY_GROUP, "default",
"Inventory group property - can be set to any identifier to separate inventories for certain regions");
APIRegistry.perms.registerPermission(PERM_INTERACT_ENTITY, PermissionLevel.TRUE, "Allow interacting with entities (villagers, dogs, horses)");
APIRegistry.perms.registerPermission(PERM_EXPLOSION, PermissionLevel.TRUE, "(global) Allows explosions");
APIRegistry.perms.registerPermission(PERM_EXPLOSION_BLOCKDMG, PermissionLevel.TRUE, "(global) Allows explosions to damage blocks");
APIRegistry.perms.registerPermission(PERM_PRESSUREPLATE, PermissionLevel.TRUE, "Prevent players from triggering pressure plates");
APIRegistry.perms.registerPermission(PERM_FIRE_DESTROY, PermissionLevel.TRUE, "Allow fire to destroy blocks");
APIRegistry.perms.registerPermission(PERM_FIRE_SPREAD, PermissionLevel.TRUE, "Allow fire to spread");
// ----------------------------------------
// Damage
APIRegistry.perms.registerPermission(PERM_DAMAGE_TO + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow damaging entities");
APIRegistry.perms.registerPermission(PERM_DAMAGE_BY + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow getting hurt by entities");
for (Class<?> entityClass : damageEntityClasses)
{
APIRegistry.perms.registerPermission(PERM_DAMAGE_TO + "." + entityClass.getSimpleName(), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(PERM_DAMAGE_BY + "." + entityClass.getSimpleName(), PermissionLevel.TRUE);
}
for (DamageSource dmgType : damageByTypes)
{
APIRegistry.perms.registerPermission(PERM_DAMAGE_BY + "." + dmgType.getDamageType(), PermissionLevel.TRUE);
}
// ----------------------------------------
// Register mobs
APIRegistry.perms.registerPermission(PERM_MOBSPAWN + Zone.PERMISSION_ASTERIX, PermissionLevel.TRUE, "(global) Allow spawning of mobs");
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_NATURAL + Zone.ALL_PERMS, PermissionLevel.TRUE,
"(global) Allow natural spawning of mobs (random spawn)");
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_FORCED + Zone.ALL_PERMS, PermissionLevel.TRUE,
"(global) Allow forced spawning of mobs (mob-spawners)");
for (Entry<String, Class<? extends Entity>> e : ((Map<String, Class<? extends Entity>>) EntityList.stringToClassMapping).entrySet())
if (EntityLiving.class.isAssignableFrom(e.getValue()))
{
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_NATURAL + "." + e.getKey(), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_FORCED + "." + e.getKey(), PermissionLevel.TRUE);
}
for (MobType mobType : MobType.values())
{
APIRegistry.perms.registerPermission(mobType.getSpawnPermission(false), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(mobType.getSpawnPermission(true), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(mobType.getDamageByPermission(), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(mobType.getDamageToPermission(), PermissionLevel.TRUE);
}
// ----------------------------------------
// Register items
APIRegistry.perms.registerPermission(PERM_USE + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow using items");
APIRegistry.perms.registerPermission(PERM_INVENTORY + Zone.ALL_PERMS, PermissionLevel.TRUE,
"Allow having item in inventory. Item will be dropped if not allowed.");
APIRegistry.perms.registerPermission(PERM_EXIST + Zone.ALL_PERMS, PermissionLevel.TRUE,
"Allow having item in inventory. Item will be destroyed if not allowed.");
APIRegistry.perms.registerPermission(PERM_CRAFT + Zone.ALL_PERMS, PermissionLevel.TRUE,
"Allow crafting of items. Not necessarily works with modded crafting tables");
for (Item item : GameData.getItemRegistry().typeSafeIterable())
if (!(item instanceof ItemBlock))
{
String itemPerm = "." + getItemId(item) + Zone.ALL_PERMS;
String itemName = getItemName(item);
APIRegistry.perms.registerPermission(PERM_USE + itemPerm, PermissionLevel.TRUE, "USE " + itemName);
APIRegistry.perms.registerPermission(PERM_CRAFT + itemPerm, PermissionLevel.TRUE, "CRAFT " + itemName);
APIRegistry.perms.registerPermission(PERM_EXIST + itemPerm, PermissionLevel.TRUE, "EXIST " + itemName);
APIRegistry.perms.registerPermission(PERM_INVENTORY + itemPerm, PermissionLevel.TRUE, "INVENTORY " + itemName);
}
// ----------------------------------------
// Register blocks
APIRegistry.perms.registerPermission(PERM_BREAK + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow breaking blocks");
APIRegistry.perms.registerPermission(PERM_PLACE + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow placing blocks");
APIRegistry.perms.registerPermission(PERM_INTERACT + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow interacting with blocks (button, chest, workbench)");
for (Block block : GameData.getBlockRegistry().typeSafeIterable())
{
String blockPerm = "." + getBlockId(block) + Zone.ALL_PERMS;
String blockName = block.getLocalizedName();
APIRegistry.perms.registerPermission(PERM_BREAK + blockPerm, PermissionLevel.TRUE, "BREAK " + blockName);
APIRegistry.perms.registerPermission(PERM_PLACE + blockPerm, PermissionLevel.TRUE, "PLACE " + blockName);
APIRegistry.perms.registerPermission(PERM_INTERACT + blockPerm, PermissionLevel.TRUE, "INTERACT " + blockName);
}
// ----------------------------------------
// Register zone permissions
APIRegistry.perms.registerPermissionDescription(ZONE, "Worldborder permissions");
APIRegistry.perms.registerPermission(ZONE_KNOCKBACK, PermissionLevel.FALSE, "Deny players from entering this area");
APIRegistry.perms.registerPermissionProperty(ZONE_DAMAGE, null, "Apply this amount of damage to players, if they are in this area");
APIRegistry.perms.registerPermissionProperty(ZONE_DAMAGE_INTERVAL, "1000",
"Time interval in milliseconds for applying damage-effect. Zero = once only.");
APIRegistry.perms.registerPermissionProperty(ZONE_COMMAND, null, "Execute this command if a player enters the area");
APIRegistry.perms.registerPermissionProperty(ZONE_COMMAND_INTERVAL, "0", "Time interval in milliseconds for executing command. Zero = once only.");
APIRegistry.perms
.registerPermissionProperty(
ZONE_POTION,
null,
"Apply potion effects to players who enter this area. Comma separated list of \"ID:duration:amplifier\" pairs. See http://www.minecraftwiki.net/wiki/Potion_effects#Parameters");
APIRegistry.perms.registerPermissionProperty(ZONE_POTION_INTERVAL, "2000",
"Time interval in milliseconds for applying potion-effects. Zero = once only.");
}
@SubscribeEvent
public void postServerStart(FEModuleServerPostInitEvent e)
{
TaskRegistry.scheduleRepeated(new TimerTask() {
@Override
public void run()
{
for (EntityPlayerMP p : ServerUtil.getPlayerList())
if (!APIRegistry.perms.checkPermission(p, PERM_NEEDSFOOD))
p.getFoodStats().addStats(20, 1.0F);
}
}, 60 * 1000);
}
/* ------------------------------------------------------------ */
public static void setDebugMode(EntityPlayer player, String commandBase)
{
if (commandBase != null)
debugModePlayers.put(player.getPersistentID(), commandBase);
else
debugModePlayers.remove(player.getPersistentID());
}
public static boolean isDebugMode(EntityPlayer player)
{
return debugModePlayers.containsKey(player.getPersistentID());
}
public static void debugPermission(EntityPlayer player, String permission)
{
if (player == null)
return;
String cmdBase = debugModePlayers.get(player.getPersistentID());
if (cmdBase == null)
return;
ChatComponentTranslation msg = new ChatComponentTranslation(permission);
msg.getChatStyle().setChatClickEvent(new ClickEvent(ClickEvent.Action.SUGGEST_COMMAND, cmdBase + permission));
msg.getChatStyle().setColor(ChatOutputHandler.chatNotificationColor);
msg.getChatStyle().setUnderlined(true);
ChatOutputHandler.sendMessage(player, msg);
}
/* ------------------------------------------------------------ */
public static String getBlockId(Block block)
{
return GameData.getBlockRegistry().getNameForObject(block).replace(':', '.').replace(' ', '_');
}
public static String getBlockPermission(Block block, int meta)
{
if (meta == 0 || meta == 32767)
return getBlockId(block);
else
return getBlockId(block) + "." + meta;
}
public static String getBlockPermission(Block block, World world, int x, int y, int z)
{
return getBlockPermission(block, block.getDamageValue(world, x, y, z));
}
public static String getBlockBreakPermission(Block block, World world, int x, int y, int z)
{
return PERM_BREAK + "." + getBlockPermission(block, world, x, y, z);
}
public static String getBlockPlacePermission(Block block, World world, int x, int y, int z)
{
return PERM_PLACE + "." + getBlockPermission(block, world, x, y, z);
}
public static String getBlockInteractPermission(Block block, World world, int x, int y, int z)
{
return PERM_INTERACT + "." + getBlockPermission(block, world, x, y, z);
}
public static String getBlockBreakPermission(Block block, int meta)
{
return PERM_BREAK + "." + getBlockPermission(block, meta);
}
public static String getBlockPlacePermission(Block block, int meta)
{
return PERM_PLACE + "." + getBlockPermission(block, meta);
}
public static String getBlockInteractPermission(Block block, int meta)
{
return PERM_INTERACT + "." + getBlockPermission(block, meta);
}
/* ------------------------------------------------------------ */
public static String getItemId(Item item)
{
return GameData.getItemRegistry().getNameForObject(item).replace(':', '.').replace(' ', '_');
}
public static String getItemPermission(ItemStack stack, boolean checkMeta)
{
try
{
int dmg = stack.getItemDamage();
if (!checkMeta || dmg == 0 || dmg == 32767)
return getItemId(stack.getItem());
else
return getItemId(stack.getItem()) + "." + dmg;
}
catch (Exception e)
{
if (stack.getItem() == null)
throw new RuntimeException("Error getting item permission. Stack item is null");
else
throw new RuntimeException(String.format("Error getting item permission for item %s", stack.getItem().getClass().getName()));
}
}
public static String getItemPermission(ItemStack stack)
{
return getItemPermission(stack, true);
}
public static String getItemUsePermission(ItemStack stack)
{
return PERM_USE + "." + getItemPermission(stack);
}
public static String getItemBanPermission(ItemStack stack)
{
return PERM_EXIST + "." + getItemPermission(stack);
}
public static String getItemInventoryPermission(ItemStack stack)
{
return PERM_INVENTORY + "." + getItemPermission(stack);
}
/* ------------------------------------------------------------ */
public static EntityPlayer getCraftingPlayer(InventoryCrafting inventory)
{
Container abstractContainer = ReflectionHelper.getPrivateValue(InventoryCrafting.class, inventory, "field_70465_c", "eventHandler");
if (abstractContainer instanceof ContainerPlayer)
{
ContainerPlayer container = (ContainerPlayer) abstractContainer;
return ReflectionHelper.getPrivateValue(ContainerPlayer.class, container, "field_82862_h", "thePlayer");
}
else if (abstractContainer instanceof ContainerWorkbench)
{
SlotCrafting slot = (SlotCrafting) abstractContainer.getSlot(0);
return ReflectionHelper.getPrivateValue(SlotCrafting.class, slot, "field_75238_b", "thePlayer");
}
return null;
}
public static String getCraftingPermission(ItemStack stack)
{
return PERM_CRAFT + "." + getItemPermission(stack, true);
}
public static boolean canCraft(EntityPlayer player, ItemStack result)
{
if (result == null)
return true;
String permission = ModuleProtection.getCraftingPermission(result);
debugPermission(player, permission);
return PermissionManager.checkPermission(player, permission);
}
}
|
src/main/java/com/forgeessentials/protection/ModuleProtection.java
|
package com.forgeessentials.protection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimerTask;
import java.util.UUID;
import net.minecraft.block.Block;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.boss.EntityWither;
import net.minecraft.entity.monster.EntityBlaze;
import net.minecraft.entity.monster.EntityCreeper;
import net.minecraft.entity.monster.EntityEnderman;
import net.minecraft.entity.monster.EntityGiantZombie;
import net.minecraft.entity.monster.EntityIronGolem;
import net.minecraft.entity.monster.EntityPigZombie;
import net.minecraft.entity.monster.EntitySilverfish;
import net.minecraft.entity.monster.EntitySkeleton;
import net.minecraft.entity.monster.EntitySnowman;
import net.minecraft.entity.monster.EntitySpider;
import net.minecraft.entity.monster.EntityWitch;
import net.minecraft.entity.monster.EntityZombie;
import net.minecraft.entity.passive.EntityChicken;
import net.minecraft.entity.passive.EntityCow;
import net.minecraft.entity.passive.EntityHorse;
import net.minecraft.entity.passive.EntityMooshroom;
import net.minecraft.entity.passive.EntityOcelot;
import net.minecraft.entity.passive.EntityPig;
import net.minecraft.entity.passive.EntitySquid;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.entity.passive.EntityWolf;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.event.ClickEvent;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.ContainerPlayer;
import net.minecraft.inventory.ContainerWorkbench;
import net.minecraft.inventory.InventoryCrafting;
import net.minecraft.inventory.SlotCrafting;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.DamageSource;
import net.minecraft.world.World;
import net.minecraftforge.permission.PermissionLevel;
import net.minecraftforge.permission.PermissionManager;
import com.forgeessentials.api.APIRegistry;
import com.forgeessentials.api.permissions.Zone;
import com.forgeessentials.core.ForgeEssentials;
import com.forgeessentials.core.misc.FECommandManager;
import com.forgeessentials.core.misc.TaskRegistry;
import com.forgeessentials.core.moduleLauncher.FEModule;
import com.forgeessentials.protection.commands.CommandItemPermission;
import com.forgeessentials.protection.commands.CommandProtectionDebug;
import com.forgeessentials.util.ServerUtil;
import com.forgeessentials.util.events.FEModuleEvent.FEModuleInitEvent;
import com.forgeessentials.util.events.FEModuleEvent.FEModuleServerInitEvent;
import com.forgeessentials.util.events.FEModuleEvent.FEModuleServerPostInitEvent;
import com.forgeessentials.util.output.ChatOutputHandler;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.registry.GameData;
import cpw.mods.fml.relauncher.ReflectionHelper;
@FEModule(name = "Protection", parentMod = ForgeEssentials.class, isCore = true, canDisable = false)
public class ModuleProtection
{
public final static String BASE_PERM = "fe.protection";
public final static String PERM_PVP = BASE_PERM + ".pvp";
public final static String PERM_SLEEP = BASE_PERM + ".sleep";
public final static String PERM_GAMEMODE = BASE_PERM + ".gamemode";
public final static String PERM_INVENTORY_GROUP = BASE_PERM + ".inventorygroup";
public final static String PERM_USE = BASE_PERM + ".use";
public final static String PERM_BREAK = BASE_PERM + ".break";
public final static String PERM_PLACE = BASE_PERM + ".place";
public final static String PERM_FIRE = BASE_PERM + ".fire";
public final static String PERM_FIRE_DESTROY = PERM_FIRE + ".destroy";
public final static String PERM_FIRE_SPREAD = PERM_FIRE + ".spread";
public final static String PERM_INTERACT = BASE_PERM + ".interact";
public final static String PERM_INTERACT_ENTITY = BASE_PERM + ".interact.entity";
public final static String PERM_DAMAGE_TO = BASE_PERM + ".damageto";
public final static String PERM_DAMAGE_BY = BASE_PERM + ".damageby";
public final static String PERM_INVENTORY = BASE_PERM + ".inventory";
public final static String PERM_EXIST = BASE_PERM + ".exist";
public static final String PERM_CRAFT = BASE_PERM + ".craft";
public final static String PERM_EXPLOSION = BASE_PERM + ".explosion";
public final static String PERM_EXPLOSION_BLOCKDMG = PERM_EXPLOSION + ".blockdmg";
public final static String PERM_NEEDSFOOD = BASE_PERM + ".needsfood";
public static final String PERM_PRESSUREPLATE = BASE_PERM + ".pressureplate";
public final static String PERM_MOBSPAWN = BASE_PERM + ".mobspawn";
public final static String PERM_MOBSPAWN_NATURAL = PERM_MOBSPAWN + ".natural";
public final static String PERM_MOBSPAWN_FORCED = PERM_MOBSPAWN + ".forced";
public static final String ZONE = BASE_PERM + ".zone";
public static final String ZONE_KNOCKBACK = ZONE + ".knockback";
public static final String ZONE_DAMAGE = ZONE + ".damage";
public static final String ZONE_DAMAGE_INTERVAL = ZONE_DAMAGE + ".interval";
public static final String ZONE_COMMAND = ZONE + ".command";
public static final String ZONE_COMMAND_INTERVAL = ZONE_COMMAND + ".interval";
public static final String ZONE_POTION = ZONE + ".potion";
public static final String ZONE_POTION_INTERVAL = ZONE_POTION + ".interval";
public static final String MSG_ZONE_DENIED = "You are not allowed to enter this area!";
private static final Class<?>[] damageEntityClasses = new Class<?>[] {
// EntityAgeable
EntityVillager.class,
// EntityAnimal
EntityChicken.class, EntityCow.class, EntityMooshroom.class, EntityHorse.class, EntityPig.class,
// EntityTameable
EntityOcelot.class, EntityWolf.class,
// EntityMob
EntityBlaze.class, EntityCreeper.class, EntityEnderman.class, EntityGiantZombie.class, EntitySilverfish.class, EntitySkeleton.class,
EntitySpider.class, EntityWitch.class, EntityWither.class, EntityZombie.class, EntityPigZombie.class,
// EntityGolem
EntityIronGolem.class, EntitySnowman.class,
// EntityWaterMob
EntitySquid.class,
/* -- end of list -- */
};
private static final DamageSource[] damageByTypes = new DamageSource[] { DamageSource.anvil, DamageSource.cactus, DamageSource.drown, DamageSource.fall,
DamageSource.fallingBlock, DamageSource.generic, DamageSource.inFire, DamageSource.inWall, DamageSource.lava, DamageSource.magic,
DamageSource.onFire, DamageSource.outOfWorld, DamageSource.starve, DamageSource.wither };
public static Map<UUID, String> debugModePlayers = new HashMap<>();
/* ------------------------------------------------------------ */
@SuppressWarnings("unused")
private ProtectionEventHandler protectionHandler;
@SubscribeEvent
public void load(FEModuleInitEvent e)
{
protectionHandler = new ProtectionEventHandler();
FECommandManager.registerCommand(new CommandItemPermission());
FECommandManager.registerCommand(new CommandProtectionDebug());
// FECommandManager.registerCommand(new CommandPlaceblock());
}
public static String getItemName(Item item)
{
try
{
return item.getItemStackDisplayName(new ItemStack(item));
}
catch (Exception | NoClassDefFoundError e)
{
return item.getUnlocalizedName();
}
}
@SuppressWarnings("unchecked")
@SubscribeEvent
public void registerPermissions(FEModuleServerInitEvent event)
{
// ----------------------------------------
// Other
APIRegistry.perms.registerPermission(PERM_SLEEP, PermissionLevel.TRUE, "Allow players to sleep in beds");
APIRegistry.perms.registerPermission(PERM_NEEDSFOOD, PermissionLevel.TRUE, "If denied to a player, their hunger bar will not deplete.");
APIRegistry.perms.registerPermission(PERM_PVP, PermissionLevel.TRUE, "If denied for at least one of two fighting players, PvP will be disabled");
APIRegistry.perms.registerPermissionProperty(PERM_GAMEMODE, "-1", "Force gamemode (-1 = none / default, 0 = survival, 1 = creative, 2 = adventure)");
APIRegistry.perms.registerPermissionProperty(PERM_INVENTORY_GROUP, "default",
"Inventory group property - can be set to any identifier to separate inventories for certain regions");
APIRegistry.perms.registerPermission(PERM_INTERACT_ENTITY, PermissionLevel.TRUE, "Allow interacting with entities (villagers, dogs, horses)");
APIRegistry.perms.registerPermission(PERM_EXPLOSION, PermissionLevel.TRUE, "(global) Allows explosions");
APIRegistry.perms.registerPermission(PERM_EXPLOSION_BLOCKDMG, PermissionLevel.TRUE, "(global) Allows explosions to damage blocks");
APIRegistry.perms.registerPermission(PERM_PRESSUREPLATE, PermissionLevel.TRUE, "Prevent players from triggering pressure plates");
APIRegistry.perms.registerPermission(PERM_FIRE_DESTROY, PermissionLevel.TRUE, "Allow fire to destroy blocks");
APIRegistry.perms.registerPermission(PERM_FIRE_SPREAD, PermissionLevel.TRUE, "Allow fire to spread");
// ----------------------------------------
// Damage
APIRegistry.perms.registerPermission(PERM_DAMAGE_TO + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow damaging entities");
APIRegistry.perms.registerPermission(PERM_DAMAGE_BY + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow getting hurt by entities");
for (Class<?> entityClass : damageEntityClasses)
{
APIRegistry.perms.registerPermission(PERM_DAMAGE_TO + "." + entityClass.getSimpleName(), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(PERM_DAMAGE_BY + "." + entityClass.getSimpleName(), PermissionLevel.TRUE);
}
for (DamageSource dmgType : damageByTypes)
{
APIRegistry.perms.registerPermission(PERM_DAMAGE_BY + "." + dmgType.getDamageType(), PermissionLevel.TRUE);
}
// ----------------------------------------
// Register mobs
APIRegistry.perms.registerPermission(PERM_MOBSPAWN + Zone.PERMISSION_ASTERIX, PermissionLevel.TRUE, "(global) Allow spawning of mobs");
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_NATURAL + Zone.ALL_PERMS, PermissionLevel.TRUE,
"(global) Allow natural spawning of mobs (random spawn)");
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_FORCED + Zone.ALL_PERMS, PermissionLevel.TRUE,
"(global) Allow forced spawning of mobs (mob-spawners)");
for (Entry<String, Class<? extends Entity>> e : ((Map<String, Class<? extends Entity>>) EntityList.stringToClassMapping).entrySet())
if (EntityLiving.class.isAssignableFrom(e.getValue()))
{
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_NATURAL + "." + e.getKey(), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(PERM_MOBSPAWN_FORCED + "." + e.getKey(), PermissionLevel.TRUE);
}
for (MobType mobType : MobType.values())
{
APIRegistry.perms.registerPermission(mobType.getSpawnPermission(false), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(mobType.getSpawnPermission(true), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(mobType.getDamageByPermission(), PermissionLevel.TRUE);
APIRegistry.perms.registerPermission(mobType.getDamageToPermission(), PermissionLevel.TRUE);
}
// ----------------------------------------
// Register items
APIRegistry.perms.registerPermission(PERM_USE + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow using items");
APIRegistry.perms.registerPermission(PERM_INVENTORY + Zone.ALL_PERMS, PermissionLevel.TRUE,
"Allow having item in inventory. Item will be dropped if not allowed.");
APIRegistry.perms.registerPermission(PERM_EXIST + Zone.ALL_PERMS, PermissionLevel.TRUE,
"Allow having item in inventory. Item will be destroyed if not allowed.");
APIRegistry.perms.registerPermission(PERM_CRAFT + Zone.ALL_PERMS, PermissionLevel.TRUE,
"Allow crafting of items. Not necessarily works with modded crafting tables");
for (Item item : GameData.getItemRegistry().typeSafeIterable())
if (!(item instanceof ItemBlock))
{
String itemPerm = "." + getItemId(item) + Zone.ALL_PERMS;
String itemName = getItemName(item);
APIRegistry.perms.registerPermission(PERM_USE + itemPerm, PermissionLevel.TRUE, "USE " + itemName);
APIRegistry.perms.registerPermission(PERM_CRAFT + itemPerm, PermissionLevel.TRUE, "CRAFT " + itemName);
APIRegistry.perms.registerPermission(PERM_EXIST + itemPerm, PermissionLevel.TRUE, "EXIST " + itemName);
APIRegistry.perms.registerPermission(PERM_INVENTORY + itemPerm, PermissionLevel.TRUE, "INVENTORY " + itemName);
}
// ----------------------------------------
// Register blocks
APIRegistry.perms.registerPermission(PERM_BREAK + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow breaking blocks");
APIRegistry.perms.registerPermission(PERM_PLACE + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow placing blocks");
APIRegistry.perms.registerPermission(PERM_INTERACT + Zone.ALL_PERMS, PermissionLevel.TRUE, "Allow interacting with blocks (button, chest, workbench)");
for (Block block : GameData.getBlockRegistry().typeSafeIterable())
{
String blockPerm = "." + getBlockId(block) + Zone.ALL_PERMS;
String blockName = block.getLocalizedName();
APIRegistry.perms.registerPermission(PERM_BREAK + blockPerm, PermissionLevel.TRUE, "BREAK " + blockName);
APIRegistry.perms.registerPermission(PERM_PLACE + blockPerm, PermissionLevel.TRUE, "PLACE " + blockName);
APIRegistry.perms.registerPermission(PERM_INTERACT + blockPerm, PermissionLevel.TRUE, "INTERACT " + blockName);
}
// ----------------------------------------
// Register zone permissions
APIRegistry.perms.registerPermissionDescription(ZONE, "Worldborder permissions");
APIRegistry.perms.registerPermission(ZONE_KNOCKBACK, PermissionLevel.FALSE, "Deny players from entering this area");
APIRegistry.perms.registerPermissionProperty(ZONE_DAMAGE, null, "Apply this amount of damage to players, if they are in this area");
APIRegistry.perms.registerPermissionProperty(ZONE_DAMAGE_INTERVAL, "1000",
"Time interval in milliseconds for applying damage-effect. Zero = once only.");
APIRegistry.perms.registerPermissionProperty(ZONE_COMMAND, null, "Execute this command if a player enters the area");
APIRegistry.perms.registerPermissionProperty(ZONE_COMMAND_INTERVAL, "0", "Time interval in milliseconds for executing command. Zero = once only.");
APIRegistry.perms
.registerPermissionProperty(
ZONE_POTION,
null,
"Apply potion effects to players who enter this area. Comma separated list of \"ID:duration:amplifier\" pairs. See http://www.minecraftwiki.net/wiki/Potion_effects#Parameters");
APIRegistry.perms.registerPermissionProperty(ZONE_POTION_INTERVAL, "2000",
"Time interval in milliseconds for applying potion-effects. Zero = once only.");
}
@SubscribeEvent
public void postServerStart(FEModuleServerPostInitEvent e)
{
TaskRegistry.scheduleRepeated(new TimerTask() {
@Override
public void run()
{
for (EntityPlayerMP p : ServerUtil.getPlayerList())
if (!APIRegistry.perms.checkPermission(p, PERM_NEEDSFOOD))
p.getFoodStats().addStats(20, 1.0F);
}
}, 60 * 1000);
}
/* ------------------------------------------------------------ */
public static void setDebugMode(EntityPlayer player, String commandBase)
{
if (commandBase != null)
debugModePlayers.put(player.getPersistentID(), commandBase);
else
debugModePlayers.remove(player.getPersistentID());
}
public static boolean isDebugMode(EntityPlayer player)
{
return debugModePlayers.containsKey(player.getPersistentID());
}
public static void debugPermission(EntityPlayer player, String permission)
{
if (player == null)
return;
String cmdBase = debugModePlayers.get(player.getPersistentID());
if (cmdBase == null)
return;
ChatComponentTranslation msg = new ChatComponentTranslation(permission);
msg.getChatStyle().setChatClickEvent(new ClickEvent(ClickEvent.Action.SUGGEST_COMMAND, cmdBase + permission));
msg.getChatStyle().setColor(ChatOutputHandler.chatNotificationColor);
msg.getChatStyle().setUnderlined(true);
ChatOutputHandler.sendMessage(player, msg);
}
/* ------------------------------------------------------------ */
public static String getBlockId(Block block)
{
return GameData.getBlockRegistry().getNameForObject(block).replace(':', '.').replace(' ', '_');
}
public static String getBlockPermission(Block block, int meta)
{
if (meta == 0 || meta == 32767)
return getBlockId(block);
else
return getBlockId(block) + "." + meta;
}
public static String getBlockPermission(Block block, World world, int x, int y, int z)
{
return getBlockPermission(block, block.getDamageValue(world, x, y, z));
}
public static String getBlockBreakPermission(Block block, World world, int x, int y, int z)
{
return PERM_BREAK + "." + getBlockPermission(block, world, x, y, z);
}
public static String getBlockPlacePermission(Block block, World world, int x, int y, int z)
{
return PERM_PLACE + "." + getBlockPermission(block, world, x, y, z);
}
public static String getBlockInteractPermission(Block block, World world, int x, int y, int z)
{
return PERM_INTERACT + "." + getBlockPermission(block, world, x, y, z);
}
public static String getBlockBreakPermission(Block block, int meta)
{
return PERM_BREAK + "." + getBlockPermission(block, meta);
}
public static String getBlockPlacePermission(Block block, int meta)
{
return PERM_PLACE + "." + getBlockPermission(block, meta);
}
public static String getBlockInteractPermission(Block block, int meta)
{
return PERM_INTERACT + "." + getBlockPermission(block, meta);
}
/* ------------------------------------------------------------ */
public static String getItemId(Item item)
{
return GameData.getItemRegistry().getNameForObject(item).replace(':', '.').replace(' ', '_');
}
public static String getItemPermission(ItemStack stack, boolean checkMeta)
{
int dmg = stack.getItemDamage();
if (!checkMeta || dmg == 0 || dmg == 32767)
return getItemId(stack.getItem());
else
return getItemId(stack.getItem()) + "." + dmg;
}
public static String getItemPermission(ItemStack stack)
{
return getItemPermission(stack, true);
}
public static String getItemUsePermission(ItemStack stack)
{
return PERM_USE + "." + getItemPermission(stack);
}
public static String getItemBanPermission(ItemStack stack)
{
return PERM_EXIST + "." + getItemPermission(stack);
}
public static String getItemInventoryPermission(ItemStack stack)
{
return PERM_INVENTORY + "." + getItemPermission(stack);
}
/* ------------------------------------------------------------ */
public static EntityPlayer getCraftingPlayer(InventoryCrafting inventory)
{
Container abstractContainer = ReflectionHelper.getPrivateValue(InventoryCrafting.class, inventory, "field_70465_c", "eventHandler");
if (abstractContainer instanceof ContainerPlayer)
{
ContainerPlayer container = (ContainerPlayer) abstractContainer;
return ReflectionHelper.getPrivateValue(ContainerPlayer.class, container, "field_82862_h", "thePlayer");
}
else if (abstractContainer instanceof ContainerWorkbench)
{
SlotCrafting slot = (SlotCrafting) abstractContainer.getSlot(0);
return ReflectionHelper.getPrivateValue(SlotCrafting.class, slot, "field_75238_b", "thePlayer");
}
return null;
}
public static String getCraftingPermission(ItemStack stack)
{
return PERM_CRAFT + "." + getItemPermission(stack, true);
}
public static boolean canCraft(EntityPlayer player, ItemStack result)
{
if (result == null)
return true;
String permission = ModuleProtection.getCraftingPermission(result);
debugPermission(player, permission);
return PermissionManager.checkPermission(player, permission);
}
}
|
Added catch block to help debugging item permission issues. References #1854
|
src/main/java/com/forgeessentials/protection/ModuleProtection.java
|
Added catch block to help debugging item permission issues. References #1854
|
<ide><path>rc/main/java/com/forgeessentials/protection/ModuleProtection.java
<ide>
<ide> public static String getItemPermission(ItemStack stack, boolean checkMeta)
<ide> {
<del> int dmg = stack.getItemDamage();
<del> if (!checkMeta || dmg == 0 || dmg == 32767)
<del> return getItemId(stack.getItem());
<del> else
<del> return getItemId(stack.getItem()) + "." + dmg;
<add> try
<add> {
<add> int dmg = stack.getItemDamage();
<add> if (!checkMeta || dmg == 0 || dmg == 32767)
<add> return getItemId(stack.getItem());
<add> else
<add> return getItemId(stack.getItem()) + "." + dmg;
<add> }
<add> catch (Exception e)
<add> {
<add> if (stack.getItem() == null)
<add> throw new RuntimeException("Error getting item permission. Stack item is null");
<add> else
<add> throw new RuntimeException(String.format("Error getting item permission for item %s", stack.getItem().getClass().getName()));
<add> }
<ide> }
<ide>
<ide> public static String getItemPermission(ItemStack stack)
|
|
JavaScript
|
mit
|
ab016727bb0c6b238c28e10cd61b7bd9434c3750
| 0 |
rt2zz/redux-persist,rt2zz/redux-persist
|
// @flow
import React, { PureComponent } from 'react' // eslint-disable-line import/no-unresolved
import type { Node } from 'react' // eslint-disable-line import/no-unresolved
import type { Persistor } from '../types'
type Props = {
onBeforeLift?: Function,
children?: Node,
loading: Node,
persistor: Persistor,
}
type State = {
bootstrapped: boolean,
}
export class PersistGate extends PureComponent<Props, State> {
state = {
bootstrapped: false,
}
_unsubscribe: ?Function
componentDidMount() {
this.handlePersistorState()
this._unsubscribe = this.props.persistor.subscribe(
this.handlePersistorState
)
}
handlePersistorState = () => {
const { persistor } = this.props
let { bootstrapped } = persistor.getState()
if (bootstrapped) {
this.props.onBeforeLift && this.props.onBeforeLift()
this.setState({ bootstrapped: true })
this._unsubscribe && this._unsubscribe()
}
}
componentWillUnmount() {
this._unsubscribe && this._unsubscribe()
}
render() {
return this.state.bootstrapped ? this.props.children : this.props.loading
}
}
|
src/integration/react.js
|
// @flow
import React, { PureComponent, type Node } from 'react' // eslint-disable-line import/no-unresolved
import type { Persistor } from '../types'
type Props = {
onBeforeLift?: Function,
children?: Node,
loading: Node,
persistor: Persistor,
}
type State = {
bootstrapped: boolean,
}
export class PersistGate extends PureComponent<Props, State> {
state = {
bootstrapped: false,
}
_unsubscribe: ?Function
componentDidMount() {
this.handlePersistorState()
this._unsubscribe = this.props.persistor.subscribe(
this.handlePersistorState
)
}
handlePersistorState = () => {
const { persistor } = this.props
let { bootstrapped } = persistor.getState()
if (bootstrapped) {
this.props.onBeforeLift && this.props.onBeforeLift()
this.setState({ bootstrapped: true })
this._unsubscribe && this._unsubscribe()
}
}
componentWillUnmount() {
this._unsubscribe && this._unsubscribe()
}
render() {
return this.state.bootstrapped ? this.props.children : this.props.loading
}
}
|
(integration): separate react type import
|
src/integration/react.js
|
(integration): separate react type import
|
<ide><path>rc/integration/react.js
<ide> // @flow
<del>import React, { PureComponent, type Node } from 'react' // eslint-disable-line import/no-unresolved
<del>
<add>import React, { PureComponent } from 'react' // eslint-disable-line import/no-unresolved
<add>import type { Node } from 'react' // eslint-disable-line import/no-unresolved
<ide> import type { Persistor } from '../types'
<ide>
<ide> type Props = {
|
|
Java
|
mit
|
3c6e09b8df6399cf9ec9b4020fe3e6177eee3ad2
| 0 |
PixelRunStudios/ChemHelper
|
package com.github.pixelrunstudios.ChemHelper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class EquationBalancer{
private static final boolean DEBUG = true;
public static void print(Object o){
if(DEBUG){
System.out.print(o);
}
}
public static void println(Object o){
if(DEBUG){
System.out.println(o);
}
}
public static void println(){
if(DEBUG){
System.out.println();
}
}
public static void main(String[] args){
/*Map<ChemistryUnit, Integer> map1 = new LinkedHashMap<ChemistryUnit, Integer>();
map1.put(ChemistryUnit.mk("Fe"), 1);
map1.put(ChemistryUnit.mk("Br"), 3);
ChemistryUnit c1 = ChemistryUnit.mk(map1);
Map<ChemistryUnit, Integer> map2 = new LinkedHashMap<ChemistryUnit, Integer>();
map2.put(ChemistryUnit.mk("H"), 2);
map2.put(ChemistryUnit.mk("S"), 1);
map2.put(ChemistryUnit.mk("O"), 4);
ChemistryUnit c2 = ChemistryUnit.mk(map2);
ChemistryUnit in = ChemistryUnit.mk(Pair.make(c1, 1), Pair.make(c2, 1));
println(in);
Map<ChemistryUnit, Integer> map3 = new LinkedHashMap<ChemistryUnit, Integer>();
map3.put(ChemistryUnit.mk("Fe"), 2);
map3.put(ChemistryUnit.mk("S"), 3);
map3.put(ChemistryUnit.mk("O"), 12);
ChemistryUnit c3 = ChemistryUnit.mk(map3);
Map<ChemistryUnit, Integer> map4 = new LinkedHashMap<ChemistryUnit, Integer>();
map4.put(ChemistryUnit.mk("H"), 1);
map4.put(ChemistryUnit.mk("Br"), 1);
ChemistryUnit c4 = ChemistryUnit.mk(map4);
ChemistryUnit out = ChemistryUnit.mk(Pair.make(c3, 1), Pair.make(c4, 1));
println(out);*/
Map<ChemistryUnit, Integer> map1 = new LinkedHashMap<ChemistryUnit, Integer>();
map1.put(ChemistryUnit.mk("C"), 8);
map1.put(ChemistryUnit.mk("H"), 18);
ChemistryUnit c1 = ChemistryUnit.mk(map1);
Map<ChemistryUnit, Integer> map2 = new LinkedHashMap<ChemistryUnit, Integer>();
map2.put(ChemistryUnit.mk("O"), 2);
ChemistryUnit c2 = ChemistryUnit.mk(map2);
ChemistryUnit in = ChemistryUnit.mk(Pair.make(c1, 1), Pair.make(c2, 1));
println(in);
Map<ChemistryUnit, Integer> map3 = new LinkedHashMap<ChemistryUnit, Integer>();
map3.put(ChemistryUnit.mk("H"), 2);
map3.put(ChemistryUnit.mk("O"), 1);
ChemistryUnit c3 = ChemistryUnit.mk(map3);
Map<ChemistryUnit, Integer> map4 = new LinkedHashMap<ChemistryUnit, Integer>();
map4.put(ChemistryUnit.mk("C"), 1);
map4.put(ChemistryUnit.mk("O"), 2);
ChemistryUnit c4 = ChemistryUnit.mk(map4);
ChemistryUnit out = ChemistryUnit.mk(Pair.make(c3, 1), Pair.make(c4, 1));
println(out);
balance(in,out);
}
public static Pair<ChemistryUnit, ChemistryUnit>
balance(ChemistryUnit inX, ChemistryUnit outX){
return balance(inX, outX, false);
}
public static Pair<ChemistryUnit, ChemistryUnit>
balance(ChemistryUnit inX, ChemistryUnit outX, boolean apart){
BigFraction.setAutoSimplify(true);
if(apart){
ChemistryUnit inNewX = new ChemistryUnit();
for(Map.Entry<ChemistryUnit, Integer> cur : inX.getUnits().entrySet()){
ChemistryUnit cnr = apart(cur.getKey());
inNewX.putUnit(cnr, cur.getValue());
}
inX = inNewX;
ChemistryUnit outNewX = new ChemistryUnit();
for(Map.Entry<ChemistryUnit, Integer> cur : outX.getUnits().entrySet()){
ChemistryUnit cnr = apart(cur.getKey());
outNewX.putUnit(cnr, cur.getValue());
}
outX = outNewX;
}
//{Fe=1,Br=3}, {H=2,S=1,O=4} -> {Fe=2,S=3,O=12} + {H=1,Br=1}
//
// Map1 Map2 Map3 Map4
// Br 3 0 0 1
// Fe 1 2 2 0
// S 0 1 3 0
// H 0 2 0 1
// O 0 4 12 0
//
// Final: 2, 3, 1, 6
//{C=8,H=18}, {O=2} -> {H=2,O=1} + {C=1,O=2}
//
// Map1 Map2 Map3 Map4
// C 8 0 0 1
// H 18 0 2 0
// O 0 2 1 2
//
// Final: 2, 25, 18, 16
ArrayList<ChemistryUnit> elements = new ArrayList<ChemistryUnit>();
int mapNum = 0;
int numOfEle = 0;
for(Map.Entry<ChemistryUnit, Integer> map : inX.getUnits().entrySet()){
for(Map.Entry<ChemistryUnit, Integer> entry : map.getKey().getUnits().entrySet()){
boolean yesEle = false;
for(int i = 0; i<elements.size();i++){
if(elements.get(i).equals(entry.getKey())){
yesEle = true;
break;
}
}
if(!yesEle){
println("hi");
elements.add(entry.getKey());
}
}
}
for(Map.Entry<ChemistryUnit, Integer> map : outX.getUnits().entrySet()){
for(Map.Entry<ChemistryUnit, Integer> outEntry : map.getKey().getUnits().entrySet()){
if(!elements.contains(outEntry.getKey())){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
}
}
for(int i = 0; i<elements.size();i++){
println(elements.get(i));
}
println();
mapNum = inX.getSubUnits().size() + outX.getSubUnits().size();
numOfEle = elements.size();
println(numOfEle + " " + mapNum);
int[][] system = new int[numOfEle][mapNum];
int newArrayRow = numOfEle-(numOfEle-mapNum+1);
int counter = add(true, inX, elements, system, 0);
add(false, outX, elements, system, counter);
println("newArrayRow: "+newArrayRow);
BigFraction[] finale = new BigFraction[mapNum];
//boolean[] finalePut = new boolean[mapNum];
finale[0] = new BigFraction(1, 1);
//finalePut[0] = true;
//boolean finalePutFull = false;
Set<Integer> set = new HashSet<Integer>();
for(int i = 0; i < elements.size(); i++){
set.add(i);
}
Set<Set<Integer>> is = SubsetHelper.subsets(set);
Set<Set<Integer>> ins = new HashSet<Set<Integer>>();
for(Set<Integer> s : is){
if(!(!(s.size() == newArrayRow) || !s.contains(1))){
println(s);
ins.add(s);
}
}
for(Set<Integer> s : ins){
Integer[] ia = new Integer[newArrayRow];
int ooo = 0;
for(Integer i : s){
ia[ooo] = i;
ooo++;
}
Pair<Integer[], BigFraction[][]> pair = solve(ia, newArrayRow, mapNum, system, numOfEle);
if(pair == null){
continue;
}
BigFraction[][] systemX = pair.getValueTwo();
int i = 0;
boolean win = true;
for(BigFraction[] sa : systemX){
if(sa[0] == null || sa[0].compareTo(new BigFraction(0, 1)) <= 0){
win = false;
}
finale[i+1] = sa[0];
i++;
}
if(win){
break;
}
/*Integer[] mX = pair.getValueOne();
BigFraction[][] systemX = pair.getValueTwo();
int i = 0;
BigFraction multiplicant = new BigFraction(1, 1);
boolean mset = false;
for(BigFraction[] sa : systemX){
if(sa[0].compareTo(new BigFraction(0, 1)) < 0){
i++;
continue;
}
println("hax: " + mX[i]);
if(finalePut[mX[i]] == true && !mset){
mset = true;
//multiplicant = finale[mX[i]].divide(sa[0]);
}
//else{
finale[mX[i]] = sa[0].multiply(multiplicant);
finalePut[mX[i]] = true;
//}
i++;
}
finalePutFull = true;
int finaleCounter = 0;
for(boolean b : finalePut){
if(!b){
finalePutFull = false;
}
else{
finaleCounter++;
}
}
println("WHoos");
print("HO ");
for(BigFraction bf : finale){
print(bf + " ");
}
println();
println(finaleCounter);*/
}
for(BigFraction bf : finale){
println("----" + bf);
}
int finDenProd = 1;
for(BigFraction bf : finale){
if(bf == null){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
finDenProd *= bf.getDenominator();
}
for(int i = 0; i < finale.length; i++){
finale[i] = finale[i].multiply(new BigFraction(finDenProd, 1));
}
for(BigFraction bf : finale){
println("----" + bf);
}
int[] finalOutOne = new int[finale.length];
for(int i = 0; i < finale.length; i++){
finalOutOne[i] = (int) finale[i].getNumerator();
}
int gcdX = gcd(finalOutOne);
for(int i = 0; i < finale.length; i++){
finalOutOne[i] = finalOutOne[i] / gcdX;
}
for(int i : finalOutOne){
println("------ " + i);
}
int finalOutX = 0;
Map<ChemistryUnit, Integer> mapOfIn = new HashMap<ChemistryUnit, Integer>();
for(Map.Entry<ChemistryUnit, Integer> map : inX.getUnits().entrySet()){
int fOut = finalOutOne[finalOutX];
if(fOut == 0){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
mapOfIn.put(map.getKey(), fOut);
finalOutX++;
}
Map<ChemistryUnit, Integer> mapOfOut = new HashMap<ChemistryUnit, Integer>();
for(Map.Entry<ChemistryUnit, Integer> map : outX.getUnits().entrySet()){
int fOut = finalOutOne[finalOutX];
if(fOut == 0){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
mapOfOut.put(map.getKey(), fOut);
finalOutX++;
}
ChemistryUnit inRet = ChemistryUnit.mk(mapOfIn);
ChemistryUnit outRet = ChemistryUnit.mk(mapOfOut);
if(!balanced(inRet, outRet)){
println("Balance failure");
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
println(inRet);
println(outRet);
return new Pair<ChemistryUnit,
ChemistryUnit>(inRet, outRet);
}
/*protected static ChemistryUnit apart(ChemistryUnit inX){
ChemistryUnit inNew = new ChemistryUnit();
ChemistryUnit cn = apart(inX, inNew).getValueOne();
println(cn);
return cn;
}*/
/*private static Pair<ChemistryUnit, Boolean> apart(ChemistryUnit inX, ChemistryUnit inNew){
if(inX.getType() == ChemistryUnit.TYPE_BASE){
return Pair.make(inX, true);
}
else{
for(Map.Entry<ChemistryUnit, Integer> pair : inX.getUnits().entrySet()){
Pair<ChemistryUnit, Boolean> pcb = apart(pair.getKey(), inNew);
ChemistryUnit v1 = pcb.getValueOne();
if(pcb.getValueTwo()){
if(inNew.containsUnitKey(v1)){
inNew.putUnit(v1, inNew.getUnit(v1) + pair.getValue());
}
else{
inNew.putUnit(v1, pair.getValue());
}
}
else{
for(Map.Entry<ChemistryUnit, Integer> cu :
v1.getUnits().entrySet()){
v1.putUnit(cu.getKey(), cu.getValue() * pair.getValue());
}
}
}
return Pair.make(inNew, false);
}
}*/
public static ChemistryUnit apart(ChemistryUnit inZ){
/*for(Map.Entry<String, Integer> entry : inZ.entrySet()){
mass += Double.parseDouble(data.get(entry.getKey().toLowerCase()+"_atomic-mass"))*entry.getValue();
}*/
if(inZ.getType() == ChemistryUnit.TYPE_BASE){
return inZ;
}
ChemistryUnit valOut = new ChemistryUnit();
for(Map.Entry<ChemistryUnit, Integer> unit : inZ.getUnits().entrySet()){
ChemistryUnit val = new ChemistryUnit();
ChemistryUnit x = apart(unit.getKey());
if(x.getType() == ChemistryUnit.TYPE_BASE){
val.putUnit(x, 1);
}
else{
for(Map.Entry<ChemistryUnit, Integer> xZero : x.getUnits().entrySet()){
ChemistryUnit xKey = xZero.getKey();
if(val.containsUnitKey(xKey)){
val.putUnit(xKey, x.getUnit(xKey) + xZero.getValue());
}
else{
val.putUnit(xKey, xZero.getValue());
}
}
}
for(Map.Entry<ChemistryUnit, Integer> un : val.getUnits().entrySet()){
ChemistryUnit unKey = un.getKey();
if(valOut.containsUnitKey(unKey)){
valOut.putUnit(unKey, valOut.getUnit(unKey) + val.getUnit(unKey) * unit.getValue());
}
else{
valOut.putUnit(unKey, val.getUnit(unKey) * unit.getValue());
}
}
}
return valOut;
}
private static int gcd(int a, int b){
while(b > 0){
int tmp = b;
b = a % b;
a = tmp;
}
return a;
}
private static int gcd(int[] ia){
int r = ia[0];
for(int i = 1; i < ia.length; i++){
r = gcd(r, ia[i]);
}
return r;
}
public static Pair<Integer[], BigFraction[][]> solve(Integer[] randomY, int newArrayRow, int mapNum, int[][] system, int numOfEle){
int[][] system2 = new int[newArrayRow][mapNum];
int[][] system3 = new int[newArrayRow][mapNum-1];
Pair<Integer[], int[][]> px = pickRandom(randomY, system, newArrayRow, mapNum);
system2 = px.getValueTwo();
for(int i = 0; i<newArrayRow;i++){
for(int j = 0; j<mapNum-1;j++){
system3[i][j] = system2[i][j+1];
}
}
/*int[][] numOfNonZero = new int[mapNum-1][2];
for(int i = 0; i<newArrayRow;i++){
for(int j = 0; j<mapNum-1;j++){
if(system3[i][j] != 0){
numOfNonZero[i][0] = i;
numOfNonZero[i][1]++;
}
}
}
List<int[]> list = Arrays.asList(numOfNonZero);
list.sort(new EquationComparator());
numOfNonZero = list.toArray(numOfNonZero);*/
int[] target = new int[mapNum-1];
Integer[] ia = new Integer[newArrayRow];
for(int i = 0; i < ia.length; i++){
ia[i] = i;
}
Set<List<Integer>> sli = CombinatoricHelper.permutations(
Arrays.asList(ia), ia.length);
//boolean broken = false;
printArray(system3);
megaFor: for(List<Integer> li : sli){
boolean fail = false;
for(int i = 0; i < target.length; i++){
target[i] = -1;
}
superFor: for(int i = 0; i < mapNum - 1; i++){
outerFor: for(int lx : li){
if(system3[lx][i] != 0){
for(int k = 0; k<mapNum-1;k++){
if(target[k] == lx){
continue outerFor;
}
}
target[i] = lx;
continue superFor;
}
}
if(target[i] == -1){
fail = true;
break;
}
}
//if(!fail){
// broken = true;
//}
if(fail){
continue;
}
for(int i : target){
print("aa" + i + " ");
}
println();
/*
for(List<Integer> li : sli){
println("SLI" + li);
for(int i = 0; i < target.length; i++){
target[i] = -1;
}
superFor: for(int i : li){
outerFor: for(int j = 0; j<mapNum-1;j++){
if(system3[i][j] != 0){
//println("Ar:" + numOfNonZero[i][0]);
for(int x = 0; x < system3[i].length; x++){
//print("o:" + system3[numOfNonZero[i][0]][x] + " ");
}
//println("I:" + i + "J:" + j + "---" + system3[numOfNonZero[i][0]][j]);
for(int k = 0; k<mapNum-1;k++){
if(target[k] == i){
continue outerFor;
}
}
target[j] = i;
continue superFor;
}
}
}
for(int i : target){
print("aa" + i + " ");
}
println();
boolean fail = false;
for(int i = 0; i < target.length; i++){
if(target[i] == -1){
fail = true;
}
}
if(!fail){
broken = true;
break;
}
}
if(!broken){
return null;
}
*/
BigFraction[][] system4 = new BigFraction[newArrayRow][mapNum];
for(int i = 0; i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system2[i][j] + " ");
system4[i][j] = new BigFraction(system2[target[i]][j], 1);
}
println();
}
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system4[i][j] + " ");
}
println();
}
for(int i = 0; i<newArrayRow;i++){
for(int j = i+1; j<newArrayRow; j++){
BigFraction temp = system4[j][i+1];
for(int k = 0; k<mapNum;k++){
//println("i:" + i + "j:" + j + "---" + system4[i][i+1]);
try{
system4[j][k] = system4[j][k].subtract(temp.multiply(
system4[i][k].divide(system4[i][i+1])));
}
catch(ArithmeticException e){
continue megaFor;
}
}
}
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system4[i][j] + " ");
}
println();
}
println();
int counter2 = 0;
for(int i = newArrayRow-1; i>=0;i--){
try{
system4[i][0] = system4[i][0].divide(system4[i][mapNum-1-counter2]);
}
catch(ArithmeticException e){
continue megaFor;
}
system4[i][mapNum-1-counter2] = system4[i][mapNum-1-counter2].divide(system4[i][mapNum-1-counter2]);
for(int j = i-1; j>=0; j--){
system4[j][0] = system4[j][0].subtract(system4[j][mapNum-1-counter2].multiply(system4[i][0]));
system4[j][mapNum-1-counter2] = system4[j][mapNum-1-counter2].subtract(system4[j][mapNum-1-counter2].multiply(system4[i][mapNum-1-counter2]));
}
counter2++;
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system4[i][j] + " ");
}
println();
}
println();
check(newArrayRow, mapNum, target, system3);
Integer[] iax = px.getValueOne();
Integer[] outIax = new Integer[iax.length];
int i = 0;
for(Integer inx : iax){
outIax[target[i]] = inx;
println(inx);
i++;
}
for(Integer inx : outIax){
println(inx);
}
/*for(int i = 0;i<numOfEle;i++){
print(elements.get(i)+" ");
for(int j = 0; j<mapNum;j++){
print(system[i][j] + " ");
}
println();
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system2[i][j] + " ");
}
println();
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum-1;j++){
print(system3[i][j] + " ");
}
println();
}
println();
for(int i = 0;i<mapNum-1;i++){
for(int j = 0; j<2;j++){
print(numOfNonZero[i][j] + " ");
}
println();
}
println();
for(int i = 0; i<mapNum-1;i++){
print(target[i] + " ");
}
println();*/
//TODO
return new Pair<Integer[], BigFraction[][]>(outIax, system4);
}
return null;
}
private static Pair<Integer[], int[][]> pickRandom(Integer[] randomY, int[][] system, int newArrayRow, int mapNum){
if(randomY == null){
Integer[] randomX = new Integer[newArrayRow - 1];
for(int i = 0; i < randomX.length; i++){
randomX[i] = i + 2;
}
List<Integer> ia = Arrays.<Integer>asList(randomX);
Collections.shuffle(ia);
randomX = ia.toArray(randomX);
randomY = new Integer[newArrayRow];
System.arraycopy(randomX, 0, randomY, 1, randomX.length);
randomY[0] = 1;
}
println(Arrays.asList(randomY));
int[][] system2 = new int[newArrayRow][mapNum];
for(int i = 0; i<newArrayRow;i++){
system2[i] = system[randomY[i]];
}
Integer[] outX = new Integer[newArrayRow];
System.arraycopy(randomY, 0, outX, 0, newArrayRow);
return new Pair<Integer[], int[][]>(outX, system2);
}
public static int add(boolean b, ChemistryUnit inX, ArrayList<ChemistryUnit> elements, int[][] system, int initCounter){
int counter = initCounter;
for(Map.Entry<ChemistryUnit, Integer> map : inX.getUnits().entrySet()){
for(Map.Entry<ChemistryUnit, Integer> entry : map.getKey().getUnits().entrySet()){
int temp = 0;
for(int i = 0; i<elements.size();i++){
if(elements.get(i).equals(entry.getKey())){
temp = i;
}
}
system[temp][counter] = counter != 0 && b ? -entry.getValue() : entry.getValue();
}
counter++;
}
return counter;
}
public static boolean check(int newArrayRow, int mapNum, int[] target, int[][] system3){
boolean done = false;
boolean onTarget = false;
int zeroes = 0;
int rowDone = 0;
outerFor: for(int i = 0; i<newArrayRow;i++){
onTarget = false;
zeroes = 0;
for(int j = 0; j<mapNum-1;j++){
if(j != target[i]){
if(system3[i][j] !=0){
break outerFor;
}else{
zeroes++;
}
}else{
if(target[i] !=1){
break outerFor;
}else{
onTarget = true;
}
}
}
if(zeroes == mapNum-2 && onTarget){
rowDone++;
}
}
if(rowDone == newArrayRow){
done = true;
}else{
done = false;
}
return done;
}
private static void printArray(int[][] arr){
for(int[] ia : arr){
for(int i : ia){
print(i + " ");
}
println();
}
println();
}
public static class EquationComparator implements Comparator<int[]>{
@Override
public int compare(int[] o1, int[] o2){
return Integer.compare(o1[1], o2[1]);
}
}
public static boolean balanced(ChemistryUnit inX, ChemistryUnit outX){
ChemistryUnit inZ = apart(inX);
ChemistryUnit outZ = apart(outX);
for(Map.Entry<ChemistryUnit, Integer> outZero : outZ.getUnits().entrySet()){
System.out.println(outZero);
}
for(Map.Entry<ChemistryUnit, Integer> inZero : inZ.getUnits().entrySet()){
System.out.println(inZero);
ChemistryUnit inKey = inZero.getKey();
if(inKey.getType() == ChemistryUnit.TYPE_NEST){
throw new IllegalArgumentException("Illegal in - separation!");
}
if(!outZ.containsUnitKey(inKey) || !outZ.getUnit(inKey).equals(inZero.getValue())){
println(inKey + ":" + outZ.getUnit(inKey) + ":" + inZero.getValue());
println("Not Balanced! 01");
return false;
}
}
for(Map.Entry<ChemistryUnit, Integer> outZero : outZ.getUnits().entrySet()){
ChemistryUnit inKey = outZero.getKey();
if(!outZ.containsUnitKey(inKey)){
println("Not Balanced! 02");
return false;
}
}
println("Balanced!");
return true;
}
}
|
src/com/github/pixelrunstudios/ChemHelper/EquationBalancer.java
|
package com.github.pixelrunstudios.ChemHelper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class EquationBalancer{
private static final boolean DEBUG = true;
public static void print(Object o){
if(DEBUG){
System.out.print(o);
}
}
public static void println(Object o){
if(DEBUG){
System.out.println(o);
}
}
public static void println(){
if(DEBUG){
System.out.println();
}
}
public static void main(String[] args){
/*Map<ChemistryUnit, Integer> map1 = new LinkedHashMap<ChemistryUnit, Integer>();
map1.put(ChemistryUnit.mk("Fe"), 1);
map1.put(ChemistryUnit.mk("Br"), 3);
ChemistryUnit c1 = ChemistryUnit.mk(map1);
Map<ChemistryUnit, Integer> map2 = new LinkedHashMap<ChemistryUnit, Integer>();
map2.put(ChemistryUnit.mk("H"), 2);
map2.put(ChemistryUnit.mk("S"), 1);
map2.put(ChemistryUnit.mk("O"), 4);
ChemistryUnit c2 = ChemistryUnit.mk(map2);
ChemistryUnit in = ChemistryUnit.mk(Pair.make(c1, 1), Pair.make(c2, 1));
println(in);
Map<ChemistryUnit, Integer> map3 = new LinkedHashMap<ChemistryUnit, Integer>();
map3.put(ChemistryUnit.mk("Fe"), 2);
map3.put(ChemistryUnit.mk("S"), 3);
map3.put(ChemistryUnit.mk("O"), 12);
ChemistryUnit c3 = ChemistryUnit.mk(map3);
Map<ChemistryUnit, Integer> map4 = new LinkedHashMap<ChemistryUnit, Integer>();
map4.put(ChemistryUnit.mk("H"), 1);
map4.put(ChemistryUnit.mk("Br"), 1);
ChemistryUnit c4 = ChemistryUnit.mk(map4);
ChemistryUnit out = ChemistryUnit.mk(Pair.make(c3, 1), Pair.make(c4, 1));
println(out);*/
Map<ChemistryUnit, Integer> map1 = new LinkedHashMap<ChemistryUnit, Integer>();
map1.put(ChemistryUnit.mk("C"), 8);
map1.put(ChemistryUnit.mk("H"), 18);
ChemistryUnit c1 = ChemistryUnit.mk(map1);
Map<ChemistryUnit, Integer> map2 = new LinkedHashMap<ChemistryUnit, Integer>();
map2.put(ChemistryUnit.mk("O"), 2);
ChemistryUnit c2 = ChemistryUnit.mk(map2);
ChemistryUnit in = ChemistryUnit.mk(Pair.make(c1, 1), Pair.make(c2, 1));
println(in);
Map<ChemistryUnit, Integer> map3 = new LinkedHashMap<ChemistryUnit, Integer>();
map3.put(ChemistryUnit.mk("H"), 2);
map3.put(ChemistryUnit.mk("O"), 1);
ChemistryUnit c3 = ChemistryUnit.mk(map3);
Map<ChemistryUnit, Integer> map4 = new LinkedHashMap<ChemistryUnit, Integer>();
map4.put(ChemistryUnit.mk("C"), 1);
map4.put(ChemistryUnit.mk("O"), 2);
ChemistryUnit c4 = ChemistryUnit.mk(map4);
ChemistryUnit out = ChemistryUnit.mk(Pair.make(c3, 1), Pair.make(c4, 1));
println(out);
balance(in,out);
}
public static Pair<ChemistryUnit, ChemistryUnit>
balance(ChemistryUnit inX, ChemistryUnit outX){
return balance(inX, outX, false);
}
public static Pair<ChemistryUnit, ChemistryUnit>
balance(ChemistryUnit inX, ChemistryUnit outX, boolean apart){
BigFraction.setAutoSimplify(true);
if(apart){
ChemistryUnit inNewX = new ChemistryUnit();
for(Map.Entry<ChemistryUnit, Integer> cur : inX.getUnits().entrySet()){
ChemistryUnit cnr = apart(cur.getKey());
inNewX.putUnit(cnr, cur.getValue());
}
inX = inNewX;
ChemistryUnit outNewX = new ChemistryUnit();
for(Map.Entry<ChemistryUnit, Integer> cur : outX.getUnits().entrySet()){
ChemistryUnit cnr = apart(cur.getKey());
outNewX.putUnit(cnr, cur.getValue());
}
outX = outNewX;
}
/*
* Complexity of thing
*
*/
//{Fe=1,Br=3}, {H=2,S=1,O=4} -> {Fe=2,S=3,O=12} + {H=1,Br=1}
//
// Map1 Map2 Map3 Map4
// Br 3 0 0 1
// Fe 1 2 2 0
// S 0 1 3 0
// H 0 2 0 1
// O 0 4 12 0
//
// Final: 2, 3, 1, 6
ArrayList<ChemistryUnit> elements = new ArrayList<ChemistryUnit>();
int mapNum = 0;
int numOfEle = 0;
for(Map.Entry<ChemistryUnit, Integer> map : inX.getUnits().entrySet()){
for(Map.Entry<ChemistryUnit, Integer> entry : map.getKey().getUnits().entrySet()){
boolean yesEle = false;
for(int i = 0; i<elements.size();i++){
if(elements.get(i).equals(entry.getKey())){
yesEle = true;
break;
}
}
if(!yesEle){
println("hi");
elements.add(entry.getKey());
}
}
}
for(Map.Entry<ChemistryUnit, Integer> map : outX.getUnits().entrySet()){
for(Map.Entry<ChemistryUnit, Integer> outEntry : map.getKey().getUnits().entrySet()){
if(!elements.contains(outEntry.getKey())){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
}
}
for(int i = 0; i<elements.size();i++){
println(elements.get(i));
}
println();
mapNum = inX.getSubUnits().size() + outX.getSubUnits().size();
numOfEle = elements.size();
println(numOfEle + " " + mapNum);
int[][] system = new int[numOfEle][mapNum];
int newArrayRow = numOfEle-(numOfEle-mapNum+1);
int counter = add(true, inX, elements, system, 0);
add(false, outX, elements, system, counter);
println("newArrayRow: "+newArrayRow);
BigFraction[] finale = new BigFraction[mapNum];
//boolean[] finalePut = new boolean[mapNum];
finale[0] = new BigFraction(1, 1);
//finalePut[0] = true;
//boolean finalePutFull = false;
Set<Integer> set = new HashSet<Integer>();
for(int i = 0; i < elements.size(); i++){
set.add(i);
}
Set<Set<Integer>> is = SubsetHelper.subsets(set);
Set<Set<Integer>> ins = new HashSet<Set<Integer>>();
for(Set<Integer> s : is){
if(!(!(s.size() == newArrayRow) || !s.contains(1))){
println(s);
ins.add(s);
}
}
for(Set<Integer> s : ins){
Integer[] ia = new Integer[newArrayRow];
int ooo = 0;
for(Integer i : s){
ia[ooo] = i;
ooo++;
}
Pair<Integer[], BigFraction[][]> pair = solve(ia, newArrayRow, mapNum, system, numOfEle);
if(pair == null){
continue;
}
BigFraction[][] systemX = pair.getValueTwo();
int i = 0;
boolean win = true;
for(BigFraction[] sa : systemX){
if(sa[0] == null || sa[0].compareTo(new BigFraction(0, 1)) <= 0){
win = false;
}
finale[i+1] = sa[0];
i++;
}
if(win){
break;
}
/*Integer[] mX = pair.getValueOne();
BigFraction[][] systemX = pair.getValueTwo();
int i = 0;
BigFraction multiplicant = new BigFraction(1, 1);
boolean mset = false;
for(BigFraction[] sa : systemX){
if(sa[0].compareTo(new BigFraction(0, 1)) < 0){
i++;
continue;
}
println("hax: " + mX[i]);
if(finalePut[mX[i]] == true && !mset){
mset = true;
//multiplicant = finale[mX[i]].divide(sa[0]);
}
//else{
finale[mX[i]] = sa[0].multiply(multiplicant);
finalePut[mX[i]] = true;
//}
i++;
}
finalePutFull = true;
int finaleCounter = 0;
for(boolean b : finalePut){
if(!b){
finalePutFull = false;
}
else{
finaleCounter++;
}
}
println("WHoos");
print("HO ");
for(BigFraction bf : finale){
print(bf + " ");
}
println();
println(finaleCounter);*/
}
for(BigFraction bf : finale){
println("----" + bf);
}
int finDenProd = 1;
for(BigFraction bf : finale){
if(bf == null){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
finDenProd *= bf.getDenominator();
}
for(int i = 0; i < finale.length; i++){
finale[i] = finale[i].multiply(new BigFraction(finDenProd, 1));
}
for(BigFraction bf : finale){
println("----" + bf);
}
int[] finalOutOne = new int[finale.length];
for(int i = 0; i < finale.length; i++){
finalOutOne[i] = (int) finale[i].getNumerator();
}
int gcdX = gcd(finalOutOne);
for(int i = 0; i < finale.length; i++){
finalOutOne[i] = finalOutOne[i] / gcdX;
}
for(int i : finalOutOne){
println("------ " + i);
}
int finalOutX = 0;
Map<ChemistryUnit, Integer> mapOfIn = new HashMap<ChemistryUnit, Integer>();
for(Map.Entry<ChemistryUnit, Integer> map : inX.getUnits().entrySet()){
int fOut = finalOutOne[finalOutX];
if(fOut == 0){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
mapOfIn.put(map.getKey(), fOut);
finalOutX++;
}
Map<ChemistryUnit, Integer> mapOfOut = new HashMap<ChemistryUnit, Integer>();
for(Map.Entry<ChemistryUnit, Integer> map : outX.getUnits().entrySet()){
int fOut = finalOutOne[finalOutX];
if(fOut == 0){
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
mapOfOut.put(map.getKey(), fOut);
finalOutX++;
}
ChemistryUnit inRet = ChemistryUnit.mk(mapOfIn);
ChemistryUnit outRet = ChemistryUnit.mk(mapOfOut);
if(!balanced(inRet, outRet)){
println("Balance failure");
if(apart){
return null;
}
else{
println("ModeSwitch");
return balance(inX, outX, true);
}
}
println(inRet);
println(outRet);
return new Pair<ChemistryUnit,
ChemistryUnit>(inRet, outRet);
}
/*protected static ChemistryUnit apart(ChemistryUnit inX){
ChemistryUnit inNew = new ChemistryUnit();
ChemistryUnit cn = apart(inX, inNew).getValueOne();
println(cn);
return cn;
}*/
/*private static Pair<ChemistryUnit, Boolean> apart(ChemistryUnit inX, ChemistryUnit inNew){
if(inX.getType() == ChemistryUnit.TYPE_BASE){
return Pair.make(inX, true);
}
else{
for(Map.Entry<ChemistryUnit, Integer> pair : inX.getUnits().entrySet()){
Pair<ChemistryUnit, Boolean> pcb = apart(pair.getKey(), inNew);
ChemistryUnit v1 = pcb.getValueOne();
if(pcb.getValueTwo()){
if(inNew.containsUnitKey(v1)){
inNew.putUnit(v1, inNew.getUnit(v1) + pair.getValue());
}
else{
inNew.putUnit(v1, pair.getValue());
}
}
else{
for(Map.Entry<ChemistryUnit, Integer> cu :
v1.getUnits().entrySet()){
v1.putUnit(cu.getKey(), cu.getValue() * pair.getValue());
}
}
}
return Pair.make(inNew, false);
}
}*/
public static ChemistryUnit apart(ChemistryUnit inZ){
/*for(Map.Entry<String, Integer> entry : inZ.entrySet()){
mass += Double.parseDouble(data.get(entry.getKey().toLowerCase()+"_atomic-mass"))*entry.getValue();
}*/
if(inZ.getType() == ChemistryUnit.TYPE_BASE){
return inZ;
}
ChemistryUnit valOut = new ChemistryUnit();
for(Map.Entry<ChemistryUnit, Integer> unit : inZ.getUnits().entrySet()){
ChemistryUnit val = new ChemistryUnit();
ChemistryUnit x = apart(unit.getKey());
if(x.getType() == ChemistryUnit.TYPE_BASE){
val.putUnit(x, 1);
}
else{
for(Map.Entry<ChemistryUnit, Integer> xZero : x.getUnits().entrySet()){
ChemistryUnit xKey = xZero.getKey();
if(val.containsUnitKey(xKey)){
val.putUnit(xKey, x.getUnit(xKey) + xZero.getValue());
}
else{
val.putUnit(xKey, xZero.getValue());
}
}
}
for(Map.Entry<ChemistryUnit, Integer> un : val.getUnits().entrySet()){
ChemistryUnit unKey = un.getKey();
if(valOut.containsUnitKey(unKey)){
valOut.putUnit(unKey, valOut.getUnit(unKey) + val.getUnit(unKey) * unit.getValue());
}
else{
valOut.putUnit(unKey, val.getUnit(unKey) * unit.getValue());
}
}
}
return valOut;
}
private static int gcd(int a, int b){
while(b > 0){
int tmp = b;
b = a % b;
a = tmp;
}
return a;
}
private static int gcd(int[] ia){
int r = ia[0];
for(int i = 1; i < ia.length; i++){
r = gcd(r, ia[i]);
}
return r;
}
public static Pair<Integer[], BigFraction[][]> solve(Integer[] randomY, int newArrayRow, int mapNum, int[][] system, int numOfEle){
int[][] system2 = new int[newArrayRow][mapNum];
int[][] system3 = new int[newArrayRow][mapNum-1];
Pair<Integer[], int[][]> px = pickRandom(randomY, system, newArrayRow, mapNum);
system2 = px.getValueTwo();
for(int i = 0; i<newArrayRow;i++){
for(int j = 0; j<mapNum-1;j++){
system3[i][j] = system2[i][j+1];
}
}
/*int[][] numOfNonZero = new int[mapNum-1][2];
for(int i = 0; i<newArrayRow;i++){
for(int j = 0; j<mapNum-1;j++){
if(system3[i][j] != 0){
numOfNonZero[i][0] = i;
numOfNonZero[i][1]++;
}
}
}
List<int[]> list = Arrays.asList(numOfNonZero);
list.sort(new EquationComparator());
numOfNonZero = list.toArray(numOfNonZero);*/
int[] target = new int[mapNum-1];
Integer[] ia = new Integer[newArrayRow];
for(int i = 0; i < ia.length; i++){
ia[i] = i;
}
Set<List<Integer>> sli = CombinatoricHelper.permutations(
Arrays.asList(ia), ia.length);
//boolean broken = false;
printArray(system3);
megaFor: for(List<Integer> li : sli){
boolean fail = false;
for(int i = 0; i < target.length; i++){
target[i] = -1;
}
superFor: for(int i = 0; i < mapNum - 1; i++){
outerFor: for(int lx : li){
if(system3[lx][i] != 0){
for(int k = 0; k<mapNum-1;k++){
if(target[k] == lx){
continue outerFor;
}
}
target[i] = lx;
continue superFor;
}
}
if(target[i] == -1){
fail = true;
break;
}
}
//if(!fail){
// broken = true;
//}
if(fail){
continue;
}
for(int i : target){
print("aa" + i + " ");
}
println();
/*
for(List<Integer> li : sli){
println("SLI" + li);
for(int i = 0; i < target.length; i++){
target[i] = -1;
}
superFor: for(int i : li){
outerFor: for(int j = 0; j<mapNum-1;j++){
if(system3[i][j] != 0){
//println("Ar:" + numOfNonZero[i][0]);
for(int x = 0; x < system3[i].length; x++){
//print("o:" + system3[numOfNonZero[i][0]][x] + " ");
}
//println("I:" + i + "J:" + j + "---" + system3[numOfNonZero[i][0]][j]);
for(int k = 0; k<mapNum-1;k++){
if(target[k] == i){
continue outerFor;
}
}
target[j] = i;
continue superFor;
}
}
}
for(int i : target){
print("aa" + i + " ");
}
println();
boolean fail = false;
for(int i = 0; i < target.length; i++){
if(target[i] == -1){
fail = true;
}
}
if(!fail){
broken = true;
break;
}
}
if(!broken){
return null;
}
*/
BigFraction[][] system4 = new BigFraction[newArrayRow][mapNum];
for(int i = 0; i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system2[i][j] + " ");
system4[i][j] = new BigFraction(system2[target[i]][j], 1);
}
println();
}
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system4[i][j] + " ");
}
println();
}
for(int i = 0; i<newArrayRow;i++){
for(int j = i+1; j<newArrayRow; j++){
BigFraction temp = system4[j][i+1];
for(int k = 0; k<mapNum;k++){
//println("i:" + i + "j:" + j + "---" + system4[i][i+1]);
try{
system4[j][k] = system4[j][k].subtract(temp.multiply(
system4[i][k].divide(system4[i][i+1])));
}
catch(ArithmeticException e){
continue megaFor;
}
}
}
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system4[i][j] + " ");
}
println();
}
println();
int counter2 = 0;
for(int i = newArrayRow-1; i>=0;i--){
try{
system4[i][0] = system4[i][0].divide(system4[i][mapNum-1-counter2]);
}
catch(ArithmeticException e){
continue megaFor;
}
system4[i][mapNum-1-counter2] = system4[i][mapNum-1-counter2].divide(system4[i][mapNum-1-counter2]);
for(int j = i-1; j>=0; j--){
system4[j][0] = system4[j][0].subtract(system4[j][mapNum-1-counter2].multiply(system4[i][0]));
system4[j][mapNum-1-counter2] = system4[j][mapNum-1-counter2].subtract(system4[j][mapNum-1-counter2].multiply(system4[i][mapNum-1-counter2]));
}
counter2++;
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system4[i][j] + " ");
}
println();
}
println();
check(newArrayRow, mapNum, target, system3);
Integer[] iax = px.getValueOne();
Integer[] outIax = new Integer[iax.length];
int i = 0;
for(Integer inx : iax){
outIax[target[i]] = inx;
println(inx);
i++;
}
for(Integer inx : outIax){
println(inx);
}
/*for(int i = 0;i<numOfEle;i++){
print(elements.get(i)+" ");
for(int j = 0; j<mapNum;j++){
print(system[i][j] + " ");
}
println();
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum;j++){
print(system2[i][j] + " ");
}
println();
}
println();
for(int i = 0;i<newArrayRow;i++){
for(int j = 0; j<mapNum-1;j++){
print(system3[i][j] + " ");
}
println();
}
println();
for(int i = 0;i<mapNum-1;i++){
for(int j = 0; j<2;j++){
print(numOfNonZero[i][j] + " ");
}
println();
}
println();
for(int i = 0; i<mapNum-1;i++){
print(target[i] + " ");
}
println();*/
//TODO
return new Pair<Integer[], BigFraction[][]>(outIax, system4);
}
return null;
}
private static Pair<Integer[], int[][]> pickRandom(Integer[] randomY, int[][] system, int newArrayRow, int mapNum){
if(randomY == null){
Integer[] randomX = new Integer[newArrayRow - 1];
for(int i = 0; i < randomX.length; i++){
randomX[i] = i + 2;
}
List<Integer> ia = Arrays.<Integer>asList(randomX);
Collections.shuffle(ia);
randomX = ia.toArray(randomX);
randomY = new Integer[newArrayRow];
System.arraycopy(randomX, 0, randomY, 1, randomX.length);
randomY[0] = 1;
}
println(Arrays.asList(randomY));
int[][] system2 = new int[newArrayRow][mapNum];
for(int i = 0; i<newArrayRow;i++){
system2[i] = system[randomY[i]];
}
Integer[] outX = new Integer[newArrayRow];
System.arraycopy(randomY, 0, outX, 0, newArrayRow);
return new Pair<Integer[], int[][]>(outX, system2);
}
public static int add(boolean b, ChemistryUnit inX, ArrayList<ChemistryUnit> elements, int[][] system, int initCounter){
int counter = initCounter;
for(Map.Entry<ChemistryUnit, Integer> map : inX.getUnits().entrySet()){
for(Map.Entry<ChemistryUnit, Integer> entry : map.getKey().getUnits().entrySet()){
int temp = 0;
for(int i = 0; i<elements.size();i++){
if(elements.get(i).equals(entry.getKey())){
temp = i;
}
}
system[temp][counter] = counter != 0 && b ? -entry.getValue() : entry.getValue();
}
counter++;
}
return counter;
}
public static boolean check(int newArrayRow, int mapNum, int[] target, int[][] system3){
boolean done = false;
boolean onTarget = false;
int zeroes = 0;
int rowDone = 0;
outerFor: for(int i = 0; i<newArrayRow;i++){
onTarget = false;
zeroes = 0;
for(int j = 0; j<mapNum-1;j++){
if(j != target[i]){
if(system3[i][j] !=0){
break outerFor;
}else{
zeroes++;
}
}else{
if(target[i] !=1){
break outerFor;
}else{
onTarget = true;
}
}
}
if(zeroes == mapNum-2 && onTarget){
rowDone++;
}
}
if(rowDone == newArrayRow){
done = true;
}else{
done = false;
}
return done;
}
private static void printArray(int[][] arr){
for(int[] ia : arr){
for(int i : ia){
print(i + " ");
}
println();
}
println();
}
public static class EquationComparator implements Comparator<int[]>{
@Override
public int compare(int[] o1, int[] o2){
return Integer.compare(o1[1], o2[1]);
}
}
public static boolean balanced(ChemistryUnit inX, ChemistryUnit outX){
ChemistryUnit inZ = apart(inX);
ChemistryUnit outZ = apart(outX);
for(Map.Entry<ChemistryUnit, Integer> outZero : outZ.getUnits().entrySet()){
System.out.println(outZero);
}
for(Map.Entry<ChemistryUnit, Integer> inZero : inZ.getUnits().entrySet()){
System.out.println(inZero);
ChemistryUnit inKey = inZero.getKey();
if(inKey.getType() == ChemistryUnit.TYPE_NEST){
throw new IllegalArgumentException("Illegal in - separation!");
}
if(!outZ.containsUnitKey(inKey) || outZ.getUnit(inKey) != inZero.getValue()){
println(inKey + ":" + outZ.getUnit(inKey) + ":" + inZero.getValue());
println("Not Balanced! 01");
return false;
}
}
for(Map.Entry<ChemistryUnit, Integer> outZero : outZ.getUnits().entrySet()){
ChemistryUnit inKey = outZero.getKey();
if(!outZ.containsUnitKey(inKey)){
println("Not Balanced! 02");
return false;
}
}
println("Balanced!");
return true;
}
}
|
Minor fixes to balance checking
|
src/com/github/pixelrunstudios/ChemHelper/EquationBalancer.java
|
Minor fixes to balance checking
|
<ide><path>rc/com/github/pixelrunstudios/ChemHelper/EquationBalancer.java
<ide> outX = outNewX;
<ide> }
<ide>
<del> /*
<del> * Complexity of thing
<del> *
<del> */
<ide> //{Fe=1,Br=3}, {H=2,S=1,O=4} -> {Fe=2,S=3,O=12} + {H=1,Br=1}
<ide> //
<ide> // Map1 Map2 Map3 Map4
<ide> // O 0 4 12 0
<ide> //
<ide> // Final: 2, 3, 1, 6
<add>
<add> //{C=8,H=18}, {O=2} -> {H=2,O=1} + {C=1,O=2}
<add> //
<add> // Map1 Map2 Map3 Map4
<add> // C 8 0 0 1
<add> // H 18 0 2 0
<add> // O 0 2 1 2
<add> //
<add> // Final: 2, 25, 18, 16
<ide>
<ide> ArrayList<ChemistryUnit> elements = new ArrayList<ChemistryUnit>();
<ide> int mapNum = 0;
<ide> if(inKey.getType() == ChemistryUnit.TYPE_NEST){
<ide> throw new IllegalArgumentException("Illegal in - separation!");
<ide> }
<del> if(!outZ.containsUnitKey(inKey) || outZ.getUnit(inKey) != inZero.getValue()){
<add> if(!outZ.containsUnitKey(inKey) || !outZ.getUnit(inKey).equals(inZero.getValue())){
<ide> println(inKey + ":" + outZ.getUnit(inKey) + ":" + inZero.getValue());
<ide> println("Not Balanced! 01");
<ide> return false;
|
|
JavaScript
|
mit
|
697554a2227e418663028bc6688a03eff3644ba9
| 0 |
mesmotronic/conbo,mesmotronic/conbo-js,mesmotronic/conbo-js,mesmotronic/conbo,mesmotronic/conbo
|
/**
* HTTP Service
*
* Base class for HTTP data services, with default configuration designed
* for use with JSON REST APIs.
*
* For XML data sources, you will need to override decodeFunction to parse
* response data, change the contentType and implement encodeFunction if
* you're using RPC.
*
* @class conbo.HttpService
* @augments conbo.EventDispatcher
* @author Neil Rackett
* @param {object} options - Object containing optional initialisation options, including 'rootUrl', 'contentType', 'dataType', 'headers', 'encodeFunction', 'decodeFunction', 'resultClass','makeObjectsBindable'
* @fires conbo.ConboEvent#RESULT
* @fires conbo.ConboEvent#FAULT
*/
conbo.HttpService = conbo.EventDispatcher.extend(
/** @lends conbo.HttpService.prototype */
{
__construct: function(options)
{
options = conbo.setDefaults(options,
{
contentType: conbo.CONTENT_TYPE_JSON
});
conbo.setValues(this, conbo.setDefaults(conbo.pick(options,
'rootUrl',
'contentType',
'dataType',
'headers',
'encodeFunction',
'decodeFunction',
'resultClass',
'makeObjectsBindable'
), {
dataType: 'json'
}));
var verbs = ['POST', 'GET', 'PUT', 'PATCH', 'DELETE'];
verbs.forEach(function(verb)
{
this[verb.toLowerCase()] = function(command, data, method, resultClass)
{
return this.call(command, data, verb, resultClass);
};
},
this);
conbo.EventDispatcher.prototype.__construct.apply(this, arguments);
},
/**
* The root URL of the web service
*/
get rootUrl()
{
return this._rootUrl || '';
},
set rootUrl(value)
{
value = String(value);
if (value && value.slice(-1) != '/')
{
value += '/';
}
this._rootUrl = value;
},
/**
* Call a method of the web service using the specified verb
*
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {String} method - GET, POST, etc (default: GET)
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
call: function(command, data, method, resultClass)
{
data = conbo.clone(data || {});
command = this.parseUrl(command, data);
data = this.encodeFunction(data, method);
var promise = conbo.httpRequest
({
data: data,
type: method || 'GET',
headers: this.headers,
url: this.rootUrl+command,
contentType: this.contentType || conbo.CONTENT_TYPE_JSON,
dataType: this.dataType,
dataFilter: this.decodeFunction,
resultClass: resultClass || this.resultClass,
makeObjectsBindable: this.makeObjectsBindable
});
promise.then(this.dispatchEvent, this.dispatchEvent, this);
return promise;
},
/**
* @member {function} post - Call a method of the web service using the POST verb
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
/**
* @member {function} get - Call a method of the web service using the GET verb
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
/**
* @member {function} put - Call a method of the web service using the PUT verb
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
/**
* @member {function} patch - Call a method of the web service using the PATCH verb
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
/**
* @member {function} delete - Call a method of the web service using the DELETE verb
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
/**
* Add one or more remote commands as methods of this class instance
* @param {String} command - The name of the command
* @param {String} method - GET, POST, etc (default: GET)
* @param {Class} resultClass - Optional
*/
addCommand: function(command, method, resultClass)
{
if (conbo.isObject(command))
{
method = command.method;
resultClass = command.resultClass;
command = command.command;
}
this[conbo.toCamelCase(command)] = function(data)
{
return this.call(command, data, method, resultClass);
};
return this;
},
/**
* Add multiple commands as methods of this class instance
* @param {Array} commands
*/
addCommands: function(commands)
{
if (!conbo.isArray(commands))
{
return this;
}
commands.forEach(function(command)
{
this.addCommand(command);
},
this);
return this;
},
/**
* Method that encodes data to be sent to the API
*
* @param {object} data - Object containing the data to be sent to the API
* @param {String} method - GET, POST, etc (default: GET)
*/
encodeFunction: function(data, method)
{
return data;
},
/**
* Splice data into URL and remove spliced properties from data object
*/
parseUrl: function(url, data)
{
var parsedUrl = url,
matches = parsedUrl.match(/:\b\w+\b/g);
if (!!matches)
{
matches.forEach(function(key)
{
key = key.substr(1);
if (!(key in data))
{
throw new Error('Property "'+key+'" required but not found in data');
}
});
}
conbo.keys(data).forEach(function(key)
{
var regExp = new RegExp(':\\b'+key+'\\b', 'g');
if (regExp.test(parsedUrl))
{
parsedUrl = parsedUrl.replace(regExp, data[key]);
delete data[key];
}
});
return parsedUrl;
},
toString: function()
{
return 'conbo.HttpService';
}
})
.implement(conbo.IInjectable);
|
src/conbo/net/HttpService.js
|
/**
* HTTP Service
*
* Base class for HTTP data services, with default configuration designed
* for use with JSON REST APIs.
*
* For XML data sources, you will need to override decodeFunction to parse
* response data, change the contentType and implement encodeFunction if
* you're using RPC.
*
* @class conbo.HttpService
* @augments conbo.EventDispatcher
* @author Neil Rackett
* @param {object} options - Object containing optional initialisation options, including 'rootUrl', 'contentType', 'dataType', 'headers', 'encodeFunction', 'decodeFunction', 'resultClass','makeObjectsBindable'
* @fires conbo.ConboEvent#RESULT
* @fires conbo.ConboEvent#FAULT
*/
conbo.HttpService = conbo.EventDispatcher.extend(
/** @lends conbo.HttpService.prototype */
{
__construct: function(options)
{
options = conbo.setDefaults(options,
{
contentType: conbo.CONTENT_TYPE_JSON
});
conbo.setValues(this, conbo.setDefaults(conbo.pick(options,
'rootUrl',
'contentType',
'dataType',
'headers',
'encodeFunction',
'decodeFunction',
'resultClass',
'makeObjectsBindable'
), {
dataType: 'json'
}));
conbo.EventDispatcher.prototype.__construct.apply(this, arguments);
},
/**
* The root URL of the web service
*/
get rootUrl()
{
return this._rootUrl || '';
},
set rootUrl(value)
{
value = String(value);
if (value && value.slice(-1) != '/')
{
value += '/';
}
this._rootUrl = value;
},
/**
* Call a method of the web service
*
* @param {String} command - The name of the command
* @param {Object} data - Object containing the data to send to the web service
* @param {String} method - GET, POST, etc (default: GET)
* @param {Class} resultClass - Optional
* @returns {conbo.Promise}
*/
call: function(command, data, method, resultClass)
{
data = conbo.clone(data || {});
command = this.parseUrl(command, data);
data = this.encodeFunction(data, method);
var promise = conbo.httpRequest
({
data: data,
type: method || 'GET',
headers: this.headers,
url: this.rootUrl+command,
contentType: this.contentType || conbo.CONTENT_TYPE_JSON,
dataType: this.dataType,
dataFilter: this.decodeFunction,
resultClass: resultClass || this.resultClass,
makeObjectsBindable: this.makeObjectsBindable
});
promise.then(this.dispatchEvent, this.dispatchEvent, this);
return promise;
},
/**
* Add one or more remote commands as methods of this class instance
* @param {String} command - The name of the command
* @param {String} method - GET, POST, etc (default: GET)
* @param {Class} resultClass - Optional
*/
addCommand: function(command, method, resultClass)
{
if (conbo.isObject(command))
{
method = command.method;
resultClass = command.resultClass;
command = command.command;
}
this[conbo.toCamelCase(command)] = function(data)
{
return this.call(command, data, method, resultClass);
};
return this;
},
/**
* Add multiple commands as methods of this class instance
* @param {Array} commands
*/
addCommands: function(commands)
{
if (!conbo.isArray(commands))
{
return this;
}
commands.forEach(function(command)
{
this.addCommand(command);
},
this);
return this;
},
/**
* Method that encodes data to be sent to the API
*
* @param {object} data - Object containing the data to be sent to the API
* @param {String} method - GET, POST, etc (default: GET)
*/
encodeFunction: function(data, method)
{
return data;
},
/**
* Splice data into URL and remove spliced properties from data object
*/
parseUrl: function(url, data)
{
var parsedUrl = url,
matches = parsedUrl.match(/:\b\w+\b/g);
if (!!matches)
{
matches.forEach(function(key)
{
key = key.substr(1);
if (!(key in data))
{
throw new Error('Property "'+key+'" required but not found in data');
}
});
}
conbo.keys(data).forEach(function(key)
{
var regExp = new RegExp(':\\b'+key+'\\b', 'g');
if (regExp.test(parsedUrl))
{
parsedUrl = parsedUrl.replace(regExp, data[key]);
delete data[key];
}
});
return parsedUrl;
},
toString: function()
{
return 'conbo.HttpService';
}
})
.implement(conbo.IInjectable);
|
Added HTTP verbs as shortcuts to call method
|
src/conbo/net/HttpService.js
|
Added HTTP verbs as shortcuts to call method
|
<ide><path>rc/conbo/net/HttpService.js
<ide> dataType: 'json'
<ide> }));
<ide>
<add> var verbs = ['POST', 'GET', 'PUT', 'PATCH', 'DELETE'];
<add>
<add> verbs.forEach(function(verb)
<add> {
<add> this[verb.toLowerCase()] = function(command, data, method, resultClass)
<add> {
<add> return this.call(command, data, verb, resultClass);
<add> };
<add> },
<add> this);
<add>
<ide> conbo.EventDispatcher.prototype.__construct.apply(this, arguments);
<ide> },
<ide>
<ide> },
<ide>
<ide> /**
<del> * Call a method of the web service
<add> * Call a method of the web service using the specified verb
<ide> *
<ide> * @param {String} command - The name of the command
<ide> * @param {Object} data - Object containing the data to send to the web service
<ide> },
<ide>
<ide> /**
<add> * @member {function} post - Call a method of the web service using the POST verb
<add> * @param {String} command - The name of the command
<add> * @param {Object} data - Object containing the data to send to the web service
<add> * @param {Class} resultClass - Optional
<add> * @returns {conbo.Promise}
<add> */
<add>
<add> /**
<add> * @member {function} get - Call a method of the web service using the GET verb
<add> * @param {String} command - The name of the command
<add> * @param {Object} data - Object containing the data to send to the web service
<add> * @param {Class} resultClass - Optional
<add> * @returns {conbo.Promise}
<add> */
<add>
<add> /**
<add> * @member {function} put - Call a method of the web service using the PUT verb
<add> * @param {String} command - The name of the command
<add> * @param {Object} data - Object containing the data to send to the web service
<add> * @param {Class} resultClass - Optional
<add> * @returns {conbo.Promise}
<add> */
<add>
<add> /**
<add> * @member {function} patch - Call a method of the web service using the PATCH verb
<add> * @param {String} command - The name of the command
<add> * @param {Object} data - Object containing the data to send to the web service
<add> * @param {Class} resultClass - Optional
<add> * @returns {conbo.Promise}
<add> */
<add>
<add> /**
<add> * @member {function} delete - Call a method of the web service using the DELETE verb
<add> * @param {String} command - The name of the command
<add> * @param {Object} data - Object containing the data to send to the web service
<add> * @param {Class} resultClass - Optional
<add> * @returns {conbo.Promise}
<add> */
<add>
<add> /**
<ide> * Add one or more remote commands as methods of this class instance
<ide> * @param {String} command - The name of the command
<ide> * @param {String} method - GET, POST, etc (default: GET)
|
|
Java
|
mit
|
907fd1f5794d0b9b6b4e17b8cb93441dd823bac8
| 0 |
chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster
|
package fi.csc.chipster.web.adminweb.data;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.jms.JMSException;
import org.apache.log4j.Logger;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import com.vaadin.server.Page;
import com.vaadin.ui.Notification;
import com.vaadin.ui.Notification.Type;
import fi.csc.chipster.web.adminweb.ChipsterConfiguration;
import fi.csc.microarray.config.ConfigurationLoader.IllegalConfigurationException;
import fi.csc.microarray.exception.MicroarrayException;
import fi.csc.microarray.messaging.JMSMessagingEndpoint;
import fi.csc.microarray.messaging.MessagingEndpoint;
import fi.csc.microarray.messaging.MessagingTopic;
import fi.csc.microarray.messaging.MessagingTopic.AccessMode;
import fi.csc.microarray.messaging.NodeBase;
import fi.csc.microarray.messaging.SuccessMessageListener;
import fi.csc.microarray.messaging.TempTopicMessagingListenerBase;
import fi.csc.microarray.messaging.Topics;
import fi.csc.microarray.messaging.message.ChipsterMessage;
import fi.csc.microarray.messaging.message.CommandMessage;
import fi.csc.microarray.messaging.message.ParameterMessage;
import fi.csc.microarray.messaging.message.SuccessMessage;
/**
* This class uses JMS messages to send data queries and converts result messages to
* Java objects. The methods wait for the results, turning asynchronous messages to
* blocking method calls.
*
* @author klemela
*/
public class StorageAdminAPI {
private static final Logger logger = Logger.getLogger(StorageAdminAPI.class);
public interface StorageEntryListener {
public void process(List<StorageEntry> entries);
}
NodeBase nodeSupport = new NodeBase() {
public String getName() {
return "admin";
}
};
private static final long TIMEOUT = 30;
private final TimeUnit TIMEOUT_UNIT = TimeUnit.SECONDS;
private MessagingTopic filebrokerAdminTopic;
private MessagingEndpoint messagingEndpoint;
public StorageAdminAPI() throws IOException, IllegalConfigurationException, MicroarrayException, JMSException {
ChipsterConfiguration.init();
messagingEndpoint = new JMSMessagingEndpoint(nodeSupport);
filebrokerAdminTopic = messagingEndpoint.createTopic(Topics.Name.FILEBROKER_ADMIN_TOPIC, AccessMode.WRITE);
}
public List<StorageEntry> listStorageUsageOfSessions(String username) throws JMSException, InterruptedException {
StorageEntryMessageListener listener = new StorageEntryMessageListener();
return listener.query(username);
}
public List<StorageAggregate> listStorageUsageOfUsers() throws JMSException, InterruptedException {
StorageAggregateMessageListener listener = new StorageAggregateMessageListener();
return listener.query();
}
public void deleteRemoteSession(String sessionID) throws JMSException {
SuccessMessageListener replyListener = new SuccessMessageListener();
try {
CommandMessage removeRequestMessage = new CommandMessage(CommandMessage.COMMAND_REMOVE_SESSION);
removeRequestMessage.addNamedParameter(ParameterMessage.PARAMETER_SESSION_UUID, sessionID);
filebrokerAdminTopic.sendReplyableMessage(removeRequestMessage, replyListener);
SuccessMessage reply = replyListener.waitForReply(TIMEOUT, TIMEOUT_UNIT);
if (reply == null ) {
showFailNotification("Delete session failed", "No reply before timeout");
} else if (!reply.success()) {
showFailNotification("Delete session failed", reply);
}
} finally {
replyListener.cleanUp();
}
}
private class StorageEntryMessageListener extends TempTopicMessagingListenerBase {
private List<StorageEntry> entries;
private CountDownLatch latch;
public List<StorageEntry> query(String username) throws JMSException, InterruptedException {
latch = new CountDownLatch(1);
CommandMessage request = new CommandMessage(CommandMessage.COMMAND_LIST_STORAGE_USAGE_OF_SESSIONS);
request.addNamedParameter("username", username);
filebrokerAdminTopic.sendReplyableMessage(request, this);
latch.await(TIMEOUT, TIMEOUT_UNIT);
return entries;
}
public void onChipsterMessage(ChipsterMessage msg) {
ParameterMessage resultMessage = (ParameterMessage) msg;
String usernamesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_USERNAME_LIST);
String namesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SESSION_NAME_LIST);
String sizesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SIZE_LIST);
String datesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_DATE_LIST);
String idsString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SESSION_UUID_LIST);
String[] usernames = usernamesString.split("\t");
String[] names = namesString.split("\t");
String[] sizes = sizesString.split("\t");
String[] dates = datesString.split("\t");
String[] ids = idsString.split("\t");
DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateTime();
entries = new LinkedList<StorageEntry>();
for (int i = 0; i < names.length; i++) {
StorageEntry entry = new StorageEntry();
entry.setDate(dateTimeFormatter.parseDateTime(dates[i]).toDate());
entry.setUsername(usernames[i]);
entry.setSize(Long.parseLong(sizes[i]));
entry.setName(names[i]);
entry.setID(ids[i]);
entries.add(entry);
}
latch.countDown();
}
}
private class StorageAggregateMessageListener extends TempTopicMessagingListenerBase {
private CountDownLatch latch;
private List<StorageAggregate> entries;
public List<StorageAggregate> query() throws JMSException, InterruptedException {
latch = new CountDownLatch(1);
CommandMessage request = new CommandMessage(CommandMessage.COMMAND_LIST_STORAGE_USAGE_OF_USERS);
filebrokerAdminTopic.sendReplyableMessage(request, this);
latch.await(TIMEOUT, TIMEOUT_UNIT);
return entries;
}
public void onChipsterMessage(ChipsterMessage msg) {
ParameterMessage resultMessage = (ParameterMessage) msg;
String namesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_USERNAME_LIST);
String sizesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SIZE_LIST);
String[] names = namesString.split("\t");
String[] sizes = sizesString.split("\t");
entries = new LinkedList<StorageAggregate>();
for (int i = 0; i < names.length && i < sizes.length; i++) {
StorageAggregate entry = new StorageAggregate();
entry.setUsername(names[i]);
entry.setSize(Long.parseLong(sizes[i]));
entries.add(entry);
}
latch.countDown();
}
}
public void clean() {
if (filebrokerAdminTopic != null) {
try {
filebrokerAdminTopic.delete();
} catch (JMSException e) {
logger.error(e);
}
}
if (messagingEndpoint != null) {
try {
messagingEndpoint.close();
} catch (JMSException e) {
logger.error(e);
}
}
}
private void showFailNotification(String title, String description) {
Notification notification = new Notification(title + "\n", description, Type.WARNING_MESSAGE);
notification.setDelayMsec(-1);
notification.setHtmlContentAllowed(false);
notification.show(Page.getCurrent());
}
private void showFailNotification(String title, SuccessMessage message) {
String description = "";
String lineBreak = "\n\n";
if (message.getErrorMessage() != null && !message.getErrorMessage().isEmpty()) {
description += message.getErrorMessage() + lineBreak;
}
if (message.getDetails() != null && !message.getDetails().isEmpty()) {
description += message.getDetails() + lineBreak;
}
if (message.getExceptionString() != null && !message.getExceptionString().isEmpty()) {
description += message.getExceptionString() + lineBreak;
}
if (description.endsWith(lineBreak)) {
description = description.substring(0, description.length() - lineBreak.length());
}
showFailNotification(title, description);
}
}
|
src/main/web/admin-web/src/fi/csc/chipster/web/adminweb/data/StorageAdminAPI.java
|
package fi.csc.chipster.web.adminweb.data;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.jms.JMSException;
import org.apache.log4j.Logger;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import fi.csc.chipster.web.adminweb.ChipsterConfiguration;
import fi.csc.microarray.config.ConfigurationLoader.IllegalConfigurationException;
import fi.csc.microarray.exception.MicroarrayException;
import fi.csc.microarray.messaging.JMSMessagingEndpoint;
import fi.csc.microarray.messaging.MessagingEndpoint;
import fi.csc.microarray.messaging.MessagingTopic;
import fi.csc.microarray.messaging.MessagingTopic.AccessMode;
import fi.csc.microarray.messaging.NodeBase;
import fi.csc.microarray.messaging.SuccessMessageListener;
import fi.csc.microarray.messaging.TempTopicMessagingListenerBase;
import fi.csc.microarray.messaging.Topics;
import fi.csc.microarray.messaging.message.ChipsterMessage;
import fi.csc.microarray.messaging.message.CommandMessage;
import fi.csc.microarray.messaging.message.ParameterMessage;
import fi.csc.microarray.messaging.message.SuccessMessage;
/**
* This class uses JMS messages to send data queries and converts result messages to
* Java objects. The methods wait for the results, turning asynchronous messages to
* blocking method calls.
*
* @author klemela
*/
public class StorageAdminAPI {
private static final Logger logger = Logger.getLogger(StorageAdminAPI.class);
public interface StorageEntryListener {
public void process(List<StorageEntry> entries);
}
NodeBase nodeSupport = new NodeBase() {
public String getName() {
return "admin";
}
};
private static final long TIMEOUT = 30;
private final TimeUnit TIMEOUT_UNIT = TimeUnit.SECONDS;
private MessagingTopic filebrokerAdminTopic;
private MessagingEndpoint messagingEndpoint;
public StorageAdminAPI() throws IOException, IllegalConfigurationException, MicroarrayException, JMSException {
ChipsterConfiguration.init();
messagingEndpoint = new JMSMessagingEndpoint(nodeSupport);
filebrokerAdminTopic = messagingEndpoint.createTopic(Topics.Name.FILEBROKER_ADMIN_TOPIC, AccessMode.WRITE);
}
public List<StorageEntry> listStorageUsageOfSessions(String username) throws JMSException, InterruptedException {
StorageEntryMessageListener listener = new StorageEntryMessageListener();
return listener.query(username);
}
public List<StorageAggregate> listStorageUsageOfUsers() throws JMSException, InterruptedException {
StorageAggregateMessageListener listener = new StorageAggregateMessageListener();
return listener.query();
}
public void deleteRemoteSession(String sessionID) throws JMSException {
SuccessMessageListener replyListener = new SuccessMessageListener();
try {
CommandMessage removeRequestMessage = new CommandMessage(CommandMessage.COMMAND_REMOVE_SESSION);
removeRequestMessage.addNamedParameter(ParameterMessage.PARAMETER_SESSION_UUID, sessionID);
filebrokerAdminTopic.sendReplyableMessage(removeRequestMessage, replyListener);
SuccessMessage reply = replyListener.waitForReply(TIMEOUT, TIMEOUT_UNIT);
if (reply == null || !reply.success()) {
// FIX ME, communicate properly
throw new JMSException("failed to remove session");
}
} finally {
replyListener.cleanUp();
}
}
private class StorageEntryMessageListener extends TempTopicMessagingListenerBase {
private List<StorageEntry> entries;
private CountDownLatch latch;
public List<StorageEntry> query(String username) throws JMSException, InterruptedException {
latch = new CountDownLatch(1);
CommandMessage request = new CommandMessage(CommandMessage.COMMAND_LIST_STORAGE_USAGE_OF_SESSIONS);
request.addNamedParameter("username", username);
filebrokerAdminTopic.sendReplyableMessage(request, this);
latch.await(TIMEOUT, TIMEOUT_UNIT);
return entries;
}
public void onChipsterMessage(ChipsterMessage msg) {
ParameterMessage resultMessage = (ParameterMessage) msg;
String usernamesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_USERNAME_LIST);
String namesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SESSION_NAME_LIST);
String sizesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SIZE_LIST);
String datesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_DATE_LIST);
String idsString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SESSION_UUID_LIST);
String[] usernames = usernamesString.split("\t");
String[] names = namesString.split("\t");
String[] sizes = sizesString.split("\t");
String[] dates = datesString.split("\t");
String[] ids = idsString.split("\t");
DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateTime();
entries = new LinkedList<StorageEntry>();
for (int i = 0; i < names.length; i++) {
StorageEntry entry = new StorageEntry();
entry.setDate(dateTimeFormatter.parseDateTime(dates[i]).toDate());
entry.setUsername(usernames[i]);
entry.setSize(Long.parseLong(sizes[i]));
entry.setName(names[i]);
entry.setID(ids[i]);
entries.add(entry);
}
latch.countDown();
}
}
private class StorageAggregateMessageListener extends TempTopicMessagingListenerBase {
private CountDownLatch latch;
private List<StorageAggregate> entries;
public List<StorageAggregate> query() throws JMSException, InterruptedException {
latch = new CountDownLatch(1);
CommandMessage request = new CommandMessage(CommandMessage.COMMAND_LIST_STORAGE_USAGE_OF_USERS);
filebrokerAdminTopic.sendReplyableMessage(request, this);
latch.await(TIMEOUT, TIMEOUT_UNIT);
return entries;
}
public void onChipsterMessage(ChipsterMessage msg) {
ParameterMessage resultMessage = (ParameterMessage) msg;
String namesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_USERNAME_LIST);
String sizesString = resultMessage.getNamedParameter(ParameterMessage.PARAMETER_SIZE_LIST);
String[] names = namesString.split("\t");
String[] sizes = sizesString.split("\t");
entries = new LinkedList<StorageAggregate>();
for (int i = 0; i < names.length && i < sizes.length; i++) {
StorageAggregate entry = new StorageAggregate();
entry.setUsername(names[i]);
entry.setSize(Long.parseLong(sizes[i]));
entries.add(entry);
}
latch.countDown();
}
}
public void clean() {
if (filebrokerAdminTopic != null) {
try {
filebrokerAdminTopic.delete();
} catch (JMSException e) {
logger.error(e);
}
}
if (messagingEndpoint != null) {
try {
messagingEndpoint.close();
} catch (JMSException e) {
logger.error(e);
}
}
}
}
|
Add notification for delete session fail in admin web
|
src/main/web/admin-web/src/fi/csc/chipster/web/adminweb/data/StorageAdminAPI.java
|
Add notification for delete session fail in admin web
|
<ide><path>rc/main/web/admin-web/src/fi/csc/chipster/web/adminweb/data/StorageAdminAPI.java
<ide> import org.apache.log4j.Logger;
<ide> import org.joda.time.format.DateTimeFormatter;
<ide> import org.joda.time.format.ISODateTimeFormat;
<add>
<add>import com.vaadin.server.Page;
<add>import com.vaadin.ui.Notification;
<add>import com.vaadin.ui.Notification.Type;
<ide>
<ide> import fi.csc.chipster.web.adminweb.ChipsterConfiguration;
<ide> import fi.csc.microarray.config.ConfigurationLoader.IllegalConfigurationException;
<ide>
<ide> SuccessMessage reply = replyListener.waitForReply(TIMEOUT, TIMEOUT_UNIT);
<ide>
<del> if (reply == null || !reply.success()) {
<del> // FIX ME, communicate properly
<del> throw new JMSException("failed to remove session");
<del> }
<del>
<add> if (reply == null ) {
<add> showFailNotification("Delete session failed", "No reply before timeout");
<add> } else if (!reply.success()) {
<add> showFailNotification("Delete session failed", reply);
<add> }
<ide> } finally {
<ide> replyListener.cleanUp();
<ide> }
<ide> }
<ide> }
<ide> }
<add>
<add>
<add> private void showFailNotification(String title, String description) {
<add> Notification notification = new Notification(title + "\n", description, Type.WARNING_MESSAGE);
<add> notification.setDelayMsec(-1);
<add> notification.setHtmlContentAllowed(false);
<add> notification.show(Page.getCurrent());
<add>
<add>
<add> }
<add>
<add> private void showFailNotification(String title, SuccessMessage message) {
<add> String description = "";
<add> String lineBreak = "\n\n";
<add> if (message.getErrorMessage() != null && !message.getErrorMessage().isEmpty()) {
<add> description += message.getErrorMessage() + lineBreak;
<add> }
<add>
<add> if (message.getDetails() != null && !message.getDetails().isEmpty()) {
<add> description += message.getDetails() + lineBreak;
<add> }
<add>
<add> if (message.getExceptionString() != null && !message.getExceptionString().isEmpty()) {
<add> description += message.getExceptionString() + lineBreak;
<add> }
<add>
<add> if (description.endsWith(lineBreak)) {
<add> description = description.substring(0, description.length() - lineBreak.length());
<add> }
<add> showFailNotification(title, description);
<add> }
<add>
<ide> }
|
|
Java
|
mit
|
54fdfe39065c279e98d31a489fb9becbc888b598
| 0 |
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
package generatedtest;
import java.util.Collection;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Queue;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;
import org.apache.commons.collections4.ArrayStack;
import org.apache.commons.collections4.Bag;
import org.apache.commons.collections4.BagUtils;
import org.apache.commons.collections4.BidiMap;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.EnumerationUtils;
import org.apache.commons.collections4.Factory;
import org.apache.commons.collections4.FluentIterable;
import org.apache.commons.collections4.Get;
import org.apache.commons.collections4.IterableGet;
import org.apache.commons.collections4.IterableMap;
import org.apache.commons.collections4.IterableSortedMap;
import org.apache.commons.collections4.IterableUtils;
import org.apache.commons.collections4.IteratorUtils;
import org.apache.commons.collections4.KeyValue;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.collections4.ListValuedMap;
import org.apache.commons.collections4.MapIterator;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.collections4.MultiMap;
import org.apache.commons.collections4.MultiMapUtils;
import org.apache.commons.collections4.MultiSet;
import org.apache.commons.collections4.MultiSetUtils;
import org.apache.commons.collections4.MultiValuedMap;
import org.apache.commons.collections4.OrderedIterator;
import org.apache.commons.collections4.OrderedMap;
import org.apache.commons.collections4.OrderedMapIterator;
import org.apache.commons.collections4.Predicate;
import org.apache.commons.collections4.Put;
import org.apache.commons.collections4.QueueUtils;
import org.apache.commons.collections4.ResettableIterator;
import org.apache.commons.collections4.ResettableListIterator;
import org.apache.commons.collections4.SetUtils;
import org.apache.commons.collections4.SetValuedMap;
import org.apache.commons.collections4.SortedBag;
import org.apache.commons.collections4.SplitMapUtils;
import org.apache.commons.collections4.Transformer;
import org.apache.commons.collections4.Trie;
import org.apache.commons.collections4.TrieUtils;
import org.apache.commons.collections4.bag.TreeBag;
import org.apache.commons.collections4.bidimap.TreeBidiMap;
import org.apache.commons.collections4.iterators.BoundedIterator;
import org.apache.commons.collections4.iterators.IteratorEnumeration;
import org.apache.commons.collections4.iterators.SkippingIterator;
import org.apache.commons.collections4.iterators.ZippingIterator;
import org.apache.commons.collections4.keyvalue.AbstractKeyValue;
import org.apache.commons.collections4.keyvalue.AbstractMapEntry;
import org.apache.commons.collections4.keyvalue.AbstractMapEntryDecorator;
import org.apache.commons.collections4.keyvalue.DefaultKeyValue;
import org.apache.commons.collections4.keyvalue.DefaultMapEntry;
import org.apache.commons.collections4.keyvalue.TiedMapEntry;
import org.apache.commons.collections4.keyvalue.UnmodifiableMapEntry;
import org.apache.commons.collections4.map.AbstractIterableMap;
import org.apache.commons.collections4.map.AbstractMapDecorator;
import org.apache.commons.collections4.map.HashedMap;
import org.apache.commons.collections4.map.LinkedMap;
import org.apache.commons.collections4.map.MultiValueMap;
import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
import org.apache.commons.collections4.multimap.HashSetValuedHashMap;
import org.apache.commons.collections4.multiset.HashMultiSet;
import org.apache.commons.collections4.trie.PatriciaTrie;
// Test case generated by GenerateFlowTestCase.ql
public class Test {
static Object getArrayElement(Object[] container) { return container[0]; }
static Object getElement(Enumeration container) { return container.nextElement(); }
static <T> T getElement(Iterable<T> container) { return container.iterator().next(); }
static Object getElement(Iterator container) { return container.next(); }
static Object getElement(MultiSet.Entry container) { return container.getElement(); }
static Object getMapKey(AbstractKeyValue container) { return container.getKey(); }
static Object getMapKeyFromEntry(Map.Entry container) { return container.getKey(); }
static Object getMapKey(AbstractMapEntryDecorator container) { return container.getKey(); }
static Object getMapKey(Map container) { return container.keySet().iterator().next(); }
static Object getMapKey(MultiValuedMap container) { return container.keySet().iterator().next(); }
static Object getMapKeyFromPut(Put container) { return getMapKey((Map)container); }
static Object getMapValue(AbstractKeyValue container) { return container.getValue(); }
static Object getMapValueFromEntry(Map.Entry container) { return container.getValue(); }
static Object getMapValue(AbstractMapEntryDecorator container) { return container.getValue(); }
static Object getMapValue(Map container) { return container.get(null); }
static Object getMapValue(MapIterator container) { return container.getValue(); }
static Collection getMapValue(MultiValuedMap container) { return container.get(null); }
static Object getMapValueFromPut(Put container) { return getMapValue((Map)container); }
Object[] newWithArrayElement(Object element) { return new Object[] {element}; }
ArrayStack<String> newArrayStackWithElement(String element) { ArrayStack<String> a = new ArrayStack<String>(); a.push(element); return a; }
Enumeration<String> newEnumerationWithElement(String element) { return new IteratorEnumeration<String>(newVectorWithElement(element).iterator()); }
FluentIterable<String> newFluentIterableWithElement(String element) { return FluentIterable.of(element); }
ListIterator<String> newListIteratorWithElement(String element) { return newVectorWithElement(element).listIterator(); }
MultiSet.Entry<String> newMultiSetEntryWithElement(String element) { return getElement(newMultiSetWithElement(element).entrySet()); }
MultiSet<String> newMultiSetWithElement(String element) { HashMultiSet<String> h = new HashMultiSet<String>(); h.add(element); return h; }
Queue<String> newQueueWithElement(String element) { LinkedList<String> q = new LinkedList<String>(); q.add(element); return q; }
MySetView<String> newSetViewWithElement(String element) { MySetView<String> s = new MySetView<String>(); s.add(element); return s; }
TreeBag<String> newTreeBagWithElement(String element) { TreeBag<String> b = new TreeBag<String>(); b.add(element); return b; }
TreeSet<String> newTreeSetWithElement(String element) { TreeSet<String> h = new TreeSet<String>(); h.add(element); return h; }
Vector<String> newVectorWithElement(String element) { Vector<String> v = new Vector<String>(); v.add(element); return v; }
Vector<Iterable<String>> newVectorWithElement(Iterable<String> element) { Vector<Iterable<String>> v = new Vector<Iterable<String>>(); v.add(element); return v; }
TreeBidiMap newTreeBidiMapWithMapKey(Object element) { TreeBidiMap m = new TreeBidiMap(); m.put(element,null); return m; }
MyAbstractKeyValue newMAKVWithMapKey(Object element) { return new MyAbstractKeyValue(element,null); }
DefaultKeyValue newDKVWithMapKey(Object element) { return new DefaultKeyValue(element,null); }
HashedMap newHashedMapWithMapKey(Object element) { HashedMap m = new HashedMap(); m.put(element,null); return m; }
MyAbstractMapEntry newMAMEWithMapKey(Object element) { return new MyAbstractMapEntry(element,null); }
MyAbstractMapEntryDecorator newMAMEDWithMapKey(Object element) { return new MyAbstractMapEntryDecorator(newMAMEWithMapKey(element)); }
MultiValueMap newMVMWithMapKey(Object element) { MultiValueMap m = new MultiValueMap(); m.put(element,null); return m; }
ArrayListValuedHashMap newALVHMWithMapKey(Object element) { ArrayListValuedHashMap m = new ArrayListValuedHashMap(); m.put(element,null); return m; }
OrderedMapIterator newOMIWithElement(Object element) { LinkedMap m = new LinkedMap(); m.put(element,null); return m.mapIterator(); }
ResourceBundle newRBWithMapKey(Object element) { return (ResourceBundle)null; }
SortedMap newTreeMapWithMapKey(Object element) { SortedMap m = new TreeMap(); m.put(element,null); return m; }
Trie newTrieWithMapKey(Object element) { Trie m = new PatriciaTrie(); m.put(element,null); return m; }
TiedMapEntry newTMEWithMapKey(Object element) { return new TiedMapEntry(new TreeMap(),element); }
TreeBidiMap newTreeBidiMapWithMapValue(Object element) { TreeBidiMap m = new TreeBidiMap(); m.put(null,element); return m; }
MyAbstractKeyValue newMAKVWithMapValue(Object element) { return new MyAbstractKeyValue(null,element); }
DefaultKeyValue newDKVWithMapValue(Object element) { return new DefaultKeyValue(null,element); }
HashedMap newHashedMapWithMapValue(Object element) { HashedMap m = new HashedMap(); m.put(null,element); return m; }
MyAbstractMapEntry newMAMEWithMapValue(Object element) { return new MyAbstractMapEntry(null,element); }
MyAbstractMapEntryDecorator newMAMEDWithMapValue(Object element) { return new MyAbstractMapEntryDecorator(newMAMEWithMapValue(element)); }
MultiValueMap newMVMWithMapValue(Object element) { MultiValueMap m = new MultiValueMap(); m.put(null,element); return m; }
MultiMap newMMWithMapValue(Object element) { MultiMap m = new MultiValueMap(); m.put(null,element); return m; }
ArrayListValuedHashMap newALVHMWithMapValue(Object element) { ArrayListValuedHashMap m = new ArrayListValuedHashMap(); m.put(null,element); return m; }
HashSetValuedHashMap newHSVHMWithMapValue(Object element) { HashSetValuedHashMap m = new HashSetValuedHashMap(); m.put(null,element); return m; }
OrderedMapIterator newOMIWithMapValue(Object element) { LinkedMap m = new LinkedMap(); m.put(null,element); return m.mapIterator(); }
ResourceBundle newRBWithMapValue(Object element) { return (ResourceBundle)null; }
SortedMap newTreeMapWithMapValue(Object element) { SortedMap m = new TreeMap(); m.put(null,element); return m; }
Trie newTrieWithMapValue(Object element) { Trie m = new PatriciaTrie(); m.put(null,element); return m; }
TiedMapEntry newTMEWithMapValue(Object element) { return new TiedMapEntry(newTreeMapWithMapValue(element),null); }
UnmodifiableMapEntry newUMEWithMapValue(Object element) { return new UnmodifiableMapEntry(null,element); }
Object source() { return null; }
void sink(Object o) { }
class MyAbstractKeyValue<K, V> extends AbstractKeyValue<K, V> {
MyAbstractKeyValue(K key, V value) {
super(key, value);
}
K mySetKey(final K key) {
return super.setKey(key);
}
V mySetValue(final V value) {
return super.setValue(value);
}
}
class MyAbstractMapEntry<K, V> extends AbstractMapEntry<K, V> {
MyAbstractMapEntry(final K key, final V value) {
super(key, value);
}
@Override
public K getKey() { return null; }
@Override
public V getValue() { return null; }
}
class MyAbstractMapEntryDecorator<K, V> extends AbstractMapEntryDecorator<K, V> {
MyAbstractMapEntryDecorator(final Map.Entry<K, V> entry) {
super(entry);
}
Map.Entry<K, V> myGetMapEntry() {
return super.getMapEntry();
}
}
class MySetView<E> extends SetUtils.SetView<E> {
MySetView() { super(); }
@Override
protected Iterator<E> createIterator() { return null; }
Iterator<E> myCreateIterator() { return createIterator(); }
}
public void test() {
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;AbstractKeyValue;;;Argument[0];MapKey of Argument[-1];value"
AbstractKeyValue out = null;
Object in = source();
out = new MyAbstractKeyValue(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;AbstractKeyValue;;;Argument[1];MapValue of Argument[-1];value"
AbstractKeyValue out = null;
Object in = source();
out = new MyAbstractKeyValue(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out.setKey(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;Argument[0];MapKey of Argument[-1];value"
MyAbstractKeyValue out = null;
Object in = source();
out.mySetKey(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
DefaultKeyValue in = newDKVWithMapValue((String)source());
out = in.setKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetKey((Object)null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
AbstractMapEntry out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
MyAbstractKeyValue out = null;
Object in = source();
out.mySetValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
UnmodifiableMapEntry in = newUMEWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
DefaultKeyValue in = newDKVWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntry in = newMAMEWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntry in = newMAMEWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetValue((Object)null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntry;true;AbstractMapEntry;;;Argument[0];MapKey of Argument[-1];value"
AbstractMapEntry out = null;
Object in = source();
out = new MyAbstractMapEntry(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntry;true;AbstractMapEntry;;;Argument[1];MapValue of Argument[-1];value"
AbstractMapEntry out = null;
Object in = source();
out = new MyAbstractMapEntry(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;AbstractMapEntryDecorator;;;MapKey of Argument[0];MapKey of Argument[-1];value"
AbstractMapEntryDecorator out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new MyAbstractMapEntryDecorator(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;AbstractMapEntryDecorator;;;MapValue of Argument[0];MapValue of Argument[-1];value"
AbstractMapEntryDecorator out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new MyAbstractMapEntryDecorator(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;getMapEntry;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
Map.Entry<String,String> out = null;
MyAbstractMapEntryDecorator in = newMAMEDWithMapKey((String)source());
out = in.myGetMapEntry();
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;getMapEntry;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
Map.Entry<String,String> out = null;
MyAbstractMapEntryDecorator in = newMAMEDWithMapValue((String)source());
out = in.myGetMapEntry();
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Entry);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new DefaultKeyValue(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Entry);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new DefaultKeyValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(KeyValue);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = new DefaultKeyValue(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(KeyValue);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = new DefaultKeyValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Object,Object);;Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out = new DefaultKeyValue(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Object,Object);;Argument[1];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out = new DefaultKeyValue(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;toMapEntry;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
Map.Entry<String,String> out = null;
DefaultKeyValue in = newDKVWithMapKey((String)source());
out = in.toMapEntry();
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;toMapEntry;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
Map.Entry<String,String> out = null;
DefaultKeyValue in = newDKVWithMapValue((String)source());
out = in.toMapEntry();
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Entry);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new DefaultMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Entry);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new DefaultMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(KeyValue);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultMapEntry out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = new DefaultMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(KeyValue);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultMapEntry out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = new DefaultMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Object,Object);;Argument[0];MapKey of Argument[-1];value"
DefaultMapEntry out = null;
Object in = source();
out = new DefaultMapEntry(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Object,Object);;Argument[1];MapValue of Argument[-1];value"
DefaultMapEntry out = null;
Object in = source();
out = new DefaultMapEntry(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;TiedMapEntry;true;TiedMapEntry;;;Argument[1];MapKey of Argument[-1];value"
TiedMapEntry out = null;
Object in = source();
out = new TiedMapEntry(null, in);
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;TiedMapEntry;true;TiedMapEntry;;;MapValue of Argument[0];MapValue of Argument[-1];value"
TiedMapEntry out = null;
Map in = newTreeMapWithMapValue((String)source());
out = new TiedMapEntry(in, null);
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Entry);;MapKey of Argument[0];MapKey of Argument[-1];value"
UnmodifiableMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Entry);;MapValue of Argument[0];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(KeyValue);;MapKey of Argument[0];MapKey of Argument[-1];value"
UnmodifiableMapEntry out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(KeyValue);;MapValue of Argument[0];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Object,Object);;Argument[0];MapKey of Argument[-1];value"
UnmodifiableMapEntry out = null;
Object in = source();
out = new UnmodifiableMapEntry(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Object,Object);;Argument[1];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
Object in = source();
out = new UnmodifiableMapEntry(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
TiedMapEntry in = newTMEWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntryDecorator in = newMAMEDWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
AbstractKeyValue in = newMAKVWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
TiedMapEntry in = newTMEWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntryDecorator in = newMAMEDWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Map out = null;
Map in = (Map)source();
out = MapUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.fixedSizeMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.fixedSizeMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.fixedSizeSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.fixedSizeSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getMap;;;Argument[2];ReturnValue;value"
Map out = null;
Map in = (Map)source();
out = MapUtils.getMap(null, null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getMap;;;MapValue of Argument[0];ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getMap(in, null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getMap;;;MapValue of Argument[0];ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getMap(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getObject;;;Argument[2];ReturnValue;value"
Object out = null;
Object in = source();
out = MapUtils.getObject(null, null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getObject;;;MapValue of Argument[0];ReturnValue;value"
Object out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getObject(in, null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getObject;;;MapValue of Argument[0];ReturnValue;value"
Object out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getObject(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getString;;;Argument[2];ReturnValue;value"
String out = null;
String in = (String)source();
out = MapUtils.getString(null, null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getString;;;MapValue of Argument[0];ReturnValue;value"
String out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getString(in, null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getString;;;MapValue of Argument[0];ReturnValue;value"
String out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getString(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;invertMap;;;MapKey of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.invertMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;invertMap;;;MapValue of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.invertMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.iterableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.iterableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableSortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.iterableSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableSortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.iterableSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazyMap(in, (Transformer)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazyMap(in, (Factory)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazyMap(in, (Transformer)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazyMap(in, (Factory)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazySortedMap(in, (Transformer)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazySortedMap(in, (Factory)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazySortedMap(in, (Transformer)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazySortedMap(in, (Factory)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.multiValueMap(in, (Factory)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.multiValueMap(in, (Class)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.multiValueMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.multiValueMap(in, (Factory)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.multiValueMap(in, (Class)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.multiValueMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;orderedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
OrderedMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.orderedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;orderedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
OrderedMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.orderedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;populateMap;(Map,Iterable,Transformer);;Element of Argument[1];MapValue of Argument[0];value"
Map out = null;
Iterable in = newVectorWithElement((String)source());
MapUtils.populateMap(out, in, (Transformer)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// Note it is tricky to get this to compile - the compiler thinks it is ambiguous
// which overload it should choose unless you put the generic types in correctly
// "org.apache.commons.collections4;MapUtils;true;populateMap;(MultiMap,Iterable,Transformer);;Element of Argument[1];MapValue of Argument[0];value"
MultiMap<Integer, String> out = null;
Iterable<String> in = newVectorWithElement((String)source());
MapUtils.populateMap(out, in, (Transformer<String, Integer>)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.predicatedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.predicatedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.predicatedSortedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.predicatedSortedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapKey of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
MapUtils.putAll(out, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapKey of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
out = MapUtils.putAll(null, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapValue of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
MapUtils.putAll(out, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapValue of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
out = MapUtils.putAll(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapKey of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
MapUtils.putAll(out, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapKey of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapValue of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
MapUtils.putAll(out, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapValue of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapKey of ArrayElement of Argument[1];MapKey of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapKey((String)source()));
MapUtils.putAll(out, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapKey of ArrayElement of Argument[1];MapKey of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapKey((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapValue of ArrayElement of Argument[1];MapValue of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapValue((String)source()));
MapUtils.putAll(out, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapValue of ArrayElement of Argument[1];MapValue of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapValue((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;safeAddToMap;;;Argument[1];MapKey of Argument[0];value"
Map out = null;
Object in = source();
MapUtils.safeAddToMap(out, in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;safeAddToMap;;;Argument[2];MapValue of Argument[0];value"
Map out = null;
Object in = source();
MapUtils.safeAddToMap(out, null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.synchronizedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.synchronizedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.synchronizedSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.synchronizedSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;toMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
ResourceBundle in = newRBWithMapKey((String)source());
out = MapUtils.toMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;toMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
ResourceBundle in = newRBWithMapValue((String)source());
out = MapUtils.toMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.transformedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.transformedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.transformedSortedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.transformedSortedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.unmodifiableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.unmodifiableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.unmodifiableSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.unmodifiableSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;peek;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
ArrayStack in = newArrayStackWithElement((String)source());
out = in.peek(0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;peek;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
ArrayStack in = newArrayStackWithElement((String)source());
out = in.peek();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;pop;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
ArrayStack in = newArrayStackWithElement((String)source());
out = in.pop();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;push;;;Argument[0];Element of Argument[-1];value"
ArrayStack out = null;
Object in = source();
out.push(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Bag;true;add;;;Argument[0];Element of Argument[-1];value"
Bag out = null;
Object in = source();
out.add(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Bag;true;add;;;Argument[0];Element of Argument[-1];value"
Bag out = null;
Object in = source();
out.add(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Bag;true;uniqueSet;;;Element of Argument[-1];Element of ReturnValue;value"
Set out = null;
Bag in = newTreeBagWithElement((String)source());
out = in.uniqueSet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;collectionBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.collectionBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;predicatedBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.predicatedBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;predicatedSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.predicatedSortedBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;synchronizedBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.synchronizedBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;synchronizedSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.synchronizedSortedBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;transformingBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.transformingBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;transformingSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.transformingSortedBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;unmodifiableBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.unmodifiableBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;unmodifiableSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.unmodifiableSortedBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
BidiMap in = newTreeBidiMapWithMapKey((String)source());
out = in.getKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;inverseBidiMap;;;MapKey of Argument[-1];MapValue of ReturnValue;value"
BidiMap out = null;
BidiMap in = newTreeBidiMapWithMapKey((String)source());
out = in.inverseBidiMap();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;inverseBidiMap;;;MapValue of Argument[-1];MapKey of ReturnValue;value"
BidiMap out = null;
BidiMap in = newTreeBidiMapWithMapValue((String)source());
out = in.inverseBidiMap();
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;removeValue;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
BidiMap in = newTreeBidiMapWithMapKey((String)source());
out = in.removeValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Enumeration);;Element of Argument[1];Element of Argument[0];value"
Collection out = null;
Enumeration in = newEnumerationWithElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Iterable);;Element of Argument[1];Element of Argument[0];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Iterator);;Element of Argument[1];Element of Argument[0];value"
Collection out = null;
Iterator in = newListIteratorWithElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Object[]);;ArrayElement of Argument[1];Element of Argument[0];value"
Collection out = null;
Object[] in = newWithArrayElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addIgnoreNull;;;Argument[1];Element of Argument[0];value"
Collection out = null;
Object in = source();
CollectionUtils.addIgnoreNull(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, null, null, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, (Iterable)null, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, (Iterable)null, (Comparator)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(null, in, null, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate((Iterable)null, in, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate((Iterable)null, in, (Comparator)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;disjunction;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.disjunction(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;disjunction;;;Element of Argument[1];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.disjunction(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Collection out = null;
Collection in = (Collection)source();
out = CollectionUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;extractSingleton;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.extractSingleton(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;find;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.find(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Iterator,int);;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Iterable,int);;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Map,int);;MapKey of Argument[0];MapKey of ReturnValue;value"
Map.Entry out = null;
Map in = newTreeMapWithMapKey((String)source());
out = CollectionUtils.get(in, 0);
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Map,int);;MapValue of Argument[0];MapValue of ReturnValue;value"
Map.Entry out = null;
Map in = newTreeMapWithMapValue((String)source());
out = CollectionUtils.get(in, 0);
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;ArrayElement of Argument[0];ReturnValue;value"
Object out = null;
Object in = newWithArrayElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;Element of Argument[0];ReturnValue;value"
Object out = null;
Object in = newVectorWithElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;MapKey of Argument[0];MapKey of ReturnValue;value"
Map.Entry out = null;
Object in = newTreeMapWithMapKey((String)source());
out = (Map.Entry)CollectionUtils.get(in, 0);
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;MapValue of Argument[0];MapValue of ReturnValue;value"
Map.Entry out = null;
Object in = newTreeMapWithMapValue((String)source());
out = (Map.Entry)CollectionUtils.get(in, 0);
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;getCardinalityMap;;;Element of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.getCardinalityMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;intersection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.intersection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;intersection;;;Element of Argument[1];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.intersection(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;permutations;;;Element of Argument[0];Element of Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.permutations(in);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;predicatedCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.predicatedCollection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;removeAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.removeAll(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;removeAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.removeAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;retainAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.retainAll(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;retainAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.retainAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of Argument[2];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.select(in, null, out, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of Argument[2];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.select(in, null, out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of Argument[3];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.select(in, null, null, out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.select(in, null, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.select(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.select(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;selectRejected;;;Element of Argument[0];Element of Argument[2];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.selectRejected(in, null, out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;selectRejected;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.selectRejected(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;selectRejected;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.selectRejected(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;subtract;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.subtract(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;subtract;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.subtract(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;synchronizedCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.synchronizedCollection(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;transformingCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.transformingCollection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;union;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.union(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;union;;;Element of Argument[1];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.union(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;unmodifiableCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.unmodifiableCollection(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;EnumerationUtils;true;get;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = EnumerationUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;EnumerationUtils;true;toList;(Enumeration);;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = EnumerationUtils.toList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;EnumerationUtils;true;toList;(StringTokenizer);;Argument[0];Element of ReturnValue;taint"
List out = null;
StringTokenizer in = (StringTokenizer)source();
out = EnumerationUtils.toList(in);
sink(getElement(out)); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Iterable);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.append((Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Iterable);;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.append(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Object[]);;ArrayElement of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Object in = source();
FluentIterable instance = null;
out = instance.append(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Object[]);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.append();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;asEnumeration;;;Element of Argument[-1];Element of ReturnValue;value"
Enumeration out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.asEnumeration();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.collate(null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.collate(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.collate(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.collate(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;copyInto;;;Element of Argument[-1];Element of Argument[0];value"
Collection out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
in.copyInto(out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;eval;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.eval();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;filter;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.filter(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;get;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.get(0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;limit;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.limit(0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;loop;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.loop();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;of;(Iterable);;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
out = FluentIterable.of(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;of;(Object);;Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Object in = source();
out = FluentIterable.of(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;of;(Object[]);;ArrayElement of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Object[] in = newWithArrayElement((String)source());
out = FluentIterable.of(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;reverse;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.reverse();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;skip;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.skip(0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;toArray;;;Element of Argument[-1];ArrayElement of ReturnValue;value"
Object[] out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.toArray(null);
sink(getArrayElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;toList;;;Element of Argument[-1];Element of ReturnValue;value"
List out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.toList();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;unique;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.unique();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;unmodifiable;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.unmodifiable();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.zip((Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable);;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.zip(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable[]);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.zip((Iterable)null, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newVectorWithElement((String)source());
FluentIterable instance = null;
out = instance.zip(in, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapKey of Argument[-1];MapKey of Element of ReturnValue;value"
Set<Map.Entry> out = null;
MultiValueMap in = newMVMWithMapKey((String)source());
out = in.entrySet();
sink(getMapKeyFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapKey of Argument[-1];MapKey of Element of ReturnValue;value"
Set<Map.Entry> out = null;
Get in = newTrieWithMapKey((String)source());
out = in.entrySet();
sink(getMapKeyFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapKey of Argument[-1];MapKey of Element of ReturnValue;value"
Set<Map.Entry> out = null;
AbstractMapDecorator in = newMVMWithMapKey((String)source());
out = in.entrySet();
sink(getMapKeyFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapValue of Argument[-1];MapValue of Element of ReturnValue;value"
Set<Map.Entry> out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.entrySet();
sink(getMapValueFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapValue of Argument[-1];MapValue of Element of ReturnValue;value"
Set<Map.Entry> out = null;
Get in = newTrieWithMapValue((String)source());
out = in.entrySet();
sink(getMapValueFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapValue of Argument[-1];MapValue of Element of ReturnValue;value"
Set<Map.Entry> out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.entrySet();
sink(getMapValueFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;get;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.get(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;get;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
Get in = newTrieWithMapValue((String)source());
out = in.get(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;get;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.get(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;keySet;();;MapKey of Argument[-1];Element of ReturnValue;value"
Set out = null;
Get in = newTrieWithMapKey((String)source());
out = in.keySet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;keySet;();;MapKey of Argument[-1];Element of ReturnValue;value"
Set out = null;
AbstractMapDecorator in = newMVMWithMapKey((String)source());
out = in.keySet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;remove;(Object);;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.remove(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;remove;(Object);;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
Get in = newTrieWithMapValue((String)source());
out = in.remove(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;remove;(Object);;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.remove(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Set out = null;
BidiMap in = newTreeBidiMapWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
Get in = newTrieWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
OrderedMapIterator out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
MapIterator out = null;
IterableGet in = newHashedMapWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
MapIterator out = null;
AbstractIterableMap in = newMVMWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
OrderedMapIterator out = null;
OrderedMap in = newTreeBidiMapWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
MapIterator out = null;
IterableGet in = newHashedMapWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
MapIterator out = null;
AbstractIterableMap in = newMVMWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;boundedIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.boundedIterable(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable);;Element of Argument[2];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, null, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[2];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[3];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Comparator,Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Comparator,Iterable,Iterable);;Element of Argument[2];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Iterable out = null;
Iterable in = (Iterable)source();
out = IterableUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;filteredIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.filteredIterable(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;find;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.find(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;first;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.first(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;get;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;loopingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.loopingIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.partition(in, (Factory)null, (Predicate)null, (Predicate)null);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.partition(in, (Predicate)null, (Predicate)null);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.partition(in, (Predicate)null, (Predicate)null);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;reversedIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.reversedIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;skippingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.skippingIterable(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.toList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toString;;;Argument[2];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IterableUtils.toString(null, null, in, null, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toString;;;Argument[3];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IterableUtils.toString(null, null, null, in, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toString;;;Argument[4];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IterableUtils.toString(null, null, null, null, in);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;uniqueIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.uniqueIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;unmodifiableIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.unmodifiableIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;(Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable((Iterable)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;(Iterable,Iterable[]);;Element of ArrayElement of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable((Iterable)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable(in, (Iterable)null, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable(in, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = source();
out = IteratorUtils.arrayIterator(in, (Object)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = source();
out = IteratorUtils.arrayListIterator(in, (Object)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asEnumeration;;;Element of Argument[0];Element of ReturnValue;value"
Enumeration out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.asEnumeration(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.asIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = IteratorUtils.asIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = IteratorUtils.asIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asMultipleUseIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.asMultipleUseIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;boundedIterator;;;Element of Argument[0];Element of ReturnValue;value"
BoundedIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.boundedIterator(in, 0L, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;boundedIterator;;;Element of Argument[0];Element of ReturnValue;value"
BoundedIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.boundedIterator(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Collection);;Element of Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Collection in = newVectorWithElement(newVectorWithElement((String)source()));
out = IteratorUtils.chainedIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Iterator[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.chainedIterator(in, (Iterator)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Iterator,Iterator);;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.chainedIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.chainedIterator(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Collection);;Element of Element of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Collection in = newVectorWithElement(newVectorWithElement((String)source()));
out = IteratorUtils.collatedIterator((Comparator)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Iterator[]);;Element of ArrayElement of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.collatedIterator((Comparator)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.collatedIterator(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Iterator,Iterator);;Element of Argument[2];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.collatedIterator(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;filteredIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.filteredIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;filteredListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ListIterator out = null;
ListIterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.filteredListIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;find;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.find(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;first;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.first(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;get;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;Argument[0];Element of ReturnValue;value"
Iterator out = null;
Object in = source();
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Object in = newVectorWithElement((String)source());
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;MapValue of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Map in = newTreeMapWithMapValue((String)source());
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;loopingIterator;;;Element of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Collection in = newVectorWithElement((String)source());
out = IteratorUtils.loopingIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;loopingListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
List in = newVectorWithElement((String)source());
out = IteratorUtils.loopingListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;peekingIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.peekingIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;pushbackIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.pushbackIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;singletonIterator;;;Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = source();
out = IteratorUtils.singletonIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;singletonListIterator;;;Argument[0];Element of ReturnValue;value"
ListIterator out = null;
Object in = source();
out = IteratorUtils.singletonListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;skippingIterator;;;Element of Argument[0];Element of ReturnValue;value"
SkippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.skippingIterator(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toArray;;;Element of Argument[0];ArrayElement of ReturnValue;value"
Object[] out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toArray(in, null);
sink(getArrayElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toArray;;;Element of Argument[0];ArrayElement of ReturnValue;value"
Object[] out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toArray(in);
sink(getArrayElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toList(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ListIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toString;;;Argument[2];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IteratorUtils.toString(null, null, in, null, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toString;;;Argument[3];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IteratorUtils.toString(null, null, null, in, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toString;;;Argument[4];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IteratorUtils.toString(null, null, null, null, in);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.unmodifiableIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ListIterator out = null;
ListIterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.unmodifiableListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableMapIterator;;;Element of Argument[0];Element of ReturnValue;value"
MapIterator out = null;
MapIterator in = newOMIWithElement((String)source());
out = IteratorUtils.unmodifiableMapIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableMapIterator;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MapIterator out = null;
MapIterator in = newOMIWithMapValue((String)source());
out = IteratorUtils.unmodifiableMapIterator(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator);;Element of Argument[0];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator,Iterator);;Element of Argument[0];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator,Iterator);;Element of Argument[2];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;defaultIfNull;;;Argument[0];ReturnValue;value"
List out = null;
List in = (List)source();
out = ListUtils.defaultIfNull(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;defaultIfNull;;;Argument[1];ReturnValue;value"
List out = null;
List in = (List)source();
out = ListUtils.defaultIfNull(null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
List out = null;
List in = (List)source();
out = ListUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;fixedSizeList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.fixedSizeList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;intersection;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.intersection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;intersection;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.intersection(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;lazyList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.lazyList(in, (Transformer)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;lazyList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.lazyList(in, (Factory)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(CharSequence,CharSequence);;Argument[0];ReturnValue;taint"
String out = null;
CharSequence in = (CharSequence)source();
out = ListUtils.longestCommonSubsequence(in, (CharSequence)null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(CharSequence,CharSequence);;Argument[1];ReturnValue;taint"
String out = null;
CharSequence in = (CharSequence)source();
out = ListUtils.longestCommonSubsequence((CharSequence)null, in);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List);;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence(in, (List)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List);;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence((List)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List,Equator);;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List,Equator);;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.partition(in, 0);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;predicatedList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.predicatedList(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;removeAll;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.removeAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;retainAll;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.retainAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.select(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;selectRejected;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.selectRejected(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;subtract;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.subtract(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;sum;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.sum(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;sum;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.sum(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;synchronizedList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.synchronizedList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;transformedList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.transformedList(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;union;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.union(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;union;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.union(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;unmodifiableList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.unmodifiableList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;getKey;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
MapIterator in = newOMIWithElement((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MapIterator in = newOMIWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
MapIterator out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MapIterator in = newOMIWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiMap in = newMMWithMapValue((String)source());
out = (Collection)in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[1];Element of MapValue of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(null, in);
sink(getElement((Collection)getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[1];Element of MapValue of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(null, in);
sink(getElement((Collection)getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;values;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;values;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = (MultiValuedMap)source();
out = MultiMapUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getCollection;;;MapValue of Argument[0];ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getCollection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getValuesAsBag;;;MapValue of Argument[0];ReturnValue;value"
Bag out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getValuesAsBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getValuesAsList;;;MapValue of Argument[0];ReturnValue;value"
List out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getValuesAsList(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getValuesAsSet;;;MapValue of Argument[0];ReturnValue;value"
Set out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getValuesAsSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;transformedMultiValuedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out = MultiMapUtils.transformedMultiValuedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;transformedMultiValuedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.transformedMultiValuedMap(in, null, null);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;unmodifiableMultiValuedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out = MultiMapUtils.unmodifiableMultiValuedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;unmodifiableMultiValuedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.unmodifiableMultiValuedMap(in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet$Entry;true;getElement;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
MultiSet.Entry in = newMultiSetEntryWithElement((String)source());
out = in.getElement();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;add;;;Argument[0];Element of Argument[-1];value"
MultiSet out = null;
Object in = source();
out.add(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;add;;;Argument[0];Element of Argument[-1];value"
MultiSet out = null;
Object in = source();
out.add(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;entrySet;;;Element of Argument[-1];Element of Element of ReturnValue;value"
Set<MultiSet.Entry> out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = in.entrySet();
sink(getElement(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;uniqueSet;;;Element of Argument[-1];Element of ReturnValue;value"
Set out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = in.uniqueSet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSetUtils;true;predicatedMultiSet;;;Element of Argument[0];Element of ReturnValue;value"
MultiSet out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = MultiSetUtils.predicatedMultiSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSetUtils;true;synchronizedMultiSet;;;Element of Argument[0];Element of ReturnValue;value"
MultiSet out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = MultiSetUtils.synchronizedMultiSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSetUtils;true;unmodifiableMultiSet;;;Element of Argument[0];Element of ReturnValue;value"
MultiSet out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = MultiSetUtils.unmodifiableMultiSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;asMap;;;Element of MapValue of Argument[-1];Element of MapValue of ReturnValue;value"
Map out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.asMap();
sink(getElement((Collection)getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;asMap;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
Map out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out = in.asMap();
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Set out = null;
SetValuedMap in = newHSVHMWithMapValue((String)source());
out = in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
List out = null;
ListValuedMap in = newALVHMWithMapValue((String)source());
out = in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;keySet;;;MapKey of Argument[-1];Element of ReturnValue;value"
Set out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out = in.keySet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;keys;;;MapKey of Argument[-1];Element of ReturnValue;value"
MultiSet out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out = in.keys();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;mapIterator;;;Element of MapValue of Argument[-1];MapValue of ReturnValue;value"
MapIterator out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
MapIterator out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;put;;;Argument[1];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
Object in = source();
out.put(null, in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Map);;MapValue of Argument[0];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(MultiValuedMap);;Element of MapValue of Argument[0];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out.putAll(in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(MultiValuedMap);;MapKey of Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Object,Iterable);;Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
Object in = source();
out.putAll(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Object,Iterable);;Element of Argument[1];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
Iterable in = newFluentIterableWithElement((String)source());
out.putAll(null, in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;remove;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Set out = null;
SetValuedMap in = newHSVHMWithMapValue((String)source());
out = in.remove(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;remove;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
List out = null;
ListValuedMap in = newALVHMWithMapValue((String)source());
out = in.remove(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;remove;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.remove(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;values;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedIterator;true;previous;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMapIterator in = newOMIWithElement((String)source());
out = in.previous();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedIterator;true;previous;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
OrderedIterator in = newOMIWithElement((String)source());
out = in.previous();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;firstKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.firstKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;lastKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.lastKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;nextKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.nextKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;previousKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.previousKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
Put out = null;
Object in = source();
out.put(in, null);
sink(getMapKeyFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
BidiMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
AbstractMapDecorator out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
Put out = null;
Object in = source();
out.put(null, in);
sink(getMapValueFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
BidiMap out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
AbstractMapDecorator out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
Put in = newHashedMapWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
BidiMap in = newTreeBidiMapWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
Put out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKeyFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
AbstractMapDecorator out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapValue of Argument[0];MapValue of Argument[-1];value"
Put out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getMapValueFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapValue of Argument[0];MapValue of Argument[-1];value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapValue of Argument[0];MapValue of Argument[-1];value"
AbstractMapDecorator out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;predicatedQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.predicatedQueue(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;synchronizedQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.synchronizedQueue(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;transformingQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.transformingQueue(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;unmodifiableQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.unmodifiableQueue(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils$SetView;true;copyInto;;;Element of Argument[-1];Element of Argument[0];value"
Set out = null;
SetUtils.SetView in = newSetViewWithElement((String)source());
in.copyInto(out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils$SetView;true;createIterator;;;Element of Argument[-1];Element of ReturnValue;value"
Iterator out = null;
MySetView in = newSetViewWithElement((String)source());
out = in.myCreateIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils$SetView;true;toSet;;;Element of Argument[-1];Element of ReturnValue;value"
Set out = null;
MySetView in = newSetViewWithElement((String)source());
out = in.toSet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;difference;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.difference(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;disjunction;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.disjunction(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;disjunction;;;Element of Argument[1];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.disjunction(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Set out = null;
Set in = (Set)source();
out = SetUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;hashSet;;;ArrayElement of Argument[0];Element of ReturnValue;value"
HashSet out = null;
Object in = source();
out = SetUtils.hashSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;intersection;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.intersection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;intersection;;;Element of Argument[1];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.intersection(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;orderedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.orderedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;predicatedNavigableSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
NavigableSet in = newTreeSetWithElement((String)source());
out = SetUtils.predicatedNavigableSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;predicatedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.predicatedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;predicatedSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.predicatedSortedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;synchronizedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.synchronizedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;synchronizedSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.synchronizedSortedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;transformedNavigableSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
NavigableSet in = newTreeSetWithElement((String)source());
out = SetUtils.transformedNavigableSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;transformedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.transformedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;transformedSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.transformedSortedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;union;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.union(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;union;;;Element of Argument[1];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.union(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableNavigableSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
NavigableSet in = newTreeSetWithElement((String)source());
out = SetUtils.unmodifiableNavigableSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableSet;(Object[]);;ArrayElement of Argument[0];Element of ReturnValue;value"
Set out = null;
Object in = source();
out = SetUtils.unmodifiableSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableSet;(Set);;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.unmodifiableSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.unmodifiableSortedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SortedBag;true;first;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = in.first();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SortedBag;true;last;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = in.last();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;readableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Get in = newHashedMapWithMapKey((String)source());
out = SplitMapUtils.readableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;readableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Get in = newHashedMapWithMapValue((String)source());
out = SplitMapUtils.readableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;writableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Put in = newHashedMapWithMapKey((String)source());
out = SplitMapUtils.writableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;writableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Put in = newHashedMapWithMapValue((String)source());
out = SplitMapUtils.writableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Trie;true;prefixMap;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
SortedMap out = null;
Trie in = newTrieWithMapKey((String)source());
out = in.prefixMap(null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Trie;true;prefixMap;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
SortedMap out = null;
Trie in = newTrieWithMapValue((String)source());
out = in.prefixMap(null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;TrieUtils;true;unmodifiableTrie;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Trie out = null;
Trie in = newTrieWithMapKey((String)source());
out = TrieUtils.unmodifiableTrie(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;TrieUtils;true;unmodifiableTrie;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Trie out = null;
Trie in = newTrieWithMapValue((String)source());
out = TrieUtils.unmodifiableTrie(in);
sink(getMapValue(out)); // $ hasValueFlow
}
}
}
|
java/ql/test/library-tests/frameworks/apache-collections/Test.java
|
package generatedtest;
import java.util.Collection;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Queue;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;
import org.apache.commons.collections4.ArrayStack;
import org.apache.commons.collections4.Bag;
import org.apache.commons.collections4.BagUtils;
import org.apache.commons.collections4.BidiMap;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.EnumerationUtils;
import org.apache.commons.collections4.Factory;
import org.apache.commons.collections4.FluentIterable;
import org.apache.commons.collections4.Get;
import org.apache.commons.collections4.IterableGet;
import org.apache.commons.collections4.IterableMap;
import org.apache.commons.collections4.IterableSortedMap;
import org.apache.commons.collections4.IterableUtils;
import org.apache.commons.collections4.IteratorUtils;
import org.apache.commons.collections4.KeyValue;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.collections4.ListValuedMap;
import org.apache.commons.collections4.MapIterator;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.collections4.MultiMap;
import org.apache.commons.collections4.MultiMapUtils;
import org.apache.commons.collections4.MultiSet;
import org.apache.commons.collections4.MultiSetUtils;
import org.apache.commons.collections4.MultiValuedMap;
import org.apache.commons.collections4.OrderedIterator;
import org.apache.commons.collections4.OrderedMap;
import org.apache.commons.collections4.OrderedMapIterator;
import org.apache.commons.collections4.Predicate;
import org.apache.commons.collections4.Put;
import org.apache.commons.collections4.QueueUtils;
import org.apache.commons.collections4.ResettableIterator;
import org.apache.commons.collections4.ResettableListIterator;
import org.apache.commons.collections4.SetUtils;
import org.apache.commons.collections4.SetValuedMap;
import org.apache.commons.collections4.SortedBag;
import org.apache.commons.collections4.SplitMapUtils;
import org.apache.commons.collections4.Transformer;
import org.apache.commons.collections4.Trie;
import org.apache.commons.collections4.TrieUtils;
import org.apache.commons.collections4.bag.TreeBag;
import org.apache.commons.collections4.bidimap.TreeBidiMap;
import org.apache.commons.collections4.iterators.BoundedIterator;
import org.apache.commons.collections4.iterators.IteratorEnumeration;
import org.apache.commons.collections4.iterators.SkippingIterator;
import org.apache.commons.collections4.iterators.ZippingIterator;
import org.apache.commons.collections4.keyvalue.AbstractKeyValue;
import org.apache.commons.collections4.keyvalue.AbstractMapEntry;
import org.apache.commons.collections4.keyvalue.AbstractMapEntryDecorator;
import org.apache.commons.collections4.keyvalue.DefaultKeyValue;
import org.apache.commons.collections4.keyvalue.DefaultMapEntry;
import org.apache.commons.collections4.keyvalue.TiedMapEntry;
import org.apache.commons.collections4.keyvalue.UnmodifiableMapEntry;
import org.apache.commons.collections4.map.AbstractIterableMap;
import org.apache.commons.collections4.map.AbstractMapDecorator;
import org.apache.commons.collections4.map.HashedMap;
import org.apache.commons.collections4.map.LinkedMap;
import org.apache.commons.collections4.map.MultiValueMap;
import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
import org.apache.commons.collections4.multimap.HashSetValuedHashMap;
import org.apache.commons.collections4.multiset.HashMultiSet;
import org.apache.commons.collections4.trie.PatriciaTrie;
// Test case generated by GenerateFlowTestCase.ql
public class Test {
static Object getArrayElement(Object[] container) { return container[0]; }
static Object getElement(Enumeration container) { return container.nextElement(); }
static <T> T getElement(Iterable<T> container) { return container.iterator().next(); }
static Object getElement(Iterator container) { return container.next(); }
static Object getElement(MultiSet.Entry container) { return container.getElement(); }
static Object getMapKey(AbstractKeyValue container) { return container.getKey(); }
static Object getMapKeyFromEntry(Map.Entry container) { return container.getKey(); }
static Object getMapKey(AbstractMapEntryDecorator container) { return container.getKey(); }
static Object getMapKey(Map container) { return container.keySet().iterator().next(); }
static Object getMapKey(MultiValuedMap container) { return container.keySet().iterator().next(); }
static Object getMapKeyFromPut(Put container) { return getMapKey((Map)container); }
static Object getMapValue(AbstractKeyValue container) { return container.getValue(); }
static Object getMapValueFromEntry(Map.Entry container) { return container.getValue(); }
static Object getMapValue(AbstractMapEntryDecorator container) { return container.getValue(); }
static Object getMapValue(Map container) { return container.get(null); }
static Object getMapValue(MapIterator container) { return container.getValue(); }
static Collection getMapValue(MultiValuedMap container) { return container.get(null); }
static Object getMapValueFromPut(Put container) { return getMapValue((Map)container); }
Object[] newWithArrayElement(Object element) { return new Object[] {element}; }
ArrayStack<String> newArrayStackWithElement(String element) { ArrayStack<String> a = new ArrayStack<String>(); a.push(element); return a; }
Enumeration<String> newEnumerationWithElement(String element) { return new IteratorEnumeration<String>(newVectorWithElement(element).iterator()); }
FluentIterable<String> newFluentIterableWithElement(String element) { return FluentIterable.of(element); }
ListIterator<String> newListIteratorWithElement(String element) { return newVectorWithElement(element).listIterator(); }
MultiSet.Entry<String> newMultiSetEntryWithElement(String element) { return getElement(newMultiSetWithElement(element).entrySet()); }
MultiSet<String> newMultiSetWithElement(String element) { HashMultiSet<String> h = new HashMultiSet<String>(); h.add(element); return h; }
Queue<String> newQueueWithElement(String element) { LinkedList<String> q = new LinkedList<String>(); q.add(element); return q; }
MySetView<String> newSetViewWithElement(String element) { MySetView<String> s = new MySetView<String>(); s.add(element); return s; }
TreeBag<String> newTreeBagWithElement(String element) { TreeBag<String> b = new TreeBag<String>(); b.add(element); return b; }
TreeSet<String> newTreeSetWithElement(String element) { TreeSet<String> h = new TreeSet<String>(); h.add(element); return h; }
Vector<String> newVectorWithElement(String element) { Vector<String> v = new Vector<String>(); v.add(element); return v; }
Vector<Iterable<String>> newVectorWithElement(Iterable<String> element) { Vector<Iterable<String>> v = new Vector<Iterable<String>>(); v.add(element); return v; }
TreeBidiMap newTreeBidiMapWithMapKey(Object element) { TreeBidiMap m = new TreeBidiMap(); m.put(element,null); return m; }
MyAbstractKeyValue newMAKVWithMapKey(Object element) { return new MyAbstractKeyValue(element,null); }
DefaultKeyValue newDKVWithMapKey(Object element) { return new DefaultKeyValue(element,null); }
HashedMap newHashedMapWithMapKey(Object element) { HashedMap m = new HashedMap(); m.put(element,null); return m; }
MyAbstractMapEntry newMAMEWithMapKey(Object element) { return new MyAbstractMapEntry(element,null); }
MyAbstractMapEntryDecorator newMAMEDWithMapKey(Object element) { return new MyAbstractMapEntryDecorator(newMAMEWithMapKey(element)); }
MultiValueMap newMVMWithMapKey(Object element) { MultiValueMap m = new MultiValueMap(); m.put(element,null); return m; }
MultiValuedMap newMVdMWithMapKey(Object element) { MultiValuedMap m = new ArrayListValuedHashMap(); m.put(element,null); return m; }
OrderedMapIterator newOMIWithElement(Object element) { LinkedMap m = new LinkedMap(); m.put(element,null); return m.mapIterator(); }
ResourceBundle newRBWithMapKey(Object element) { return (ResourceBundle)null; }
SortedMap newTreeMapWithMapKey(Object element) { SortedMap m = new TreeMap(); m.put(element,null); return m; }
Trie newTrieWithMapKey(Object element) { Trie m = new PatriciaTrie(); m.put(element,null); return m; }
TiedMapEntry newTMEWithMapKey(Object element) { return new TiedMapEntry(new TreeMap(),element); }
TreeBidiMap newTreeBidiMapWithMapValue(Object element) { TreeBidiMap m = new TreeBidiMap(); m.put(null,element); return m; }
MyAbstractKeyValue newMAKVWithMapValue(Object element) { return new MyAbstractKeyValue(null,element); }
DefaultKeyValue newDKVWithMapValue(Object element) { return new DefaultKeyValue(null,element); }
HashedMap newHashedMapWithMapValue(Object element) { HashedMap m = new HashedMap(); m.put(null,element); return m; }
MyAbstractMapEntry newMAMEWithMapValue(Object element) { return new MyAbstractMapEntry(null,element); }
MyAbstractMapEntryDecorator newMAMEDWithMapValue(Object element) { return new MyAbstractMapEntryDecorator(newMAMEWithMapValue(element)); }
MultiValueMap newMVMWithMapValue(Object element) { MultiValueMap m = new MultiValueMap(); m.put(null,element); return m; }
MultiMap newMMWithMapValue(Object element) { MultiMap m = new MultiValueMap(); m.put(null,element); return m; }
ArrayListValuedHashMap newALVHMWithMapValue(Object element) { ArrayListValuedHashMap m = new ArrayListValuedHashMap(); m.put(null,element); return m; }
HashSetValuedHashMap newHSVHMWithMapValue(Object element) { HashSetValuedHashMap m = new HashSetValuedHashMap(); m.put(null,element); return m; }
OrderedMapIterator newOMIWithMapValue(Object element) { LinkedMap m = new LinkedMap(); m.put(null,element); return m.mapIterator(); }
ResourceBundle newRBWithMapValue(Object element) { return (ResourceBundle)null; }
SortedMap newTreeMapWithMapValue(Object element) { SortedMap m = new TreeMap(); m.put(null,element); return m; }
Trie newTrieWithMapValue(Object element) { Trie m = new PatriciaTrie(); m.put(null,element); return m; }
TiedMapEntry newTMEWithMapValue(Object element) { return new TiedMapEntry(newTreeMapWithMapValue(element),null); }
UnmodifiableMapEntry newUMEWithMapValue(Object element) { return new UnmodifiableMapEntry(null,element); }
Object source() { return null; }
void sink(Object o) { }
class MyAbstractKeyValue<K, V> extends AbstractKeyValue<K, V> {
MyAbstractKeyValue(K key, V value) {
super(key, value);
}
K mySetKey(final K key) {
return super.setKey(key);
}
V mySetValue(final V value) {
return super.setValue(value);
}
}
class MyAbstractMapEntry<K, V> extends AbstractMapEntry<K, V> {
MyAbstractMapEntry(final K key, final V value) {
super(key, value);
}
@Override
public K getKey() { return null; }
@Override
public V getValue() { return null; }
}
class MyAbstractMapEntryDecorator<K, V> extends AbstractMapEntryDecorator<K, V> {
MyAbstractMapEntryDecorator(final Map.Entry<K, V> entry) {
super(entry);
}
Map.Entry<K, V> myGetMapEntry() {
return super.getMapEntry();
}
}
class MySetView<E> extends SetUtils.SetView<E> {
MySetView() { super(); }
@Override
protected Iterator<E> createIterator() { return null; }
Iterator<E> myCreateIterator() { return createIterator(); }
}
public void test() {
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;AbstractKeyValue;;;Argument[0];MapKey of Argument[-1];value"
AbstractKeyValue out = null;
Object in = source();
out = new MyAbstractKeyValue(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;AbstractKeyValue;;;Argument[1];MapValue of Argument[-1];value"
AbstractKeyValue out = null;
Object in = source();
out = new MyAbstractKeyValue(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out.setKey(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;Argument[0];MapKey of Argument[-1];value"
MyAbstractKeyValue out = null;
Object in = source();
out.mySetKey(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
DefaultKeyValue in = newDKVWithMapValue((String)source());
out = in.setKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setKey;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetKey((Object)null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
AbstractMapEntry out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
MyAbstractKeyValue out = null;
Object in = source();
out.mySetValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
UnmodifiableMapEntry in = newUMEWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
DefaultKeyValue in = newDKVWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntry in = newMAMEWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntry in = newMAMEWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractKeyValue;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MyAbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.mySetValue((Object)null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntry;true;AbstractMapEntry;;;Argument[0];MapKey of Argument[-1];value"
AbstractMapEntry out = null;
Object in = source();
out = new MyAbstractMapEntry(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntry;true;AbstractMapEntry;;;Argument[1];MapValue of Argument[-1];value"
AbstractMapEntry out = null;
Object in = source();
out = new MyAbstractMapEntry(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;AbstractMapEntryDecorator;;;MapKey of Argument[0];MapKey of Argument[-1];value"
AbstractMapEntryDecorator out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new MyAbstractMapEntryDecorator(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;AbstractMapEntryDecorator;;;MapValue of Argument[0];MapValue of Argument[-1];value"
AbstractMapEntryDecorator out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new MyAbstractMapEntryDecorator(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;getMapEntry;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
Map.Entry<String,String> out = null;
MyAbstractMapEntryDecorator in = newMAMEDWithMapKey((String)source());
out = in.myGetMapEntry();
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;AbstractMapEntryDecorator;true;getMapEntry;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
Map.Entry<String,String> out = null;
MyAbstractMapEntryDecorator in = newMAMEDWithMapValue((String)source());
out = in.myGetMapEntry();
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Entry);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new DefaultKeyValue(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Entry);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new DefaultKeyValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(KeyValue);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = new DefaultKeyValue(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(KeyValue);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = new DefaultKeyValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Object,Object);;Argument[0];MapKey of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out = new DefaultKeyValue(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;DefaultKeyValue;(Object,Object);;Argument[1];MapValue of Argument[-1];value"
DefaultKeyValue out = null;
Object in = source();
out = new DefaultKeyValue(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;toMapEntry;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
Map.Entry<String,String> out = null;
DefaultKeyValue in = newDKVWithMapKey((String)source());
out = in.toMapEntry();
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultKeyValue;true;toMapEntry;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
Map.Entry<String,String> out = null;
DefaultKeyValue in = newDKVWithMapValue((String)source());
out = in.toMapEntry();
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Entry);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new DefaultMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Entry);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new DefaultMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(KeyValue);;MapKey of Argument[0];MapKey of Argument[-1];value"
DefaultMapEntry out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = new DefaultMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(KeyValue);;MapValue of Argument[0];MapValue of Argument[-1];value"
DefaultMapEntry out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = new DefaultMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Object,Object);;Argument[0];MapKey of Argument[-1];value"
DefaultMapEntry out = null;
Object in = source();
out = new DefaultMapEntry(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;DefaultMapEntry;true;DefaultMapEntry;(Object,Object);;Argument[1];MapValue of Argument[-1];value"
DefaultMapEntry out = null;
Object in = source();
out = new DefaultMapEntry(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;TiedMapEntry;true;TiedMapEntry;;;Argument[1];MapKey of Argument[-1];value"
TiedMapEntry out = null;
Object in = source();
out = new TiedMapEntry(null, in);
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;TiedMapEntry;true;TiedMapEntry;;;MapValue of Argument[0];MapValue of Argument[-1];value"
TiedMapEntry out = null;
Map in = newTreeMapWithMapValue((String)source());
out = new TiedMapEntry(in, null);
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Entry);;MapKey of Argument[0];MapKey of Argument[-1];value"
UnmodifiableMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapKey((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Entry);;MapValue of Argument[0];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
Map.Entry<String,String> in = newMAMEWithMapValue((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(KeyValue);;MapKey of Argument[0];MapKey of Argument[-1];value"
UnmodifiableMapEntry out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(KeyValue);;MapValue of Argument[0];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = new UnmodifiableMapEntry(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Object,Object);;Argument[0];MapKey of Argument[-1];value"
UnmodifiableMapEntry out = null;
Object in = source();
out = new UnmodifiableMapEntry(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4.keyvalue;UnmodifiableMapEntry;true;UnmodifiableMapEntry;(Object,Object);;Argument[1];MapValue of Argument[-1];value"
UnmodifiableMapEntry out = null;
Object in = source();
out = new UnmodifiableMapEntry(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
TiedMapEntry in = newTMEWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
KeyValue in = newMAKVWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntryDecorator in = newMAMEDWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
AbstractKeyValue in = newMAKVWithMapKey((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
TiedMapEntry in = newTMEWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
KeyValue in = newMAKVWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapEntryDecorator in = newMAMEDWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;KeyValue;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractKeyValue in = newMAKVWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Map out = null;
Map in = (Map)source();
out = MapUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.fixedSizeMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.fixedSizeMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.fixedSizeSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;fixedSizeSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.fixedSizeSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getMap;;;Argument[2];ReturnValue;value"
Map out = null;
Map in = (Map)source();
out = MapUtils.getMap(null, null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getMap;;;MapValue of Argument[0];ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getMap(in, null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getMap;;;MapValue of Argument[0];ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getMap(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getObject;;;Argument[2];ReturnValue;value"
Object out = null;
Object in = source();
out = MapUtils.getObject(null, null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getObject;;;MapValue of Argument[0];ReturnValue;value"
Object out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getObject(in, null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getObject;;;MapValue of Argument[0];ReturnValue;value"
Object out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getObject(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getString;;;Argument[2];ReturnValue;value"
String out = null;
String in = (String)source();
out = MapUtils.getString(null, null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getString;;;MapValue of Argument[0];ReturnValue;value"
String out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getString(in, null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;getString;;;MapValue of Argument[0];ReturnValue;value"
String out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.getString(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;invertMap;;;MapKey of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.invertMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;invertMap;;;MapValue of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.invertMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.iterableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.iterableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableSortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.iterableSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;iterableSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableSortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.iterableSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazyMap(in, (Transformer)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazyMap(in, (Factory)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazyMap(in, (Transformer)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazyMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazyMap(in, (Factory)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazySortedMap(in, (Transformer)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.lazySortedMap(in, (Factory)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazySortedMap(in, (Transformer)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;lazySortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.lazySortedMap(in, (Factory)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.multiValueMap(in, (Factory)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.multiValueMap(in, (Class)null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.multiValueMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.multiValueMap(in, (Factory)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.multiValueMap(in, (Class)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;multiValueMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.multiValueMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;orderedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
OrderedMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.orderedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;orderedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
OrderedMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.orderedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;populateMap;(Map,Iterable,Transformer);;Element of Argument[1];MapValue of Argument[0];value"
Map out = null;
Iterable in = newVectorWithElement((String)source());
MapUtils.populateMap(out, in, (Transformer)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// Note it is tricky to get this to compile - the compiler thinks it is ambiguous
// which overload it should choose unless you put the generic types in correctly
// "org.apache.commons.collections4;MapUtils;true;populateMap;(MultiMap,Iterable,Transformer);;Element of Argument[1];MapValue of Argument[0];value"
MultiMap<Integer, String> out = null;
Iterable<String> in = newVectorWithElement((String)source());
MapUtils.populateMap(out, in, (Transformer<String, Integer>)null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.predicatedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.predicatedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.predicatedSortedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;predicatedSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.predicatedSortedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapKey of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
MapUtils.putAll(out, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapKey of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
out = MapUtils.putAll(null, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapValue of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
MapUtils.putAll(out, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of Argument[1];MapValue of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement((String)source());
out = MapUtils.putAll(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapKey of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
MapUtils.putAll(out, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapKey of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapValue of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
MapUtils.putAll(out, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;ArrayElement of ArrayElement of Argument[1];MapValue of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newWithArrayElement((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapKey of ArrayElement of Argument[1];MapKey of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapKey((String)source()));
MapUtils.putAll(out, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapKey of ArrayElement of Argument[1];MapKey of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapKey((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapValue of ArrayElement of Argument[1];MapValue of Argument[0];value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapValue((String)source()));
MapUtils.putAll(out, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;putAll;;;MapValue of ArrayElement of Argument[1];MapValue of ReturnValue;value"
Map out = null;
Object[] in = newWithArrayElement(newMAKVWithMapValue((String)source()));
out = MapUtils.putAll(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;safeAddToMap;;;Argument[1];MapKey of Argument[0];value"
Map out = null;
Object in = source();
MapUtils.safeAddToMap(out, in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;safeAddToMap;;;Argument[2];MapValue of Argument[0];value"
Map out = null;
Object in = source();
MapUtils.safeAddToMap(out, null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.synchronizedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.synchronizedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.synchronizedSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;synchronizedSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.synchronizedSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;toMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
ResourceBundle in = newRBWithMapKey((String)source());
out = MapUtils.toMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;toMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
ResourceBundle in = newRBWithMapValue((String)source());
out = MapUtils.toMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.transformedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.transformedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.transformedSortedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;transformedSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.transformedSortedMap(in, null, null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapKey((String)source());
out = MapUtils.unmodifiableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Map in = newTreeMapWithMapValue((String)source());
out = MapUtils.unmodifiableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableSortedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapKey((String)source());
out = MapUtils.unmodifiableSortedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapUtils;true;unmodifiableSortedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
SortedMap out = null;
SortedMap in = newTreeMapWithMapValue((String)source());
out = MapUtils.unmodifiableSortedMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;peek;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
ArrayStack in = newArrayStackWithElement((String)source());
out = in.peek(0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;peek;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
ArrayStack in = newArrayStackWithElement((String)source());
out = in.peek();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;pop;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
ArrayStack in = newArrayStackWithElement((String)source());
out = in.pop();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ArrayStack;true;push;;;Argument[0];Element of Argument[-1];value"
ArrayStack out = null;
Object in = source();
out.push(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Bag;true;add;;;Argument[0];Element of Argument[-1];value"
Bag out = null;
Object in = source();
out.add(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Bag;true;add;;;Argument[0];Element of Argument[-1];value"
Bag out = null;
Object in = source();
out.add(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Bag;true;uniqueSet;;;Element of Argument[-1];Element of ReturnValue;value"
Set out = null;
Bag in = newTreeBagWithElement((String)source());
out = in.uniqueSet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;collectionBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.collectionBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;predicatedBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.predicatedBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;predicatedSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.predicatedSortedBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;synchronizedBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.synchronizedBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;synchronizedSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.synchronizedSortedBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;transformingBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.transformingBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;transformingSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.transformingSortedBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;unmodifiableBag;;;Element of Argument[0];Element of ReturnValue;value"
Bag out = null;
Bag in = newTreeBagWithElement((String)source());
out = BagUtils.unmodifiableBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BagUtils;true;unmodifiableSortedBag;;;Element of Argument[0];Element of ReturnValue;value"
SortedBag out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = BagUtils.unmodifiableSortedBag(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;getKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
BidiMap in = newTreeBidiMapWithMapKey((String)source());
out = in.getKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;inverseBidiMap;;;MapKey of Argument[-1];MapValue of ReturnValue;value"
BidiMap out = null;
BidiMap in = newTreeBidiMapWithMapKey((String)source());
out = in.inverseBidiMap();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;inverseBidiMap;;;MapValue of Argument[-1];MapKey of ReturnValue;value"
BidiMap out = null;
BidiMap in = newTreeBidiMapWithMapValue((String)source());
out = in.inverseBidiMap();
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;BidiMap;true;removeValue;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
BidiMap in = newTreeBidiMapWithMapKey((String)source());
out = in.removeValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Enumeration);;Element of Argument[1];Element of Argument[0];value"
Collection out = null;
Enumeration in = newEnumerationWithElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Iterable);;Element of Argument[1];Element of Argument[0];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Iterator);;Element of Argument[1];Element of Argument[0];value"
Collection out = null;
Iterator in = newListIteratorWithElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addAll;(Collection,Object[]);;ArrayElement of Argument[1];Element of Argument[0];value"
Collection out = null;
Object[] in = newWithArrayElement((String)source());
CollectionUtils.addAll(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;addIgnoreNull;;;Argument[1];Element of Argument[0];value"
Collection out = null;
Object in = source();
CollectionUtils.addIgnoreNull(out, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, null, null, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, (Iterable)null, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(in, (Iterable)null, (Comparator)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(null, in, null, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate((Iterable)null, in, false);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;collate;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.collate((Iterable)null, in, (Comparator)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;disjunction;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.disjunction(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;disjunction;;;Element of Argument[1];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.disjunction(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Collection out = null;
Collection in = (Collection)source();
out = CollectionUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;extractSingleton;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.extractSingleton(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;find;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.find(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Iterator,int);;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Iterable,int);;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Map,int);;MapKey of Argument[0];MapKey of ReturnValue;value"
Map.Entry out = null;
Map in = newTreeMapWithMapKey((String)source());
out = CollectionUtils.get(in, 0);
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Map,int);;MapValue of Argument[0];MapValue of ReturnValue;value"
Map.Entry out = null;
Map in = newTreeMapWithMapValue((String)source());
out = CollectionUtils.get(in, 0);
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;ArrayElement of Argument[0];ReturnValue;value"
Object out = null;
Object in = newWithArrayElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;Element of Argument[0];ReturnValue;value"
Object out = null;
Object in = newVectorWithElement((String)source());
out = CollectionUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;MapKey of Argument[0];MapKey of ReturnValue;value"
Map.Entry out = null;
Object in = newTreeMapWithMapKey((String)source());
out = (Map.Entry)CollectionUtils.get(in, 0);
sink(getMapKeyFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;get;(Object,int);;MapValue of Argument[0];MapValue of ReturnValue;value"
Map.Entry out = null;
Object in = newTreeMapWithMapValue((String)source());
out = (Map.Entry)CollectionUtils.get(in, 0);
sink(getMapValueFromEntry(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;getCardinalityMap;;;Element of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.getCardinalityMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;intersection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.intersection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;intersection;;;Element of Argument[1];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.intersection(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;permutations;;;Element of Argument[0];Element of Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.permutations(in);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;predicatedCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.predicatedCollection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;removeAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.removeAll(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;removeAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.removeAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;retainAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.retainAll(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;retainAll;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.retainAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of Argument[2];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.select(in, null, out, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of Argument[2];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.select(in, null, out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of Argument[3];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.select(in, null, null, out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.select(in, null, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.select(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.select(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;selectRejected;;;Element of Argument[0];Element of Argument[2];value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
CollectionUtils.selectRejected(in, null, out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;selectRejected;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.selectRejected(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;selectRejected;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.selectRejected(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;subtract;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.subtract(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;subtract;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.subtract(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;synchronizedCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.synchronizedCollection(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;transformingCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.transformingCollection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;union;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.union(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;union;;;Element of Argument[1];Element of ReturnValue;value"
Collection out = null;
Iterable in = newVectorWithElement((String)source());
out = CollectionUtils.union(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;CollectionUtils;true;unmodifiableCollection;;;Element of Argument[0];Element of ReturnValue;value"
Collection out = null;
Collection in = newVectorWithElement((String)source());
out = CollectionUtils.unmodifiableCollection(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;EnumerationUtils;true;get;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = EnumerationUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;EnumerationUtils;true;toList;(Enumeration);;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = EnumerationUtils.toList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;EnumerationUtils;true;toList;(StringTokenizer);;Argument[0];Element of ReturnValue;taint"
List out = null;
StringTokenizer in = (StringTokenizer)source();
out = EnumerationUtils.toList(in);
sink(getElement(out)); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Iterable);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.append((Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Iterable);;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.append(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Object[]);;ArrayElement of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Object in = source();
FluentIterable instance = null;
out = instance.append(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;append;(Object[]);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.append();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;asEnumeration;;;Element of Argument[-1];Element of ReturnValue;value"
Enumeration out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.asEnumeration();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.collate(null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.collate(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.collate(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;collate;;;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.collate(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;copyInto;;;Element of Argument[-1];Element of Argument[0];value"
Collection out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
in.copyInto(out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;eval;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.eval();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;filter;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.filter(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;get;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.get(0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;limit;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.limit(0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;loop;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.loop();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;of;(Iterable);;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
out = FluentIterable.of(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;of;(Object);;Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Object in = source();
out = FluentIterable.of(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;of;(Object[]);;ArrayElement of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Object[] in = newWithArrayElement((String)source());
out = FluentIterable.of(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;reverse;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.reverse();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;skip;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.skip(0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;toArray;;;Element of Argument[-1];ArrayElement of ReturnValue;value"
Object[] out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.toArray(null);
sink(getArrayElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;toList;;;Element of Argument[-1];Element of ReturnValue;value"
List out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.toList();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;unique;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.unique();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;unmodifiable;;;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.unmodifiable();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.zip((Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable);;Element of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newFluentIterableWithElement((String)source());
FluentIterable instance = null;
out = instance.zip(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable[]);;Element of Argument[-1];Element of ReturnValue;value"
FluentIterable out = null;
FluentIterable in = newFluentIterableWithElement((String)source());
out = in.zip((Iterable)null, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;FluentIterable;true;zip;(Iterable[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
FluentIterable out = null;
Iterable in = newVectorWithElement((String)source());
FluentIterable instance = null;
out = instance.zip(in, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapKey of Argument[-1];MapKey of Element of ReturnValue;value"
Set<Map.Entry> out = null;
MultiValueMap in = newMVMWithMapKey((String)source());
out = in.entrySet();
sink(getMapKeyFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapKey of Argument[-1];MapKey of Element of ReturnValue;value"
Set<Map.Entry> out = null;
Get in = newTrieWithMapKey((String)source());
out = in.entrySet();
sink(getMapKeyFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapKey of Argument[-1];MapKey of Element of ReturnValue;value"
Set<Map.Entry> out = null;
AbstractMapDecorator in = newMVMWithMapKey((String)source());
out = in.entrySet();
sink(getMapKeyFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapValue of Argument[-1];MapValue of Element of ReturnValue;value"
Set<Map.Entry> out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.entrySet();
sink(getMapValueFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapValue of Argument[-1];MapValue of Element of ReturnValue;value"
Set<Map.Entry> out = null;
Get in = newTrieWithMapValue((String)source());
out = in.entrySet();
sink(getMapValueFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;entrySet;;;MapValue of Argument[-1];MapValue of Element of ReturnValue;value"
Set<Map.Entry> out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.entrySet();
sink(getMapValueFromEntry(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;get;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.get(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;get;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
Get in = newTrieWithMapValue((String)source());
out = in.get(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;get;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.get(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;keySet;();;MapKey of Argument[-1];Element of ReturnValue;value"
Set out = null;
Get in = newTrieWithMapKey((String)source());
out = in.keySet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;keySet;();;MapKey of Argument[-1];Element of ReturnValue;value"
Set out = null;
AbstractMapDecorator in = newMVMWithMapKey((String)source());
out = in.keySet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;remove;(Object);;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.remove(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;remove;(Object);;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
Get in = newTrieWithMapValue((String)source());
out = in.remove(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;remove;(Object);;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.remove(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Set out = null;
BidiMap in = newTreeBidiMapWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
Get in = newTrieWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Get;true;values;();;MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
OrderedMapIterator out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
MapIterator out = null;
IterableGet in = newHashedMapWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
MapIterator out = null;
AbstractIterableMap in = newMVMWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
OrderedMapIterator out = null;
OrderedMap in = newTreeBidiMapWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
MapIterator out = null;
IterableGet in = newHashedMapWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableGet;true;mapIterator;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
MapIterator out = null;
AbstractIterableMap in = newMVMWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;boundedIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.boundedIterable(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable);;Element of Argument[2];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(in, null, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[2];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;chainedIterable;(Iterable,Iterable,Iterable,Iterable);;Element of Argument[3];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.chainedIterable(null, null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Comparator,Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Comparator,Iterable,Iterable);;Element of Argument[2];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Iterable,Iterable);;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;collatedIterable;(Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.collatedIterable(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Iterable out = null;
Iterable in = (Iterable)source();
out = IterableUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;filteredIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.filteredIterable(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;find;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.find(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;first;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.first(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;get;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;loopingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.loopingIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.partition(in, (Factory)null, (Predicate)null, (Predicate)null);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.partition(in, (Predicate)null, (Predicate)null);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.partition(in, (Predicate)null, (Predicate)null);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;reversedIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.reversedIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;skippingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.skippingIterable(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.toList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toString;;;Argument[2];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IterableUtils.toString(null, null, in, null, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toString;;;Argument[3];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IterableUtils.toString(null, null, null, in, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;toString;;;Argument[4];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IterableUtils.toString(null, null, null, null, in);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;uniqueIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.uniqueIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;unmodifiableIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.unmodifiableIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;(Iterable,Iterable);;Element of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable((Iterable)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;(Iterable,Iterable[]);;Element of ArrayElement of Argument[1];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable((Iterable)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable(in, (Iterable)null, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IterableUtils;true;zippingIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterable in = newVectorWithElement((String)source());
out = IterableUtils.zippingIterable(in, (Iterable)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = source();
out = IteratorUtils.arrayIterator(in, (Object)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object[] in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = source();
out = IteratorUtils.arrayListIterator(in, (Object)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;arrayListIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.arrayListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asEnumeration;;;Element of Argument[0];Element of ReturnValue;value"
Enumeration out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.asEnumeration(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.asIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = IteratorUtils.asIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Enumeration in = newEnumerationWithElement((String)source());
out = IteratorUtils.asIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;asMultipleUseIterable;;;Element of Argument[0];Element of ReturnValue;value"
Iterable out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.asMultipleUseIterable(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;boundedIterator;;;Element of Argument[0];Element of ReturnValue;value"
BoundedIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.boundedIterator(in, 0L, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;boundedIterator;;;Element of Argument[0];Element of ReturnValue;value"
BoundedIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.boundedIterator(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Collection);;Element of Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Collection in = newVectorWithElement(newVectorWithElement((String)source()));
out = IteratorUtils.chainedIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Iterator[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.chainedIterator(in, (Iterator)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Iterator,Iterator);;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.chainedIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;chainedIterator;(Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.chainedIterator(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Collection);;Element of Element of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Collection in = newVectorWithElement(newVectorWithElement((String)source()));
out = IteratorUtils.collatedIterator((Comparator)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Iterator[]);;Element of ArrayElement of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.collatedIterator((Comparator)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.collatedIterator(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;collatedIterator;(Comparator,Iterator,Iterator);;Element of Argument[2];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.collatedIterator(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;filteredIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.filteredIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;filteredListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ListIterator out = null;
ListIterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.filteredListIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;find;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.find(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;first;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.first(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;get;;;Element of Argument[0];ReturnValue;value"
Object out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.get(in, 0);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;Argument[0];Element of ReturnValue;value"
Iterator out = null;
Object in = source();
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;ArrayElement of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Object in = newWithArrayElement((String)source());
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Object in = newVectorWithElement((String)source());
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;getIterator;;;MapValue of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Map in = newTreeMapWithMapValue((String)source());
out = IteratorUtils.getIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;loopingIterator;;;Element of Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Collection in = newVectorWithElement((String)source());
out = IteratorUtils.loopingIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;loopingListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ResettableListIterator out = null;
List in = newVectorWithElement((String)source());
out = IteratorUtils.loopingListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;peekingIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.peekingIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;pushbackIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.pushbackIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;singletonIterator;;;Argument[0];Element of ReturnValue;value"
ResettableIterator out = null;
Object in = source();
out = IteratorUtils.singletonIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;singletonListIterator;;;Argument[0];Element of ReturnValue;value"
ListIterator out = null;
Object in = source();
out = IteratorUtils.singletonListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;skippingIterator;;;Element of Argument[0];Element of ReturnValue;value"
SkippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.skippingIterator(in, 0L);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toArray;;;Element of Argument[0];ArrayElement of ReturnValue;value"
Object[] out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toArray(in, null);
sink(getArrayElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toArray;;;Element of Argument[0];ArrayElement of ReturnValue;value"
Object[] out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toArray(in);
sink(getArrayElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toList(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ListIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.toListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toString;;;Argument[2];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IteratorUtils.toString(null, null, in, null, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toString;;;Argument[3];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IteratorUtils.toString(null, null, null, in, null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;toString;;;Argument[4];ReturnValue;taint"
String out = null;
String in = (String)source();
out = IteratorUtils.toString(null, null, null, null, in);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableIterator;;;Element of Argument[0];Element of ReturnValue;value"
Iterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.unmodifiableIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableListIterator;;;Element of Argument[0];Element of ReturnValue;value"
ListIterator out = null;
ListIterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.unmodifiableListIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableMapIterator;;;Element of Argument[0];Element of ReturnValue;value"
MapIterator out = null;
MapIterator in = newOMIWithElement((String)source());
out = IteratorUtils.unmodifiableMapIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;unmodifiableMapIterator;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MapIterator out = null;
MapIterator in = newOMIWithMapValue((String)source());
out = IteratorUtils.unmodifiableMapIterator(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator[]);;Element of ArrayElement of Argument[0];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator);;Element of Argument[0];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator,Iterator);;Element of Argument[0];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator,Iterator);;Element of Argument[1];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;IteratorUtils;true;zippingIterator;(Iterator,Iterator,Iterator);;Element of Argument[2];Element of ReturnValue;value"
ZippingIterator out = null;
Iterator in = newListIteratorWithElement((String)source());
out = IteratorUtils.zippingIterator(null, null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;defaultIfNull;;;Argument[0];ReturnValue;value"
List out = null;
List in = (List)source();
out = ListUtils.defaultIfNull(in, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;defaultIfNull;;;Argument[1];ReturnValue;value"
List out = null;
List in = (List)source();
out = ListUtils.defaultIfNull(null, in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
List out = null;
List in = (List)source();
out = ListUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;fixedSizeList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.fixedSizeList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;intersection;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.intersection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;intersection;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.intersection(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;lazyList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.lazyList(in, (Transformer)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;lazyList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.lazyList(in, (Factory)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(CharSequence,CharSequence);;Argument[0];ReturnValue;taint"
String out = null;
CharSequence in = (CharSequence)source();
out = ListUtils.longestCommonSubsequence(in, (CharSequence)null);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(CharSequence,CharSequence);;Argument[1];ReturnValue;taint"
String out = null;
CharSequence in = (CharSequence)source();
out = ListUtils.longestCommonSubsequence((CharSequence)null, in);
sink(out); // $ hasTaintFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List);;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence(in, (List)null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List);;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence((List)null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List,Equator);;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence(in, null, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;longestCommonSubsequence;(List,List,Equator);;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.longestCommonSubsequence(null, in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;partition;;;Element of Argument[0];Element of Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.partition(in, 0);
sink(getElement((Iterable)getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;predicatedList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.predicatedList(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;removeAll;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.removeAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;retainAll;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.retainAll(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;select;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.select(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;selectRejected;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
Collection in = newVectorWithElement((String)source());
out = ListUtils.selectRejected(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;subtract;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.subtract(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;sum;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.sum(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;sum;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.sum(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;synchronizedList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.synchronizedList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;transformedList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.transformedList(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;union;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.union(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;union;;;Element of Argument[1];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.union(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;ListUtils;true;unmodifiableList;;;Element of Argument[0];Element of ReturnValue;value"
List out = null;
List in = newVectorWithElement((String)source());
out = ListUtils.unmodifiableList(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;getKey;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
MapIterator in = newOMIWithElement((String)source());
out = in.getKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;getValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MapIterator in = newOMIWithMapValue((String)source());
out = in.getValue();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;setValue;;;Argument[0];MapValue of Argument[-1];value"
MapIterator out = null;
Object in = source();
out.setValue(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MapIterator;true;setValue;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MapIterator in = newOMIWithMapValue((String)source());
out = in.setValue(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiMap in = newMMWithMapValue((String)source());
out = (Collection)in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[1];Element of MapValue of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(null, in);
sink(getElement((Collection)getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;put;;;Argument[1];Element of MapValue of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(null, in);
sink(getElement((Collection)getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;values;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMap;true;values;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = (MultiValuedMap)source();
out = MultiMapUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getCollection;;;MapValue of Argument[0];ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getCollection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getValuesAsBag;;;MapValue of Argument[0];ReturnValue;value"
Bag out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getValuesAsBag(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getValuesAsList;;;MapValue of Argument[0];ReturnValue;value"
List out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getValuesAsList(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;getValuesAsSet;;;MapValue of Argument[0];ReturnValue;value"
Set out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.getValuesAsSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;transformedMultiValuedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out = MultiMapUtils.transformedMultiValuedMap(in, null, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;transformedMultiValuedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.transformedMultiValuedMap(in, null, null);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;unmodifiableMultiValuedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out = MultiMapUtils.unmodifiableMultiValuedMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiMapUtils;true;unmodifiableMultiValuedMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = MultiMapUtils.unmodifiableMultiValuedMap(in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet$Entry;true;getElement;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
MultiSet.Entry in = newMultiSetEntryWithElement((String)source());
out = in.getElement();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;add;;;Argument[0];Element of Argument[-1];value"
MultiSet out = null;
Object in = source();
out.add(in, 0);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;add;;;Argument[0];Element of Argument[-1];value"
MultiSet out = null;
Object in = source();
out.add(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;entrySet;;;Element of Argument[-1];Element of Element of ReturnValue;value"
Set<MultiSet.Entry> out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = in.entrySet();
sink(getElement(getElement(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSet;true;uniqueSet;;;Element of Argument[-1];Element of ReturnValue;value"
Set out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = in.uniqueSet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSetUtils;true;predicatedMultiSet;;;Element of Argument[0];Element of ReturnValue;value"
MultiSet out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = MultiSetUtils.predicatedMultiSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSetUtils;true;synchronizedMultiSet;;;Element of Argument[0];Element of ReturnValue;value"
MultiSet out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = MultiSetUtils.synchronizedMultiSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiSetUtils;true;unmodifiableMultiSet;;;Element of Argument[0];Element of ReturnValue;value"
MultiSet out = null;
MultiSet in = newMultiSetWithElement((String)source());
out = MultiSetUtils.unmodifiableMultiSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;asMap;;;Element of MapValue of Argument[-1];Element of MapValue of ReturnValue;value"
Map out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.asMap();
sink(getElement((Collection)getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;asMap;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
Map out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out = in.asMap();
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Set out = null;
SetValuedMap in = newHSVHMWithMapValue((String)source());
out = in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
List out = null;
ListValuedMap in = newALVHMWithMapValue((String)source());
out = in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;get;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.get(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;keySet;;;MapKey of Argument[-1];Element of ReturnValue;value"
Set out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out = in.keySet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;keys;;;MapKey of Argument[-1];Element of ReturnValue;value"
MultiSet out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out = in.keys();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;mapIterator;;;Element of MapValue of Argument[-1];MapValue of ReturnValue;value"
MapIterator out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.mapIterator();
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
MapIterator out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out = in.mapIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;put;;;Argument[1];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
Object in = source();
out.put(null, in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Map);;MapValue of Argument[0];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(MultiValuedMap);;Element of MapValue of Argument[0];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out.putAll(in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(MultiValuedMap);;MapKey of Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
MultiValuedMap in = newMVdMWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Object,Iterable);;Argument[0];MapKey of Argument[-1];value"
MultiValuedMap out = null;
Object in = source();
out.putAll(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;putAll;(Object,Iterable);;Element of Argument[1];Element of MapValue of Argument[-1];value"
MultiValuedMap out = null;
Iterable in = newFluentIterableWithElement((String)source());
out.putAll(null, in);
sink(getElement(getMapValue(out))); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;remove;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Set out = null;
SetValuedMap in = newHSVHMWithMapValue((String)source());
out = in.remove(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;remove;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
List out = null;
ListValuedMap in = newALVHMWithMapValue((String)source());
out = in.remove(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;remove;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.remove(null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;MultiValuedMap;true;values;;;Element of MapValue of Argument[-1];Element of ReturnValue;value"
Collection out = null;
MultiValuedMap in = newALVHMWithMapValue((String)source());
out = in.values();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedIterator;true;previous;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMapIterator in = newOMIWithElement((String)source());
out = in.previous();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedIterator;true;previous;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
OrderedIterator in = newOMIWithElement((String)source());
out = in.previous();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;firstKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.firstKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;lastKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.lastKey();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;nextKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.nextKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;OrderedMap;true;previousKey;;;MapKey of Argument[-1];ReturnValue;value"
Object out = null;
OrderedMap in = newTreeBidiMapWithMapKey((String)source());
out = in.previousKey(null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
Put out = null;
Object in = source();
out.put(in, null);
sink(getMapKeyFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
BidiMap out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[0];MapKey of Argument[-1];value"
AbstractMapDecorator out = null;
Object in = source();
out.put(in, null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
Put out = null;
Object in = source();
out.put(null, in);
sink(getMapValueFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
MultiValueMap out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
MultiMap out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
BidiMap out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;Argument[1];MapValue of Argument[-1];value"
AbstractMapDecorator out = null;
Object in = source();
out.put(null, in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
Put in = newHashedMapWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiValueMap in = newMVMWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
MultiMap in = newMMWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
BidiMap in = newTreeBidiMapWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;put;;;MapValue of Argument[-1];ReturnValue;value"
Object out = null;
AbstractMapDecorator in = newMVMWithMapValue((String)source());
out = in.put(null, null);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
Put out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKeyFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
MultiValueMap out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapKey of Argument[0];MapKey of Argument[-1];value"
AbstractMapDecorator out = null;
Map in = newTreeMapWithMapKey((String)source());
out.putAll(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapValue of Argument[0];MapValue of Argument[-1];value"
Put out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getMapValueFromPut(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapValue of Argument[0];MapValue of Argument[-1];value"
MultiValueMap out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Put;true;putAll;(Map);;MapValue of Argument[0];MapValue of Argument[-1];value"
AbstractMapDecorator out = null;
Map in = newTreeMapWithMapValue((String)source());
out.putAll(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;predicatedQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.predicatedQueue(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;synchronizedQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.synchronizedQueue(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;transformingQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.transformingQueue(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;QueueUtils;true;unmodifiableQueue;;;Element of Argument[0];Element of ReturnValue;value"
Queue out = null;
Queue in = newQueueWithElement((String)source());
out = QueueUtils.unmodifiableQueue(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils$SetView;true;copyInto;;;Element of Argument[-1];Element of Argument[0];value"
Set out = null;
SetUtils.SetView in = newSetViewWithElement((String)source());
in.copyInto(out);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils$SetView;true;createIterator;;;Element of Argument[-1];Element of ReturnValue;value"
Iterator out = null;
MySetView in = newSetViewWithElement((String)source());
out = in.myCreateIterator();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils$SetView;true;toSet;;;Element of Argument[-1];Element of ReturnValue;value"
Set out = null;
MySetView in = newSetViewWithElement((String)source());
out = in.toSet();
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;difference;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.difference(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;disjunction;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.disjunction(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;disjunction;;;Element of Argument[1];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.disjunction(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;emptyIfNull;;;Argument[0];ReturnValue;value"
Set out = null;
Set in = (Set)source();
out = SetUtils.emptyIfNull(in);
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;hashSet;;;ArrayElement of Argument[0];Element of ReturnValue;value"
HashSet out = null;
Object in = source();
out = SetUtils.hashSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;intersection;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.intersection(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;intersection;;;Element of Argument[1];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.intersection(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;orderedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.orderedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;predicatedNavigableSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
NavigableSet in = newTreeSetWithElement((String)source());
out = SetUtils.predicatedNavigableSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;predicatedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.predicatedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;predicatedSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.predicatedSortedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;synchronizedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.synchronizedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;synchronizedSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.synchronizedSortedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;transformedNavigableSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
NavigableSet in = newTreeSetWithElement((String)source());
out = SetUtils.transformedNavigableSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;transformedSet;;;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.transformedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;transformedSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.transformedSortedSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;union;;;Element of Argument[0];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.union(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;union;;;Element of Argument[1];Element of ReturnValue;value"
SetUtils.SetView out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.union(null, in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableNavigableSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
NavigableSet in = newTreeSetWithElement((String)source());
out = SetUtils.unmodifiableNavigableSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableSet;(Object[]);;ArrayElement of Argument[0];Element of ReturnValue;value"
Set out = null;
Object in = source();
out = SetUtils.unmodifiableSet(in, null);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableSet;(Set);;Element of Argument[0];Element of ReturnValue;value"
Set out = null;
Set in = newTreeSetWithElement((String)source());
out = SetUtils.unmodifiableSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SetUtils;true;unmodifiableSortedSet;;;Element of Argument[0];Element of ReturnValue;value"
SortedSet out = null;
SortedSet in = newTreeSetWithElement((String)source());
out = SetUtils.unmodifiableSortedSet(in);
sink(getElement(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SortedBag;true;first;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = in.first();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SortedBag;true;last;;;Element of Argument[-1];ReturnValue;value"
Object out = null;
SortedBag in = newTreeBagWithElement((String)source());
out = in.last();
sink(out); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;readableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
IterableMap out = null;
Get in = newHashedMapWithMapKey((String)source());
out = SplitMapUtils.readableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;readableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
IterableMap out = null;
Get in = newHashedMapWithMapValue((String)source());
out = SplitMapUtils.readableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;writableMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Map out = null;
Put in = newHashedMapWithMapKey((String)source());
out = SplitMapUtils.writableMap(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;SplitMapUtils;true;writableMap;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Map out = null;
Put in = newHashedMapWithMapValue((String)source());
out = SplitMapUtils.writableMap(in);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Trie;true;prefixMap;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
SortedMap out = null;
Trie in = newTrieWithMapKey((String)source());
out = in.prefixMap(null);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;Trie;true;prefixMap;;;MapValue of Argument[-1];MapValue of ReturnValue;value"
SortedMap out = null;
Trie in = newTrieWithMapValue((String)source());
out = in.prefixMap(null);
sink(getMapValue(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;TrieUtils;true;unmodifiableTrie;;;MapKey of Argument[0];MapKey of ReturnValue;value"
Trie out = null;
Trie in = newTrieWithMapKey((String)source());
out = TrieUtils.unmodifiableTrie(in);
sink(getMapKey(out)); // $ hasValueFlow
}
{
// "org.apache.commons.collections4;TrieUtils;true;unmodifiableTrie;;;MapValue of Argument[0];MapValue of ReturnValue;value"
Trie out = null;
Trie in = newTrieWithMapValue((String)source());
out = TrieUtils.unmodifiableTrie(in);
sink(getMapValue(out)); // $ hasValueFlow
}
}
}
|
Make helper functions more consistent
|
java/ql/test/library-tests/frameworks/apache-collections/Test.java
|
Make helper functions more consistent
|
<ide><path>ava/ql/test/library-tests/frameworks/apache-collections/Test.java
<ide> MyAbstractMapEntry newMAMEWithMapKey(Object element) { return new MyAbstractMapEntry(element,null); }
<ide> MyAbstractMapEntryDecorator newMAMEDWithMapKey(Object element) { return new MyAbstractMapEntryDecorator(newMAMEWithMapKey(element)); }
<ide> MultiValueMap newMVMWithMapKey(Object element) { MultiValueMap m = new MultiValueMap(); m.put(element,null); return m; }
<del> MultiValuedMap newMVdMWithMapKey(Object element) { MultiValuedMap m = new ArrayListValuedHashMap(); m.put(element,null); return m; }
<add> ArrayListValuedHashMap newALVHMWithMapKey(Object element) { ArrayListValuedHashMap m = new ArrayListValuedHashMap(); m.put(element,null); return m; }
<ide> OrderedMapIterator newOMIWithElement(Object element) { LinkedMap m = new LinkedMap(); m.put(element,null); return m.mapIterator(); }
<ide> ResourceBundle newRBWithMapKey(Object element) { return (ResourceBundle)null; }
<ide> SortedMap newTreeMapWithMapKey(Object element) { SortedMap m = new TreeMap(); m.put(element,null); return m; }
<ide> {
<ide> // "org.apache.commons.collections4;MultiMapUtils;true;transformedMultiValuedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
<ide> MultiValuedMap out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out = MultiMapUtils.transformedMultiValuedMap(in, null, null);
<ide> sink(getMapKey(out)); // $ hasValueFlow
<ide> }
<ide> {
<ide> // "org.apache.commons.collections4;MultiMapUtils;true;unmodifiableMultiValuedMap;;;MapKey of Argument[0];MapKey of ReturnValue;value"
<ide> MultiValuedMap out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out = MultiMapUtils.unmodifiableMultiValuedMap(in);
<ide> sink(getMapKey(out)); // $ hasValueFlow
<ide> }
<ide> {
<ide> // "org.apache.commons.collections4;MultiValuedMap;true;asMap;;;MapKey of Argument[-1];MapKey of ReturnValue;value"
<ide> Map out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out = in.asMap();
<ide> sink(getMapKey(out)); // $ hasValueFlow
<ide> }
<ide> {
<ide> // "org.apache.commons.collections4;MultiValuedMap;true;keySet;;;MapKey of Argument[-1];Element of ReturnValue;value"
<ide> Set out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out = in.keySet();
<ide> sink(getElement(out)); // $ hasValueFlow
<ide> }
<ide> {
<ide> // "org.apache.commons.collections4;MultiValuedMap;true;keys;;;MapKey of Argument[-1];Element of ReturnValue;value"
<ide> MultiSet out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out = in.keys();
<ide> sink(getElement(out)); // $ hasValueFlow
<ide> }
<ide> {
<ide> // "org.apache.commons.collections4;MultiValuedMap;true;mapIterator;;;MapKey of Argument[-1];Element of ReturnValue;value"
<ide> MapIterator out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out = in.mapIterator();
<ide> sink(getElement(out)); // $ hasValueFlow
<ide> }
<ide> {
<ide> // "org.apache.commons.collections4;MultiValuedMap;true;putAll;(MultiValuedMap);;MapKey of Argument[0];MapKey of Argument[-1];value"
<ide> MultiValuedMap out = null;
<del> MultiValuedMap in = newMVdMWithMapKey((String)source());
<add> MultiValuedMap in = newALVHMWithMapKey((String)source());
<ide> out.putAll(in);
<ide> sink(getMapKey(out)); // $ hasValueFlow
<ide> }
|
|
Java
|
apache-2.0
|
6b001a95c6ab50a79458b1b0f5f21a6270586c2f
| 0 |
artemprokopov/aprokopov
|
package ru.job4j.simplearray;
import java.util.Arrays;
/**
* Класс простого контейнера основанного на массиве(За аналог взят ArrayList).
* @param <T> тип контейнера.
*/
public class SimpleArray<T> {
/**
* Смещение максимального размера массива относительно {@link Integer#MAX_VALUE}.
*/
private static final int ARRAY_INDEX_MAX_VALUE_OFFSET = 5;
/**
* Максимальный размер массива принимается как
* {@link Integer#MAX_VALUE} - {@link SimpleArray#ARRAY_INDEX_MAX_VALUE_OFFSET}.
*/
private static final int ARRAY_MAX_SIZE = Integer.MAX_VALUE - ARRAY_INDEX_MAX_VALUE_OFFSET;
/**
* Размер которым инициализируется массив {@link SimpleArray#array} по умолчанию.
*/
private static final int ARRAY_INIT_SIZE = 10;
/**
* Хранилище элементов контейнера.
*/
private Object[] array;
/**
* Номер последнего элемента в контейнере, при пустом контейнере равен -1.
*/
private int currentItem = -1;
/**
* Текущий размер хранилища {@link SimpleArray#array}.
*/
private int size;
/**
* Конструктор по умолчанию, вызывает конструктор с параметром {@link SimpleArray#SimpleArray(int)}
* с значением {@link SimpleArray#ARRAY_INIT_SIZE}.
*/
public SimpleArray() {
this(ARRAY_INIT_SIZE);
}
/**
* Конструктор с параметром инициализации размера массива {@link SimpleArray#array}.
* @param initSize параметр инициализации размера массива {@link SimpleArray#array}.
*/
public SimpleArray(int initSize) {
this.size = initSize;
this.array = new Object[initSize];
}
/**
* Метод добавления элемента в контейнер.
* Добавляет элемент в хвост массива {@link SimpleArray#array}
* @param addItem добавляемый в контейнер элемент тип T.
* @return возвращает true если операция выполняется успешно.
*/
public boolean add(T addItem) {
checkAddSizeArray();
++currentItem;
this.array[currentItem] = addItem;
return true;
}
/**
* Добавляет элемент в контейнер по индексу, хвост массива сдвигаетс в право.
* @param indexAddItem индекс элемента вставки.
* @param addItem элемент вставки.
*/
@SuppressWarnings("unused")
private void add(int indexAddItem, T addItem) {
checkIndex(indexAddItem);
checkAddSizeArray();
copyTailArrayWhenAddItem(indexAddItem);
this.currentItem++;
array[indexAddItem] = addItem;
}
/**
* Заменяет элемент в контейнере.
* @param indexUpdateItem индекс заменяемого элемента.
* @param itemUpdate обновляемый элемент.
* @return если операция добавления завершилась успешно возвращает true.
*/
public boolean update(int indexUpdateItem, T itemUpdate) {
checkIndex(indexUpdateItem);
this.array[indexUpdateItem] = itemUpdate;
return true;
}
/**
* Удаляет элемент по индексу.
* @param indexDeleteItem индекс удаляемого элемента.
* @return в слуучае успеха возвращает удаленный элемент.
*/
@SuppressWarnings("unchecked")
public T delete(int indexDeleteItem) {
checkIndex(indexDeleteItem);
T oldValue = (T) array[indexDeleteItem];
copyTailArrayWhenDeleteItem(indexDeleteItem);
this.currentItem--;
return oldValue;
}
/**
* Удаляет первый найденный с начала контейнера элемент.
* @param deleteItem Удаляемый элемент.
* @return в случае успеха возвращает удалённый элемент, в противном случае если такого элемента нет null.
*/
@SuppressWarnings("unchecked")
public T delete(T deleteItem) {
T oldValue = null;
int indexDeleteItem = findItem(deleteItem);
if (indexDeleteItem != -1) {
oldValue = (T) array[indexDeleteItem];
copyTailArrayWhenDeleteItem(indexDeleteItem);
this.currentItem--;
}
return oldValue;
}
/**
* Возвращает элемент контейнера по индексу.
* @param indexItem индекс элемента.
* @return элемент контейнера.
*/
@SuppressWarnings("unchecked")
public T get(int indexItem) {
checkIndex(indexItem);
return (T) array[indexItem];
}
/**
* Проверка есть ли элементы в контейнере.
* @return true если контейнер не содержит ни одного элемента.
*/
public boolean isEmpty() {
return this.currentItem == -1;
}
/**
* Уменьшает размер массива {@link SimpleArray#array}
* до размера {@link SimpleArray#currentItem} + 1.
*/
public void trim() {
this.array = Arrays.copyOf(array, this.currentItem + 1);
this.size = this.currentItem + 1;
}
/**
* Виртуально число размера массива(общее число элементов размещённых в контейнере).
* @return число элементов в контейнере.
*/
public int size() {
return currentItem + 1;
}
/**
* Проверка индекса на принадлежность диапазону 0 <= i <= {@link SimpleArray#currentItem}.
* @param checkIndex проверяемый индекс.
*/
private void checkIndex(int checkIndex) {
if (isEmpty()) {
throw new IndexOutOfBoundsException(outOfBoundsMsg(checkIndex));
}
if (checkIndex > currentItem || checkIndex < 0) {
throw new IndexOutOfBoundsException(outOfBoundsMsg(checkIndex));
}
}
/**
* Поиск элемента в контейнере.
* @param searchItem искомый элемент.
* @return индекс найденного элемента, в противном случае если элемент не найден -1.
*/
private int findItem(T searchItem) {
if (isEmpty()) {
return -1;
}
for (int i = 0; i <= currentItem; i++) {
if (searchItem.equals(array[i])) {
return i;
}
}
return -1;
}
/**
* Проверка на то что размер массива {@link SimpleArray#array} позволяет провести добавление элемента
* если не позволяет, массив расширяется.
*/
private void checkAddSizeArray() {
if (currentItem + 1 >= size) {
size = checkMaxSizeArray();
array = Arrays.copyOf(array, size);
}
}
/**
* Проверка на то что размер массива {@link SimpleArray#array} не выходит за максимально возможный размер
* {@link SimpleArray#ARRAY_MAX_SIZE}, если не выходит возвращает новый возможный размер массива.
* Если увеличение размера не возможно, то генерирует исключение {@link OutOfMemoryError}
* @return новый допустимый размер массива.
*/
private int checkMaxSizeArray() {
if (ARRAY_MAX_SIZE - (currentItem + 1) == 0) {
throw new OutOfMemoryError("The array index is greater than the maximum possible values");
}
int newSize = size + size;
return ARRAY_MAX_SIZE - size > size ? newSize : ARRAY_MAX_SIZE;
}
/**
* Копирует хвост массива начиная indexDeleteItem + 1 на один элемент влево.
* @param indexDeleteItem индекс удаляемого элемента.
*/
private void copyTailArrayWhenDeleteItem(int indexDeleteItem) {
System.arraycopy(array, indexDeleteItem + 1,
array, indexDeleteItem, currentItem + 1 - indexDeleteItem + 1);
}
/**
* Копирует хвост массива начиная indexAddItem на один элемент влево, освобождает элемент для добавления нового.
* @param indexAddItem индекс добавляемого элемента.
*/
private void copyTailArrayWhenAddItem(int indexAddItem) {
System.arraycopy(array, indexAddItem,
array, indexAddItem + 1, currentItem + 1 - indexAddItem + 1);
}
/**
* Метод формирует сообщение для генерируемых исключений в методе {@link SimpleArray#checkIndex(int)}.
* @param index индекс для формирования строки сообщения.
* @return сформированную строку.
*/
private String outOfBoundsMsg(int index) {
return "Index: " + index + ", Size: " + (currentItem + 1);
}
}
|
chapter_005/src/main/java/ru/job4j/simplearray/SimpleArray.java
|
package ru.job4j.simplearray;
import java.util.Arrays;
/**
* Клас простого контейнера основанного на массиве(За аналог взят ArrayList).
* @param <T> тип контейнера.
*/
public class SimpleArray<T> {
/**
* Смещение максимального размера массива относительно {@link Integer#MAX_VALUE}.
*/
private static final int ARRAY_INDEX_MAX_VALUE_OFFSET = 5;
/**
* Максимальный размер массива принимается как
* {@link Integer#MAX_VALUE} - {@link SimpleArray#ARRAY_INDEX_MAX_VALUE_OFFSET}.
*/
private static final int ARRAY_MAX_SIZE = Integer.MAX_VALUE - ARRAY_INDEX_MAX_VALUE_OFFSET;
/**
* Размер которым инициализируется массив {@link SimpleArray#array} по умолчанию.
*/
private static final int ARRAY_INIT_SIZE = 10;
/**
* Хранилище элементов контейнера.
*/
private Object[] array;
/**
* Номер последнего элемента в контейнере, при пустом контейнере равен -1.
*/
private int currentItem = -1;
/**
* Текущий размер хранилища {@link SimpleArray#array}.
*/
private int size;
/**
* Конструктор по умолчанию, вызывает конструктор с параметром {@link SimpleArray#SimpleArray(int)}
* с значением {@link SimpleArray#ARRAY_INIT_SIZE}.
*/
public SimpleArray() {
this(ARRAY_INIT_SIZE);
}
/**
* Конструктор с параметром инициализации размера массива {@link SimpleArray#array}.
* @param initSize параметр инициализации размера массива {@link SimpleArray#array}.
*/
public SimpleArray(int initSize) {
this.size = initSize;
this.array = new Object[initSize];
}
/**
* Метод добавления элемента в контейнер.
* Добавляет элемент в хвост массива {@link SimpleArray#array}
* @param addItem добавляемый в контейнер элемент тип T.
* @return возвращает true если операция выполняется успешно.
*/
public boolean add(T addItem) {
checkAddSizeArray();
++currentItem;
this.array[currentItem] = addItem;
return true;
}
/**
* Добавляет элемент в конрейнер по индексу, хвост массива сдвигаетс в право.
* @param indexAddItem индекс элемента вставки.
* @param addItem элемент вставки.
*/
@SuppressWarnings("unused")
private void add(int indexAddItem, T addItem) {
checkIndex(indexAddItem);
checkAddSizeArray();
copyTailArrayWhenAddItem(indexAddItem);
this.currentItem++;
array[indexAddItem] = addItem;
}
/**
* Заменяет элеммент в контейнере.
* @param indexUpdateItem индекс заменяемого элемента.
* @param itemUpdate обновляемы элемент.
* @return если операция добавления завершилась успешно возвращает true.
*/
public boolean update(int indexUpdateItem, T itemUpdate) {
checkIndex(indexUpdateItem);
this.array[indexUpdateItem] = itemUpdate;
return true;
}
/**
* Удаляет элемент по индексу.
* @param indexDeleteItem индекс удаляемого элемента.
* @return в слуучае успеха возвращает удаленный элемент.
*/
@SuppressWarnings("unchecked")
public T delete(int indexDeleteItem) {
checkIndex(indexDeleteItem);
T oldValue = (T) array[indexDeleteItem];
copyTailArrayWhenDeleteItem(indexDeleteItem);
this.currentItem--;
return oldValue;
}
/**
* Удаляет первый найденный с началаконтейнера элемент.
* @param deleteItem Удаляемый элемент.
* @return в случае успеха возвращает удаленный элемент, в противном случае если такого элемента нет null.
*/
@SuppressWarnings("unchecked")
public T delete(T deleteItem) {
T oldValue = null;
int indexDeleteItem = findItem(deleteItem);
if (indexDeleteItem != -1) {
oldValue = (T) array[indexDeleteItem];
copyTailArrayWhenDeleteItem(indexDeleteItem);
this.currentItem--;
}
return oldValue;
}
/**
* Возвращает элемент контейнера по индексу.
* @param indexItem индекс элемента.
* @return элемент контейнера.
*/
@SuppressWarnings("unchecked")
public T get(int indexItem) {
checkIndex(indexItem);
return (T) array[indexItem];
}
/**
* Проверка есть ли элементы в контейнере.
* @return true если контейнер не содержит ни одного элемента.
*/
public boolean isEmpty() {
return this.currentItem == -1;
}
/**
* Уменьшает размер массива {@link SimpleArray#array}
* до размера {@link SimpleArray#currentItem} + 1.
*/
public void trim() {
this.array = Arrays.copyOf(array, this.currentItem + 1);
this.size = this.currentItem + 1;
}
/**
* Виртуально число размера массива(общее число элементов размещенных в контейнере).
* @return число элементов в контейнере.
*/
public int size() {
return currentItem + 1;
}
/**
* Проверка индекса на принадлежность диапазону 0 <= i <= {@link SimpleArray#currentItem}.
* @param checkIndex проверяемый индекс.
*/
private void checkIndex(int checkIndex) {
if (isEmpty()) {
throw new IndexOutOfBoundsException(outOfBoundsMsg(checkIndex));
}
if (checkIndex > currentItem || checkIndex < 0) {
throw new IndexOutOfBoundsException(outOfBoundsMsg(checkIndex));
}
}
/**
* Поиск элемента в контейнере.
* @param searchItem искомый элемент.
* @return индекс найденого элемента, в противном случае если элемент не найден -1.
*/
private int findItem(T searchItem) {
if (isEmpty()) {
return -1;
}
for (int i = 0; i <= currentItem; i++) {
if (searchItem.equals(array[i])) {
return i;
}
}
return -1;
}
/**
* Провернка на то что размер массива {@link SimpleArray#array} позволяет провести добавление элемента
* если не позволяет, массив расширяется.
*/
private void checkAddSizeArray() {
if (currentItem + 1 >= size) {
size = checkMaxSizeArray();
array = Arrays.copyOf(array, size);
}
}
/**
* Провернка на то что размер массива {@link SimpleArray#array} не выходит за максимально возможный размер
* {@link SimpleArray#ARRAY_MAX_SIZE}, если не выходит возвращает новый возможный размер массива.
* Если увеличение размара не возможно, то генерирует исключение {@link OutOfMemoryError}
* @return новый допустимый размер массива.
*/
private int checkMaxSizeArray() {
if (ARRAY_MAX_SIZE - (currentItem + 1) == 0) {
throw new OutOfMemoryError("The array index is greater than the maximum possible values");
}
int newSize = size + size;
return ARRAY_MAX_SIZE - size > size ? newSize : ARRAY_MAX_SIZE;
}
/**
* Копирует хвост массива начиная indexDeleteItem + 1 на один эемент в лево.
* @param indexDeleteItem индекс удаляемого элемента.
*/
private void copyTailArrayWhenDeleteItem(int indexDeleteItem) {
System.arraycopy(array, indexDeleteItem + 1,
array, indexDeleteItem, currentItem + 1 - indexDeleteItem + 1);
}
/**
* Копирует хвост массива начиная indexAddItem на один эемент в лево, осовобождает элемент для добавления нового.
* @param indexAddItem индекс добавляемого элемента.
*/
private void copyTailArrayWhenAddItem(int indexAddItem) {
System.arraycopy(array, indexAddItem,
array, indexAddItem + 1, currentItem + 1 - indexAddItem + 1);
}
/**
* Метод формирует сообщение для генерируемых исключений в методе {@link SimpleArray#checkIndex(int)}.
* @param index индекс для формированя строки сообщения.
* @return сформированную строку.
*/
private String outOfBoundsMsg(int index) {
return "Index: " + index + ", Size: " + (currentItem + 1);
}
}
|
Исправление орфографических ошибок.
|
chapter_005/src/main/java/ru/job4j/simplearray/SimpleArray.java
|
Исправление орфографических ошибок.
|
<ide><path>hapter_005/src/main/java/ru/job4j/simplearray/SimpleArray.java
<ide> import java.util.Arrays;
<ide>
<ide> /**
<del> * Клас простого контейнера основанного на массиве(За аналог взят ArrayList).
<add> * Класс простого контейнера основанного на массиве(За аналог взят ArrayList).
<ide> * @param <T> тип контейнера.
<ide> */
<ide> public class SimpleArray<T> {
<ide> }
<ide>
<ide> /**
<del> * Добавляет элемент в конрейнер по индексу, хвост массива сдвигаетс в право.
<add> * Добавляет элемент в контейнер по индексу, хвост массива сдвигаетс в право.
<ide> * @param indexAddItem индекс элемента вставки.
<ide> * @param addItem элемент вставки.
<ide> */
<ide> }
<ide>
<ide> /**
<del> * Заменяет элеммент в контейнере.
<add> * Заменяет элемент в контейнере.
<ide> * @param indexUpdateItem индекс заменяемого элемента.
<del> * @param itemUpdate обновляемы элемент.
<add> * @param itemUpdate обновляемый элемент.
<ide> * @return если операция добавления завершилась успешно возвращает true.
<ide> */
<ide> public boolean update(int indexUpdateItem, T itemUpdate) {
<ide> }
<ide>
<ide> /**
<del> * Удаляет первый найденный с началаконтейнера элемент.
<add> * Удаляет первый найденный с начала контейнера элемент.
<ide> * @param deleteItem Удаляемый элемент.
<del> * @return в случае успеха возвращает удаленный элемент, в противном случае если такого элемента нет null.
<add> * @return в случае успеха возвращает удалённый элемент, в противном случае если такого элемента нет null.
<ide> */
<ide> @SuppressWarnings("unchecked")
<ide> public T delete(T deleteItem) {
<ide> checkIndex(indexItem);
<ide> return (T) array[indexItem];
<ide> }
<del>
<del> /**
<del> * Проверка есть ли элементы в контейнере.
<del> * @return true если контейнер не содержит ни одного элемента.
<del> */
<del> public boolean isEmpty() {
<del> return this.currentItem == -1;
<del> }
<del>
<del> /**
<add> /**
<add> * Проверка есть ли элементы в контейнере.
<add> * @return true если контейнер не содержит ни одного элемента.
<add> */
<add> public boolean isEmpty() {
<add> return this.currentItem == -1;
<add> }
<add>
<add> /**
<ide> * Уменьшает размер массива {@link SimpleArray#array}
<ide> * до размера {@link SimpleArray#currentItem} + 1.
<ide> */
<ide> }
<ide>
<ide> /**
<del> * Виртуально число размера массива(общее число элементов размещенных в контейнере).
<add> * Виртуально число размера массива(общее число элементов размещённых в контейнере).
<ide> * @return число элементов в контейнере.
<ide> */
<ide> public int size() {
<ide> /**
<ide> * Поиск элемента в контейнере.
<ide> * @param searchItem искомый элемент.
<del> * @return индекс найденого элемента, в противном случае если элемент не найден -1.
<add> * @return индекс найденного элемента, в противном случае если элемент не найден -1.
<ide> */
<ide> private int findItem(T searchItem) {
<ide> if (isEmpty()) {
<ide> }
<ide>
<ide> /**
<del> * Провернка на то что размер массива {@link SimpleArray#array} позволяет провести добавление элемента
<add> * Проверка на то что размер массива {@link SimpleArray#array} позволяет провести добавление элемента
<ide> * если не позволяет, массив расширяется.
<ide> */
<ide> private void checkAddSizeArray() {
<ide> }
<ide>
<ide> /**
<del> * Провернка на то что размер массива {@link SimpleArray#array} не выходит за максимально возможный размер
<add> * Проверка на то что размер массива {@link SimpleArray#array} не выходит за максимально возможный размер
<ide> * {@link SimpleArray#ARRAY_MAX_SIZE}, если не выходит возвращает новый возможный размер массива.
<del> * Если увеличение размара не возможно, то генерирует исключение {@link OutOfMemoryError}
<add> * Если увеличение размера не возможно, то генерирует исключение {@link OutOfMemoryError}
<ide> * @return новый допустимый размер массива.
<ide> */
<ide> private int checkMaxSizeArray() {
<ide> }
<ide>
<ide> /**
<del> * Копирует хвост массива начиная indexDeleteItem + 1 на один эемент в лево.
<add> * Копирует хвост массива начиная indexDeleteItem + 1 на один элемент влево.
<ide> * @param indexDeleteItem индекс удаляемого элемента.
<ide> */
<ide> private void copyTailArrayWhenDeleteItem(int indexDeleteItem) {
<ide> }
<ide>
<ide> /**
<del> * Копирует хвост массива начиная indexAddItem на один эемент в лево, осовобождает элемент для добавления нового.
<add> * Копирует хвост массива начиная indexAddItem на один элемент влево, освобождает элемент для добавления нового.
<ide> * @param indexAddItem индекс добавляемого элемента.
<ide> */
<ide> private void copyTailArrayWhenAddItem(int indexAddItem) {
<ide>
<ide> /**
<ide> * Метод формирует сообщение для генерируемых исключений в методе {@link SimpleArray#checkIndex(int)}.
<del> * @param index индекс для формированя строки сообщения.
<add> * @param index индекс для формирования строки сообщения.
<ide> * @return сформированную строку.
<ide> */
<ide> private String outOfBoundsMsg(int index) {
|
|
Java
|
apache-2.0
|
error: pathspec 'org.jenetics.example/src/main/java/org/jenetics/example/CodecExample.java' did not match any file(s) known to git
|
d0cbb8a9e674f53376560587c7432373bc8cf208
| 1 |
jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics
|
/*
* Java Genetic Algorithm Library (@__identifier__@).
* Copyright (c) @__year__@ Franz Wilhelmstötter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author:
* Franz Wilhelmstötter ([email protected])
*/
package org.jenetics.example;
import org.jenetics.DoubleChromosome;
import org.jenetics.DoubleGene;
import org.jenetics.Genotype;
import org.jenetics.engine.Codec;
import org.jenetics.engine.Engine;
import org.jenetics.engine.EvolutionResult;
import org.jenetics.util.DoubleRange;
import org.jenetics.util.IntRange;
import org.jenetics.util.LongRange;
/**
* @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
* @version !__version__!
* @since !__version__!
*/
public class CodecExample {
// The domain class
final static class Tuple {
final int _1;
final long _2;
final double _3;
Tuple(final int v1, final long v2, final double v3) {
_1 = v1;
_2 = v2;
_3 = v3;
}
}
// The fitness function. No need to know anything about GAs. Decoupling of
// the problem function from the GA usage.
static double f(final Tuple param) {
return param._1 + param._2 + param._3;
}
// The encoding/decoding of the problem domain is defined at ONE place.
static Codec<Tuple, DoubleGene> codec(
final IntRange v1Domain,
final LongRange v2Domain,
final DoubleRange v3Domain
) {
return Codec.of(
Genotype.of(
DoubleChromosome.of(DoubleRange.of(v1Domain.getMin(), v1Domain.getMax())),
DoubleChromosome.of(DoubleRange.of(v2Domain.getMin(), v2Domain.getMax())),
DoubleChromosome.of(v3Domain)
),
gt -> new Tuple(
gt.getChromosome(0).getGene().intValue(),
gt.getChromosome(1).getGene().longValue(),
gt.getChromosome(2).getGene().doubleValue()
)
);
}
public static void main(final String[] args) {
// The domain of your fitness function.
final IntRange domain1 = IntRange.of(0, 100);
final LongRange domain2 = LongRange.of(0, 1_000_000_000_000L);
final DoubleRange domain3 = DoubleRange.of(0, 1);
// The problem domain encoder/decoder.
final Codec<Tuple, DoubleGene> codec = codec(domain1, domain2, domain3);
final Engine<DoubleGene, Double> engine = Engine
.builder(CodecExample::f, codec)
.build();
final Genotype<DoubleGene> gt = engine.stream()
.limit(100)
.collect(EvolutionResult.toBestGenotype());
final Tuple param = codec.decoder().apply(gt);
System.out.println(param);
}
}
|
org.jenetics.example/src/main/java/org/jenetics/example/CodecExample.java
|
Add 'Codec' example.
|
org.jenetics.example/src/main/java/org/jenetics/example/CodecExample.java
|
Add 'Codec' example.
|
<ide><path>rg.jenetics.example/src/main/java/org/jenetics/example/CodecExample.java
<add>/*
<add> * Java Genetic Algorithm Library (@__identifier__@).
<add> * Copyright (c) @__year__@ Franz Wilhelmstötter
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> *
<add> * Author:
<add> * Franz Wilhelmstötter ([email protected])
<add> */
<add>package org.jenetics.example;
<add>
<add>import org.jenetics.DoubleChromosome;
<add>import org.jenetics.DoubleGene;
<add>import org.jenetics.Genotype;
<add>import org.jenetics.engine.Codec;
<add>import org.jenetics.engine.Engine;
<add>import org.jenetics.engine.EvolutionResult;
<add>import org.jenetics.util.DoubleRange;
<add>import org.jenetics.util.IntRange;
<add>import org.jenetics.util.LongRange;
<add>
<add>/**
<add> * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
<add> * @version !__version__!
<add> * @since !__version__!
<add> */
<add>public class CodecExample {
<add>
<add> // The domain class
<add> final static class Tuple {
<add> final int _1;
<add> final long _2;
<add> final double _3;
<add>
<add> Tuple(final int v1, final long v2, final double v3) {
<add> _1 = v1;
<add> _2 = v2;
<add> _3 = v3;
<add> }
<add> }
<add>
<add> // The fitness function. No need to know anything about GAs. Decoupling of
<add> // the problem function from the GA usage.
<add> static double f(final Tuple param) {
<add> return param._1 + param._2 + param._3;
<add> }
<add>
<add> // The encoding/decoding of the problem domain is defined at ONE place.
<add> static Codec<Tuple, DoubleGene> codec(
<add> final IntRange v1Domain,
<add> final LongRange v2Domain,
<add> final DoubleRange v3Domain
<add> ) {
<add> return Codec.of(
<add> Genotype.of(
<add> DoubleChromosome.of(DoubleRange.of(v1Domain.getMin(), v1Domain.getMax())),
<add> DoubleChromosome.of(DoubleRange.of(v2Domain.getMin(), v2Domain.getMax())),
<add> DoubleChromosome.of(v3Domain)
<add> ),
<add> gt -> new Tuple(
<add> gt.getChromosome(0).getGene().intValue(),
<add> gt.getChromosome(1).getGene().longValue(),
<add> gt.getChromosome(2).getGene().doubleValue()
<add> )
<add> );
<add> }
<add>
<add> public static void main(final String[] args) {
<add> // The domain of your fitness function.
<add> final IntRange domain1 = IntRange.of(0, 100);
<add> final LongRange domain2 = LongRange.of(0, 1_000_000_000_000L);
<add> final DoubleRange domain3 = DoubleRange.of(0, 1);
<add>
<add> // The problem domain encoder/decoder.
<add> final Codec<Tuple, DoubleGene> codec = codec(domain1, domain2, domain3);
<add>
<add> final Engine<DoubleGene, Double> engine = Engine
<add> .builder(CodecExample::f, codec)
<add> .build();
<add>
<add> final Genotype<DoubleGene> gt = engine.stream()
<add> .limit(100)
<add> .collect(EvolutionResult.toBestGenotype());
<add>
<add> final Tuple param = codec.decoder().apply(gt);
<add> System.out.println(param);
<add> }
<add>
<add>}
|
|
Java
|
apache-2.0
|
5944f51977ff72108be240d9aa76f6903f9a8591
| 0 |
mathemage/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,spennihana/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,spennihana/h2o-3,mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-3,spennihana/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,mathemage/h2o-3
|
package ai.h2o.automl;
import hex.Model;
import hex.ModelMetrics;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.util.Log;
import java.util.Arrays;
import java.util.List;
import static water.DKV.getGet;
import static water.Key.make;
/**
* Utility to track all the models built for a given dataset type.
* <p>
* Note that if a new Leaderboard is made for the same project it'll
* keep using the old model list, which allows us to run AutoML multiple
* times and keep adding to the leaderboard.
* <p>
* TODO: make this robust against removal of models from the DKV.
*/
public class Leaderboard extends Keyed {
/**
* Identifier for the models that should be grouped together in the leaderboard
* (e.g., "airlines" and "iris").
*/
private final String project;
/**
* List of models for this leaderboard, sorted by metric so that the best is on top,
* according to the standard metric for the given model type. NOTE: callers should
* access this through #models() to make sure they don't get a stale copy.
*/
private Key<Model>[] models;
/**
* Metric used to sort this leaderboard.
*/
private String metric;
/** HIDEME! */
private Leaderboard() {
throw new NotImplementedException();
}
/**
*
*/
public Leaderboard(String project) {
super(make("AutoML_Leaderboard_" + project, (byte) 0, (byte) 2 /*builtin key*/, false));
this.project = project;
Leaderboard old = DKV.getGet(this._key);
if (null == old) {
this.models = new Key[0];
DKV.put(this);
}
}
public String getProject() {
return project;
}
public void addModels(final Key<Model>[] newModels) {
if (null == this._key)
throw new H2OIllegalArgumentException("Can't add models to a Leaderboard which isn't in the DKV.");
new TAtomic<Leaderboard>() {
@Override
public Leaderboard atomic(Leaderboard old) {
if (old == null) old = new Leaderboard();
Key<Model>[] oldModels = old.models;
old.models = new Key[oldModels.length + newModels.length];
System.arraycopy(oldModels, 0, old.models, 0, oldModels.length);
System.arraycopy(newModels, 0, old.models, oldModels.length, newModels.length);
Model m = DKV.getGet(old.models[0]);
// Sort by metric.
// TODO: allow the metric to be passed in. Note that this assumes the validation (or training) frame is the same.
// If we want to train on different frames and then compare we need to score all the models and sort on the new metrics.
List<Key<Model>> newModelsSorted = null;
try {
if (m._output.isBinomialClassifier())
newModelsSorted = ModelMetrics.sortModelsByMetric("auc", true, Arrays.asList(old.models));
else if (m._output.isClassifier())
newModelsSorted = ModelMetrics.sortModelsByMetric("mean_per_class_error", false, Arrays.asList(old.models));
else if (m._output.isSupervised())
newModelsSorted = ModelMetrics.sortModelsByMetric("mean_residual_deviance", false, Arrays.asList(old.models));
}
catch (H2OIllegalArgumentException e) {
Log.warn("ModelMetrics.sortModelsByMetric failed: " + e);
throw e;
}
old.models = newModelsSorted.toArray(new Key[0]);
return old;
} // atomic
}.invoke(this._key);
}
public void addModel(final Key<Model> key) {
Key<Model>keys[] = new Key[1];
keys[0] = key;
addModels(keys);
}
public void addModel(final Model model) {
Key<Model>keys[] = new Key[1];
keys[0] = model._key;
addModels(keys);
}
private static Model[] modelsForModelKeys(Key<Model>[] modelKeys, Model[] models) {
assert models.length >= modelKeys.length;
int i = 0;
for (Key<Model> modelKey : modelKeys)
models[i++] = getGet(modelKey);
return models;
}
/**
* @return list of keys of models sorted by the default metric for the model category, fetched from the DKV
*/
public Key<Model>[] modelKeys() {
return ((Leaderboard)DKV.getGet(this._key)).models;
}
/**
* @return list of models sorted by the default metric for the model category
*/
public Model[] models() {
Key<Model>[] modelKeys = modelKeys();
if (modelKeys == null || 0 == modelKeys.length) return new Model[0];
Model[] models = new Model[modelKeys.length];
return modelsForModelKeys(modelKeys, models);
}
public Model leader() {
Key<Model>[] modelKeys = modelKeys();
if (modelKeys == null || 0 == modelKeys.length) return null;
return modelKeys[0].get();
}
/**
* Delete everything in the DKV that this points to. We currently need to be able to call this after deleteWithChildren().
*/
public void delete() {
remove();
}
public void deleteWithChildren() {
for (Model m : models())
m.delete();
delete();
}
public static String toString(Model[] models) {
return toString(null, models, "\n");
}
public static String toString(String project, Model[] models) {
return toString(project, models, "\n");
}
public static String toString(String project, Model[] models, String separator) {
StringBuilder sb = new StringBuilder("Leaderboard for project \"" + project + "\": ");
if (models.length == 0) {
sb.append("<empty>");
return sb.toString();
}
sb.append(separator);
for (Model m : models) {
sb.append(m._key.toString());
sb.append(" ");
// TODO: allow the metric to be passed in. Note that this assumes the validation (or training) frame is the same.
// TODO: if validation metrics are available, print those.
if (m._output.isBinomialClassifier()) {
sb.append("auc: ");
sb.append(m.auc());
} else if (m._output.isClassifier()) {
sb.append("mean per class error: ");
sb.append(m.mean_per_class_error());
} else if (m._output.isSupervised()) {
sb.append("mean residual deviance: ");
sb.append(m.deviance());
}
sb.append(separator);
}
return sb.toString();
}
public String toString(String separator) {
return toString(project, models(), separator);
}
@Override
public String toString() {
return toString(" | ");
}
}
|
h2o-automl/src/main/java/ai/h2o/automl/Leaderboard.java
|
package ai.h2o.automl;
import hex.Model;
import hex.ModelMetrics;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.util.Log;
import java.util.Arrays;
import java.util.List;
import static water.DKV.getGet;
import static water.Key.make;
/**
* Utility to track all the models built for a given dataset type.
* <p>
* Note that if a new Leaderboard is made for the same project it'll
* keep using the old model list, which allows us to run AutoML multiple
* times and keep adding to the leaderboard.
* <p>
* TODO: make this robust against removal of models from the DKV.
*/
public class Leaderboard extends Keyed {
/**
* Identifier for the models that should be grouped together in the leaderboard
* (e.g., "airlines" and "iris").
*/
private final String project;
/**
* List of models for this leaderboard, sorted by metric so that the best is on top,
* according to the standard metric for the given model type. NOTE: callers should
* access this through #models() to make sure they don't get a stale copy.
*/
private Key<Model>[] models;
/**
* Metric used to sort this leaderboard.
*/
private String metric;
/** HIDEME! */
private Leaderboard() {
throw new NotImplementedException();
}
/**
*
*/
public Leaderboard(String project) {
super(make("AutoML_Leaderboard_" + project, (byte) 0, (byte) 2 /*builtin key*/, false));
this.project = project;
this.models = new Key[0];
DKV.put(this);
}
public String getProject() {
return project;
}
public void addModels(final Key<Model>[] newModels) {
if (null == this._key)
throw new H2OIllegalArgumentException("Can't add models to a Leaderboard which isn't in the DKV.");
new TAtomic<Leaderboard>() {
@Override
public Leaderboard atomic(Leaderboard old) {
if (old == null) old = new Leaderboard();
Key<Model>[] oldModels = old.models;
old.models = new Key[oldModels.length + newModels.length];
System.arraycopy(oldModels, 0, old.models, 0, oldModels.length);
System.arraycopy(newModels, 0, old.models, oldModels.length, newModels.length);
Model m = DKV.getGet(old.models[0]);
// Sort by metric.
// TODO: allow the metric to be passed in. Note that this assumes the validation (or training) frame is the same.
// If we want to train on different frames and then compare we need to score all the models and sort on the new metrics.
List<Key<Model>> newModelsSorted = null;
try {
if (m._output.isBinomialClassifier())
newModelsSorted = ModelMetrics.sortModelsByMetric("auc", true, Arrays.asList(old.models));
else if (m._output.isClassifier())
newModelsSorted = ModelMetrics.sortModelsByMetric("mean_per_class_error", false, Arrays.asList(old.models));
else if (m._output.isSupervised())
newModelsSorted = ModelMetrics.sortModelsByMetric("mean_residual_deviance", false, Arrays.asList(old.models));
}
catch (H2OIllegalArgumentException e) {
Log.warn("ModelMetrics.sortModelsByMetric failed: " + e);
throw e;
}
old.models = newModelsSorted.toArray(new Key[0]);
return old;
} // atomic
}.invoke(this._key);
}
public void addModel(final Key<Model> key) {
Key<Model>keys[] = new Key[1];
keys[0] = key;
addModels(keys);
}
public void addModel(final Model model) {
Key<Model>keys[] = new Key[1];
keys[0] = model._key;
addModels(keys);
}
private static Model[] modelsForModelKeys(Key<Model>[] modelKeys, Model[] models) {
assert models.length >= modelKeys.length;
int i = 0;
for (Key<Model> modelKey : modelKeys)
models[i++] = getGet(modelKey);
return models;
}
/**
* @return list of keys of models sorted by the default metric for the model category, fetched from the DKV
*/
public Key<Model>[] modelKeys() {
return ((Leaderboard)DKV.getGet(this._key)).models;
}
/**
* @return list of models sorted by the default metric for the model category
*/
public Model[] models() {
Key<Model>[] modelKeys = modelKeys();
if (modelKeys == null || 0 == modelKeys.length) return new Model[0];
Model[] models = new Model[modelKeys.length];
return modelsForModelKeys(modelKeys, models);
}
public Model leader() {
Key<Model>[] modelKeys = modelKeys();
if (modelKeys == null || 0 == modelKeys.length) return null;
return modelKeys[0].get();
}
/**
* Delete everything in the DKV that this points to. We currently need to be able to call this after deleteWithChildren().
*/
public void delete() {
remove();
}
public void deleteWithChildren() {
for (Model m : models())
m.delete();
delete();
}
public static String toString(Model[] models) {
return toString(null, models, "\n");
}
public static String toString(String project, Model[] models) {
return toString(project, models, "\n");
}
public static String toString(String project, Model[] models, String separator) {
StringBuilder sb = new StringBuilder("Leaderboard for project \"" + project + "\": ");
if (models.length == 0) {
sb.append("<empty>");
return sb.toString();
}
sb.append(separator);
for (Model m : models) {
sb.append(m._key.toString());
sb.append(" ");
// TODO: allow the metric to be passed in. Note that this assumes the validation (or training) frame is the same.
// TODO: if validation metrics are available, print those.
if (m._output.isBinomialClassifier()) {
sb.append("auc: ");
sb.append(m.auc());
} else if (m._output.isClassifier()) {
sb.append("mean per class error: ");
sb.append(m.mean_per_class_error());
} else if (m._output.isSupervised()) {
sb.append("mean residual deviance: ");
sb.append(m.deviance());
}
sb.append(separator);
}
return sb.toString();
}
public String toString(String separator) {
return toString(project, models(), separator);
}
@Override
public String toString() {
return toString(" | ");
}
}
|
AUTOML-121: allow leaderboards to accumulate across runs again.
|
h2o-automl/src/main/java/ai/h2o/automl/Leaderboard.java
|
AUTOML-121: allow leaderboards to accumulate across runs again.
|
<ide><path>2o-automl/src/main/java/ai/h2o/automl/Leaderboard.java
<ide> public Leaderboard(String project) {
<ide> super(make("AutoML_Leaderboard_" + project, (byte) 0, (byte) 2 /*builtin key*/, false));
<ide> this.project = project;
<del> this.models = new Key[0];
<del> DKV.put(this);
<add>
<add> Leaderboard old = DKV.getGet(this._key);
<add>
<add> if (null == old) {
<add> this.models = new Key[0];
<add> DKV.put(this);
<add> }
<ide> }
<ide>
<ide> public String getProject() {
|
|
JavaScript
|
apache-2.0
|
39e71d8c6e6a3f847cdff99df7631a00e40c6ce0
| 0 |
Zodia/poc-tijari-mobile,Zodia/poc-tijari-mobile,Zodia/poc-tijari-mobile,Zodia/poc-tijari-mobile,Zodia/poc-tijari-mobile,Zodia/poc-tijari-mobile,Zodia/poc-tijari-mobile
|
$(".applications").click(function() {
loadPage ("apps-list-view.html");
});
$(".new-mortgage").click(function() {
loadPage ("customers-list-view.html");
});
$(".mortgage-simulator").click(function() {
loadPage ("mortgage-simulator-view.html");
});
$(".refinancing").click(function() {
//loadPage ("mortgage-center-view.html");
});
|
attijari-poc-mobile-app/apps/attijari_poc_mobile_app/common/js/pages/mortgage-center-view.js
|
/**
*
*/
$("#existingapp").click(function() {
loadPage("../pages/apps-list-view.html");
});
$("#newmortgages").click(function() {
loadPage("../pages/customers-list-view.html");
});
$("#mortgagesimulator").click(function() {
loadPage("../pages/mortgage-simulator-view.html");
});
$("#mortgages").click(function() {
loadPage("../spages/mortgage-center-view.html");
});
|
Update mortgage-center-view.js
|
attijari-poc-mobile-app/apps/attijari_poc_mobile_app/common/js/pages/mortgage-center-view.js
|
Update mortgage-center-view.js
|
<ide><path>ttijari-poc-mobile-app/apps/attijari_poc_mobile_app/common/js/pages/mortgage-center-view.js
<del>/**
<del> *
<del> */
<del>
<del>$("#existingapp").click(function() {
<del>
<del>loadPage("../pages/apps-list-view.html");
<del>
<add>$(".applications").click(function() {
<add> loadPage ("apps-list-view.html");
<ide> });
<ide>
<add>$(".new-mortgage").click(function() {
<add> loadPage ("customers-list-view.html");
<add>});
<ide>
<add>$(".mortgage-simulator").click(function() {
<add> loadPage ("mortgage-simulator-view.html");
<add>});
<ide>
<del>$("#newmortgages").click(function() {
<del>
<del> loadPage("../pages/customers-list-view.html");
<del>
<del> });
<del>
<del>$("#mortgagesimulator").click(function() {
<del>
<del> loadPage("../pages/mortgage-simulator-view.html");
<del>
<del> });
<del>
<del>
<del>
<del>$("#mortgages").click(function() {
<del>
<del>loadPage("../spages/mortgage-center-view.html");
<del>
<del>
<add>$(".refinancing").click(function() {
<add> //loadPage ("mortgage-center-view.html");
<ide> });
|
|
Java
|
apache-2.0
|
93a3cb82385954392d60bbd26bcdcf440c6d7164
| 0 |
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.provision;
import com.yahoo.collections.AbstractFilteringList;
import com.yahoo.component.Version;
import com.yahoo.config.provision.ApplicationId;
import com.yahoo.config.provision.ClusterSpec;
import com.yahoo.config.provision.NodeResources;
import com.yahoo.config.provision.NodeType;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.stream.Collectors.collectingAndThen;
/**
* A filterable node list. The result of a filter operation is immutable.
*
* @author bratseth
* @author mpolden
*/
public class NodeList extends AbstractFilteringList<Node, NodeList> {
protected NodeList(List<Node> nodes, boolean negate) {
super(nodes, negate, NodeList::new);
}
/** Returns the subset of nodes which are retired */
public NodeList retired() {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().retired());
}
/** Returns the subset of nodes that are being deprovisioned */
public NodeList deprovisioning() {
return matching(node -> node.status().wantToRetire() && node.status().wantToDeprovision());
}
/** Returns the subset of nodes which are removable */
public NodeList removable() {
return matching(node -> node.allocation().isPresent() && node.allocation().get().isRemovable());
}
/** Returns the subset of nodes having exactly the given resources */
public NodeList resources(NodeResources resources) { return matching(node -> node.resources().equals(resources)); }
/** Returns the subset of nodes which satisfy the given resources */
public NodeList satisfies(NodeResources resources) { return matching(node -> node.resources().satisfies(resources)); }
/** Returns the subset of nodes of the given flavor */
public NodeList flavor(String flavor) {
return matching(node -> node.flavor().name().equals(flavor));
}
/** Returns the subset of nodes not in the given set */
public NodeList except(Set<Node> nodes) {
return matching(node -> ! nodes.contains(node));
}
/** Returns the subset of nodes assigned to the given cluster type */
public NodeList type(ClusterSpec.Type type) {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().cluster().type().equals(type));
}
/** Returns the subset of nodes that run containers */
public NodeList container() {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().cluster().type().isContainer());
}
/** Returns the subset of nodes that are currently changing their Vespa version */
public NodeList changingVersion() {
return matching(node -> node.status().vespaVersion().isPresent() &&
node.allocation().isPresent() &&
!node.status().vespaVersion().get().equals(node.allocation().get().membership().cluster().vespaVersion()));
}
/** Returns the subset of nodes that are currently changing their OS version to given version */
public NodeList changingOsVersionTo(Version version) {
return matching(node -> node.status().osVersion().changingTo(version));
}
/** Returns the subset of nodes that are currently changing their OS version */
public NodeList changingOsVersion() {
return matching(node -> node.status().osVersion().changing());
}
/** Returns a copy of this sorted by current OS version (lowest to highest) */
public NodeList byIncreasingOsVersion() {
return sortedBy(Comparator.comparing(node -> node.status()
.osVersion()
.current()
.orElse(Version.emptyVersion)));
}
/** Returns the subset of nodes that are currently on a lower version than the given version */
public NodeList osVersionIsBefore(Version version) {
return matching(node -> node.status().osVersion().isBefore(version));
}
/** Returns the subset of nodes that are currently on the given OS version */
public NodeList onOsVersion(Version version) {
return matching(node -> node.status().osVersion().matches(version));
}
/** Returns the subset of nodes assigned to the given cluster */
public NodeList cluster(ClusterSpec.Id cluster) {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().cluster().id().equals(cluster));
}
/** Returns the subset of nodes owned by the given application */
public NodeList owner(ApplicationId application) {
return matching(node -> node.allocation().map(a -> a.owner().equals(application)).orElse(false));
}
/** Returns the subset of nodes matching the given node type(s) */
public NodeList nodeType(NodeType first, NodeType... rest) {
if (rest.length == 0) {
return matching(node -> node.type() == first);
}
EnumSet<NodeType> nodeTypes = EnumSet.of(first, rest);
return matching(node -> nodeTypes.contains(node.type()));
}
/** Returns the subset of nodes of the host type */
public NodeList hosts() {
return nodeType(NodeType.host);
}
/** Returns the subset of nodes that are parents */
public NodeList parents() {
return matching(node -> node.parentHostname().isEmpty());
}
/** Returns the child nodes of the given parent node */
public NodeList childrenOf(String hostname) {
return matching(node -> node.hasParent(hostname));
}
public NodeList childrenOf(Node parent) {
return childrenOf(parent.hostname());
}
/** Returns the subset of nodes that are in any of the given state(s) */
public NodeList state(Node.State first, Node.State... rest) {
if (rest.length == 0) {
return matching(node -> node.state() == first);
}
return state(EnumSet.of(first, rest));
}
/** Returns the subset of nodes that are in any of the given state(s) */
public NodeList state(Set<Node.State> nodeStates) {
return matching(node -> nodeStates.contains(node.state()));
}
/** Returns the subset of nodes which wantToRetire set true */
public NodeList wantToRetire() {
return matching(node -> node.status().wantToRetire());
}
/** Returns the parent nodes of the given child nodes */
public NodeList parentsOf(NodeList children) {
return children.stream()
.map(this::parentOf)
.filter(Optional::isPresent)
.flatMap(Optional::stream)
.collect(collectingAndThen(Collectors.toList(), NodeList::copyOf));
}
/** Returns the nodes contained in the group identified by given index */
public NodeList group(int index) {
return matching(n -> n.allocation().isPresent() &&
n.allocation().get().membership().cluster().group().equals(Optional.of(ClusterSpec.Group.from(index))));
}
/** Returns the parent node of the given child node */
public Optional<Node> parentOf(Node child) {
return child.parentHostname()
.flatMap(parentHostname -> stream().filter(node -> node.hostname().equals(parentHostname))
.findFirst());
}
/** Returns the nodes of this as a stream */
public Stream<Node> stream() { return asList().stream(); }
public static NodeList copyOf(List<Node> nodes) {
return new NodeList(nodes, false);
}
@Override
public String toString() {
return asList().toString();
}
}
|
node-repository/src/main/java/com/yahoo/vespa/hosted/provision/NodeList.java
|
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.provision;
import com.yahoo.collections.AbstractFilteringList;
import com.yahoo.component.Version;
import com.yahoo.config.provision.ApplicationId;
import com.yahoo.config.provision.ClusterSpec;
import com.yahoo.config.provision.NodeResources;
import com.yahoo.config.provision.NodeType;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.stream.Collectors.collectingAndThen;
/**
* A filterable node list. The result of a filter operation is immutable.
*
* @author bratseth
* @author mpolden
*/
public class NodeList extends AbstractFilteringList<Node, NodeList> {
protected NodeList(List<Node> nodes, boolean negate) {
super(nodes, negate, NodeList::new);
}
/** Returns the subset of nodes which are retired */
public NodeList retired() {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().retired());
}
/** Returns the subset of nodes that are being deprovisioned */
public NodeList deprovisioning() {
return matching(node -> node.status().wantToRetire() && node.status().wantToDeprovision());
}
/** Returns the subset of nodes which are removable */
public NodeList removable() {
return matching(node -> node.allocation().isPresent() && node.allocation().get().isRemovable());
}
/** Returns the subset of nodes having exactly the given resources */
public NodeList resources(NodeResources resources) { return matching(node -> node.resources().equals(resources)); }
/** Returns the subset of nodes which satisfy the given resources */
public NodeList satisfies(NodeResources resources) { return matching(node -> node.resources().satisfies(resources)); }
/** Returns the subset of nodes of the given flavor */
public NodeList flavor(String flavor) {
return matching(node -> node.flavor().name().equals(flavor));
}
/** Returns the subset of nodes not in the given set */
public NodeList except(Set<Node> nodes) {
return matching(node -> ! nodes.contains(node));
}
/** Returns the subset of nodes assigned to the given cluster type */
public NodeList type(ClusterSpec.Type type) {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().cluster().type().equals(type));
}
/** Returns the subset of nodes that run containers */
public NodeList container() {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().cluster().type().isContainer());
}
/** Returns the subset of nodes that are currently changing their Vespa version */
public NodeList changingVersion() {
return matching(node -> node.status().vespaVersion().isPresent() &&
node.allocation().isPresent() &&
!node.status().vespaVersion().get().equals(node.allocation().get().membership().cluster().vespaVersion()));
}
/** Returns the subset of nodes that are currently changing their OS version to given version */
public NodeList changingOsVersionTo(Version version) {
return matching(node -> node.status().osVersion().changingTo(version));
}
/** Returns the subset of nodes that are currently changing their OS version */
public NodeList changingOsVersion() {
return matching(node -> node.status().osVersion().changing());
}
/** Returns a copy of this sorted by current OS version (lowest to highest) */
public NodeList byIncreasingOsVersion() {
return sortedBy(Comparator.comparing(node -> node.status()
.osVersion()
.current()
.orElse(Version.emptyVersion)));
}
/** Returns the subset of nodes that are currently on a lower version than the given version */
public NodeList osVersionIsBefore(Version version) {
return matching(node -> node.status().osVersion().isBefore(version));
}
/** Returns the subset of nodes that are currently on the given OS version */
public NodeList onOsVersion(Version version) {
return matching(node -> node.status().osVersion().matches(version));
}
/** Returns the subset of nodes assigned to the given cluster */
public NodeList cluster(ClusterSpec.Id cluster) {
return matching(node -> node.allocation().isPresent() && node.allocation().get().membership().cluster().id().equals(cluster));
}
/** Returns the subset of nodes owned by the given application */
public NodeList owner(ApplicationId application) {
return matching(node -> node.allocation().map(a -> a.owner().equals(application)).orElse(false));
}
/** Returns the subset of nodes matching the given node type(s) */
public NodeList nodeType(NodeType first, NodeType... rest) {
if (rest.length == 0) {
return matching(node -> node.type() == first);
}
EnumSet<NodeType> nodeTypes = EnumSet.of(first, rest);
return matching(node -> nodeTypes.contains(node.type()));
}
/** Returns the subset of nodes of the host type */
public NodeList hosts() {
return nodeType(NodeType.host);
}
/** Returns the subset of nodes that are parents */
public NodeList parents() {
return matching(node -> node.parentHostname().isEmpty());
}
/** Returns the child nodes of the given parent node */
public NodeList childrenOf(String hostname) {
return matching(node -> node.hasParent(hostname));
}
public NodeList childrenOf(Node parent) {
return childrenOf(parent.hostname());
}
/** Returns the subset of nodes that are in any of the given state(s) */
public NodeList state(Node.State first, Node.State... rest) {
return state(EnumSet.of(first, rest));
}
/** Returns the subset of nodes that are in any of the given state(s) */
public NodeList state(Set<Node.State> nodeStates) {
return matching(node -> nodeStates.contains(node.state()));
}
/** Returns the subset of nodes which wantToRetire set true */
public NodeList wantToRetire() {
return matching(node -> node.status().wantToRetire());
}
/** Returns the parent nodes of the given child nodes */
public NodeList parentsOf(NodeList children) {
return children.stream()
.map(this::parentOf)
.filter(Optional::isPresent)
.flatMap(Optional::stream)
.collect(collectingAndThen(Collectors.toList(), NodeList::copyOf));
}
/** Returns the nodes contained in the group identified by given index */
public NodeList group(int index) {
return matching(n -> n.allocation().isPresent() &&
n.allocation().get().membership().cluster().group().equals(Optional.of(ClusterSpec.Group.from(index))));
}
/** Returns the parent node of the given child node */
public Optional<Node> parentOf(Node child) {
return child.parentHostname()
.flatMap(parentHostname -> stream().filter(node -> node.hostname().equals(parentHostname))
.findFirst());
}
/** Returns the nodes of this as a stream */
public Stream<Node> stream() { return asList().stream(); }
public static NodeList copyOf(List<Node> nodes) {
return new NodeList(nodes, false);
}
@Override
public String toString() {
return asList().toString();
}
}
|
Avoid creating set for the common case
|
node-repository/src/main/java/com/yahoo/vespa/hosted/provision/NodeList.java
|
Avoid creating set for the common case
|
<ide><path>ode-repository/src/main/java/com/yahoo/vespa/hosted/provision/NodeList.java
<ide>
<ide> /** Returns the subset of nodes that are in any of the given state(s) */
<ide> public NodeList state(Node.State first, Node.State... rest) {
<add> if (rest.length == 0) {
<add> return matching(node -> node.state() == first);
<add> }
<ide> return state(EnumSet.of(first, rest));
<ide> }
<ide>
|
|
JavaScript
|
agpl-3.0
|
7c3cff120a4bf5727983c9f890daf583addf00b3
| 0 |
cancerit/cgpJBrowseToolkit,cancerit/cgpJBrowseToolkit
|
#!/usr/bin/env node
/**
* Copyright (c) 2016-2018 Genome Research Ltd.
*
* Author: CASM/Cancer IT <[email protected]>
*
* This file is part of cgpJBrowseToolkit.
*
* cgpJBrowseToolkit is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation; either version 3 of the License, or (at your option) any
* later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* 1. The usage of a range of years within a copyright statement contained within
* this distribution should be interpreted as being equivalent to a list of years
* including the first and last year specified and all consecutive years between
* them. For example, a copyright statement that reads ‘Copyright (c) 2005, 2007-
* 2009, 2011-2012’ should be interpreted as being identical to a statement that
* reads ‘Copyright (c) 2005, 2007, 2008, 2009, 2011, 2012’ and a copyright
* statement that reads ‘Copyright (c) 2005-2012’ should be interpreted as being
* identical to a statement that reads ‘Copyright (c) 2005, 2006, 2007, 2008,
* 2009, 2010, 2011, 2012’."
*/
process.on('unhandledRejection', (reason) => {
console.log('Unhandled Rejection at:', reason.stack || reason);
process.exit(1);
})
const VERSION = require('./version.js');
const puppeteer = require('puppeteer');
const path = require('path');
const colon = encodeURIComponent(':');
const fs = require('fs');
const mkdirp = require('mkdirp');
const Mode = require('stat-mode');
/**
* Process command line args and check validity
*
* @return {object} A commander object
*/
function cliChecks() {
const program = require('commander');
program
.description('Generate images against a JBrowse server')
.option('-l, --locs <file>', 'Bed file of locations, see --help')
.option('-b, --baseUrl [value]', 'URL from pre configured JBrowse webpage, ommit if provided in BED file')
.option('-w, --width [n]', 'Width of image', 600)
.option('-i, --imgType [value]', 'Type of image [jpeg|pdf|png]', 'png')
.option('-o, --outdir [value]', 'Output folder', './')
.option('-n, --navOff', 'Remove nav bars', false)
.option('-d, --dMode [value]', 'Change default display of alignment tracks [normal|compact|collapsed]')
.option(' --highlight', 'Highlight region (for short events)', false)
.option('-q, --quality [n]', 'Image resolution [1,2,3]', '3')
.option('-z, --zoom [n]', 'Zoom factor', 1)
.option('-p, --passwdFile [file]', 'User password for httpBasic')
.option('-t, --timeout [n]', 'For each track allow upto N sec.', 10)
.version(VERSION, '-v, --version')
.on('--help', function() {
console.log("\n Additional information:");
console.log(imageHelp);
console.log(bedHelp);
})
.parse(process.argv);
if (process.argv.length < 3 || program.args.length > 0) program.help();
if (program.dMode !== undefined && !program.dMode.match(/^(normal|compact|collapsed)$/)) {
throwErr("ERROR: -d|--dMode only accepts values of: normal, compact, collapsed");
}
if (!program.imgType.match(/^(jpeg|pdf|png)$/)) {
throwErr("ERROR: -i|--imgType only accepts values of: jpeg, pdf, png");
}
program.width = parseInt(program.width) || throwErr("ERROR: -w|--width not an int");
program.timeout = parseInt(program.timeout) || throwErr("ERROR: -t|--timeout not an int");
program.quality = parseInt(program.quality) || throwErr("ERROR: -q|--quality not an int");
program.zoom = parseFloat(program.zoom) || throwErr("ERROR: -z|--zoom not a float");
if(program.quality < 0 || program.quality > 3) throwErr("ERROR: -q|--quality not 1, 2 or 3");
return program;
}
const imageHelp = `
Image quality:
Best image quality is achieved with pdf, but ~5x larger than png.
Zoom:
To allow capturing same region in a wider image as JBrowse has a maximum width per base.
`
const bedHelp = `
--locs bed file:
Can include comment lines to switch the baseUrl used for the next block of
coordinates.
Any comment line will be processed into a dataset ($DS) name and URL. Files generated will be
output to a subfolder of the specified --output area as:
$OUTPUT/$DS/$CHR-$START_$END.
FORMAT:
# DATASET_NAME URL
CHR START END
# DATASET_NAME2 URL
CHR START END
...
Comment/URL separator lines can be space or tab separated elements.
BED formatted lines must be tab separated and only have 3 elements.
`
/**
* So that we can throw custom error in an expression.
*
* @param {string} message - Error message
*/
function throwErr(message) {
throw new Error(message);
}
/**
* Cleans and configures baseUrl according to options
*
* @param {object} - commander object
* @param {string} - url to be processed
* @param {string} - subdirectory to append to options.outdir (or null)
*
* @return {object} - Url entities keyed as outloc, url, timeout.
*/
function urlCleaning(options, url, subdir) {
// Handle standard cleaning of the URL
let address = url
.replace(/loc=[^&]?/, '')
.replace(/&tracklist=[^&]?/, '')
.replace(/&nav=[^&]?/, '')
.replace(/&fullviewlink=[^&]?/, '')
.replace(/&highres=[^&]?/, '')
.replace(/&highlight=[^&]?/, '');
// handle sometimes flaky 0.0.0.0 loopback
address = address.replace(/[/]{2}0\.0\.0\.0/, '//localhost');
// turn off track list and fullview
address += '&tracklist=0&fullviewlink=0&highres='+options.quality;
if(options.navOff) { // optionally turn of the navigation tools
address += '&nav=0';
}
if(options.highlight) {
fullAddress += '&highlight='+loc.urlElement;
}
// cleanup any multiples of &&
address = address.replace(/[&]+/g,'&');
const tracks = address.match(/tracks=[^&]+/)[0].split(/%2C/g);
let outloc;
if(subdir != null) {
outloc = path.join(options.outdir, subdir);
}
else {
outloc = options.outdir;
}
return {
outloc: outloc,
url: address,
timeout: (30 + (options.timeout * tracks.length)) * 1000
}
}
/**
* Load the locations file and embed the required URLs
*
* @param {object} - commander object
* @return {array} - array of objects, {urlElement, realElement, fileElement} or {outloc, url, timeout}
*/
function loadLocs(options) {
let locations = [];
if(options.baseUrl) {
locations.push(urlCleaning(options, options.baseUrl, null));
}
// read in the bed locations
const rawLocs = fs.readFileSync(options.locs, "utf-8").split(/\r?\n/)
for(let rawLoc of rawLocs) {
if(rawLoc.length === 0) continue;
if(rawLoc.startsWith('#')) {
if(options.baseUrl) {
throwErr('ERROR: Dataset/URL cannot be defined in BED file when --baseUrl provided');
}
rawLoc = rawLoc.replace(/^#\s+/, '');
const groups = rawLoc.match(/([^\s]+)\s+(http.+)/);
locations.push(urlCleaning(options, groups[2], groups[1]));
continue;
}
const elements = rawLoc.split(/\t/);
if(elements.length !== 3) continue;
let start = parseInt(elements[1]);
const end = parseInt(elements[2]);
if(start >= end) {
console.warn('Skipping: bed location malformed: ' + rawLoc);
continue;
}
start += 1;
locations.push({
urlElement: elements[0] + colon + start + '..' + end,
realElement: elements[0] + ':' + start + '..' + end,
fileElement: elements[0] + '_' + start + '-' + end
});
}
return locations;
}
/**
* Get the height required for non-Track elements
*
* @param {object} - commander object
* @return {number} - height in pixels
*/
function headerHeight(options) {
// menubar 27
// navbox 33
// overview 22 (surrounds overviewtrack_overview_loc_track)
// static_track 14
let minHeight = 96;
if(options.navOff) minHeight = 26;
return minHeight;
}
/**
* Load password for httpBasic from file when provided.
*
* @param {object} - commander object
* @return {string|null} - Loaded password or null
*/
function loadPw(options) {
if(options.passwdFile) {
if(process.platform == 'win32') {
console.warn("Windows system, cannot check or correct file permissions of --passwdFile");
}
else {
const mode = new Mode(fs.statSync(options.passwdFile));
if(mode.group.read || mode.others.read) {
console.warn("File provided to --passwdFile is readable by people other than you, changing permissions...");
mode.owner.execute = false;
mode.group.read = false;
mode.group.write = false;
mode.group.execute = false;
mode.others.read = false;
mode.others.write = false;
mode.others.execute = false;
fs.chmodSync(options.passwdFile, mode.stat.mode);
}
}
return fs.readFileSync(options.passwdFile, "utf-8").replace(/\r?\n/g, '');
}
return null;
}
function main() {
const program = cliChecks();
const locations = loadLocs(program);
const minHeight = headerHeight(program);
const passwd = loadPw(program);
(async () => {
const browser = await puppeteer.launch({ignoreHTTPSErrors: true, headless: true});
const page = await browser.newPage();
await page.setCacheEnabled(true);
if(passwd) await page.authenticate({username: process.env.USER, password: passwd});
let {address, timeout, outloc} = ['', 0, ''];
locLoop: for (const loc of locations) {
if(loc.url) {
address = loc.url;
timeout = loc.timeout;
outloc = loc.outloc;
// make sure we have somewhere to write to:
mkdirp(outloc, function (err) {
if (err) {
throw err;
}
});
continue;
}
let fullAddress = address+'&loc='+loc.urlElement;
process.stdout.write('Processing: '+fullAddress);
const started = Date.now();
let rendered = false;
let tries = 1;
while(!rendered) {
try {
// need to reset each time
await page.setViewport({width: program.width, height: 2000});
const response = await page.goto(
fullAddress, {
timeout: timeout,
waitUntil: ['load', 'domcontentloaded', 'networkidle0']
}
);
if(! response.ok()) {
throwErr("ERROR: Check you connection and if you need to provide a password (http error code: "+response.status()+')');
}
if(program.dMode !== undefined) {
const tracks = await page.$$('.track_jbrowse_view_track_alignments2');
for (let t of tracks) {
await page.evaluate((t, mode) => {
t.track.displayMode = mode;
t.track.layout = null;
t.track.redraw();
}, t, program.dMode);
}
await page.waitFor(500); // allow time for redraw
}
} catch(err) {
if(tries === 1) console.warn();
console.warn(err.message);
if(tries++ < 3) {
console.log("\tTry "+tries);
continue;
}
console.error("Image not generated for: "+fullAddress);
continue locLoop;
}
rendered=true;
}
let trackHeight = minHeight;
const divs = await page.$$('.track');
for (const d of divs) {
const propId = await d.getProperty('id')
const id = await propId.jsonValue();
if(id == 'gridtrack' || id == 'overviewtrack_overview_loc_track' || id == 'static_track') {
continue;
}
const bb = await d.boundingBox();
trackHeight += bb.height;
}
await page.setViewport({width: program.width, height: trackHeight, deviceScaleFactor: program.zoom});
const finalPath = path.join(outloc, loc.fileElement+'.'+program.imgType);
if(program.imgType === 'pdf') {
await page.pdf({path: finalPath, scale: program.zoom, width: parseInt(program.width * program.zoom), height: parseInt(trackHeight * program.zoom)})
}
else {
let shotOpts = {
path: finalPath,
fullPage: false
}
if(program.imgType === 'jpeg' && program.quality === 3) shotOpts['quality'] = 100;
await page.screenshot(shotOpts);
}
const took = Date.now() - started;
console.log(' ('+took/1000+' sec.)')
}
await browser.close();
})();
}
try {
main();
} catch(err) {
console.error("\n"+err.message+"\n");
process.exit(1);
}
|
js/jbrowse_rasterize.js
|
#!/usr/bin/env node
/**
* Copyright (c) 2016-2018 Genome Research Ltd.
*
* Author: CASM/Cancer IT <[email protected]>
*
* This file is part of cgpJBrowseToolkit.
*
* cgpJBrowseToolkit is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation; either version 3 of the License, or (at your option) any
* later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* 1. The usage of a range of years within a copyright statement contained within
* this distribution should be interpreted as being equivalent to a list of years
* including the first and last year specified and all consecutive years between
* them. For example, a copyright statement that reads ‘Copyright (c) 2005, 2007-
* 2009, 2011-2012’ should be interpreted as being identical to a statement that
* reads ‘Copyright (c) 2005, 2007, 2008, 2009, 2011, 2012’ and a copyright
* statement that reads ‘Copyright (c) 2005-2012’ should be interpreted as being
* identical to a statement that reads ‘Copyright (c) 2005, 2006, 2007, 2008,
* 2009, 2010, 2011, 2012’."
*/
process.on('unhandledRejection', (reason) => {
console.log('Unhandled Rejection at:', reason.stack || reason);
process.exit(1);
})
const VERSION = require('./version.js');
const puppeteer = require('puppeteer');
const path = require('path');
const colon = encodeURIComponent(':');
const fs = require('fs');
const mkdirp = require('mkdirp');
const Mode = require('stat-mode');
/**
* Process command line args and check validity
*
* @return {object} A commander object
*/
function cliChecks() {
const program = require('commander');
program
.description('Generate images against a JBrowse server')
.option('-l, --locs <file>', 'Bed file of locations, see --help')
.option('-b, --baseUrl [value]', 'URL from pre configured JBrowse webpage, ommit if provided in BED file')
.option('-w, --width [n]', 'Width of image', 600)
.option('-i, --imgType [value]', 'Type of image [jpeg|pdf|png]', 'png')
.option('-o, --outdir [value]', 'Output folder', './')
.option('-n, --navOff', 'Remove nav bars', false)
.option('-d, --dMode [value]', 'Change default display of alignment tracks [normal|compact|collapsed]')
.option(' --highlight', 'Highlight region (for short events)', false)
.option('-q, --quality [n]', 'Image resolution [1,2,3]', '3')
.option('-z, --zoom [n]', 'Zoom factor', 1)
.option('-p, --passwdFile [file]', 'User password for httpBasic')
.option('-t, --timeout [n]', 'For each track allow upto N sec.', 10)
.version(VERSION, '-v, --version')
.on('--help', function() {
console.log("\n Additional information:");
console.log(imageHelp);
console.log(bedHelp);
})
.parse(process.argv);
if (process.argv.length < 3 || program.args.length > 0) program.help();
if (program.dMode !== undefined && !program.dMode.match(/^(normal|compact|collapsed)$/)) {
throwErr("ERROR: -d|--dMode only accepts values of: normal, compact, collapsed");
}
if (!program.imgType.match(/^(jpeg|pdf|png)$/)) {
throwErr("ERROR: -i|--imgType only accepts values of: jpeg, pdf, png");
}
program.width = parseInt(program.width) || throwErr("ERROR: -w|--width not an int");
program.timeout = parseInt(program.timeout) || throwErr("ERROR: -t|--timeout not an int");
program.quality = parseInt(program.quality) || throwErr("ERROR: -q|--quality not an int");
program.zoom = parseFloat(program.zoom) || throwErr("ERROR: -z|--zoom not a float");
if(program.quality < 0 || program.quality > 3) throwErr("ERROR: -q|--quality not 1, 2 or 3");
return program;
}
const imageHelp = `
Image quality:
Best image quality is achieved with pdf, but ~5x larger than png.
Zoom:
To allow capturing same region in a wider image as JBrowse has a maximum width per base.
`
const bedHelp = `
--locs bed file:
Can include comment lines to switch the baseUrl used for the next block of
coordinates.
Any comment line will be processed into a dataset ($DS) name and URL. Files generated will be
output to a subfolder of the specified --output area as:
$OUTPUT/$DS/$CHR-$START_$END.
FORMAT:
# DATASET_NAME URL
CHR START END
# DATASET_NAME2 URL
CHR START END
...
Comment/URL separator lines can be space or tab separated elements.
BED formatted lines must be tab separated and only have 3 elements.
`
/**
* So that we can throw custom error in an expression.
*
* @param {string} message - Error message
*/
function throwErr(message) {
throw new Error(message);
}
/**
* Cleans and configures baseUrl according to options
*
* @param {object} - commander object
* @param {string} - url to be processed
* @param {string} - subdirectory to append to options.outdir (or null)
*
* @return {object} - Url entities keyed as outloc, url, timeout.
*/
function urlCleaning(options, url, subdir) {
// Handle standard cleaning of the URL
let address = url
.replace(/loc=[^&]?/, '')
.replace(/&tracklist=[^&]?/, '')
.replace(/&nav=[^&]?/, '')
.replace(/&fullviewlink=[^&]?/, '')
.replace(/&highres=[^&]?/, '')
.replace(/&highlight=[^&]?/, '');
// handle sometimes flaky 0.0.0.0 loopback
address = address.replace(/[/]{2}0\.0\.0\.0/, '//localhost');
// turn off track list and fullview
address += '&tracklist=0&fullviewlink=0&highres='+options.quality;
if(options.navOff) { // optionally turn of the navigation tools
address += '&nav=0';
}
if(options.highlight) {
fullAddress += '&highlight='+loc.urlElement;
}
// cleanup any multiples of &&
address = address.replace(/[&]+/g,'&');
const tracks = address.match(/tracks=[^&]+/)[0].split(/%2C/g);
let outloc;
if(subdir != null) {
outloc = path.join(options.outdir, subdir);
}
else {
outloc = options.outdir;
}
return {
outloc: outloc,
url: address,
timeout: (30 + (options.timeout * tracks.length)) * 1000
}
}
/**
* Load the locations file and embed the required URLs
*
* @param {object} - commander object
* @return {array} - array of objects, {urlElement, realElement, fileElement} or {outloc, url, timeout}
*/
function loadLocs(options) {
let locations = [];
if(options.baseUrl) {
locations.push(urlCleaning(options, options.baseUrl, null));
}
// read in the bed locations
const rawLocs = fs.readFileSync(options.locs, "utf-8").split(/\r?\n/)
for(let rawLoc of rawLocs) {
if(rawLoc.length === 0) continue;
if(rawLoc.startsWith('#')) {
if(options.baseUrl) {
throwErr('ERROR: Dataset/URL cannot be defined in BED file when --baseUrl provided');
}
rawLoc = rawLoc.replace(/^#\s+/, '');
const groups = rawLoc.match(/([^\s]+)\s+(http.+)/);
locations.push(urlCleaning(options, groups[2], groups[1]));
continue;
}
const elements = rawLoc.split(/\t/);
if(elements.length !== 3) continue;
let start = parseInt(elements[1]);
const end = parseInt(elements[2]);
if(start >= end) {
console.warn('Skipping: bed location malformed: ' + rawLoc);
continue;
}
start += 1;
locations.push({
urlElement: elements[0] + colon + start + '..' + end,
realElement: elements[0] + ':' + start + '..' + end,
fileElement: elements[0] + '_' + start + '-' + end
});
}
return locations;
}
/**
* Get the height required for non-Track elements
*
* @param {object} - commander object
* @return {number} - height in pixels
*/
function headerHeight(options) {
// menubar 27
// navbox 33
// overview 22 (surrounds overviewtrack_overview_loc_track)
// static_track 14
let minHeight = 96;
if(options.navOff) minHeight = 26;
return minHeight;
}
/**
* Load password for httpBasic from file when provided.
*
* @param {object} - commander object
* @return {string|null} - Loaded password or null
*/
function loadPw(options) {
if(options.passwdFile) {
if(process.platform == 'win32') {
console.warn("Windows system, cannot check or correct file permissions of --passwdFile");
}
else {
var mode = new Mode(fs.statSync(options.passwdFile));
if(mode.group.read || mode.others.read) {
console.warn("File provided to --passwdFile is readable by people other than you, changing permissions...");
mode.owner.execute = false;
mode.group.read = false;
mode.group.write = false;
mode.group.execute = false;
mode.others.read = false;
mode.others.write = false;
mode.others.execute = false;
fs.chmodSync(options.passwdFile, mode.stat.mode);
}
}
return fs.readFileSync(options.passwdFile, "utf-8").replace(/\r?\n/g, '');
}
return null;
}
function main() {
const program = cliChecks();
const locations = loadLocs(program);
const minHeight = headerHeight(program);
const passwd = loadPw(program);
(async () => {
const browser = await puppeteer.launch({ignoreHTTPSErrors: true, headless: true});
const page = await browser.newPage();
await page.setCacheEnabled(true);
if(passwd) await page.authenticate({username: process.env.USER, password: passwd});
let {address, timeout, outloc} = ['', 0, ''];
locLoop: for (const loc of locations) {
if(loc.url) {
address = loc.url;
timeout = loc.timeout;
outloc = loc.outloc;
// make sure we have somewhere to write to:
mkdirp(outloc, function (err) {
if (err) {
throw err;
}
});
continue;
}
let fullAddress = address+'&loc='+loc.urlElement;
process.stdout.write('Processing: '+fullAddress);
const started = Date.now();
let rendered = false;
let tries = 1;
while(!rendered) {
try {
// need to reset each time
await page.setViewport({width: program.width, height: 2000});
const response = await page.goto(
fullAddress, {
timeout: timeout,
waitUntil: ['load', 'domcontentloaded', 'networkidle0']
}
);
if(! response.ok()) {
throwErr("ERROR: Check you connection and if you need to provide a password (http error code: "+response.status()+')');
}
if(program.dMode !== undefined) {
const tracks = await page.$$('.track_jbrowse_view_track_alignments2');
for (let t of tracks) {
await page.evaluate((t, mode) => {
t.track.displayMode = mode;
t.track.layout = null;
t.track.redraw();
}, t, program.dMode);
}
await page.waitFor(500); // allow time for redraw
}
} catch(err) {
if(tries === 1) console.warn();
console.warn(err.message);
if(tries++ < 3) {
console.log("\tTry "+tries);
continue;
}
console.error("Image not generated for: "+fullAddress);
continue locLoop;
}
rendered=true;
}
let trackHeight = minHeight;
const divs = await page.$$('.track');
for (const d of divs) {
const propId = await d.getProperty('id')
const id = await propId.jsonValue();
if(id == 'gridtrack' || id == 'overviewtrack_overview_loc_track' || id == 'static_track') {
continue;
}
const bb = await d.boundingBox();
trackHeight += bb.height;
}
await page.setViewport({width: program.width, height: trackHeight, deviceScaleFactor: program.zoom});
const finalPath = path.join(outloc, loc.fileElement+'.'+program.imgType);
if(program.imgType === 'pdf') {
await page.pdf({path: finalPath, scale: program.zoom, width: parseInt(program.width * program.zoom), height: parseInt(trackHeight * program.zoom)})
}
else {
let shotOpts = {
path: finalPath,
fullPage: false
}
if(program.imgType === 'jpeg' && program.quality === 3) shotOpts['quality'] = 100;
await page.screenshot(shotOpts);
}
const took = Date.now() - started;
console.log(' ('+took/1000+' sec.)')
}
await browser.close();
})();
}
try {
main();
} catch(err) {
console.error("\n"+err.message+"\n");
process.exit(1);
}
|
lint error
|
js/jbrowse_rasterize.js
|
lint error
|
<ide><path>s/jbrowse_rasterize.js
<ide> console.warn("Windows system, cannot check or correct file permissions of --passwdFile");
<ide> }
<ide> else {
<del> var mode = new Mode(fs.statSync(options.passwdFile));
<add> const mode = new Mode(fs.statSync(options.passwdFile));
<ide> if(mode.group.read || mode.others.read) {
<ide> console.warn("File provided to --passwdFile is readable by people other than you, changing permissions...");
<ide> mode.owner.execute = false;
|
|
Java
|
agpl-3.0
|
3725a88ef205251b89c71e536b28db79742233c2
| 0 |
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
5dc9c94e-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
5dc44c8a-2e60-11e5-9284-b827eb9e62be
|
5dc9c94e-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
5dc9c94e-2e60-11e5-9284-b827eb9e62be
|
<ide><path>ello.java
<del>5dc44c8a-2e60-11e5-9284-b827eb9e62be
<add>5dc9c94e-2e60-11e5-9284-b827eb9e62be
|
|
Java
|
bsd-3-clause
|
0d4308c936f4c402c6a56d9f366d881cd7d06f38
| 0 |
NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt
|
package gov.nih.nci.rembrandt.web.xml;
import gov.nih.nci.rembrandt.queryservice.resultset.DimensionalViewContainer;
import gov.nih.nci.rembrandt.queryservice.resultset.Resultant;
import gov.nih.nci.rembrandt.queryservice.resultset.ResultsContainer;
import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleResultset;
import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleViewResultsContainer;
import gov.nih.nci.rembrandt.util.DEUtils;
import java.text.DecimalFormat;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
/**
* @author LandyR
* Feb 8, 2005
*
*/
public class ClinicalSampleReport implements ReportGenerator {
/**
*
*/
public ClinicalSampleReport () {
super();
}
/* (non-Javadoc)
* @see gov.nih.nci.nautilus.ui.report.ReportGenerator#getTemplate(gov.nih.nci.nautilus.resultset.Resultant, java.lang.String)
*/
public Document getReportXML(Resultant resultant, Map filterMapParams) {
//String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" };
DecimalFormat resultFormat = new DecimalFormat("0.0000");
String defaultV = "-";
Document document = DocumentHelper.createDocument();
try {
Element report = document.addElement( "Report" );
Element cell = null;
Element data = null;
Element dataRow = null;
//add the atts
report.addAttribute("reportType", "Clinical");
//fudge these for now
report.addAttribute("groupBy", "none");
String queryName = resultant.getAssociatedQuery().getQueryName();
//set the queryName to be unique for session/cache access
report.addAttribute("queryName", queryName);
report.addAttribute("sessionId", "the session id");
report.addAttribute("creationTime", "right now");
boolean gLinks = false;
boolean cLinks = false;
StringBuffer sb = new StringBuffer();
ResultsContainer resultsContainer = resultant.getResultsContainer();
SampleViewResultsContainer sampleViewContainer = null;
if(resultsContainer instanceof DimensionalViewContainer) {
DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer;
// Are we making hyperlinks?
if(dimensionalViewContainer.getGeneExprSingleViewContainer() != null) {
// show the geneExprHyperlinks
gLinks = true;
}
if(dimensionalViewContainer.getCopyNumberSingleViewContainer() != null) {
// show the copyNumberHyperlinks
cLinks = true;
}
sampleViewContainer = dimensionalViewContainer.getSampleViewResultsContainer();
}
else if (resultsContainer instanceof SampleViewResultsContainer) {
sampleViewContainer = (SampleViewResultsContainer) resultsContainer;
}
Collection samples = sampleViewContainer.getBioSpecimenResultsets();
/*
sb.append("<div class=\"rowCount\">"+helpFul+samples.size()+" records returned " + links + "</div>\n");
sb.append("<table cellpadding=\"0\" cellspacing=\"0\">\n");
*/
// set up the headers for this table
Element headerRow = report.addElement("Row").addAttribute("name", "headerRow");
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Sample");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Age at Dx (years)");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Gender");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Survival (months)");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Disease");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Grade");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Race");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Karnofsky");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Lansky");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Exam");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("MRI");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Clinical Evaluation Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Month");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Evaluation Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Steroid Dose Status");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Anti-Convulsant Status");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Site");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Dose");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Type");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent ID");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent Name");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Course Count");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Study Source");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Protocol Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Procedure Title");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Tumor Histology");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Outcome");
data = null;
cell = null;
// starting Prior areas
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Site");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Neurosis Status");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Dose");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Type");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent ID");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent Name");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Regimen Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Course Count");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Study Source");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Protocol Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Procedure Title");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Indication ");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Histo Diagnosis ");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Outcome");
data = null;
cell = null;
//sb.append("<Tr><Td id=\"header\">SAMPLE</td><td id=\"header\">AGE at Dx (years)</td><td id=\"header\">GENDER</td><td id=\"header\">SURVIVAL (months)</td><td id=\"header\">DISEASE</td>");
Iterator si = samples.iterator();
if(si.hasNext()) {
SampleResultset sampleResultset = (SampleResultset)si.next();
if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) {
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("GeneExp");
data = null;
cell = null;
//sb.append("<Td id=\"header\">GeneExp</td>");
}
if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) {
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("CopyNumber");
data = null;
cell = null;
//sb.append("<td id=\"header\">CopyNumber</td>");
}
//sb.append("</tr>\n");
}
for (Iterator sampleIterator = samples.iterator(); sampleIterator.hasNext();) {
SampleResultset sampleResultset = (SampleResultset)sampleIterator.next();
dataRow = report.addElement("Row").addAttribute("name", "dataRow");
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "sample").addAttribute("group", "sample");
data = cell.addElement("Data").addAttribute("type", "data").addText(sampleResultset.getSampleIDDE().getValue().toString());
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getAgeGroup()));
data = null;
cell = null;
String theGender = defaultV;
if(!DEUtils.checkNV(sampleResultset.getGenderCode()).equalsIgnoreCase("O"))
theGender = DEUtils.checkNV(sampleResultset.getGenderCode());
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(theGender);
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getSurvivalLengthRange()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getDisease()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getWhoGrade()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getRaceDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getKarnofskyClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getLanskyClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getNeuroExamClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getMriClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupMonths()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroEvaluationDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getSteroidDoseStatuses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getAntiConvulsantStatuses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationSites()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionDoses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationTypes()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoagentIds()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoAgentNames()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoCourseCounts()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoStudySources()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoProtocolNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryProcedureTitles()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTumorHistologys()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryOutcomes()));
data = null;
cell = null;
// starting onstudy
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationSites()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionDoses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationNeurosisStatuses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationTypes()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoagentIds()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoAgentNames()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoRegimenNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoCourseCounts()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoStudySources()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoProtocolNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryProcedureTitles()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryIndications()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryHistoDiagnoses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryOutcomes()));
data = null;
cell = null;
/*
sb.append("<tr><td>"+sampleResultset.getBiospecimen().getValue().toString().substring(2)+ "</td>" +
"<Td>"+sampleResultset.getAgeGroup().getValue()+ "</td>" +
"<td>"+sampleResultset.getGenderCode().getValue()+ "</td>" +
"<td>"+sampleResultset.getSurvivalLengthRange().getValue()+ "</td>" +
"<Td>"+sampleResultset.getDisease().getValue() + "</td>");
*/
if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) {
//TODO: create the links
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText("G");
data = null;
cell = null;
//sb.append("<td><a href=\"report.do?s="+sampleName+"_gene&report=gene\">G</a></td>");
}
else if (gLinks){
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(" ");
data = null;
cell = null;
//sb.append("<td> </td>"); //empty cell
}
if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) {
// TODO: create the links
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText("C");
data = null;
cell = null;
//sb.append("<Td><a href=\"report.do?s="+sampleName +"_copy&report=copy\">C</a></td>");
}
else if (cLinks){
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(" ");
data = null;
cell = null;
//sb.append("<td> </td>"); //empty cell
}
//report.append("row", row);
//sb.append("</tr>\n");
}
//sb.append("</table>\n<br>");
//return sb.toString();
}
catch(Exception e) {
//asdf
System.out.println(e);
}
return document;
}
}
|
src/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java
|
package gov.nih.nci.rembrandt.web.xml;
import gov.nih.nci.rembrandt.queryservice.resultset.DimensionalViewContainer;
import gov.nih.nci.rembrandt.queryservice.resultset.Resultant;
import gov.nih.nci.rembrandt.queryservice.resultset.ResultsContainer;
import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleResultset;
import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleViewResultsContainer;
import gov.nih.nci.rembrandt.util.DEUtils;
import java.text.DecimalFormat;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
/**
* @author LandyR
* Feb 8, 2005
*
*/
public class ClinicalSampleReport implements ReportGenerator {
/**
*
*/
public ClinicalSampleReport () {
super();
}
/* (non-Javadoc)
* @see gov.nih.nci.nautilus.ui.report.ReportGenerator#getTemplate(gov.nih.nci.nautilus.resultset.Resultant, java.lang.String)
*/
public Document getReportXML(Resultant resultant, Map filterMapParams) {
//String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" };
DecimalFormat resultFormat = new DecimalFormat("0.0000");
Document document = DocumentHelper.createDocument();
try {
Element report = document.addElement( "Report" );
Element cell = null;
Element data = null;
Element dataRow = null;
//add the atts
report.addAttribute("reportType", "Clinical");
//fudge these for now
report.addAttribute("groupBy", "none");
String queryName = resultant.getAssociatedQuery().getQueryName();
//set the queryName to be unique for session/cache access
report.addAttribute("queryName", queryName);
report.addAttribute("sessionId", "the session id");
report.addAttribute("creationTime", "right now");
boolean gLinks = false;
boolean cLinks = false;
StringBuffer sb = new StringBuffer();
ResultsContainer resultsContainer = resultant.getResultsContainer();
SampleViewResultsContainer sampleViewContainer = null;
if(resultsContainer instanceof DimensionalViewContainer) {
DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer;
// Are we making hyperlinks?
if(dimensionalViewContainer.getGeneExprSingleViewContainer() != null) {
// show the geneExprHyperlinks
gLinks = true;
}
if(dimensionalViewContainer.getCopyNumberSingleViewContainer() != null) {
// show the copyNumberHyperlinks
cLinks = true;
}
sampleViewContainer = dimensionalViewContainer.getSampleViewResultsContainer();
}
else if (resultsContainer instanceof SampleViewResultsContainer) {
sampleViewContainer = (SampleViewResultsContainer) resultsContainer;
}
Collection samples = sampleViewContainer.getBioSpecimenResultsets();
/*
sb.append("<div class=\"rowCount\">"+helpFul+samples.size()+" records returned " + links + "</div>\n");
sb.append("<table cellpadding=\"0\" cellspacing=\"0\">\n");
*/
// set up the headers for this table
Element headerRow = report.addElement("Row").addAttribute("name", "headerRow");
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Sample");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Age at Dx (years)");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Gender");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Survival (months)");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Disease");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Grade");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Race");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Karnofsky");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Lansky");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Exam");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("MRI");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Clinical Evaluation Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Month");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Evaluation Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Steroid Dose Status");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Anti-Convulsant Status");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Site");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Dose");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Type");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent ID");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent Name");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Course Count");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Study Source");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Protocol Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Procedure Title");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Tumor Histology");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Outcome");
data = null;
cell = null;
// starting Prior areas
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Site");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Neurosis Status");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Dose");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Type");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent ID");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent Name");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Regimen Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Course Count");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Start Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Stop Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Study Source");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Protocol Number");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Time Point");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Procedure Title");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Indication ");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Histo Diagnosis ");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Date");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Outcome");
data = null;
cell = null;
//sb.append("<Tr><Td id=\"header\">SAMPLE</td><td id=\"header\">AGE at Dx (years)</td><td id=\"header\">GENDER</td><td id=\"header\">SURVIVAL (months)</td><td id=\"header\">DISEASE</td>");
Iterator si = samples.iterator();
if(si.hasNext()) {
SampleResultset sampleResultset = (SampleResultset)si.next();
if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) {
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("GeneExp");
data = null;
cell = null;
//sb.append("<Td id=\"header\">GeneExp</td>");
}
if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) {
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("CopyNumber");
data = null;
cell = null;
//sb.append("<td id=\"header\">CopyNumber</td>");
}
//sb.append("</tr>\n");
}
for (Iterator sampleIterator = samples.iterator(); sampleIterator.hasNext();) {
SampleResultset sampleResultset = (SampleResultset)sampleIterator.next();
dataRow = report.addElement("Row").addAttribute("name", "dataRow");
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "sample").addAttribute("group", "sample");
data = cell.addElement("Data").addAttribute("type", "data").addText(sampleResultset.getSampleIDDE().getValue().toString());
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getAgeGroup()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getGenderCode()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getSurvivalLengthRange()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getDisease()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getWhoGrade()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getRaceDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getKarnofskyClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getLanskyClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getNeuroExamClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getMriClinicalEvalDE()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupMonths()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroEvaluationDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getSteroidDoseStatuses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getAntiConvulsantStatuses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationSites()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionDoses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationTypes()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoagentIds()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoAgentNames()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoCourseCounts()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoStudySources()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoProtocolNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryProcedureTitles()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTumorHistologys()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryOutcomes()));
data = null;
cell = null;
// starting onstudy
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationSites()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionDoses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationNeurosisStatuses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationTypes()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoagentIds()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoAgentNames()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoRegimenNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoCourseCounts()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStartDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStopDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoStudySources()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoProtocolNumbers()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryTimePoints()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryProcedureTitles()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryIndications()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryHistoDiagnoses()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryDates()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryOutcomes()));
data = null;
cell = null;
/*
sb.append("<tr><td>"+sampleResultset.getBiospecimen().getValue().toString().substring(2)+ "</td>" +
"<Td>"+sampleResultset.getAgeGroup().getValue()+ "</td>" +
"<td>"+sampleResultset.getGenderCode().getValue()+ "</td>" +
"<td>"+sampleResultset.getSurvivalLengthRange().getValue()+ "</td>" +
"<Td>"+sampleResultset.getDisease().getValue() + "</td>");
*/
if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) {
//TODO: create the links
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText("G");
data = null;
cell = null;
//sb.append("<td><a href=\"report.do?s="+sampleName+"_gene&report=gene\">G</a></td>");
}
else if (gLinks){
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(" ");
data = null;
cell = null;
//sb.append("<td> </td>"); //empty cell
}
if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) {
// TODO: create the links
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText("C");
data = null;
cell = null;
//sb.append("<Td><a href=\"report.do?s="+sampleName +"_copy&report=copy\">C</a></td>");
}
else if (cLinks){
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "data").addText(" ");
data = null;
cell = null;
//sb.append("<td> </td>"); //empty cell
}
//report.append("row", row);
//sb.append("</tr>\n");
}
//sb.append("</table>\n<br>");
//return sb.toString();
}
catch(Exception e) {
//asdf
System.out.println(e);
}
return document;
}
}
|
gender from O to -
SVN-Revision: 2073
|
src/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java
|
gender from O to -
|
<ide><path>rc/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java
<ide>
<ide> //String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" };
<ide> DecimalFormat resultFormat = new DecimalFormat("0.0000");
<add> String defaultV = "-";
<ide>
<ide> Document document = DocumentHelper.createDocument();
<ide>
<ide> data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getAgeGroup()));
<ide> data = null;
<ide> cell = null;
<add> String theGender = defaultV;
<add> if(!DEUtils.checkNV(sampleResultset.getGenderCode()).equalsIgnoreCase("O"))
<add> theGender = DEUtils.checkNV(sampleResultset.getGenderCode());
<ide> cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
<del> data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getGenderCode()));
<add> data = cell.addElement("Data").addAttribute("type", "data").addText(theGender);
<ide> data = null;
<ide> cell = null;
<ide> cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
|
|
Java
|
bsd-3-clause
|
891a9f581a59827976f3d308a0ff1bd6973c9421
| 0 |
EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal,EuropeanSpallationSource/openxal
|
/*
* @(#)VADocument.java 1.5 07/15/2004
*
* Copyright (c) 2001-2004 Oak Ridge National Laboratory
* Oak Ridge, Tenessee 37831, U.S.A.
* All rights reserved.
*
*/
package xal.app.virtualaccelerator;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.URL;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.JToggleButton.ToggleButtonModel;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.PlainDocument;
import xal.ca.Channel;
import xal.ca.ChannelFactory;
import xal.ca.ConnectionException;
import xal.ca.GetException;
import xal.ca.PutException;
import xal.ca.PutListener;
import xal.extension.application.Application;
import xal.extension.application.Commander;
import xal.extension.application.XalWindow;
import xal.extension.application.smf.AcceleratorDocument;
import xal.extension.bricks.WindowReference;
import xal.extension.widgets.apputils.SimpleProbeEditor;
import xal.extension.widgets.plot.BasicGraphData;
import xal.extension.widgets.plot.FunctionGraphsJPanel;
import xal.extension.widgets.swing.DecimalField;
import xal.extension.widgets.swing.KeyValueFilteredTableModel;
import xal.model.IAlgorithm;
import xal.model.ModelException;
import xal.model.alg.TransferMapTracker;
import xal.model.probe.EnvelopeProbe;
import xal.model.probe.Probe; // Probe for t3d header
import xal.model.probe.TransferMapProbe;
import xal.model.probe.traj.EnvelopeProbeState;
import xal.model.probe.traj.ProbeState;
import xal.service.pvlogger.apputils.browser.PVLogSnapshotChooser;
import xal.service.pvlogger.sim.PVLoggerDataSource;
import xal.sim.scenario.AlgorithmFactory;
import xal.sim.scenario.ProbeFactory;
import xal.sim.scenario.Scenario;
import xal.smf.AcceleratorNode;
import xal.smf.AcceleratorSeq;
import xal.smf.AcceleratorSeqCombo;
import xal.smf.NoSuchChannelException;
import xal.smf.Ring;
import xal.smf.TimingCenter;
import xal.smf.attr.BPMBucket;
import xal.smf.impl.BPM;
import xal.smf.impl.Bend;
import xal.smf.impl.Electromagnet;
import xal.smf.impl.HDipoleCorr;
import xal.smf.impl.MagnetMainSupply;
import xal.smf.impl.MagnetTrimSupply;
import xal.smf.impl.ProfileMonitor;
import xal.smf.impl.Quadrupole;
import xal.smf.impl.RfCavity;
import xal.smf.impl.RingBPM;
import xal.smf.impl.SCLCavity;
import xal.smf.impl.Solenoid;
import xal.smf.impl.TrimmedQuadrupole;
import xal.smf.impl.VDipoleCorr;
import xal.smf.impl.qualify.QualifierFactory;
import xal.smf.impl.qualify.TypeQualifier;
import xal.tools.beam.PhaseVector;
import xal.tools.beam.Twiss;
import xal.tools.beam.calc.SimpleSimResultsAdaptor;
import xal.tools.data.DataAdaptor;
import xal.tools.dispatch.DispatchQueue;
import xal.tools.dispatch.DispatchTimer;
//TODO: CKA - Many unused imports
import xal.tools.xml.XmlDataAdaptor;
/**
* <p>
* <h4>CKA NOTES:</h4>
* - In method <code>{@link #createDefaultProbe()}</code> a <code>TransferMapProbe</code>
* is created in the case of a ring. The method <code>TransferMapState#setPhaseCoordinates</code>
* is called to create an initial static erorr. This does nothing because transfer map probes
* do not have phase coordinates any longer, the method is deprecated.
* <br/>
* <br/>
* - The static noise for the above call is hard coded. As are many features in this class.
* </p>
*
* VADocument is a custom AcceleratorDocument for virtual accelerator application.
* @version 1.6 13 Jul 2015
* @author Paul Chu
* @author Blaz Kranjc <[email protected]>
*/
public class VADocument extends AcceleratorDocument implements ActionListener, PutListener {
/** default BPM waveform size */
final static private int DEFAULT_BPM_WAVEFORM_SIZE = VAServer.DEFAULT_ARRAY_SIZE;
/** default BPM waveform data size (part of the waveform to populate with data) */
final static private int DEFAULT_BPM_WAVEFORM_DATA_SIZE = 250;
/** factory for server channels
* Not sure whether it is better for this to be static and shared across all documents.
* For now we will just use a common server factory across all documents (possibly prevents server conflicts).
*/
final static private ChannelFactory CHANNEL_SERVER_FACTORY = ChannelFactory.newServerFactory();
/** The document for the text pane in the main window. */
protected PlainDocument textDocument;
/** For on-line model */
protected Scenario modelScenario;
/* template probe which may be configured and then copied as the currentProbe for use in the simulation */
private Probe<?> baseProbe;
/* probe which was copied from the base probe and is being used in the simulation */
private Probe<?> currentProbe;
String dataSource = Scenario.SYNC_MODE_LIVE;
int runT3d_OK = 0;
private JDialog setNoise = new JDialog();
private DecimalField df_quadNoise, df_bendNoise, df_dipCorrNoise, df_bpmNoise, df_solNoise, df_rfAmpNoise, df_rfPhaseNoise;
private DecimalField df_quadStatErr, df_bendStatErr, df_dipCorrStatErr, df_bpmStatErr, df_solStatErr, df_rfAmpStatErr, df_rfPhaseStatErr;
private DecimalField df_quadStatHorMisalign, df_bpmStatHorMisalign, df_solStatHorMisalign, df_rfCavStatHorMisalign;
private DecimalField df_quadStatVerMisalign, df_bpmStatVerMisalign, df_solStatVerMisalign, df_rfCavStatVerMisalign;
private double quadNoise = 0.0;
private double dipoleNoise = 0.0;
private double correctorNoise = 0.0;
private double solNoise = 0.0;
private double bpmNoise = 0.0;
private double rfAmpNoise = 0.0;
private double rfPhaseNoise = 0.0;
private double quadStaticError = 0.0;
private double dipoleStaticError = 0.0;
private double correctorStaticError = 0.0;
private double solStaticError = 0.0;
private double bpmStaticError = 0.0;
private double rfAmpStaticError = 0.0;
private double rfPhaseStaticError = 0.0;
private double quadStatHorMisalign = 0.0;
private double quadStatVerMisalign = 0.0;
private double bpmStatHorMisalign = 0.0;
private double bpmStatVerMisalign = 0.0;
private JButton done = new JButton("OK");
private volatile boolean vaRunning = false;
// add by liyong
private java.util.List<AcceleratorNode> nodes; // TODO: CKA - NEVER USED
private java.util.List<RfCavity> rfCavities;
private java.util.List<Electromagnet> mags;
private java.util.List<BPM> bpms;
private java.util.List<ProfileMonitor> wss;
private Channel beamOnEvent;
private Channel beamOnEventCount;
private Channel slowDiagEvent;
private Channel _repRateChannel;
// timestamp of last update
private Date _lastUpdate;
private long beamOnEventCounter = 0;
private List<ReadbackSetRecord> READBACK_SET_RECORDS;
private LinkedHashMap<Channel, Double> ch_noiseMap;
private LinkedHashMap<Channel, Double> ch_staticErrorMap;
private VAServer _vaServer;
protected Commander commander;
// private RecentFileTracker _probeFileTracker;
// for on/off-line mode selection
ToggleButtonModel olmModel = new ToggleButtonModel();
ToggleButtonModel pvlogModel = new ToggleButtonModel();
ToggleButtonModel pvlogMovieModel = new ToggleButtonModel();
private boolean isFromPVLogger = false;
private boolean isForOLM = false;
private PVLogSnapshotChooser plsc;
private JDialog pvLogSelector;
private PVLoggerDataSource plds;
/** bricks window reference */
private WindowReference _windowReference;
/** readback setpoint table model */
private KeyValueFilteredTableModel<ReadbackSetRecord> READBACK_SET_TABLE_MODEL;
/** timer to synch the readbacks with the setpoints and also sync the model */
final private DispatchTimer MODEL_SYNC_TIMER;
/** model sync period in milliseconds */
private long _modelSyncPeriod;
public DiagPlot _diagplot;
/** Create a new empty document */
public VADocument() {
this( null );
}
/**
* Create a new document loaded from the URL file
* @param url The URL of the file to load into the new document.
*/
public VADocument( final java.net.URL url ) {
setSource( url );
// timer to synchronize readbacks with setpoints as well as the online model
MODEL_SYNC_TIMER = DispatchTimer.getCoalescingInstance( DispatchQueue.createSerialQueue( "" ), getOnlineModelSynchronizer() );
// set the default model sync period to 1 second
_modelSyncPeriod = 1000;
READBACK_SET_RECORDS = new ArrayList<ReadbackSetRecord>();
final WindowReference windowReference = getDefaultWindowReference( "MainWindow", this );
_windowReference = windowReference;
READBACK_SET_TABLE_MODEL = new KeyValueFilteredTableModel<ReadbackSetRecord>( new ArrayList<ReadbackSetRecord>(), "node.id", "readbackChannel.channelName", "lastReadback", "setpointChannel.channelName", "lastSetpoint" );
READBACK_SET_TABLE_MODEL.setColumnClass( "lastReadback", Number.class );
//READBACK_SET_TABLE_MODEL.setColumnClass( "lastSetpoint", Number.class );
READBACK_SET_TABLE_MODEL.setColumnClass( "lastSetpoint", Double.class );
READBACK_SET_TABLE_MODEL.setColumnName( "node.id", "Node" );
READBACK_SET_TABLE_MODEL.setColumnName( "readbackChannel.channelName", "Readback PV" );
READBACK_SET_TABLE_MODEL.setColumnName( "lastReadback", "Readback" );
READBACK_SET_TABLE_MODEL.setColumnName( "setpointChannel.channelName", "Setpoint PV" );
READBACK_SET_TABLE_MODEL.setColumnName( "lastSetpoint", "Setpoint" );
READBACK_SET_TABLE_MODEL.setColumnEditable("lastSetpoint", true);
final JTextField filterField = (JTextField)windowReference.getView( "FilterField" );
READBACK_SET_TABLE_MODEL.setInputFilterComponent( filterField );
makeTextDocument();
// probe file management
//_probeFileTracker = new RecentFileTracker( 1, this.getClass(), "recent_probes" );
_lastUpdate = new Date();
if ( url == null ) return;
}
/**
* Override the nextChannelFactory() method to return this document's channel server factory.
* @return this document's channel server factory
*/
@Override
public ChannelFactory nextChannelFactory() {
//System.out.println( "Getting the server channel factory..." );
return CHANNEL_SERVER_FACTORY;
}
/** Make a main window by instantiating the my custom window. Set the text pane to use the textDocument variable as its document. */
@Override
public void makeMainWindow() {
mainWindow = (XalWindow)_windowReference.getWindow();
final JTable readbackTable = (JTable)_windowReference.getView( "ReadbackTable" );
readbackTable.setCellSelectionEnabled( true );
readbackTable.setModel( READBACK_SET_TABLE_MODEL );
/** add digaplot */
final FunctionGraphsJPanel beamdispplot = (FunctionGraphsJPanel) _windowReference.getView("BeamDispPlot");
final FunctionGraphsJPanel sigamplot = (FunctionGraphsJPanel) _windowReference.getView("SigmaPlot");
_diagplot = new DiagPlot(beamdispplot, sigamplot);
makeNoiseDialog();
if (getSource() != null) {
java.net.URL url = getSource();
DataAdaptor documentAdaptor = XmlDataAdaptor.adaptorForUrl( url, false );
update( documentAdaptor.childAdaptor("MpxDocument") );
}
setHasChanges(false);
}
/** get the model sync period in milliseconds */
public long getModelSyncPeriod() {
return _modelSyncPeriod;
}
/** update the model sync period in milliseconds */
public void setModelSyncPeriod( final long period ) {
_modelSyncPeriod = period;
MODEL_SYNC_TIMER.startNowWithInterval( _modelSyncPeriod, 0 );
setHasChanges( true );
}
/** Make the noise dialog box */
private void makeNoiseDialog() {
JPanel settingPanel = new JPanel();
JPanel noiseLevelPanel = new JPanel();
JPanel staticErrorPanel = new JPanel();
JPanel staticHorMisalignPanel = new JPanel();
JPanel staticVerMisalignPanel = new JPanel();
// for noise %
noiseLevelPanel.setLayout(new GridLayout(8, 1));
noiseLevelPanel.add(new JLabel("Noise Level"));
NumberFormat numberFormat;
numberFormat = NumberFormat.getNumberInstance();
numberFormat.setMaximumFractionDigits(3);
JPanel quadNoisePanel = new JPanel();
quadNoisePanel.setLayout(new GridLayout(1, 3));
JLabel label1 = new JLabel("Quad: ");
df_quadNoise = new DecimalField( 0., 5, numberFormat );
quadNoisePanel.add(label1);
quadNoisePanel.add(df_quadNoise);
quadNoisePanel.add(new JLabel(" %"));
noiseLevelPanel.add(quadNoisePanel);
JPanel bendNoisePanel = new JPanel();
bendNoisePanel.setLayout(new GridLayout(1, 3));
JLabel label2 = new JLabel("Bending Dipole: ");
df_bendNoise = new DecimalField( 0., 5, numberFormat );
bendNoisePanel.add(label2);
bendNoisePanel.add(df_bendNoise);
bendNoisePanel.add(new JLabel(" %"));
noiseLevelPanel.add(bendNoisePanel);
JPanel dipCorrNoisePanel = new JPanel();
dipCorrNoisePanel.setLayout(new GridLayout(1, 3));
df_dipCorrNoise = new DecimalField( 0., 5, numberFormat );
dipCorrNoisePanel.add(new JLabel("Dipole Corr.: "));
dipCorrNoisePanel.add(df_dipCorrNoise);
dipCorrNoisePanel.add(new JLabel(" %"));
noiseLevelPanel.add(dipCorrNoisePanel);
JPanel solNoisePanel = new JPanel();
solNoisePanel.setLayout(new GridLayout(1, 3));
df_solNoise = new DecimalField( 0., 5, numberFormat );
solNoisePanel.add(new JLabel("Solenoid: "));
solNoisePanel.add(df_solNoise);
solNoisePanel.add(new JLabel(" %"));
noiseLevelPanel.add(solNoisePanel);
JPanel rfAmpNoisePanel = new JPanel();
rfAmpNoisePanel.setLayout(new GridLayout(1, 3));
df_rfAmpNoise = new DecimalField( 0., 5, numberFormat );
rfAmpNoisePanel.add(new JLabel("RF amp: "));
rfAmpNoisePanel.add(df_rfAmpNoise);
rfAmpNoisePanel.add(new JLabel(" %"));
noiseLevelPanel.add(rfAmpNoisePanel);
JPanel rfPhaseNoisePanel = new JPanel();
rfPhaseNoisePanel.setLayout(new GridLayout(1, 3));
df_rfPhaseNoise = new DecimalField( 0., 5, numberFormat );
rfPhaseNoisePanel.add(new JLabel("RF phase: "));
rfPhaseNoisePanel.add(df_rfPhaseNoise);
rfPhaseNoisePanel.add(new JLabel(" %"));
noiseLevelPanel.add(rfPhaseNoisePanel);
JPanel bpmNoisePanel = new JPanel();
bpmNoisePanel.setLayout(new GridLayout(1, 3));
df_bpmNoise = new DecimalField( 0., 5, numberFormat );
bpmNoisePanel.add(new JLabel("BPM: "));
bpmNoisePanel.add(df_bpmNoise);
bpmNoisePanel.add(new JLabel(" mm"));
noiseLevelPanel.add(bpmNoisePanel);
// for static errors
staticErrorPanel.setLayout(new GridLayout(8, 1));
staticErrorPanel.add(new JLabel("Static error"));
JPanel quadStatErrPanel = new JPanel();
quadStatErrPanel.setLayout(new GridLayout(1, 2));
df_quadStatErr = new DecimalField( 0., 5, numberFormat );
quadStatErrPanel.add(new JLabel("Quad: "));
quadStatErrPanel.add(df_quadStatErr);
quadStatErrPanel.add(new JLabel(" T/m"));
staticErrorPanel.add(quadStatErrPanel);
JPanel bendStatErrPanel = new JPanel();
bendStatErrPanel.setLayout(new GridLayout(1, 2));
df_bendStatErr = new DecimalField( 0., 5, numberFormat );
bendStatErrPanel.add(new JLabel("Bending Dipole: "));
bendStatErrPanel.add(df_bendStatErr);
bendStatErrPanel.add(new JLabel(" T"));
staticErrorPanel.add(bendStatErrPanel);
JPanel dipCorrStatErrPanel = new JPanel();
dipCorrStatErrPanel.setLayout(new GridLayout(1, 2));
df_dipCorrStatErr = new DecimalField( 0., 5, numberFormat );
dipCorrStatErrPanel.add(new JLabel("Dipole Corr.: "));
dipCorrStatErrPanel.add(df_dipCorrStatErr);
dipCorrStatErrPanel.add(new JLabel(" T"));
staticErrorPanel.add(dipCorrStatErrPanel);
JPanel solStatErrPanel = new JPanel();
solStatErrPanel.setLayout(new GridLayout(1, 2));
df_solStatErr = new DecimalField( 0., 5, numberFormat );
solStatErrPanel.add(new JLabel("Solenoid: "));
solStatErrPanel.add(df_solStatErr);
solStatErrPanel.add(new JLabel(" T"));
staticErrorPanel.add(solStatErrPanel);
JPanel rfAmpStatErrPanel = new JPanel();
rfAmpStatErrPanel.setLayout(new GridLayout(1, 2));
df_rfAmpStatErr = new DecimalField( 0., 5, numberFormat );
rfAmpStatErrPanel.add(new JLabel("RF amp: "));
rfAmpStatErrPanel.add(df_rfAmpStatErr);
rfAmpStatErrPanel.add(new JLabel(" kV"));
staticErrorPanel.add(rfAmpStatErrPanel);
JPanel rfPhaseStatErrPanel = new JPanel();
rfPhaseStatErrPanel.setLayout(new GridLayout(1, 2));
df_rfPhaseStatErr = new DecimalField( 0., 5, numberFormat );
rfPhaseStatErrPanel.add(new JLabel("RF phase: "));
rfPhaseStatErrPanel.add(df_rfPhaseStatErr);
rfPhaseStatErrPanel.add(new JLabel(" deg"));
staticErrorPanel.add(rfPhaseStatErrPanel);
JPanel bpmStatErrPanel = new JPanel();
bpmStatErrPanel.setLayout(new GridLayout(1, 2));
df_bpmStatErr = new DecimalField( 0., 5, numberFormat );
bpmStatErrPanel.add(new JLabel("BPM: "));
bpmStatErrPanel.add(df_bpmStatErr);
bpmStatErrPanel.add(new JLabel(" mm"));
staticErrorPanel.add(bpmStatErrPanel);
// for horizontal misalignments (static)
staticHorMisalignPanel.setLayout(new GridLayout(8, 1));
staticHorMisalignPanel.add(new JLabel("Horizontal misalignments"));
JPanel quadStatHorMisalignPanel = new JPanel();
quadStatHorMisalignPanel.setLayout(new GridLayout(1, 2));
df_quadStatHorMisalign = new DecimalField( 0., 5, numberFormat );
quadStatHorMisalignPanel.add(new JLabel("Quad: "));
quadStatHorMisalignPanel.add(df_quadStatHorMisalign);
quadStatHorMisalignPanel.add(new JLabel(" mm"));
staticHorMisalignPanel.add(quadStatHorMisalignPanel);
JPanel bpmStatHorMisalignPanel = new JPanel();
bpmStatHorMisalignPanel.setLayout(new GridLayout(1, 2));
df_bpmStatHorMisalign = new DecimalField( 0., 5, numberFormat );
bpmStatHorMisalignPanel.add(new JLabel("bpm: "));
bpmStatHorMisalignPanel.add(df_bpmStatHorMisalign);
bpmStatHorMisalignPanel.add(new JLabel(" mm"));
staticHorMisalignPanel.add(bpmStatHorMisalignPanel);
// for vertical misalignments (static)
staticVerMisalignPanel.setLayout(new GridLayout(8, 1));
staticVerMisalignPanel.add(new JLabel("Vertical misalignments"));
JPanel quadStatVerMisalignPanel = new JPanel();
quadStatVerMisalignPanel.setLayout(new GridLayout(1, 2));
df_quadStatVerMisalign = new DecimalField( 0., 5, numberFormat );
quadStatVerMisalignPanel.add(new JLabel("Quad: "));
quadStatVerMisalignPanel.add(df_quadStatVerMisalign);
quadStatVerMisalignPanel.add(new JLabel(" mm"));
staticVerMisalignPanel.add(quadStatVerMisalignPanel);
JPanel bpmStatVerMisalignPanel = new JPanel();
bpmStatVerMisalignPanel.setLayout(new GridLayout(1, 2));
df_bpmStatVerMisalign = new DecimalField( 0., 5, numberFormat );
bpmStatVerMisalignPanel.add(new JLabel("bpm: "));
bpmStatVerMisalignPanel.add(df_bpmStatVerMisalign);
bpmStatVerMisalignPanel.add(new JLabel(" mm"));
staticVerMisalignPanel.add(bpmStatVerMisalignPanel);
// put everything together
setNoise.setBounds(300, 300, 900, 600);
setNoise.setTitle("Define errors...");
settingPanel.setLayout(new BoxLayout(settingPanel, BoxLayout.X_AXIS));
settingPanel.add(noiseLevelPanel);
settingPanel.add(staticErrorPanel);
settingPanel.add(staticHorMisalignPanel);
settingPanel.add(staticVerMisalignPanel);
setNoise.getContentPane().setLayout(new BorderLayout());
setNoise.getContentPane().add(settingPanel, BorderLayout.CENTER);
setNoise.getContentPane().add(done, BorderLayout.SOUTH);
done.setActionCommand("noiseSet");
done.addActionListener(this);
setNoise.pack();
}
/**
* Save the document to the specified URL.
* @param url The URL to which the document should be saved.
*/
@Override
public void saveDocumentAs(URL url) {
XmlDataAdaptor xda = XmlDataAdaptor.newEmptyDocumentAdaptor();
DataAdaptor daLevel1 = xda.createChild("VA");
//save accelerator file
DataAdaptor daXMLFile = daLevel1.createChild("accelerator");
daXMLFile.setValue("xmlFile", this.getAcceleratorFilePath());
// save selected sequences
List<String> sequenceNames;
if ( getSelectedSequence() != null ) {
DataAdaptor daSeq = daLevel1.createChild("sequences");
daSeq.setValue("name", getSelectedSequence().getId());
if ( getSelectedSequence() instanceof AcceleratorSeqCombo ) {
AcceleratorSeqCombo asc = (AcceleratorSeqCombo) getSelectedSequence();
sequenceNames = asc.getConstituentNames();
}
else {
sequenceNames = new ArrayList<String>();
sequenceNames.add( getSelectedSequence().getId() );
}
for ( final String sequenceName : sequenceNames ) {
DataAdaptor daSeqComponents = daSeq.createChild( "seq" );
daSeqComponents.setValue( "name", sequenceName );
}
DataAdaptor daNoise = daLevel1.createChild("noiseLevels");
daNoise.setValue("quad", quadNoise);
daNoise.setValue("dipole", dipoleNoise);
daNoise.setValue("corrector", correctorNoise);
daNoise.setValue("bpm", bpmNoise);
daNoise.setValue("sol", solNoise);
daNoise.setValue("rfAmp", rfAmpNoise);
daNoise.setValue("rfPhase", rfPhaseNoise);
DataAdaptor daStaticError = daLevel1.createChild("staticErrors");
daStaticError.setValue("quad", quadStaticError);
daStaticError.setValue("dipole", dipoleStaticError);
daStaticError.setValue("corrector", correctorStaticError);
daStaticError.setValue("bpm", bpmStaticError);
daStaticError.setValue("sol", solStaticError);
daStaticError.setValue("rfAmp", rfAmpStaticError);
daStaticError.setValue("rfPhase", rfPhaseStaticError);
DataAdaptor daStaticHorMisalign = daLevel1.createChild("horizontalMisalignments");
daStaticHorMisalign.setValue("quad", quadStatHorMisalign);
daStaticHorMisalign.setValue("bpm", bpmStatHorMisalign);
DataAdaptor daStaticVerMisalign = daLevel1.createChild("verticalMisalignments");
daStaticVerMisalign.setValue("quad", quadStatVerMisalign);
daStaticVerMisalign.setValue("bpm", bpmStatVerMisalign);
}
daLevel1.setValue( "modelSyncPeriod", _modelSyncPeriod );
xda.writeToUrl(url);
setHasChanges(false);
}
/**
* Instantiate a new PlainDocument that servers as the document for the text
* pane. Create a handler of text actions so we can determine if the
* document has changes that should be saved.
*/
private void makeTextDocument() {
textDocument = new PlainDocument();
textDocument.addDocumentListener(new DocumentListener() {
@Override
public void changedUpdate(javax.swing.event.DocumentEvent evt) {
setHasChanges(true);
}
@Override
public void removeUpdate(DocumentEvent evt) {
setHasChanges(true);
}
@Override
public void insertUpdate(DocumentEvent evt) {
setHasChanges(true);
}
});
}
/** Create the default probe from the edit context. */
private void createDefaultProbe() {
if ( selectedSequence != null ) {
try {
baseProbe = ( selectedSequence instanceof xal.smf.Ring ) ? createRingProbe( selectedSequence ) : createEnvelopeProbe( selectedSequence );
currentProbe = baseProbe.copy();
currentProbe.initialize();
modelScenario.setProbe( currentProbe );
}
catch ( Exception exception ) {
displayError( "Error Creating Probe", "Probe Error", exception );
}
}
}
/** create a new ring probe */
static private TransferMapProbe createRingProbe( final AcceleratorSeq sequence ) throws InstantiationException {
final TransferMapTracker tracker = AlgorithmFactory.createTransferMapTracker( sequence );
return ProbeFactory.getTransferMapProbe( sequence, tracker );
}
/** create a new envelope probe */
static private EnvelopeProbe createEnvelopeProbe( final AcceleratorSeq sequence ) throws InstantiationException {
final IAlgorithm tracker = AlgorithmFactory.createEnvTrackerAdapt( sequence );
return ProbeFactory.getEnvelopeProbe( sequence, tracker );
}
@Override
public void customizeCommands(Commander commander) {
// open probe editor
// TODO: implement probe editor support
this.commander = commander;
Action probeEditorAction = new AbstractAction("probe-editor") {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
if ( baseProbe != null ) {
stopServer();
final SimpleProbeEditor probeEditor = new SimpleProbeEditor( getMainWindow(), baseProbe );
baseProbe = probeEditor.getProbe();
currentProbe = baseProbe.copy();
currentProbe.initialize();
if ( modelScenario != null ) {
modelScenario.setProbe(currentProbe);
}
}
else {
//Sequence has not been selected
displayError("Probe Editor Error", "You must select a sequence before attempting to edit the probe.");
}
}
};
probeEditorAction.putValue(Action.NAME, "probe-editor");
commander.registerAction(probeEditorAction);
// action for using online model as engine
olmModel.setSelected(true);
olmModel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
isForOLM = true;
isFromPVLogger = false;
}
});
commander.registerModel("olm", olmModel);
// action for using PV logger snapshot through online model
pvlogModel.setSelected(false);
pvlogModel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
isForOLM = true;
isFromPVLogger = true;
if (pvLogSelector == null) {
// for PV Logger snapshot chooser
plsc = new PVLogSnapshotChooser();
pvLogSelector = plsc.choosePVLogId();
} else
pvLogSelector.setVisible(true);
}
});
commander.registerModel("pvlogger", pvlogModel);
// action for direct replaying of PVLogger logged data
pvlogMovieModel.setSelected(false);
pvlogMovieModel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
isForOLM = false;
isFromPVLogger = true;
if (pvLogSelector == null) {
// for PV Logger snapshot chooser
plsc = new PVLogSnapshotChooser();
pvLogSelector = plsc.choosePVLogId();
} else
pvLogSelector.setVisible(true);
}
});
commander.registerModel("pvlogMovie", pvlogMovieModel);
// action for running model and Diagnostics acquisition
Action runAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
if ( vaRunning ) {
JOptionPane.showMessageDialog( getMainWindow(), "Virtual Accelerator has already started.", "Warning!", JOptionPane.PLAIN_MESSAGE );
return;
}
if(!Application.getApp().authorizeWithRBAC("Start")){
JOptionPane.showMessageDialog( getMainWindow(), "You are unauthorized for this action.", "Warning!", JOptionPane.PLAIN_MESSAGE );
return;
}
if ( getSelectedSequence() == null ) {
JOptionPane.showMessageDialog( getMainWindow(), "You need to select sequence(s) first.", "Warning!", JOptionPane.PLAIN_MESSAGE );
} else {
// use PV logger
if ( isFromPVLogger ) {
long pvLoggerId = plsc.getPVLogId();
runServer();
plds = new PVLoggerDataSource(pvLoggerId);
// use PVLogger to construct the model
if (isForOLM) {
// load the settings from the PV Logger
putSetPVsFromPVLogger();
// synchronize with the online model
MODEL_SYNC_TIMER.setEventHandler( getOnlineModelSynchronizer() );
}
else { // directly use PVLogger data for replay
MODEL_SYNC_TIMER.setEventHandler( getPVLoggerSynchronizer() );
}
}
// use online model
else {
if ( currentProbe == null ) {
createDefaultProbe();
if ( currentProbe == null ) {
displayWarning( "Warning!", "You need to select probe file first." );
return;
}
actionPerformed( event );
}
else {
runServer();
}
// put the initial B_Book PVs to the server
configFieldBookPVs();
//put "set" PVs to the server
putSetPVs();
// continuously loop through the next 3 steps
System.out.println( "Setup to synchronize the online model periodically..." );
MODEL_SYNC_TIMER.setEventHandler( getOnlineModelSynchronizer() );
}
MODEL_SYNC_TIMER.startNowWithInterval( _modelSyncPeriod, 0 );
MODEL_SYNC_TIMER.resume();
}
}
};
runAction.putValue(Action.NAME, "run-va");
commander.registerAction(runAction);
// stop the channel access server
Action stopAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
stopServer();
}
};
stopAction.putValue(Action.NAME, "stop-va");
commander.registerAction(stopAction);
// set noise level
Action setNoiseAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
df_quadNoise.setValue(quadNoise);
df_bendNoise.setValue(dipoleNoise);
df_dipCorrNoise.setValue(correctorNoise);
df_bpmNoise.setValue(bpmNoise);
df_solNoise.setValue(solNoise);
df_rfAmpNoise.setValue(rfAmpNoise);
df_rfPhaseNoise.setValue(rfPhaseNoise);
df_quadStatErr.setValue(quadStaticError);
df_bendStatErr.setValue(dipoleStaticError);
df_dipCorrStatErr.setValue(correctorStaticError);
df_bpmStatErr.setValue(bpmStaticError);
df_solStatErr.setValue(solStaticError);
df_rfAmpStatErr.setValue(rfAmpStaticError);
df_rfPhaseStatErr.setValue(rfPhaseStaticError);
df_quadStatHorMisalign.setValue(quadStatHorMisalign);
df_quadStatVerMisalign.setValue(quadStatVerMisalign);
df_bpmStatHorMisalign.setValue(bpmStatHorMisalign);
df_bpmStatVerMisalign.setValue(bpmStatVerMisalign);
setNoise.setVisible(true);
}
};
setNoiseAction.putValue(Action.NAME, "set-noise");
commander.registerAction(setNoiseAction);
// configure synchronization
final Action synchConfigAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
final String result = JOptionPane.showInputDialog( getMainWindow(), "Set the Model Synchronization Period (milliseconds): ", _modelSyncPeriod );
if ( result != null ) {
try {
final long modelSyncPeriod = Long.parseLong( result );
setModelSyncPeriod( modelSyncPeriod );
}
catch( Exception exception ) {
displayError( "Error setting Model Sync Period!", exception.getMessage() );
}
}
}
};
synchConfigAction.putValue( Action.NAME, "sync-config" );
commander.registerAction( synchConfigAction );
}
/** handle this document being closed */
@Override
public void willClose() {
System.out.println( "Document will be closed" );
destroyServer();
}
public void update( final DataAdaptor adaptor ) {
if ( getSource() != null ) {
XmlDataAdaptor xda = XmlDataAdaptor.adaptorForUrl( getSource(), false );
DataAdaptor da1 = xda.childAdaptor( "VA" );
//restore accelerator file
applySelectedAcceleratorWithDefaultPath( da1.childAdaptor( "accelerator" ).stringValue( "xmlFile" ) );
// set up the right sequence combo from selected primaries:
List<DataAdaptor> temp = da1.childAdaptors( "sequences" );
if ( temp.isEmpty() ) return; // bail out, nothing left to do
ArrayList<AcceleratorSeq> seqs = new ArrayList<AcceleratorSeq>();
DataAdaptor da2a = da1.childAdaptor( "sequences" );
String seqName = da2a.stringValue( "name" );
DataAdaptor daNoise = da1.childAdaptor("noiseLevels");
if (daNoise != null) {
quadNoise = daNoise.doubleValue("quad");
dipoleNoise = daNoise.doubleValue("dipole");
correctorNoise = daNoise.doubleValue("corrector");
bpmNoise = daNoise.doubleValue("bpm");
solNoise = daNoise.doubleValue("sol");
rfAmpNoise = daNoise.doubleValue("rfAmp");
rfPhaseNoise = daNoise.doubleValue("rfPhase");
}
DataAdaptor daStaticError = da1.childAdaptor("staticErrors");
if (daStaticError != null) {
quadStaticError = daStaticError.doubleValue("quad");
dipoleStaticError = daStaticError.doubleValue("dipole");
correctorStaticError = daStaticError.doubleValue("corrector");
bpmStaticError = daStaticError.doubleValue("bpm");
solStaticError = daStaticError.doubleValue("sol");
rfAmpStaticError = daStaticError.doubleValue("rfAmp");
rfPhaseStaticError = daStaticError.doubleValue("rfPhase");
}
DataAdaptor daStaticHorMisalign = da1.childAdaptor("horizontalMisalignments");
if (daStaticHorMisalign != null) {
quadStatHorMisalign = daStaticHorMisalign.doubleValue("quad");
bpmStatHorMisalign = daStaticHorMisalign.doubleValue("bpm");
}
DataAdaptor daStaticVerMisalign = da1.childAdaptor("verticalMisalignments");
if (daStaticVerMisalign != null) {
quadStatVerMisalign = daStaticVerMisalign.doubleValue("quad");
bpmStatVerMisalign = daStaticVerMisalign.doubleValue("bpm");
}
temp = da2a.childAdaptors("seq");
for ( final DataAdaptor da : temp ) {
seqs.add( getAccelerator().getSequence( da.stringValue("name") ) );
}
if (seqName.equals("Ring"))
setSelectedSequence(new Ring(seqName, seqs));
else
setSelectedSequence(new AcceleratorSeqCombo(seqName, seqs));
setSelectedSequenceList(seqs.subList(0, seqs.size()));
createDefaultProbe();
modelScenario.setProbe(currentProbe);
if ( da1.hasAttribute( "modelSyncPeriod" ) ) {
_modelSyncPeriod = da1.longValue( "modelSyncPeriod" );
}
}
}
protected Scenario getScenario() {
return modelScenario;
}
protected boolean isVARunning() {
return vaRunning;
}
/** update the limit channels based on changes to the Field Book channels */
private void updateLimitChannels() {
for ( final Electromagnet magnet : mags ) {
try {
final Channel bookChannel = magnet.getMainSupply().findChannel( MagnetMainSupply.FIELD_BOOK_HANDLE );
final Channel fieldChannel = magnet.getMainSupply().findChannel( MagnetMainSupply.FIELD_SET_HANDLE );
if ( bookChannel != null ) {
if ( bookChannel.isConnected() ) {
final double bookField = bookChannel.getValDbl();
final double warningStaticError = 0.05 * Math.abs( bookField );
final double alarmStaticError = 0.1 * Math.abs( bookField );
final String[] warningPVs = fieldChannel.getWarningLimitPVs();
final Channel lowerWarningChannel = CHANNEL_SERVER_FACTORY.getChannel( warningPVs[0], fieldChannel.getValueTransform() );
// System.out.println( "Lower Limit PV: " + lowerWarningChannel.channelName() );
if ( lowerWarningChannel.connectAndWait() ) {
lowerWarningChannel.putValCallback( bookField - warningStaticError, this );
}
final Channel upperWarningChannel = CHANNEL_SERVER_FACTORY.getChannel( warningPVs[1], fieldChannel.getValueTransform() );
if ( upperWarningChannel.connectAndWait() ) {
upperWarningChannel.putValCallback( bookField + warningStaticError, this );
}
final String[] alarmPVs = fieldChannel.getAlarmLimitPVs();
final Channel lowerAlarmChannel = CHANNEL_SERVER_FACTORY.getChannel( alarmPVs[0], fieldChannel.getValueTransform() );
if ( lowerAlarmChannel.connectAndWait() ) {
lowerAlarmChannel.putValCallback( bookField - alarmStaticError, this );
}
final Channel upperAlarmChannel = CHANNEL_SERVER_FACTORY.getChannel( alarmPVs[1], fieldChannel.getValueTransform() );
if ( upperAlarmChannel.connectAndWait() ) {
upperAlarmChannel.putValCallback( bookField + alarmStaticError, this );
}
}
}
}
catch ( NoSuchChannelException exception ) {
System.err.println( exception.getMessage() );
}
catch ( ConnectionException exception ) {
System.err.println( exception.getMessage() );
}
catch ( GetException exception ) {
System.err.println( exception.getMessage() );
}
catch ( PutException exception ) {
System.err.println( exception.getMessage() );
}
}
Channel.flushIO();
}
/** This method is for populating the readback PVs */
private void putReadbackPVs() {
// set beam trigger PV to "on"
try {
final Date now = new Date();
if ( _repRateChannel != null ) {
final double updatePeriod = 0.001 * ( now.getTime() - _lastUpdate.getTime() ); // period of update in seconds
_repRateChannel.putValCallback( 1.0 / updatePeriod , this );
}
_lastUpdate = now;
if ( beamOnEvent != null ) beamOnEvent.putValCallback( 0, this );
beamOnEventCounter++;
if ( beamOnEventCount != null ) beamOnEventCount.putValCallback( beamOnEventCounter, this );
if ( slowDiagEvent != null ) slowDiagEvent.putValCallback( 0, this );
} catch (ConnectionException e) {
System.err.println(e);
} catch (PutException e) {
System.err.println(e);
}
// get the "set" PV value, add noise, and then put to the corresponding readback PV.
for ( final ReadbackSetRecord record : READBACK_SET_RECORDS ) {
try {
record.updateReadback( ch_noiseMap, ch_staticErrorMap, this );
}
catch (Exception e) {
System.err.println( e.getMessage() );
}
}
Channel.flushIO();
final int rowCount = READBACK_SET_TABLE_MODEL.getRowCount();
if ( rowCount > 0 ) {
READBACK_SET_TABLE_MODEL.fireTableRowsUpdated( 0, rowCount - 1 );
}
updateLimitChannels();
}
/** populate the readback PVs from the PV Logger */
private void putReadbackPVsFromPVLogger() {
final Map<String,Double> qPVMap = plds.getMagnetMap();
// set beam trigger PV to "on"
try {
if ( beamOnEvent != null ) beamOnEvent.putVal(0);
beamOnEventCounter++;
if ( beamOnEventCount != null ) beamOnEventCount.putVal(beamOnEventCounter);
if ( slowDiagEvent != null ) slowDiagEvent.putVal( 0 );
} catch (ConnectionException e) {
System.err.println(e);
} catch (PutException e) {
System.err.println(e);
}
// get the "set" PV value, add noise, and then put to the corresponding readback PV.
for ( final ReadbackSetRecord record : READBACK_SET_RECORDS ) {
try {
final String readbackPV = record.getReadbackChannel().channelName();
if ( qPVMap.containsKey( readbackPV ) ) {
final double basisValue = qPVMap.get( readbackPV ).doubleValue();
record.updateReadback( basisValue, ch_noiseMap, ch_staticErrorMap, this );
}
}
catch ( Exception e ) {
System.err.println( e.getMessage() );
}
}
READBACK_SET_TABLE_MODEL.fireTableDataChanged();
}
/** initialize the field book PVs from the default values */
private void configFieldBookPVs() {
for ( final Electromagnet magnet : mags ) {
try {
final Channel bookChannel = magnet.getMainSupply().findChannel( MagnetMainSupply.FIELD_BOOK_HANDLE );
if ( bookChannel != null ) {
if ( bookChannel.connectAndWait() ) {
final double bookField = magnet.toCAFromField( magnet.getDfltField() );
bookChannel.putValCallback( bookField, this );
}
}
}
catch ( NoSuchChannelException exception ) {
System.err.println( exception.getMessage() );
}
catch ( ConnectionException exception ) {
System.err.println( exception.getMessage() );
}
catch ( PutException exception ) {
System.err.println( exception.getMessage() );
}
}
}
/**
* populate all the "set" PV values from design values
*/
private void putSetPVs() {
// for all magnets
for ( final Electromagnet em : mags ) {
try {
Channel ch = em.getMainSupply().getAndConnectChannel( MagnetMainSupply.FIELD_SET_HANDLE );
final double setting = em.toCAFromField( em.getDfltField() );
//System.out.println("Ready to put " + setting + " to " + ch.getId());
ch.putValCallback( setting, this);
if ( em instanceof TrimmedQuadrupole ) {
Channel trimChannel = ((TrimmedQuadrupole)em).getTrimSupply().getAndConnectChannel( MagnetTrimSupply.FIELD_SET_HANDLE );
//System.out.println("Ready to put " + 0.0 + " to " + trimChannel.getId());
trimChannel.putValCallback( 0.0, this);
}
}
catch (NoSuchChannelException e) {
System.err.println(e.getMessage());
}
catch (ConnectionException e) {
System.err.println(e.getMessage());
}
catch (PutException e) {
System.err.println(e.getMessage());
}
}
// for all rf cavities
for ( final RfCavity rfCavity : rfCavities ) {
try {
final Channel ampSetCh = rfCavity.findChannel( RfCavity.CAV_AMP_SET_HANDLE );
if ( ampSetCh.isValid() ) {
ampSetCh.connectAndWait();
//System.out.println("Ready to put " + rfCavity.getDfltCavAmp() + " to " + ampSetCh.getId());
if (rfCavity instanceof xal.smf.impl.SCLCavity) {
ampSetCh.putValCallback( rfCavity.getDfltCavAmp()*((SCLCavity)rfCavity).getStructureTTF(), this );
}
else {
ampSetCh.putValCallback( rfCavity.getDfltCavAmp(), this );
}
}
final Channel phaseSetCh = rfCavity.findChannel( RfCavity.CAV_PHASE_SET_HANDLE );
if ( phaseSetCh.isValid() ) {
phaseSetCh.connectAndWait();
//System.out.println("Ready to put " + rfCavity.getDfltCavPhase() + " to " + phaseSetCh.getId());
phaseSetCh.putValCallback( rfCavity.getDfltCavPhase(), this );
}
} catch (NoSuchChannelException e) {
System.err.println(e.getMessage());
} catch (ConnectionException e) {
System.err.println(e.getMessage());
} catch (PutException e) {
System.err.println(e.getMessage());
}
}
Channel.flushIO();
}
private void putSetPVsFromPVLogger() {
final Map<String,Double> qPSPVMap = plds.getMagnetPSMap();
for ( final Electromagnet em : mags ) {
try {
Channel ch = em.getMainSupply().getAndConnectChannel( MagnetMainSupply.FIELD_SET_HANDLE );
//System.out.println("Ready to put " + Math.abs(em.getDfltField()) + " to " + ch.getId());
final String channelID = ch.getId();
if ( qPSPVMap.containsKey( channelID ) )
ch.putValCallback( qPSPVMap.get( channelID ).doubleValue(), this );
}
catch (NoSuchChannelException e) {
System.err.println(e.getMessage());
}
catch (ConnectionException e) {
System.err.println(e.getMessage());
}
catch (PutException e) {
System.err.println(e.getMessage());
}
}
}
/** This method is for populating the diagnostic PVs (only BPMs + WSs for now) */
protected void putDiagPVs() {
// CKA Nov 25, 2013
SimpleSimResultsAdaptor cmpCalcEngine = new SimpleSimResultsAdaptor( modelScenario.getTrajectory() );
/**temporary list data for getting the array bpm and ws datas*/
int i = 0;
List<Double> tempBPMx = new ArrayList<Double>();
List<Double> tempBPMy = new ArrayList<Double>();
List<Double> tempBPMp = new ArrayList<Double>();
List<Double> tempWSx = new ArrayList<Double>();
List<Double> tempWSy = new ArrayList<Double>();
List<Double> tempWSp = new ArrayList<Double>();
List<Double> tempbeampos = new ArrayList<Double>();
List<Double> tempbeamx = new ArrayList<Double>();
List<Double> tempbeamy = new ArrayList<Double>();
List<Double> tempsigmaz = new ArrayList<Double>();
final Iterator<? extends ProbeState<?>> stateIter =modelScenario.getTrajectory().stateIterator();
while ( stateIter.hasNext() ) {
final ProbeState<?> state = stateIter.next();
// EnvelopeProbeState state = (EnvelopeProbeState) stateIter.next();
double position = state.getPosition();
final PhaseVector coordinateVector = cmpCalcEngine.computeFixedOrbit( state );
double x = coordinateVector.getx() * 1000;
double y = coordinateVector.gety()* 1000;
final Twiss[] twiss = cmpCalcEngine.computeTwissParameters( state );
double sigmaz=twiss[2].getEnvelopeRadius() * 1000;
tempbeampos.add(position);
tempbeamx.add(x);
tempbeamy.add(y);
tempsigmaz.add(sigmaz);
}
double beamp[] = new double[tempbeampos.size()];
double beamx[] = new double[tempbeampos.size()];
double beamy[] = new double[tempbeampos.size()];
double beamsigmaz[]=new double[tempbeampos.size()];
for (i = 0; i < tempbeampos.size(); i++) {
beamp[i] = tempbeampos.get(i);
beamx[i] = tempbeamx.get(i);
beamy[i] = tempbeamy.get(i);
beamsigmaz[i]=tempsigmaz.get(i);
}
try {
_diagplot.showbeampositionplot(beamp, beamx, beamy);
_diagplot.showsigmazplot(beamp, beamsigmaz);
} catch (ConnectionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (GetException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// for BPMs
for ( final BPM bpm : bpms ) {
final Channel bpmXAvgChannel = bpm.getChannel( BPM.X_AVG_HANDLE );
final Channel bpmXTBTChannel = bpm.getChannel( BPM.X_TBT_HANDLE ); // TODO: CKA - NEVER USED
final Channel bpmYAvgChannel = bpm.getChannel( BPM.Y_AVG_HANDLE );
final Channel bpmYTBTChannel = bpm.getChannel( BPM.Y_TBT_HANDLE ); // TODO: CKA - NEVER USED
final Channel bpmAmpAvgChannel = bpm.getChannel( BPM.AMP_AVG_HANDLE );
try {
ProbeState<?> probeState = modelScenario.getTrajectory().stateForElement( bpm.getId() );
//System.out.println("Now updating " + bpm.getId());
// CKA - Transfer map probes and Envelope probes both exposed ICoordinateState
// so we should be able to compute a "fixed orbit" in any context
//
// CKA Nov 25, 2013
// if ( probeState instanceof ICoordinateState ) {
// final PhaseVector coordinates = ((ICoordinateState)probeState).getFixedOrbit();
final PhaseVector coordinates = cmpCalcEngine.computeFixedOrbit(probeState);
// final PhaseVector coordinates = cmpCalcEngine.computeCoordinatePosition(probeState);
// For SNS Ring BPM system, we only measure the signal with respect to the center of the beam pipe.
// TO-DO: the turn by turn arrays should really be generated from betatron motion rather than random data about the nominal
final double[] xTBT = NoiseGenerator.noisyArrayForNominal( coordinates.getx() * 1000.0 - bpm.getXOffset(), DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
final double xAvg = NoiseGenerator.getAverage( xTBT, DEFAULT_BPM_WAVEFORM_DATA_SIZE );
final double[] yTBT = NoiseGenerator.noisyArrayForNominal( coordinates.gety() * 1000.0 - bpm.getYOffset(), DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
final double yAvg = NoiseGenerator.getAverage( yTBT, DEFAULT_BPM_WAVEFORM_DATA_SIZE );
bpmXAvgChannel.putValCallback( xAvg, this );
// bpmXTBTChannel.putValCallback( xTBT, this ); // don't post to channel access until the turn by turn data is generated correctly
bpmYAvgChannel.putValCallback( yAvg, this );
// bpmYTBTChannel.putValCallback( yTBT, this ); // don't post to channel access until the turn by turn data is generated correctly
final double position = getSelectedSequence().getPosition(bpm);
tempBPMp.add(position);
tempBPMx.add(xAvg);
tempBPMy.add(yAvg);
// hardwired BPM amplitude noise and static error to 5% and 0.1mm (randomly) respectively
bpmAmpAvgChannel.putVal( NoiseGenerator.setValForPV( 20., 5., 0.1, false) );
// calculate the BPM phase (for linac only)
if ( !( currentProbe instanceof TransferMapProbe ) && !( bpm instanceof RingBPM ) ) {
final Channel bpmPhaseAvgChannel = bpm.getChannel( BPM.PHASE_AVG_HANDLE );
bpmPhaseAvgChannel.putValCallback( probeState.getTime() * 360. * ( ( (BPMBucket)bpm.getBucket("bpm") ).getFrequency() * 1.e6 ) % 360.0, this );
}
} catch (ConnectionException e) {
System.err.println( e.getMessage() );
} catch (PutException e) {
System.err.println( e.getMessage() );
}
}
/**the array of bpm data*/
double bpmp[] = new double[tempBPMp.size()];
double bpmx[] = new double[tempBPMp.size()];
double bpmy[] = new double[tempBPMp.size()];
/**get the bpmdata[] from the list*/
for (i = 0; i < tempBPMp.size(); i++) {
bpmp[i] = tempBPMp.get(i);
bpmx[i] = tempBPMx.get(i);
bpmy[i] = tempBPMy.get(i);
}
/**showBPMplot*/
try {
_diagplot.showbpmplot(bpmp, bpmx,bpmy);
} catch (ConnectionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (GetException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// for WSs
for ( final ProfileMonitor ws : wss ) {
Channel wsX = ws.getChannel(ProfileMonitor.H_SIGMA_M_HANDLE);
Channel wsY = ws.getChannel(ProfileMonitor.V_SIGMA_M_HANDLE);
try {
ProbeState<?> probeState = modelScenario.getTrajectory().stateForElement( ws.getId() );
if (modelScenario.getProbe() instanceof EnvelopeProbe) {
final Twiss[] twiss = ( (EnvelopeProbeState)probeState ).getCovarianceMatrix().computeTwiss();
wsX.putValCallback( twiss[0].getEnvelopeRadius() * 1000., this );
wsY.putValCallback( twiss[1].getEnvelopeRadius() * 1000., this );
tempWSp.add(ws.getPosition());
tempWSx.add(twiss[0].getEnvelopeRadius() * 1000);
tempWSy.add(twiss[1].getEnvelopeRadius() * 1000);
}
} catch (ConnectionException e) {
System.err.println( e.getMessage() );
} catch (PutException e) {
System.err.println( e.getMessage() );
}
}
/**the array of ws data*/
double wsp[] = new double[tempWSp.size()];
double wsx[] = new double[tempWSp.size()];
double wsy[] = new double[tempWSp.size()];
/**get the wsdata[] from the list*/
for (i = 0; i < tempWSp.size(); i++) {
wsp[i] = tempWSp.get(i);
wsx[i] = tempWSx.get(i);
wsy[i] = tempWSy.get(i);
}
/**showWSplot*/
try {
_diagplot.showsigmaplot(wsp, wsx, wsy);
} catch (ConnectionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (GetException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Channel.flushIO();
}
private void putDiagPVsFromPVLogger() {
// for BPMs
final Map<String,Double> bpmXMap = plds.getBPMXMap();
final Map<String,Double> bpmYMap = plds.getBPMYMap();
final Map<String,Double> bpmAmpMap = plds.getBPMAmpMap();
final Map<String,Double> bpmPhaseMap = plds.getBPMPhaseMap();
for ( final BPM bpm : bpms ) {
Channel bpmX = bpm.getChannel(BPM.X_AVG_HANDLE);
Channel bpmY = bpm.getChannel(BPM.Y_AVG_HANDLE);
Channel bpmAmp = bpm.getChannel(BPM.AMP_AVG_HANDLE);
try {
System.err.println("Now updating " + bpm.getId());
if ( bpmXMap.containsKey( bpmX.getId() ) ) {
bpmX.putVal( NoiseGenerator.setValForPV( bpmXMap.get( bpmX.getId() ).doubleValue(), bpmNoise, getStaticError(bpmStaticError), false ) );
}
if ( bpmYMap.containsKey( bpmY.getId() ) ) {
bpmY.putVal( NoiseGenerator.setValForPV( bpmYMap.get( bpmY.getId() ).doubleValue(), bpmNoise, getStaticError(bpmStaticError), false ) );
}
// BPM amplitude
if (bpmAmpMap.containsKey(bpmAmp.getId()))
bpmAmp.putVal( NoiseGenerator.setValForPV( bpmAmpMap.get( bpmAmp.getId() ).doubleValue(), 5., 0.1, false) );
// BPM phase (for linac only)
if ( !( currentProbe instanceof TransferMapProbe ) ) {
Channel bpmPhase = bpm.getChannel( BPM.PHASE_AVG_HANDLE );
if ( bpmPhaseMap.containsKey( bpmPhase.getId() ) ) {
bpmPhase.putVal( bpmPhaseMap.get( bpmPhase.getId() ).doubleValue() );
}
}
} catch ( ConnectionException e ) {
System.err.println( e.getMessage() );
} catch ( PutException e ) {
System.err.println( e.getMessage() );
}
}
}
/** handle the CA put callback */
@Override
public void putCompleted( final Channel chan ) {}
/** Returns a distributed static error, uniformly distributed within +/- value given */
private double getStaticError(double staticErrorSigma) {
return staticErrorSigma * (Math.random() - 0.5) * 2;
}
private double getNoiseForElement(AcceleratorNode element) {
if ( element.isKindOf( Quadrupole.s_strType ) ) return quadNoise;
if ( element.isKindOf( Bend.s_strType ) ) return dipoleNoise;
if ( element.isKindOf( HDipoleCorr.s_strType ) ) return correctorNoise;
if ( element.isKindOf( VDipoleCorr.s_strType ) ) return correctorNoise;
if ( element.isKindOf( Solenoid.s_strType ) ) return solNoise;
return 0.0;
}
private double getStaticErrorForElement(AcceleratorNode element) {
double staticError=0.0;
if ( element.isKindOf( Quadrupole.s_strType ) ) staticError=quadStaticError;
if ( element.isKindOf( Bend.s_strType ) ) staticError=dipoleStaticError;
if ( element.isKindOf( HDipoleCorr.s_strType ) ) staticError=correctorStaticError;
if ( element.isKindOf( VDipoleCorr.s_strType ) ) staticError=correctorStaticError;
if ( element.isKindOf( Solenoid.s_strType ) ) staticError=solStaticError;
return getStaticError(staticError);
}
// Used to set horizontal misalignments in misalignElements()
private double getStaticHorizontalMisalignmentForElement(AcceleratorNode element) {
double horizontalMisalignment=0.0;
if ( element.isKindOf( Quadrupole.s_strType ) ) horizontalMisalignment = quadStatHorMisalign;
if ( element.isKindOf( BPM.s_strType ) ) horizontalMisalignment = bpmStatHorMisalign;
return getStaticError(horizontalMisalignment);
}
// Used to set vertical misalignments in misalignElements()
private double getStaticVerticalMisalignmentForElement(AcceleratorNode element) {
double verticalMisalignment=0.0;
if ( element.isKindOf( Quadrupole.s_strType ) ) verticalMisalignment = quadStatVerMisalign;
if ( element.isKindOf( BPM.s_strType ) ) verticalMisalignment = bpmStatVerMisalign;
return getStaticError(verticalMisalignment);
}
/** create the map between the "readback" and "set" PVs */
private void configureReadbacks() {
READBACK_SET_RECORDS.clear();
ch_noiseMap = new LinkedHashMap<Channel, Double>();
ch_staticErrorMap = new LinkedHashMap<Channel, Double>();
if ( selectedSequence != null ) {
// for magnet PVs
for ( final Electromagnet em : mags ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( em, em.getChannel( Electromagnet.FIELD_RB_HANDLE ), em.getChannel( MagnetMainSupply.FIELD_SET_HANDLE ) ) );
// handle the trimmed magnets
if ( em.isKindOf( TrimmedQuadrupole.s_strType ) ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( em, em.getChannel( MagnetTrimSupply.FIELD_RB_HANDLE ), em.getChannel( MagnetTrimSupply.FIELD_SET_HANDLE ) ) );
ch_noiseMap.put( em.getChannel( MagnetTrimSupply.FIELD_RB_HANDLE ), 0.0 );
ch_staticErrorMap.put( em.getChannel( MagnetTrimSupply.FIELD_RB_HANDLE ), 0.0 );
}
// set up the map between the magnet readback PV and its noise level
ch_noiseMap.put( em.getChannel( Electromagnet.FIELD_RB_HANDLE), getNoiseForElement(em) );
ch_staticErrorMap.put( em.getChannel( Electromagnet.FIELD_RB_HANDLE), getStaticErrorForElement(em) );
}
// for RF PVs
for ( final RfCavity rfCav : rfCavities ) {
final Channel ampSetChannel = rfCav.findChannel( RfCavity.CAV_AMP_SET_HANDLE );
final Channel ampReadChannel = rfCav.findChannel( RfCavity.CAV_AMP_AVG_HANDLE );
if ( ampReadChannel != null && ampReadChannel.isValid() ) {
if ( ampSetChannel != null && ampSetChannel.isValid() ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( rfCav, ampReadChannel, ampSetChannel ) );
}
ch_noiseMap.put( ampReadChannel, rfAmpNoise );
ch_staticErrorMap.put( ampReadChannel, getStaticError(rfAmpStaticError) );
}
final Channel phaseSetChannel = rfCav.findChannel( RfCavity.CAV_PHASE_SET_HANDLE );
final Channel phaseReadChannel = rfCav.findChannel( RfCavity.CAV_PHASE_AVG_HANDLE );
if ( phaseReadChannel != null && phaseReadChannel.isValid() ) {
if ( phaseSetChannel != null && phaseSetChannel.isValid() ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( rfCav, phaseReadChannel, phaseSetChannel ) );
}
ch_noiseMap.put( phaseReadChannel, rfPhaseNoise );
ch_staticErrorMap.put( phaseReadChannel, getStaticError(rfPhaseStaticError) );
}
}
Collections.sort( READBACK_SET_RECORDS, new ReadbackSetRecordPositionComparator( selectedSequence ) );
READBACK_SET_TABLE_MODEL.setRecords( new ArrayList<ReadbackSetRecord>( READBACK_SET_RECORDS ) );
}
}
/** run the VA server */
private void runServer() {
vaRunning = true;
}
/** stop the VA Server */
private void stopServer() {
MODEL_SYNC_TIMER.suspend();
vaRunning = false;
}
/** destroy the VA Server */
void destroyServer() {
try {
stopServer();
if ( _vaServer != null ) {
_vaServer.destroy();
_vaServer = null;
}
}
catch( Exception exception ) {
exception.printStackTrace();
}
}
@Override
public void acceleratorChanged() {
if (accelerator != null) {
stopServer();
baseProbe = null;
currentProbe = null;
_repRateChannel = accelerator.getTimingCenter().findChannel( TimingCenter.REP_RATE_HANDLE );
beamOnEvent = accelerator.getTimingCenter().findChannel( TimingCenter.BEAM_ON_EVENT_HANDLE );
beamOnEventCount = accelerator.getTimingCenter().findChannel( TimingCenter.BEAM_ON_EVENT_COUNT_HANDLE );
slowDiagEvent = accelerator.getTimingCenter().findChannel( TimingCenter.SLOW_DIAGNOSTIC_EVENT_HANDLE );
setHasChanges( true );
}
}
@Override
public void selectedSequenceChanged() {
destroyServer();
if (selectedSequence != null) {
try {
_vaServer = new VAServer( selectedSequence );
}
catch( Exception exception ) {
exception.printStackTrace();
}
/**get all nodes(add by liyong) */
nodes = getSelectedSequence().getAllNodes();
// get electro magnets
TypeQualifier typeQualifier = QualifierFactory.qualifierWithStatusAndTypes( true, Electromagnet.s_strType );
mags = getSelectedSequence().<Electromagnet>getAllNodesWithQualifier( typeQualifier );
// get all the rf cavities
typeQualifier = typeQualifier = QualifierFactory.qualifierWithStatusAndTypes( true, RfCavity.s_strType ); // TODO: CKA - No Effect
rfCavities = getSelectedSequence().getAllInclusiveNodesWithQualifier( typeQualifier );
// get all the BPMs
bpms = getSelectedSequence().<BPM>getAllNodesWithQualifier( QualifierFactory.qualifierWithStatusAndType( true, "BPM" ) );
// get all the wire scanners
wss = getSelectedSequence().getAllNodesWithQualifier( QualifierFactory.qualifierWithStatusAndType( true, ProfileMonitor.PROFILE_MONITOR_TYPE ) );
System.out.println( wss );
// should create a new map for "set" <-> "readback" PV mapping
configureReadbacks();
// for on-line model
try {
modelScenario = Scenario.newScenarioFor( getSelectedSequence() );
}
catch ( ModelException exception ) {
System.err.println( exception.getMessage() );
}
// setting up the default probe
createDefaultProbe();
setHasChanges(true);
}
else {
modelScenario = null;
baseProbe = null;
currentProbe = null;
}
}
public void buildOnlineModel() {
try {
// model.resetProbe();
modelScenario.setSynchronizationMode(Scenario.SYNC_MODE_LIVE);
modelScenario.resync();
} catch (Exception e) {
System.err.println(e.getMessage());
}
}
@Override
public void actionPerformed(ActionEvent ev) {
if (ev.getActionCommand().equals("noiseSet")) {
quadNoise = df_quadNoise.getDoubleValue();
dipoleNoise = df_bendNoise.getDoubleValue();
correctorNoise = df_dipCorrNoise.getDoubleValue();
bpmNoise = df_bpmNoise.getDoubleValue();
solNoise = df_solNoise.getDoubleValue();
rfAmpNoise = df_rfAmpNoise.getDoubleValue();
rfPhaseNoise = df_rfPhaseNoise.getDoubleValue();
quadStaticError = df_quadStatErr.getDoubleValue();
dipoleStaticError = df_bendStatErr.getDoubleValue();
correctorStaticError = df_dipCorrStatErr.getDoubleValue();
bpmStaticError = df_bpmStatErr.getDoubleValue();
solStaticError = df_solStatErr.getDoubleValue();
rfAmpStaticError = df_rfAmpStatErr.getDoubleValue();
rfPhaseStaticError = df_rfPhaseStatErr.getDoubleValue();
setHasChanges(true);
/**add below*/
configureReadbacks();
misalignElements();
setNoise.setVisible(false);
}
}
private void misalignElements() {
quadStatHorMisalign = df_quadStatHorMisalign.getDoubleValue();
quadStatVerMisalign = df_quadStatVerMisalign.getDoubleValue();
bpmStatHorMisalign = df_bpmStatHorMisalign.getDoubleValue();
bpmStatVerMisalign = df_bpmStatVerMisalign.getDoubleValue();
for ( final AcceleratorNode node : getSelectedSequence().getAllNodes() ) {
node.setXOffset(getStaticHorizontalMisalignmentForElement(node));
node.setYOffset(getStaticVerticalMisalignmentForElement(node));
}
}
/** synchronize the readbacks with setpoints and synchronize with the online model */
private void syncOnlineModel() {
if ( vaRunning ) {
// add noise, populate "read-back" PVs
putReadbackPVs();
// re-sync lattice and run model
buildOnlineModel();
try {
modelScenario.getProbe().reset();
modelScenario.run();
// put diagnostic node PVs
putDiagPVs();
}
catch ( ModelException exception ) {
System.err.println( exception.getMessage() );
}
}
}
/** Get a runnable that syncs the online model */
private Runnable getOnlineModelSynchronizer() {
return new Runnable() {
@Override
public void run() {
syncOnlineModel();
}
};
}
/** synchronize the readbacks with setpoints and synchronize with the online model */
private void syncPVLogger() {
if ( vaRunning ) {
putSetPVsFromPVLogger();
putReadbackPVsFromPVLogger();
putDiagPVsFromPVLogger();
}
}
/** Get a runnable that syncs with the PV Logger */
private Runnable getPVLoggerSynchronizer() {
return new Runnable() {
@Override
public void run() {
syncPVLogger();
}
};
}
}
/** compare readback set records by their position within a sequence */
class ReadbackSetRecordPositionComparator implements Comparator<ReadbackSetRecord> {
/** sequence within which the nodes are ordered */
final AcceleratorSeq SEQUENCE;
/** Constructor */
public ReadbackSetRecordPositionComparator( final AcceleratorSeq sequence ) {
SEQUENCE = sequence;
}
/** compare the records based on location relative to the start of the sequence */
@Override
public int compare( final ReadbackSetRecord record1, final ReadbackSetRecord record2 ) {
if ( record1 == null && record2 == null ) {
return 0;
}
else if ( record1 == null ) {
return -1;
}
else if ( record2 == null ) {
return 1;
}
else {
final double position1 = SEQUENCE.getPosition( record1.getNode() );
final double position2 = SEQUENCE.getPosition( record2.getNode() );
return position1 > position2 ? 1 : position1 < position2 ? -1 : 0;
}
}
/** all comparators of this class are the same */
@Override
public boolean equals( final Object object ) {
return object instanceof ReadbackSetRecordPositionComparator;
}
/** override hashCode() as required for consistency with equals() */
@Override
public int hashCode() {
return 1; // constant since all comparators of this class are equivalent
}
}
/**show bpm and ws plots*/
class DiagPlot {
protected FunctionGraphsJPanel _beampositionplot;
protected FunctionGraphsJPanel _sigamplot;
protected BasicGraphData DataBeamx;
protected BasicGraphData DataBeamy;
protected BasicGraphData DataBPMx;
protected BasicGraphData DataBPMy;
protected BasicGraphData Datasigmaz;
protected BasicGraphData DataWSx;
protected BasicGraphData DataWSy;
public DiagPlot(FunctionGraphsJPanel beampositionplot, FunctionGraphsJPanel sigamplot) {
_beampositionplot=beampositionplot;
_sigamplot=sigamplot;
setupPlot(beampositionplot,sigamplot);
}
public void showbeampositionplot(double[] p,double[] x, double[] y) throws ConnectionException, GetException {
DataBeamx.updateValues(p, x);
DataBeamy.updateValues(p, y);
}
public void showbpmplot(double[] p,double[] x, double[] y) throws ConnectionException, GetException {
DataBPMx.updateValues(p, x);
DataBPMy.updateValues(p, y);
}
public void showsigmazplot(double[] p,double[] sigmaz) throws ConnectionException, GetException {
Datasigmaz.updateValues(p, sigmaz);
}
public void showsigmaplot(double[] wsp,double[] wsx, double[] wsy) throws ConnectionException, GetException {
DataWSx.updateValues(wsp, wsx);
DataWSy.updateValues(wsp, wsy);
}
public void setupPlot(FunctionGraphsJPanel beampositionplot,FunctionGraphsJPanel sigamplot) {
/** setup beamdispplot*/
// labels
beampositionplot.setName( "BeamDisp_PLOT" );
beampositionplot.setAxisNameX("Position(m)");
beampositionplot.setAxisNameY("Beam displacement (mm)");
beampositionplot.setNumberFormatX( new DecimalFormat( "0.00E0" ) );
beampositionplot.setNumberFormatY( new DecimalFormat( "0.00E0" ) );
// add legend support
beampositionplot.setLegendPosition( FunctionGraphsJPanel.LEGEND_POSITION_ARBITRARY );
beampositionplot.setLegendKeyString( "Legend" );
beampositionplot.setLegendBackground( Color.lightGray );
beampositionplot.setLegendColor( Color.black );
beampositionplot.setLegendVisible( true );
/** setup sigamplot*/
// labels
sigamplot.setName( "Sigma_PLOT" );
sigamplot.setAxisNameX("Position(m)");
sigamplot.setAxisNameY("Beam Envelope(mm)");
sigamplot.setNumberFormatX( new DecimalFormat( "0.00E0" ) );
sigamplot.setNumberFormatY( new DecimalFormat( "0.00E0" ) );
// add legend support
sigamplot.setLegendPosition( FunctionGraphsJPanel.LEGEND_POSITION_ARBITRARY );
sigamplot.setLegendKeyString( "Legend" );
sigamplot.setLegendBackground( Color.lightGray );
sigamplot.setLegendColor( Color.black );
sigamplot.setLegendVisible( true );
DataBeamx=new BasicGraphData();
DataBeamy=new BasicGraphData();
DataBPMx=new BasicGraphData();
DataBPMy=new BasicGraphData();
DataWSx=new BasicGraphData();
DataWSy=new BasicGraphData();
Datasigmaz=new BasicGraphData();
DataBeamx.setGraphProperty(_beampositionplot.getLegendKeyString(), "BeamxAvg");
DataBeamy.setGraphProperty(_beampositionplot.getLegendKeyString(), "BeamyAvg");
DataBPMx.setGraphProperty(_beampositionplot.getLegendKeyString(), "BPMxAvg");
DataBPMy.setGraphProperty(_beampositionplot.getLegendKeyString(), "BPMyAvg");
DataWSx.setGraphProperty(_sigamplot.getLegendKeyString(), "sigmax");
DataWSy.setGraphProperty(_sigamplot.getLegendKeyString(), "sigmay");
Datasigmaz.setGraphProperty(_sigamplot.getLegendKeyString(), "sigmaz");
DataBeamx.setGraphColor(Color.blue);
DataBeamy.setGraphColor(Color.orange);
DataBPMx.setGraphColor(Color.RED);
DataBPMy.setGraphColor(Color.BLACK);
DataWSx.setGraphColor(Color.RED);
DataWSy.setGraphColor(Color.BLACK);
Datasigmaz.setGraphColor(Color.blue);
_beampositionplot.addGraphData(DataBeamx);
_beampositionplot.addGraphData(DataBeamy);
_beampositionplot.addGraphData(DataBPMx);
_beampositionplot.addGraphData(DataBPMy);
_sigamplot.addGraphData(DataWSx);
_sigamplot.addGraphData(DataWSy);
_sigamplot.addGraphData(Datasigmaz);
}
}
|
apps/virtualaccelerator/src/xal/app/virtualaccelerator/VADocument.java
|
/*
* @(#)VADocument.java 1.5 07/15/2004
*
* Copyright (c) 2001-2004 Oak Ridge National Laboratory
* Oak Ridge, Tenessee 37831, U.S.A.
* All rights reserved.
*
*/
package xal.app.virtualaccelerator;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.URL;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.JToggleButton.ToggleButtonModel;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.PlainDocument;
import xal.ca.Channel;
import xal.ca.ChannelFactory;
import xal.ca.ConnectionException;
import xal.ca.GetException;
import xal.ca.PutException;
import xal.ca.PutListener;
import xal.extension.application.Application;
import xal.extension.application.Commander;
import xal.extension.application.XalWindow;
import xal.extension.application.smf.AcceleratorDocument;
import xal.extension.bricks.WindowReference;
import xal.extension.widgets.apputils.SimpleProbeEditor;
import xal.extension.widgets.plot.BasicGraphData;
import xal.extension.widgets.plot.FunctionGraphsJPanel;
import xal.extension.widgets.swing.DecimalField;
import xal.extension.widgets.swing.KeyValueFilteredTableModel;
import xal.model.IAlgorithm;
import xal.model.ModelException;
import xal.model.alg.TransferMapTracker;
import xal.model.probe.EnvelopeProbe;
import xal.model.probe.Probe; // Probe for t3d header
import xal.model.probe.TransferMapProbe;
import xal.model.probe.traj.EnvelopeProbeState;
import xal.model.probe.traj.ProbeState;
import xal.service.pvlogger.apputils.browser.PVLogSnapshotChooser;
import xal.service.pvlogger.sim.PVLoggerDataSource;
import xal.sim.scenario.AlgorithmFactory;
import xal.sim.scenario.ProbeFactory;
import xal.sim.scenario.Scenario;
import xal.smf.AcceleratorNode;
import xal.smf.AcceleratorSeq;
import xal.smf.AcceleratorSeqCombo;
import xal.smf.NoSuchChannelException;
import xal.smf.Ring;
import xal.smf.TimingCenter;
import xal.smf.attr.BPMBucket;
import xal.smf.impl.BPM;
import xal.smf.impl.Bend;
import xal.smf.impl.Electromagnet;
import xal.smf.impl.HDipoleCorr;
import xal.smf.impl.MagnetMainSupply;
import xal.smf.impl.MagnetTrimSupply;
import xal.smf.impl.ProfileMonitor;
import xal.smf.impl.Quadrupole;
import xal.smf.impl.RfCavity;
import xal.smf.impl.RingBPM;
import xal.smf.impl.SCLCavity;
import xal.smf.impl.Solenoid;
import xal.smf.impl.TrimmedQuadrupole;
import xal.smf.impl.VDipoleCorr;
import xal.smf.impl.qualify.QualifierFactory;
import xal.smf.impl.qualify.TypeQualifier;
import xal.tools.beam.PhaseVector;
import xal.tools.beam.Twiss;
import xal.tools.beam.calc.SimpleSimResultsAdaptor;
import xal.tools.data.DataAdaptor;
import xal.tools.dispatch.DispatchQueue;
import xal.tools.dispatch.DispatchTimer;
//TODO: CKA - Many unused imports
import xal.tools.xml.XmlDataAdaptor;
/**
* <p>
* <h4>CKA NOTES:</h4>
* - In method <code>{@link #createDefaultProbe()}</code> a <code>TransferMapProbe</code>
* is created in the case of a ring. The method <code>TransferMapState#setPhaseCoordinates</code>
* is called to create an initial static erorr. This does nothing because transfer map probes
* do not have phase coordinates any longer, the method is deprecated.
* <br/>
* <br/>
* - The static noise for the above call is hard coded. As are many features in this class.
* </p>
*
* VADocument is a custom AcceleratorDocument for virtual accelerator application.
* @version 1.6 13 Jul 2015
* @author Paul Chu
* @author Blaz Kranjc <[email protected]>
*/
public class VADocument extends AcceleratorDocument implements ActionListener, PutListener {
/** default BPM waveform size */
final static private int DEFAULT_BPM_WAVEFORM_SIZE = VAServer.DEFAULT_ARRAY_SIZE;
/** default BPM waveform data size (part of the waveform to populate with data) */
final static private int DEFAULT_BPM_WAVEFORM_DATA_SIZE = 250;
/** factory for server channels
* Not sure whether it is better for this to be static and shared across all documents.
* For now we will just use a common server factory across all documents (possibly prevents server conflicts).
*/
final static private ChannelFactory CHANNEL_SERVER_FACTORY = ChannelFactory.newServerFactory();
/** The document for the text pane in the main window. */
protected PlainDocument textDocument;
/** For on-line model */
protected Scenario modelScenario;
/* template probe which may be configured and then copied as the currentProbe for use in the simulation */
private Probe<?> baseProbe;
/* probe which was copied from the base probe and is being used in the simulation */
private Probe<?> currentProbe;
String dataSource = Scenario.SYNC_MODE_LIVE;
int runT3d_OK = 0;
private JDialog setNoise = new JDialog();
private DecimalField df_quadNoise, df_bendNoise, df_dipCorrNoise, df_bpmNoise, df_solNoise, df_rfAmpNoise, df_rfPhaseNoise;
private DecimalField df_quadStatErr, df_bendStatErr, df_dipCorrStatErr, df_bpmStatErr, df_solStatErr, df_rfAmpStatErr, df_rfPhaseStatErr;
private double quadNoise = 0.0;
private double dipoleNoise = 0.0;
private double correctorNoise = 0.0;
private double solNoise = 0.0;
private double bpmNoise = 0.0;
private double rfAmpNoise = 0.0;
private double rfPhaseNoise = 0.0;
private double quadStaticError = 0.0;
private double dipoleStaticError = 0.0;
private double correctorStaticError = 0.0;
private double solStaticError = 0.0;
private double bpmStaticError = 0.0;
private double rfAmpStaticError = 0.0;
private double rfPhaseStaticError = 0.0;
private JButton done = new JButton("OK");
private volatile boolean vaRunning = false;
// add by liyong
private java.util.List<AcceleratorNode> nodes; // TODO: CKA - NEVER USED
private java.util.List<RfCavity> rfCavities;
private java.util.List<Electromagnet> mags;
private java.util.List<BPM> bpms;
private java.util.List<ProfileMonitor> wss;
private Channel beamOnEvent;
private Channel beamOnEventCount;
private Channel slowDiagEvent;
private Channel _repRateChannel;
// timestamp of last update
private Date _lastUpdate;
private long beamOnEventCounter = 0;
private List<ReadbackSetRecord> READBACK_SET_RECORDS;
private LinkedHashMap<Channel, Double> ch_noiseMap;
private LinkedHashMap<Channel, Double> ch_staticErrorMap;
private VAServer _vaServer;
protected Commander commander;
// private RecentFileTracker _probeFileTracker;
// for on/off-line mode selection
ToggleButtonModel olmModel = new ToggleButtonModel();
ToggleButtonModel pvlogModel = new ToggleButtonModel();
ToggleButtonModel pvlogMovieModel = new ToggleButtonModel();
private boolean isFromPVLogger = false;
private boolean isForOLM = false;
private PVLogSnapshotChooser plsc;
private JDialog pvLogSelector;
private PVLoggerDataSource plds;
/** bricks window reference */
private WindowReference _windowReference;
/** readback setpoint table model */
private KeyValueFilteredTableModel<ReadbackSetRecord> READBACK_SET_TABLE_MODEL;
/** timer to synch the readbacks with the setpoints and also sync the model */
final private DispatchTimer MODEL_SYNC_TIMER;
/** model sync period in milliseconds */
private long _modelSyncPeriod;
public DiagPlot _diagplot;
/** Create a new empty document */
public VADocument() {
this( null );
}
/**
* Create a new document loaded from the URL file
* @param url The URL of the file to load into the new document.
*/
public VADocument( final java.net.URL url ) {
setSource( url );
// timer to synchronize readbacks with setpoints as well as the online model
MODEL_SYNC_TIMER = DispatchTimer.getCoalescingInstance( DispatchQueue.createSerialQueue( "" ), getOnlineModelSynchronizer() );
// set the default model sync period to 1 second
_modelSyncPeriod = 1000;
READBACK_SET_RECORDS = new ArrayList<ReadbackSetRecord>();
final WindowReference windowReference = getDefaultWindowReference( "MainWindow", this );
_windowReference = windowReference;
READBACK_SET_TABLE_MODEL = new KeyValueFilteredTableModel<ReadbackSetRecord>( new ArrayList<ReadbackSetRecord>(), "node.id", "readbackChannel.channelName", "lastReadback", "setpointChannel.channelName", "lastSetpoint" );
READBACK_SET_TABLE_MODEL.setColumnClass( "lastReadback", Number.class );
//READBACK_SET_TABLE_MODEL.setColumnClass( "lastSetpoint", Number.class );
READBACK_SET_TABLE_MODEL.setColumnClass( "lastSetpoint", Double.class );
READBACK_SET_TABLE_MODEL.setColumnName( "node.id", "Node" );
READBACK_SET_TABLE_MODEL.setColumnName( "readbackChannel.channelName", "Readback PV" );
READBACK_SET_TABLE_MODEL.setColumnName( "lastReadback", "Readback" );
READBACK_SET_TABLE_MODEL.setColumnName( "setpointChannel.channelName", "Setpoint PV" );
READBACK_SET_TABLE_MODEL.setColumnName( "lastSetpoint", "Setpoint" );
READBACK_SET_TABLE_MODEL.setColumnEditable("lastSetpoint", true);
final JTextField filterField = (JTextField)windowReference.getView( "FilterField" );
READBACK_SET_TABLE_MODEL.setInputFilterComponent( filterField );
makeTextDocument();
// probe file management
//_probeFileTracker = new RecentFileTracker( 1, this.getClass(), "recent_probes" );
_lastUpdate = new Date();
if ( url == null ) return;
}
/**
* Override the nextChannelFactory() method to return this document's channel server factory.
* @return this document's channel server factory
*/
@Override
public ChannelFactory nextChannelFactory() {
//System.out.println( "Getting the server channel factory..." );
return CHANNEL_SERVER_FACTORY;
}
/** Make a main window by instantiating the my custom window. Set the text pane to use the textDocument variable as its document. */
@Override
public void makeMainWindow() {
mainWindow = (XalWindow)_windowReference.getWindow();
final JTable readbackTable = (JTable)_windowReference.getView( "ReadbackTable" );
readbackTable.setCellSelectionEnabled( true );
readbackTable.setModel( READBACK_SET_TABLE_MODEL );
/** add digaplot */
final FunctionGraphsJPanel beamdispplot = (FunctionGraphsJPanel) _windowReference.getView("BeamDispPlot");
final FunctionGraphsJPanel sigamplot = (FunctionGraphsJPanel) _windowReference.getView("SigmaPlot");
_diagplot = new DiagPlot(beamdispplot, sigamplot);
makeNoiseDialog();
if (getSource() != null) {
java.net.URL url = getSource();
DataAdaptor documentAdaptor = XmlDataAdaptor.adaptorForUrl( url, false );
update( documentAdaptor.childAdaptor("MpxDocument") );
}
setHasChanges(false);
}
/** get the model sync period in milliseconds */
public long getModelSyncPeriod() {
return _modelSyncPeriod;
}
/** update the model sync period in milliseconds */
public void setModelSyncPeriod( final long period ) {
_modelSyncPeriod = period;
MODEL_SYNC_TIMER.startNowWithInterval( _modelSyncPeriod, 0 );
setHasChanges( true );
}
/** Make the noise dialog box */
private void makeNoiseDialog() {
JPanel settingPanel = new JPanel();
JPanel noiseLevelPanel = new JPanel();
JPanel staticErrorPanel = new JPanel();
// for noise %
noiseLevelPanel.setLayout(new GridLayout(8, 1));
noiseLevelPanel.add(new JLabel("Noise Level for Device Type:"));
NumberFormat numberFormat;
numberFormat = NumberFormat.getNumberInstance();
numberFormat.setMaximumFractionDigits(3);
JPanel quadNoisePanel = new JPanel();
quadNoisePanel.setLayout(new GridLayout(1, 3));
JLabel label1 = new JLabel("Quad: ");
df_quadNoise = new DecimalField( 0., 5, numberFormat );
quadNoisePanel.add(label1);
quadNoisePanel.add(df_quadNoise);
quadNoisePanel.add(new JLabel(" % "));
noiseLevelPanel.add(quadNoisePanel);
JPanel bendNoisePanel = new JPanel();
bendNoisePanel.setLayout(new GridLayout(1, 3));
JLabel label2 = new JLabel("Bending Dipole: ");
df_bendNoise = new DecimalField( 0., 5, numberFormat );
bendNoisePanel.add(label2);
bendNoisePanel.add(df_bendNoise);
bendNoisePanel.add(new JLabel(" % "));
noiseLevelPanel.add(bendNoisePanel);
JPanel dipCorrNoisePanel = new JPanel();
dipCorrNoisePanel.setLayout(new GridLayout(1, 3));
df_dipCorrNoise = new DecimalField( 0., 5, numberFormat );
dipCorrNoisePanel.add(new JLabel("Dipole Corr.: "));
dipCorrNoisePanel.add(df_dipCorrNoise);
dipCorrNoisePanel.add(new JLabel(" % "));
noiseLevelPanel.add(dipCorrNoisePanel);
JPanel solNoisePanel = new JPanel();
solNoisePanel.setLayout(new GridLayout(1, 3));
df_solNoise = new DecimalField( 0., 5, numberFormat );
solNoisePanel.add(new JLabel("Solenoid: "));
solNoisePanel.add(df_solNoise);
solNoisePanel.add(new JLabel(" % "));
noiseLevelPanel.add(solNoisePanel);
JPanel rfAmpNoisePanel = new JPanel();
rfAmpNoisePanel.setLayout(new GridLayout(1, 3));
df_rfAmpNoise = new DecimalField( 0., 5, numberFormat );
rfAmpNoisePanel.add(new JLabel("RF amp: "));
rfAmpNoisePanel.add(df_rfAmpNoise);
rfAmpNoisePanel.add(new JLabel(" % "));
noiseLevelPanel.add(rfAmpNoisePanel);
JPanel rfPhaseNoisePanel = new JPanel();
rfPhaseNoisePanel.setLayout(new GridLayout(1, 3));
df_rfPhaseNoise = new DecimalField( 0., 5, numberFormat );
rfPhaseNoisePanel.add(new JLabel("RF phase: "));
rfPhaseNoisePanel.add(df_rfPhaseNoise);
rfPhaseNoisePanel.add(new JLabel(" % "));
noiseLevelPanel.add(rfPhaseNoisePanel);
JPanel bpmNoisePanel = new JPanel();
bpmNoisePanel.setLayout(new GridLayout(1, 3));
df_bpmNoise = new DecimalField( 0., 5, numberFormat );
bpmNoisePanel.add(new JLabel("BPM: "));
bpmNoisePanel.add(df_bpmNoise);
bpmNoisePanel.add(new JLabel("mm"));
noiseLevelPanel.add(bpmNoisePanel);
// for static errors
staticErrorPanel.setLayout(new GridLayout(8, 1));
staticErrorPanel.add(new JLabel("Static error for Device Type:"));
JPanel quadStatErrPanel = new JPanel();
quadStatErrPanel.setLayout(new GridLayout(1, 2));
df_quadStatErr = new DecimalField( 0., 5, numberFormat );
quadStatErrPanel.add(new JLabel("Quad: "));
quadStatErrPanel.add(df_quadStatErr);
quadStatErrPanel.add(new JLabel(" T/m "));
staticErrorPanel.add(quadStatErrPanel);
JPanel bendStatErrPanel = new JPanel();
bendStatErrPanel.setLayout(new GridLayout(1, 2));
df_bendStatErr = new DecimalField( 0., 5, numberFormat );
bendStatErrPanel.add(new JLabel("Bending Dipole: "));
bendStatErrPanel.add(df_bendStatErr);
bendStatErrPanel.add(new JLabel(" T "));
staticErrorPanel.add(bendStatErrPanel);
JPanel dipCorrStatErrPanel = new JPanel();
dipCorrStatErrPanel.setLayout(new GridLayout(1, 2));
df_dipCorrStatErr = new DecimalField( 0., 5, numberFormat );
dipCorrStatErrPanel.add(new JLabel("Dipole Corr.: "));
dipCorrStatErrPanel.add(df_dipCorrStatErr);
dipCorrStatErrPanel.add(new JLabel(" T "));
staticErrorPanel.add(dipCorrStatErrPanel);
JPanel solStatErrPanel = new JPanel();
solStatErrPanel.setLayout(new GridLayout(1, 2));
df_solStatErr = new DecimalField( 0., 5, numberFormat );
solStatErrPanel.add(new JLabel("Solenoid: "));
solStatErrPanel.add(df_solStatErr);
solStatErrPanel.add(new JLabel(" T "));
staticErrorPanel.add(solStatErrPanel);
JPanel rfAmpStatErrPanel = new JPanel();
rfAmpStatErrPanel.setLayout(new GridLayout(1, 2));
df_rfAmpStatErr = new DecimalField( 0., 5, numberFormat );
rfAmpStatErrPanel.add(new JLabel("RF amp: "));
rfAmpStatErrPanel.add(df_rfAmpStatErr);
rfAmpStatErrPanel.add(new JLabel(" kV "));
staticErrorPanel.add(rfAmpStatErrPanel);
JPanel rfPhaseStatErrPanel = new JPanel();
rfPhaseStatErrPanel.setLayout(new GridLayout(1, 2));
df_rfPhaseStatErr = new DecimalField( 0., 5, numberFormat );
rfPhaseStatErrPanel.add(new JLabel("RF phase: "));
rfPhaseStatErrPanel.add(df_rfPhaseStatErr);
rfPhaseStatErrPanel.add(new JLabel(" deg "));
staticErrorPanel.add(rfPhaseStatErrPanel);
JPanel bpmStatErrPanel = new JPanel();
bpmStatErrPanel.setLayout(new GridLayout(1, 2));
df_bpmStatErr = new DecimalField( 0., 5, numberFormat );
bpmStatErrPanel.add(new JLabel("BPM: "));
bpmStatErrPanel.add(df_bpmStatErr);
bpmStatErrPanel.add(new JLabel(" mm "));
staticErrorPanel.add(bpmStatErrPanel);
// put everything together
setNoise.setBounds(300, 300, 900, 600);
setNoise.setTitle("Define errors...");
settingPanel.setLayout(new BoxLayout(settingPanel, BoxLayout.X_AXIS));
settingPanel.add(noiseLevelPanel);
settingPanel.add(staticErrorPanel);
setNoise.getContentPane().setLayout(new BorderLayout());
setNoise.getContentPane().add(settingPanel, BorderLayout.CENTER);
setNoise.getContentPane().add(done, BorderLayout.SOUTH);
done.setActionCommand("noiseSet");
done.addActionListener(this);
setNoise.pack();
}
/**
* Save the document to the specified URL.
* @param url The URL to which the document should be saved.
*/
@Override
public void saveDocumentAs(URL url) {
XmlDataAdaptor xda = XmlDataAdaptor.newEmptyDocumentAdaptor();
DataAdaptor daLevel1 = xda.createChild("VA");
//save accelerator file
DataAdaptor daXMLFile = daLevel1.createChild("accelerator");
daXMLFile.setValue("xmlFile", this.getAcceleratorFilePath());
// save selected sequences
List<String> sequenceNames;
if ( getSelectedSequence() != null ) {
DataAdaptor daSeq = daLevel1.createChild("sequences");
daSeq.setValue("name", getSelectedSequence().getId());
if ( getSelectedSequence() instanceof AcceleratorSeqCombo ) {
AcceleratorSeqCombo asc = (AcceleratorSeqCombo) getSelectedSequence();
sequenceNames = asc.getConstituentNames();
}
else {
sequenceNames = new ArrayList<String>();
sequenceNames.add( getSelectedSequence().getId() );
}
for ( final String sequenceName : sequenceNames ) {
DataAdaptor daSeqComponents = daSeq.createChild( "seq" );
daSeqComponents.setValue( "name", sequenceName );
}
DataAdaptor daNoise = daLevel1.createChild("noiseLevels");
daNoise.setValue("quad", quadNoise);
daNoise.setValue("dipole", dipoleNoise);
daNoise.setValue("corrector", correctorNoise);
daNoise.setValue("bpm", bpmNoise);
daNoise.setValue("sol", solNoise);
daNoise.setValue("rfAmp", rfAmpNoise);
daNoise.setValue("rfPhase", rfPhaseNoise);
DataAdaptor daStaticError = daLevel1.createChild("staticErrors");
daStaticError.setValue("quad", quadStaticError);
daStaticError.setValue("dipole", dipoleStaticError);
daStaticError.setValue("corrector", correctorStaticError);
daStaticError.setValue("bpm", bpmStaticError);
daStaticError.setValue("sol", solStaticError);
daStaticError.setValue("rfAmp", rfAmpStaticError);
daStaticError.setValue("rfPhase", rfPhaseStaticError);
}
daLevel1.setValue( "modelSyncPeriod", _modelSyncPeriod );
xda.writeToUrl(url);
setHasChanges(false);
}
/**
* Instantiate a new PlainDocument that servers as the document for the text
* pane. Create a handler of text actions so we can determine if the
* document has changes that should be saved.
*/
private void makeTextDocument() {
textDocument = new PlainDocument();
textDocument.addDocumentListener(new DocumentListener() {
@Override
public void changedUpdate(javax.swing.event.DocumentEvent evt) {
setHasChanges(true);
}
@Override
public void removeUpdate(DocumentEvent evt) {
setHasChanges(true);
}
@Override
public void insertUpdate(DocumentEvent evt) {
setHasChanges(true);
}
});
}
/** Create the default probe from the edit context. */
private void createDefaultProbe() {
if ( selectedSequence != null ) {
try {
baseProbe = ( selectedSequence instanceof xal.smf.Ring ) ? createRingProbe( selectedSequence ) : createEnvelopeProbe( selectedSequence );
currentProbe = baseProbe.copy();
currentProbe.initialize();
modelScenario.setProbe( currentProbe );
}
catch ( Exception exception ) {
displayError( "Error Creating Probe", "Probe Error", exception );
}
}
}
/** create a new ring probe */
static private TransferMapProbe createRingProbe( final AcceleratorSeq sequence ) throws InstantiationException {
final TransferMapTracker tracker = AlgorithmFactory.createTransferMapTracker( sequence );
return ProbeFactory.getTransferMapProbe( sequence, tracker );
}
/** create a new envelope probe */
static private EnvelopeProbe createEnvelopeProbe( final AcceleratorSeq sequence ) throws InstantiationException {
final IAlgorithm tracker = AlgorithmFactory.createEnvTrackerAdapt( sequence );
return ProbeFactory.getEnvelopeProbe( sequence, tracker );
}
@Override
public void customizeCommands(Commander commander) {
// open probe editor
// TODO: implement probe editor support
this.commander = commander;
Action probeEditorAction = new AbstractAction("probe-editor") {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
if ( baseProbe != null ) {
stopServer();
final SimpleProbeEditor probeEditor = new SimpleProbeEditor( getMainWindow(), baseProbe );
baseProbe = probeEditor.getProbe();
currentProbe = baseProbe.copy();
currentProbe.initialize();
if ( modelScenario != null ) {
modelScenario.setProbe(currentProbe);
}
}
else {
//Sequence has not been selected
displayError("Probe Editor Error", "You must select a sequence before attempting to edit the probe.");
}
}
};
probeEditorAction.putValue(Action.NAME, "probe-editor");
commander.registerAction(probeEditorAction);
// action for using online model as engine
olmModel.setSelected(true);
olmModel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
isForOLM = true;
isFromPVLogger = false;
}
});
commander.registerModel("olm", olmModel);
// action for using PV logger snapshot through online model
pvlogModel.setSelected(false);
pvlogModel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
isForOLM = true;
isFromPVLogger = true;
if (pvLogSelector == null) {
// for PV Logger snapshot chooser
plsc = new PVLogSnapshotChooser();
pvLogSelector = plsc.choosePVLogId();
} else
pvLogSelector.setVisible(true);
}
});
commander.registerModel("pvlogger", pvlogModel);
// action for direct replaying of PVLogger logged data
pvlogMovieModel.setSelected(false);
pvlogMovieModel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
isForOLM = false;
isFromPVLogger = true;
if (pvLogSelector == null) {
// for PV Logger snapshot chooser
plsc = new PVLogSnapshotChooser();
pvLogSelector = plsc.choosePVLogId();
} else
pvLogSelector.setVisible(true);
}
});
commander.registerModel("pvlogMovie", pvlogMovieModel);
// action for running model and Diagnostics acquisition
Action runAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
if ( vaRunning ) {
JOptionPane.showMessageDialog( getMainWindow(), "Virtual Accelerator has already started.", "Warning!", JOptionPane.PLAIN_MESSAGE );
return;
}
if(!Application.getApp().authorizeWithRBAC("Start")){
JOptionPane.showMessageDialog( getMainWindow(), "You are unauthorized for this action.", "Warning!", JOptionPane.PLAIN_MESSAGE );
return;
}
if ( getSelectedSequence() == null ) {
JOptionPane.showMessageDialog( getMainWindow(), "You need to select sequence(s) first.", "Warning!", JOptionPane.PLAIN_MESSAGE );
} else {
// use PV logger
if ( isFromPVLogger ) {
long pvLoggerId = plsc.getPVLogId();
runServer();
plds = new PVLoggerDataSource(pvLoggerId);
// use PVLogger to construct the model
if (isForOLM) {
// load the settings from the PV Logger
putSetPVsFromPVLogger();
// synchronize with the online model
MODEL_SYNC_TIMER.setEventHandler( getOnlineModelSynchronizer() );
}
else { // directly use PVLogger data for replay
MODEL_SYNC_TIMER.setEventHandler( getPVLoggerSynchronizer() );
}
}
// use online model
else {
if ( currentProbe == null ) {
createDefaultProbe();
if ( currentProbe == null ) {
displayWarning( "Warning!", "You need to select probe file first." );
return;
}
actionPerformed( event );
}
else {
runServer();
}
// put the initial B_Book PVs to the server
configFieldBookPVs();
//put "set" PVs to the server
putSetPVs();
// continuously loop through the next 3 steps
System.out.println( "Setup to synchronize the online model periodically..." );
MODEL_SYNC_TIMER.setEventHandler( getOnlineModelSynchronizer() );
}
MODEL_SYNC_TIMER.startNowWithInterval( _modelSyncPeriod, 0 );
MODEL_SYNC_TIMER.resume();
}
}
};
runAction.putValue(Action.NAME, "run-va");
commander.registerAction(runAction);
// stop the channel access server
Action stopAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
stopServer();
}
};
stopAction.putValue(Action.NAME, "stop-va");
commander.registerAction(stopAction);
// set noise level
Action setNoiseAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
df_quadNoise.setValue(quadNoise);
df_bendNoise.setValue(dipoleNoise);
df_dipCorrNoise.setValue(correctorNoise);
df_bpmNoise.setValue(bpmNoise);
df_solNoise.setValue(solNoise);
df_rfAmpNoise.setValue(rfAmpNoise);
df_rfPhaseNoise.setValue(rfPhaseNoise);
df_quadStatErr.setValue(quadStaticError);
df_bendStatErr.setValue(dipoleStaticError);
df_dipCorrStatErr.setValue(correctorStaticError);
df_bpmStatErr.setValue(bpmStaticError);
df_solStatErr.setValue(solStaticError);
df_rfAmpStatErr.setValue(rfAmpStaticError);
df_rfPhaseStatErr.setValue(rfPhaseStaticError);
setNoise.setVisible(true);
}
};
setNoiseAction.putValue(Action.NAME, "set-noise");
commander.registerAction(setNoiseAction);
// configure synchronization
final Action synchConfigAction = new AbstractAction() {
static final long serialVersionUID = 0;
@Override
public void actionPerformed(ActionEvent event) {
final String result = JOptionPane.showInputDialog( getMainWindow(), "Set the Model Synchronization Period (milliseconds): ", _modelSyncPeriod );
if ( result != null ) {
try {
final long modelSyncPeriod = Long.parseLong( result );
setModelSyncPeriod( modelSyncPeriod );
}
catch( Exception exception ) {
displayError( "Error setting Model Sync Period!", exception.getMessage() );
}
}
}
};
synchConfigAction.putValue( Action.NAME, "sync-config" );
commander.registerAction( synchConfigAction );
}
/** handle this document being closed */
@Override
public void willClose() {
System.out.println( "Document will be closed" );
destroyServer();
}
public void update( final DataAdaptor adaptor ) {
if ( getSource() != null ) {
XmlDataAdaptor xda = XmlDataAdaptor.adaptorForUrl( getSource(), false );
DataAdaptor da1 = xda.childAdaptor( "VA" );
//restore accelerator file
applySelectedAcceleratorWithDefaultPath( da1.childAdaptor( "accelerator" ).stringValue( "xmlFile" ) );
// set up the right sequence combo from selected primaries:
List<DataAdaptor> temp = da1.childAdaptors( "sequences" );
if ( temp.isEmpty() ) return; // bail out, nothing left to do
ArrayList<AcceleratorSeq> seqs = new ArrayList<AcceleratorSeq>();
DataAdaptor da2a = da1.childAdaptor( "sequences" );
String seqName = da2a.stringValue( "name" );
DataAdaptor daNoise = da1.childAdaptor("noiseLevels");
if (daNoise != null) {
quadNoise = daNoise.doubleValue("quad");
dipoleNoise = daNoise.doubleValue("dipole");
correctorNoise = daNoise.doubleValue("corrector");
bpmNoise = daNoise.doubleValue("bpm");
solNoise = daNoise.doubleValue("sol");
rfAmpNoise = daNoise.doubleValue("rfAmp");
rfPhaseNoise = daNoise.doubleValue("rfPhase");
}
DataAdaptor daStaticError = da1.childAdaptor("staticErrors");
if (daStaticError != null) {
quadStaticError = daStaticError.doubleValue("quad");
dipoleStaticError = daStaticError.doubleValue("dipole");
correctorStaticError = daStaticError.doubleValue("corrector");
bpmStaticError = daStaticError.doubleValue("bpm");
solStaticError = daStaticError.doubleValue("sol");
rfAmpStaticError = daStaticError.doubleValue("rfAmp");
rfPhaseStaticError = daStaticError.doubleValue("rfPhase");
}
temp = da2a.childAdaptors("seq");
for ( final DataAdaptor da : temp ) {
seqs.add( getAccelerator().getSequence( da.stringValue("name") ) );
}
if (seqName.equals("Ring"))
setSelectedSequence(new Ring(seqName, seqs));
else
setSelectedSequence(new AcceleratorSeqCombo(seqName, seqs));
setSelectedSequenceList(seqs.subList(0, seqs.size()));
createDefaultProbe();
modelScenario.setProbe(currentProbe);
if ( da1.hasAttribute( "modelSyncPeriod" ) ) {
_modelSyncPeriod = da1.longValue( "modelSyncPeriod" );
}
}
}
protected Scenario getScenario() {
return modelScenario;
}
protected boolean isVARunning() {
return vaRunning;
}
/** update the limit channels based on changes to the Field Book channels */
private void updateLimitChannels() {
for ( final Electromagnet magnet : mags ) {
try {
final Channel bookChannel = magnet.getMainSupply().findChannel( MagnetMainSupply.FIELD_BOOK_HANDLE );
final Channel fieldChannel = magnet.getMainSupply().findChannel( MagnetMainSupply.FIELD_SET_HANDLE );
if ( bookChannel != null ) {
if ( bookChannel.isConnected() ) {
final double bookField = bookChannel.getValDbl();
final double warningStaticError = 0.05 * Math.abs( bookField );
final double alarmStaticError = 0.1 * Math.abs( bookField );
final String[] warningPVs = fieldChannel.getWarningLimitPVs();
final Channel lowerWarningChannel = CHANNEL_SERVER_FACTORY.getChannel( warningPVs[0], fieldChannel.getValueTransform() );
// System.out.println( "Lower Limit PV: " + lowerWarningChannel.channelName() );
if ( lowerWarningChannel.connectAndWait() ) {
lowerWarningChannel.putValCallback( bookField - warningStaticError, this );
}
final Channel upperWarningChannel = CHANNEL_SERVER_FACTORY.getChannel( warningPVs[1], fieldChannel.getValueTransform() );
if ( upperWarningChannel.connectAndWait() ) {
upperWarningChannel.putValCallback( bookField + warningStaticError, this );
}
final String[] alarmPVs = fieldChannel.getAlarmLimitPVs();
final Channel lowerAlarmChannel = CHANNEL_SERVER_FACTORY.getChannel( alarmPVs[0], fieldChannel.getValueTransform() );
if ( lowerAlarmChannel.connectAndWait() ) {
lowerAlarmChannel.putValCallback( bookField - alarmStaticError, this );
}
final Channel upperAlarmChannel = CHANNEL_SERVER_FACTORY.getChannel( alarmPVs[1], fieldChannel.getValueTransform() );
if ( upperAlarmChannel.connectAndWait() ) {
upperAlarmChannel.putValCallback( bookField + alarmStaticError, this );
}
}
}
}
catch ( NoSuchChannelException exception ) {
System.err.println( exception.getMessage() );
}
catch ( ConnectionException exception ) {
System.err.println( exception.getMessage() );
}
catch ( GetException exception ) {
System.err.println( exception.getMessage() );
}
catch ( PutException exception ) {
System.err.println( exception.getMessage() );
}
}
Channel.flushIO();
}
/** This method is for populating the readback PVs */
private void putReadbackPVs() {
// set beam trigger PV to "on"
try {
final Date now = new Date();
if ( _repRateChannel != null ) {
final double updatePeriod = 0.001 * ( now.getTime() - _lastUpdate.getTime() ); // period of update in seconds
_repRateChannel.putValCallback( 1.0 / updatePeriod , this );
}
_lastUpdate = now;
if ( beamOnEvent != null ) beamOnEvent.putValCallback( 0, this );
beamOnEventCounter++;
if ( beamOnEventCount != null ) beamOnEventCount.putValCallback( beamOnEventCounter, this );
if ( slowDiagEvent != null ) slowDiagEvent.putValCallback( 0, this );
} catch (ConnectionException e) {
System.err.println(e);
} catch (PutException e) {
System.err.println(e);
}
// get the "set" PV value, add noise, and then put to the corresponding readback PV.
for ( final ReadbackSetRecord record : READBACK_SET_RECORDS ) {
try {
record.updateReadback( ch_noiseMap, ch_staticErrorMap, this );
}
catch (Exception e) {
System.err.println( e.getMessage() );
}
}
Channel.flushIO();
final int rowCount = READBACK_SET_TABLE_MODEL.getRowCount();
if ( rowCount > 0 ) {
READBACK_SET_TABLE_MODEL.fireTableRowsUpdated( 0, rowCount - 1 );
}
updateLimitChannels();
}
/** populate the readback PVs from the PV Logger */
private void putReadbackPVsFromPVLogger() {
final Map<String,Double> qPVMap = plds.getMagnetMap();
// set beam trigger PV to "on"
try {
if ( beamOnEvent != null ) beamOnEvent.putVal(0);
beamOnEventCounter++;
if ( beamOnEventCount != null ) beamOnEventCount.putVal(beamOnEventCounter);
if ( slowDiagEvent != null ) slowDiagEvent.putVal( 0 );
} catch (ConnectionException e) {
System.err.println(e);
} catch (PutException e) {
System.err.println(e);
}
// get the "set" PV value, add noise, and then put to the corresponding readback PV.
for ( final ReadbackSetRecord record : READBACK_SET_RECORDS ) {
try {
final String readbackPV = record.getReadbackChannel().channelName();
if ( qPVMap.containsKey( readbackPV ) ) {
final double basisValue = qPVMap.get( readbackPV ).doubleValue();
record.updateReadback( basisValue, ch_noiseMap, ch_staticErrorMap, this );
}
}
catch ( Exception e ) {
System.err.println( e.getMessage() );
}
}
READBACK_SET_TABLE_MODEL.fireTableDataChanged();
}
/** initialize the field book PVs from the default values */
private void configFieldBookPVs() {
for ( final Electromagnet magnet : mags ) {
try {
final Channel bookChannel = magnet.getMainSupply().findChannel( MagnetMainSupply.FIELD_BOOK_HANDLE );
if ( bookChannel != null ) {
if ( bookChannel.connectAndWait() ) {
final double bookField = magnet.toCAFromField( magnet.getDfltField() );
bookChannel.putValCallback( bookField, this );
}
}
}
catch ( NoSuchChannelException exception ) {
System.err.println( exception.getMessage() );
}
catch ( ConnectionException exception ) {
System.err.println( exception.getMessage() );
}
catch ( PutException exception ) {
System.err.println( exception.getMessage() );
}
}
}
/**
* populate all the "set" PV values from design values
*/
private void putSetPVs() {
// for all magnets
for ( final Electromagnet em : mags ) {
try {
Channel ch = em.getMainSupply().getAndConnectChannel( MagnetMainSupply.FIELD_SET_HANDLE );
final double setting = em.toCAFromField( em.getDfltField() );
//System.out.println("Ready to put " + setting + " to " + ch.getId());
ch.putValCallback( setting, this);
if ( em instanceof TrimmedQuadrupole ) {
Channel trimChannel = ((TrimmedQuadrupole)em).getTrimSupply().getAndConnectChannel( MagnetTrimSupply.FIELD_SET_HANDLE );
//System.out.println("Ready to put " + 0.0 + " to " + trimChannel.getId());
trimChannel.putValCallback( 0.0, this);
}
}
catch (NoSuchChannelException e) {
System.err.println(e.getMessage());
}
catch (ConnectionException e) {
System.err.println(e.getMessage());
}
catch (PutException e) {
System.err.println(e.getMessage());
}
}
// for all rf cavities
for ( final RfCavity rfCavity : rfCavities ) {
try {
final Channel ampSetCh = rfCavity.findChannel( RfCavity.CAV_AMP_SET_HANDLE );
if ( ampSetCh.isValid() ) {
ampSetCh.connectAndWait();
//System.out.println("Ready to put " + rfCavity.getDfltCavAmp() + " to " + ampSetCh.getId());
if (rfCavity instanceof xal.smf.impl.SCLCavity) {
ampSetCh.putValCallback( rfCavity.getDfltCavAmp()*((SCLCavity)rfCavity).getStructureTTF(), this );
}
else {
ampSetCh.putValCallback( rfCavity.getDfltCavAmp(), this );
}
}
final Channel phaseSetCh = rfCavity.findChannel( RfCavity.CAV_PHASE_SET_HANDLE );
if ( phaseSetCh.isValid() ) {
phaseSetCh.connectAndWait();
//System.out.println("Ready to put " + rfCavity.getDfltCavPhase() + " to " + phaseSetCh.getId());
phaseSetCh.putValCallback( rfCavity.getDfltCavPhase(), this );
}
} catch (NoSuchChannelException e) {
System.err.println(e.getMessage());
} catch (ConnectionException e) {
System.err.println(e.getMessage());
} catch (PutException e) {
System.err.println(e.getMessage());
}
}
Channel.flushIO();
}
private void putSetPVsFromPVLogger() {
final Map<String,Double> qPSPVMap = plds.getMagnetPSMap();
for ( final Electromagnet em : mags ) {
try {
Channel ch = em.getMainSupply().getAndConnectChannel( MagnetMainSupply.FIELD_SET_HANDLE );
//System.out.println("Ready to put " + Math.abs(em.getDfltField()) + " to " + ch.getId());
final String channelID = ch.getId();
if ( qPSPVMap.containsKey( channelID ) )
ch.putValCallback( qPSPVMap.get( channelID ).doubleValue(), this );
}
catch (NoSuchChannelException e) {
System.err.println(e.getMessage());
}
catch (ConnectionException e) {
System.err.println(e.getMessage());
}
catch (PutException e) {
System.err.println(e.getMessage());
}
}
}
/** This method is for populating the diagnostic PVs (only BPMs + WSs for now) */
protected void putDiagPVs() {
// CKA Nov 25, 2013
SimpleSimResultsAdaptor cmpCalcEngine = new SimpleSimResultsAdaptor( modelScenario.getTrajectory() );
/**temporary list data for getting the array bpm and ws datas*/
int i = 0;
List<Double> tempBPMx = new ArrayList<Double>();
List<Double> tempBPMy = new ArrayList<Double>();
List<Double> tempBPMp = new ArrayList<Double>();
List<Double> tempWSx = new ArrayList<Double>();
List<Double> tempWSy = new ArrayList<Double>();
List<Double> tempWSp = new ArrayList<Double>();
List<Double> tempbeampos = new ArrayList<Double>();
List<Double> tempbeamx = new ArrayList<Double>();
List<Double> tempbeamy = new ArrayList<Double>();
List<Double> tempsigmaz = new ArrayList<Double>();
final Iterator<? extends ProbeState<?>> stateIter =modelScenario.getTrajectory().stateIterator();
while ( stateIter.hasNext() ) {
final ProbeState<?> state = stateIter.next();
// EnvelopeProbeState state = (EnvelopeProbeState) stateIter.next();
double position = state.getPosition();
final PhaseVector coordinateVector = cmpCalcEngine.computeFixedOrbit( state );
double x = coordinateVector.getx() * 1000;
double y = coordinateVector.gety()* 1000;
final Twiss[] twiss = cmpCalcEngine.computeTwissParameters( state );
double sigmaz=twiss[2].getEnvelopeRadius() * 1000;
tempbeampos.add(position);
tempbeamx.add(x);
tempbeamy.add(y);
tempsigmaz.add(sigmaz);
}
double beamp[] = new double[tempbeampos.size()];
double beamx[] = new double[tempbeampos.size()];
double beamy[] = new double[tempbeampos.size()];
double beamsigmaz[]=new double[tempbeampos.size()];
for (i = 0; i < tempbeampos.size(); i++) {
beamp[i] = tempbeampos.get(i);
beamx[i] = tempbeamx.get(i);
beamy[i] = tempbeamy.get(i);
beamsigmaz[i]=tempsigmaz.get(i);
}
try {
_diagplot.showbeampositionplot(beamp, beamx, beamy);
_diagplot.showsigmazplot(beamp, beamsigmaz);
} catch (ConnectionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (GetException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// for BPMs
for ( final BPM bpm : bpms ) {
final Channel bpmXAvgChannel = bpm.getChannel( BPM.X_AVG_HANDLE );
final Channel bpmXTBTChannel = bpm.getChannel( BPM.X_TBT_HANDLE ); // TODO: CKA - NEVER USED
final Channel bpmYAvgChannel = bpm.getChannel( BPM.Y_AVG_HANDLE );
final Channel bpmYTBTChannel = bpm.getChannel( BPM.Y_TBT_HANDLE ); // TODO: CKA - NEVER USED
final Channel bpmAmpAvgChannel = bpm.getChannel( BPM.AMP_AVG_HANDLE );
try {
ProbeState<?> probeState = modelScenario.getTrajectory().stateForElement( bpm.getId() );
//System.out.println("Now updating " + bpm.getId());
// CKA - Transfer map probes and Envelope probes both exposed ICoordinateState
// so we should be able to compute a "fixed orbit" in any context
//
// CKA Nov 25, 2013
// if ( probeState instanceof ICoordinateState ) {
// final PhaseVector coordinates = ((ICoordinateState)probeState).getFixedOrbit();
final PhaseVector coordinates = cmpCalcEngine.computeFixedOrbit(probeState);
// final PhaseVector coordinates = cmpCalcEngine.computeCoordinatePosition(probeState);
// For SNS Ring BPM system, we only measure the signal with respect to the center of the beam pipe.
// TO-DO: the turn by turn arrays should really be generated from betatron motion rather than random data about the nominal
final double[] xTBT = NoiseGenerator.noisyArrayForNominal( coordinates.getx() * 1000.0, DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
final double xAvg = NoiseGenerator.getAverage( xTBT, DEFAULT_BPM_WAVEFORM_DATA_SIZE );
final double[] yTBT = NoiseGenerator.noisyArrayForNominal( coordinates.gety() * 1000.0, DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
final double yAvg = NoiseGenerator.getAverage( yTBT, DEFAULT_BPM_WAVEFORM_DATA_SIZE );
bpmXAvgChannel.putValCallback( xAvg, this );
// bpmXTBTChannel.putValCallback( xTBT, this ); // don't post to channel access until the turn by turn data is generated correctly
bpmYAvgChannel.putValCallback( yAvg, this );
// bpmYTBTChannel.putValCallback( yTBT, this ); // don't post to channel access until the turn by turn data is generated correctly
final double position = getSelectedSequence().getPosition(bpm);
tempBPMp.add(position);
tempBPMx.add(xAvg);
tempBPMy.add(yAvg);
// hardwired BPM amplitude noise and static error to 5% and 0.1mm (randomly) respectively
bpmAmpAvgChannel.putVal( NoiseGenerator.setValForPV( 20., 5., 0.1, false) );
// calculate the BPM phase (for linac only)
if ( !( currentProbe instanceof TransferMapProbe ) && !( bpm instanceof RingBPM ) ) {
final Channel bpmPhaseAvgChannel = bpm.getChannel( BPM.PHASE_AVG_HANDLE );
bpmPhaseAvgChannel.putValCallback( probeState.getTime() * 360. * ( ( (BPMBucket)bpm.getBucket("bpm") ).getFrequency() * 1.e6 ) % 360.0, this );
}
} catch (ConnectionException e) {
System.err.println( e.getMessage() );
} catch (PutException e) {
System.err.println( e.getMessage() );
}
}
/**the array of bpm data*/
double bpmp[] = new double[tempBPMp.size()];
double bpmx[] = new double[tempBPMp.size()];
double bpmy[] = new double[tempBPMp.size()];
/**get the bpmdata[] from the list*/
for (i = 0; i < tempBPMp.size(); i++) {
bpmp[i] = tempBPMp.get(i);
bpmx[i] = tempBPMx.get(i);
bpmy[i] = tempBPMy.get(i);
}
/**showBPMplot*/
try {
_diagplot.showbpmplot(bpmp, bpmx,bpmy);
} catch (ConnectionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (GetException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// for WSs
for ( final ProfileMonitor ws : wss ) {
Channel wsX = ws.getChannel(ProfileMonitor.H_SIGMA_M_HANDLE);
Channel wsY = ws.getChannel(ProfileMonitor.V_SIGMA_M_HANDLE);
try {
ProbeState<?> probeState = modelScenario.getTrajectory().stateForElement( ws.getId() );
if (modelScenario.getProbe() instanceof EnvelopeProbe) {
final Twiss[] twiss = ( (EnvelopeProbeState)probeState ).getCovarianceMatrix().computeTwiss();
wsX.putValCallback( twiss[0].getEnvelopeRadius() * 1000., this );
wsY.putValCallback( twiss[1].getEnvelopeRadius() * 1000., this );
tempWSp.add(ws.getPosition());
tempWSx.add(twiss[0].getEnvelopeRadius() * 1000);
tempWSy.add(twiss[1].getEnvelopeRadius() * 1000);
}
} catch (ConnectionException e) {
System.err.println( e.getMessage() );
} catch (PutException e) {
System.err.println( e.getMessage() );
}
}
/**the array of ws data*/
double wsp[] = new double[tempWSp.size()];
double wsx[] = new double[tempWSp.size()];
double wsy[] = new double[tempWSp.size()];
/**get the wsdata[] from the list*/
for (i = 0; i < tempWSp.size(); i++) {
wsp[i] = tempWSp.get(i);
wsx[i] = tempWSx.get(i);
wsy[i] = tempWSy.get(i);
}
/**showWSplot*/
try {
_diagplot.showsigmaplot(wsp, wsx, wsy);
} catch (ConnectionException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (GetException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Channel.flushIO();
}
private void putDiagPVsFromPVLogger() {
// for BPMs
final Map<String,Double> bpmXMap = plds.getBPMXMap();
final Map<String,Double> bpmYMap = plds.getBPMYMap();
final Map<String,Double> bpmAmpMap = plds.getBPMAmpMap();
final Map<String,Double> bpmPhaseMap = plds.getBPMPhaseMap();
for ( final BPM bpm : bpms ) {
Channel bpmX = bpm.getChannel(BPM.X_AVG_HANDLE);
Channel bpmY = bpm.getChannel(BPM.Y_AVG_HANDLE);
Channel bpmAmp = bpm.getChannel(BPM.AMP_AVG_HANDLE);
try {
System.err.println("Now updating " + bpm.getId());
if ( bpmXMap.containsKey( bpmX.getId() ) ) {
bpmX.putVal( NoiseGenerator.setValForPV( bpmXMap.get( bpmX.getId() ).doubleValue(), bpmNoise, getStaticError(bpmStaticError), false ) );
}
if ( bpmYMap.containsKey( bpmY.getId() ) ) {
bpmY.putVal( NoiseGenerator.setValForPV( bpmYMap.get( bpmY.getId() ).doubleValue(), bpmNoise, getStaticError(bpmStaticError), false ) );
}
// BPM amplitude
if (bpmAmpMap.containsKey(bpmAmp.getId()))
bpmAmp.putVal( NoiseGenerator.setValForPV( bpmAmpMap.get( bpmAmp.getId() ).doubleValue(), 5., 0.1, false) );
// BPM phase (for linac only)
if ( !( currentProbe instanceof TransferMapProbe ) ) {
Channel bpmPhase = bpm.getChannel( BPM.PHASE_AVG_HANDLE );
if ( bpmPhaseMap.containsKey( bpmPhase.getId() ) ) {
bpmPhase.putVal( bpmPhaseMap.get( bpmPhase.getId() ).doubleValue() );
}
}
} catch ( ConnectionException e ) {
System.err.println( e.getMessage() );
} catch ( PutException e ) {
System.err.println( e.getMessage() );
}
}
}
/** handle the CA put callback */
@Override
public void putCompleted( final Channel chan ) {}
/** Returns a distributed static error, uniformly distributed within +/- value given */
private double getStaticError(double staticErrorSigma) {
return staticErrorSigma * (Math.random() - 0.5) * 2;
}
private double getNoiseForElement(AcceleratorNode element) {
if ( element.isKindOf( Quadrupole.s_strType ) ) return quadNoise;
if ( element.isKindOf( Bend.s_strType ) ) return dipoleNoise;
if ( element.isKindOf( HDipoleCorr.s_strType ) ) return correctorNoise;
if ( element.isKindOf( VDipoleCorr.s_strType ) ) return correctorNoise;
if ( element.isKindOf( Solenoid.s_strType ) ) return solNoise;
return 0.0;
}
private double getStaticErrorForElement(AcceleratorNode element) {
double staticError=0.0;
if ( element.isKindOf( Quadrupole.s_strType ) ) staticError=quadStaticError;
if ( element.isKindOf( Bend.s_strType ) ) staticError=dipoleStaticError;
if ( element.isKindOf( HDipoleCorr.s_strType ) ) staticError=correctorStaticError;
if ( element.isKindOf( VDipoleCorr.s_strType ) ) staticError=correctorStaticError;
if ( element.isKindOf( Solenoid.s_strType ) ) staticError=solStaticError;
return getStaticError(staticError);
}
/** create the map between the "readback" and "set" PVs */
private void configureReadbacks() {
READBACK_SET_RECORDS.clear();
ch_noiseMap = new LinkedHashMap<Channel, Double>();
ch_staticErrorMap = new LinkedHashMap<Channel, Double>();
if ( selectedSequence != null ) {
// for magnet PVs
for ( final Electromagnet em : mags ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( em, em.getChannel( Electromagnet.FIELD_RB_HANDLE ), em.getChannel( MagnetMainSupply.FIELD_SET_HANDLE ) ) );
// handle the trimmed magnets
if ( em.isKindOf( TrimmedQuadrupole.s_strType ) ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( em, em.getChannel( MagnetTrimSupply.FIELD_RB_HANDLE ), em.getChannel( MagnetTrimSupply.FIELD_SET_HANDLE ) ) );
ch_noiseMap.put( em.getChannel( MagnetTrimSupply.FIELD_RB_HANDLE ), 0.0 );
ch_staticErrorMap.put( em.getChannel( MagnetTrimSupply.FIELD_RB_HANDLE ), 0.0 );
}
// set up the map between the magnet readback PV and its noise level
ch_noiseMap.put( em.getChannel( Electromagnet.FIELD_RB_HANDLE), getNoiseForElement(em) );
ch_staticErrorMap.put( em.getChannel( Electromagnet.FIELD_RB_HANDLE), getStaticErrorForElement(em) );
}
// for RF PVs
for ( final RfCavity rfCav : rfCavities ) {
final Channel ampSetChannel = rfCav.findChannel( RfCavity.CAV_AMP_SET_HANDLE );
final Channel ampReadChannel = rfCav.findChannel( RfCavity.CAV_AMP_AVG_HANDLE );
if ( ampReadChannel != null && ampReadChannel.isValid() ) {
if ( ampSetChannel != null && ampSetChannel.isValid() ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( rfCav, ampReadChannel, ampSetChannel ) );
}
ch_noiseMap.put( ampReadChannel, rfAmpNoise );
ch_staticErrorMap.put( ampReadChannel, getStaticError(rfAmpStaticError) );
}
final Channel phaseSetChannel = rfCav.findChannel( RfCavity.CAV_PHASE_SET_HANDLE );
final Channel phaseReadChannel = rfCav.findChannel( RfCavity.CAV_PHASE_AVG_HANDLE );
if ( phaseReadChannel != null && phaseReadChannel.isValid() ) {
if ( phaseSetChannel != null && phaseSetChannel.isValid() ) {
READBACK_SET_RECORDS.add( new ReadbackSetRecord( rfCav, phaseReadChannel, phaseSetChannel ) );
}
ch_noiseMap.put( phaseReadChannel, rfPhaseNoise );
ch_staticErrorMap.put( phaseReadChannel, getStaticError(rfPhaseStaticError) );
}
}
Collections.sort( READBACK_SET_RECORDS, new ReadbackSetRecordPositionComparator( selectedSequence ) );
READBACK_SET_TABLE_MODEL.setRecords( new ArrayList<ReadbackSetRecord>( READBACK_SET_RECORDS ) );
}
}
/** run the VA server */
private void runServer() {
vaRunning = true;
}
/** stop the VA Server */
private void stopServer() {
MODEL_SYNC_TIMER.suspend();
vaRunning = false;
}
/** destroy the VA Server */
void destroyServer() {
try {
stopServer();
if ( _vaServer != null ) {
_vaServer.destroy();
_vaServer = null;
}
}
catch( Exception exception ) {
exception.printStackTrace();
}
}
@Override
public void acceleratorChanged() {
if (accelerator != null) {
stopServer();
baseProbe = null;
currentProbe = null;
_repRateChannel = accelerator.getTimingCenter().findChannel( TimingCenter.REP_RATE_HANDLE );
beamOnEvent = accelerator.getTimingCenter().findChannel( TimingCenter.BEAM_ON_EVENT_HANDLE );
beamOnEventCount = accelerator.getTimingCenter().findChannel( TimingCenter.BEAM_ON_EVENT_COUNT_HANDLE );
slowDiagEvent = accelerator.getTimingCenter().findChannel( TimingCenter.SLOW_DIAGNOSTIC_EVENT_HANDLE );
setHasChanges( true );
}
}
@Override
public void selectedSequenceChanged() {
destroyServer();
if (selectedSequence != null) {
try {
_vaServer = new VAServer( selectedSequence );
}
catch( Exception exception ) {
exception.printStackTrace();
}
/**get all nodes(add by liyong) */
nodes = getSelectedSequence().getAllNodes();
// get electro magnets
TypeQualifier typeQualifier = QualifierFactory.qualifierWithStatusAndTypes( true, Electromagnet.s_strType );
mags = getSelectedSequence().<Electromagnet>getAllNodesWithQualifier( typeQualifier );
// get all the rf cavities
typeQualifier = typeQualifier = QualifierFactory.qualifierWithStatusAndTypes( true, RfCavity.s_strType ); // TODO: CKA - No Effect
rfCavities = getSelectedSequence().getAllInclusiveNodesWithQualifier( typeQualifier );
// get all the BPMs
bpms = getSelectedSequence().<BPM>getAllNodesWithQualifier( QualifierFactory.qualifierWithStatusAndType( true, "BPM" ) );
// get all the wire scanners
wss = getSelectedSequence().getAllNodesWithQualifier( QualifierFactory.qualifierWithStatusAndType( true, ProfileMonitor.PROFILE_MONITOR_TYPE ) );
System.out.println( wss );
// should create a new map for "set" <-> "readback" PV mapping
configureReadbacks();
// for on-line model
try {
modelScenario = Scenario.newScenarioFor( getSelectedSequence() );
}
catch ( ModelException exception ) {
System.err.println( exception.getMessage() );
}
// setting up the default probe
createDefaultProbe();
setHasChanges(true);
}
else {
modelScenario = null;
baseProbe = null;
currentProbe = null;
}
}
public void buildOnlineModel() {
try {
// model.resetProbe();
modelScenario.setSynchronizationMode(Scenario.SYNC_MODE_LIVE);
modelScenario.resync();
} catch (Exception e) {
System.err.println(e.getMessage());
}
}
@Override
public void actionPerformed(ActionEvent ev) {
if (ev.getActionCommand().equals("noiseSet")) {
quadNoise = df_quadNoise.getDoubleValue();
dipoleNoise = df_bendNoise.getDoubleValue();
correctorNoise = df_dipCorrNoise.getDoubleValue();
bpmNoise = df_bpmNoise.getDoubleValue();
solNoise = df_solNoise.getDoubleValue();
rfAmpNoise = df_rfAmpNoise.getDoubleValue();
rfPhaseNoise = df_rfPhaseNoise.getDoubleValue();
quadStaticError = df_quadStatErr.getDoubleValue();
dipoleStaticError = df_bendStatErr.getDoubleValue();
correctorStaticError = df_dipCorrStatErr.getDoubleValue();
bpmStaticError = df_bpmStatErr.getDoubleValue();
solStaticError = df_solStatErr.getDoubleValue();
rfAmpStaticError = df_rfAmpStatErr.getDoubleValue();
rfPhaseStaticError = df_rfPhaseStatErr.getDoubleValue();
setHasChanges(true);
/**add below*/
configureReadbacks();
setNoise.setVisible(false);
}
}
/** synchronize the readbacks with setpoints and synchronize with the online model */
private void syncOnlineModel() {
if ( vaRunning ) {
// add noise, populate "read-back" PVs
putReadbackPVs();
// re-sync lattice and run model
buildOnlineModel();
try {
modelScenario.getProbe().reset();
modelScenario.run();
// put diagnostic node PVs
putDiagPVs();
}
catch ( ModelException exception ) {
System.err.println( exception.getMessage() );
}
}
}
/** Get a runnable that syncs the online model */
private Runnable getOnlineModelSynchronizer() {
return new Runnable() {
@Override
public void run() {
syncOnlineModel();
}
};
}
/** synchronize the readbacks with setpoints and synchronize with the online model */
private void syncPVLogger() {
if ( vaRunning ) {
putSetPVsFromPVLogger();
putReadbackPVsFromPVLogger();
putDiagPVsFromPVLogger();
}
}
/** Get a runnable that syncs with the PV Logger */
private Runnable getPVLoggerSynchronizer() {
return new Runnable() {
@Override
public void run() {
syncPVLogger();
}
};
}
}
/** compare readback set records by their position within a sequence */
class ReadbackSetRecordPositionComparator implements Comparator<ReadbackSetRecord> {
/** sequence within which the nodes are ordered */
final AcceleratorSeq SEQUENCE;
/** Constructor */
public ReadbackSetRecordPositionComparator( final AcceleratorSeq sequence ) {
SEQUENCE = sequence;
}
/** compare the records based on location relative to the start of the sequence */
@Override
public int compare( final ReadbackSetRecord record1, final ReadbackSetRecord record2 ) {
if ( record1 == null && record2 == null ) {
return 0;
}
else if ( record1 == null ) {
return -1;
}
else if ( record2 == null ) {
return 1;
}
else {
final double position1 = SEQUENCE.getPosition( record1.getNode() );
final double position2 = SEQUENCE.getPosition( record2.getNode() );
return position1 > position2 ? 1 : position1 < position2 ? -1 : 0;
}
}
/** all comparators of this class are the same */
@Override
public boolean equals( final Object object ) {
return object instanceof ReadbackSetRecordPositionComparator;
}
/** override hashCode() as required for consistency with equals() */
@Override
public int hashCode() {
return 1; // constant since all comparators of this class are equivalent
}
}
/**show bpm and ws plots*/
class DiagPlot {
protected FunctionGraphsJPanel _beampositionplot;
protected FunctionGraphsJPanel _sigamplot;
protected BasicGraphData DataBeamx;
protected BasicGraphData DataBeamy;
protected BasicGraphData DataBPMx;
protected BasicGraphData DataBPMy;
protected BasicGraphData Datasigmaz;
protected BasicGraphData DataWSx;
protected BasicGraphData DataWSy;
public DiagPlot(FunctionGraphsJPanel beampositionplot, FunctionGraphsJPanel sigamplot) {
_beampositionplot=beampositionplot;
_sigamplot=sigamplot;
setupPlot(beampositionplot,sigamplot);
}
public void showbeampositionplot(double[] p,double[] x, double[] y) throws ConnectionException, GetException {
DataBeamx.updateValues(p, x);
DataBeamy.updateValues(p, y);
}
public void showbpmplot(double[] p,double[] x, double[] y) throws ConnectionException, GetException {
DataBPMx.updateValues(p, x);
DataBPMy.updateValues(p, y);
}
public void showsigmazplot(double[] p,double[] sigmaz) throws ConnectionException, GetException {
Datasigmaz.updateValues(p, sigmaz);
}
public void showsigmaplot(double[] wsp,double[] wsx, double[] wsy) throws ConnectionException, GetException {
DataWSx.updateValues(wsp, wsx);
DataWSy.updateValues(wsp, wsy);
}
public void setupPlot(FunctionGraphsJPanel beampositionplot,FunctionGraphsJPanel sigamplot) {
/** setup beamdispplot*/
// labels
beampositionplot.setName( "BeamDisp_PLOT" );
beampositionplot.setAxisNameX("Position(m)");
beampositionplot.setAxisNameY("Beam displacement (mm)");
beampositionplot.setNumberFormatX( new DecimalFormat( "0.00E0" ) );
beampositionplot.setNumberFormatY( new DecimalFormat( "0.00E0" ) );
// add legend support
beampositionplot.setLegendPosition( FunctionGraphsJPanel.LEGEND_POSITION_ARBITRARY );
beampositionplot.setLegendKeyString( "Legend" );
beampositionplot.setLegendBackground( Color.lightGray );
beampositionplot.setLegendColor( Color.black );
beampositionplot.setLegendVisible( true );
/** setup sigamplot*/
// labels
sigamplot.setName( "Sigma_PLOT" );
sigamplot.setAxisNameX("Position(m)");
sigamplot.setAxisNameY("Beam Envelope(mm)");
sigamplot.setNumberFormatX( new DecimalFormat( "0.00E0" ) );
sigamplot.setNumberFormatY( new DecimalFormat( "0.00E0" ) );
// add legend support
sigamplot.setLegendPosition( FunctionGraphsJPanel.LEGEND_POSITION_ARBITRARY );
sigamplot.setLegendKeyString( "Legend" );
sigamplot.setLegendBackground( Color.lightGray );
sigamplot.setLegendColor( Color.black );
sigamplot.setLegendVisible( true );
DataBeamx=new BasicGraphData();
DataBeamy=new BasicGraphData();
DataBPMx=new BasicGraphData();
DataBPMy=new BasicGraphData();
DataWSx=new BasicGraphData();
DataWSy=new BasicGraphData();
Datasigmaz=new BasicGraphData();
DataBeamx.setGraphProperty(_beampositionplot.getLegendKeyString(), "BeamxAvg");
DataBeamy.setGraphProperty(_beampositionplot.getLegendKeyString(), "BeamyAvg");
DataBPMx.setGraphProperty(_beampositionplot.getLegendKeyString(), "BPMxAvg");
DataBPMy.setGraphProperty(_beampositionplot.getLegendKeyString(), "BPMyAvg");
DataWSx.setGraphProperty(_sigamplot.getLegendKeyString(), "sigmax");
DataWSy.setGraphProperty(_sigamplot.getLegendKeyString(), "sigmay");
Datasigmaz.setGraphProperty(_sigamplot.getLegendKeyString(), "sigmaz");
DataBeamx.setGraphColor(Color.blue);
DataBeamy.setGraphColor(Color.orange);
DataBPMx.setGraphColor(Color.RED);
DataBPMy.setGraphColor(Color.BLACK);
DataWSx.setGraphColor(Color.RED);
DataWSy.setGraphColor(Color.BLACK);
Datasigmaz.setGraphColor(Color.blue);
_beampositionplot.addGraphData(DataBeamx);
_beampositionplot.addGraphData(DataBeamy);
_beampositionplot.addGraphData(DataBPMx);
_beampositionplot.addGraphData(DataBPMy);
_sigamplot.addGraphData(DataWSx);
_sigamplot.addGraphData(DataWSy);
_sigamplot.addGraphData(Datasigmaz);
}
}
|
[app.virtualaccelerator] quad and bpm misalignments implemented
basic structure for misalignments implemented
shorter titles in error table window
OXAL-481
|
apps/virtualaccelerator/src/xal/app/virtualaccelerator/VADocument.java
|
[app.virtualaccelerator] quad and bpm misalignments implemented basic structure for misalignments implemented shorter titles in error table window OXAL-481
|
<ide><path>pps/virtualaccelerator/src/xal/app/virtualaccelerator/VADocument.java
<ide>
<ide> private DecimalField df_quadStatErr, df_bendStatErr, df_dipCorrStatErr, df_bpmStatErr, df_solStatErr, df_rfAmpStatErr, df_rfPhaseStatErr;
<ide>
<add> private DecimalField df_quadStatHorMisalign, df_bpmStatHorMisalign, df_solStatHorMisalign, df_rfCavStatHorMisalign;
<add> private DecimalField df_quadStatVerMisalign, df_bpmStatVerMisalign, df_solStatVerMisalign, df_rfCavStatVerMisalign;
<ide>
<ide> private double quadNoise = 0.0;
<ide>
<ide> private double rfAmpStaticError = 0.0;
<ide>
<ide> private double rfPhaseStaticError = 0.0;
<add>
<add> private double quadStatHorMisalign = 0.0;
<add> private double quadStatVerMisalign = 0.0;
<add>
<add> private double bpmStatHorMisalign = 0.0;
<add> private double bpmStatVerMisalign = 0.0;
<ide>
<ide> private JButton done = new JButton("OK");
<ide>
<ide> JPanel settingPanel = new JPanel();
<ide> JPanel noiseLevelPanel = new JPanel();
<ide> JPanel staticErrorPanel = new JPanel();
<add> JPanel staticHorMisalignPanel = new JPanel();
<add> JPanel staticVerMisalignPanel = new JPanel();
<ide>
<ide> // for noise %
<ide> noiseLevelPanel.setLayout(new GridLayout(8, 1));
<del> noiseLevelPanel.add(new JLabel("Noise Level for Device Type:"));
<add> noiseLevelPanel.add(new JLabel("Noise Level"));
<ide>
<ide> NumberFormat numberFormat;
<ide> numberFormat = NumberFormat.getNumberInstance();
<ide> df_quadNoise = new DecimalField( 0., 5, numberFormat );
<ide> quadNoisePanel.add(label1);
<ide> quadNoisePanel.add(df_quadNoise);
<del> quadNoisePanel.add(new JLabel(" % "));
<add> quadNoisePanel.add(new JLabel(" %"));
<ide> noiseLevelPanel.add(quadNoisePanel);
<ide>
<ide> JPanel bendNoisePanel = new JPanel();
<ide> df_bendNoise = new DecimalField( 0., 5, numberFormat );
<ide> bendNoisePanel.add(label2);
<ide> bendNoisePanel.add(df_bendNoise);
<del> bendNoisePanel.add(new JLabel(" % "));
<add> bendNoisePanel.add(new JLabel(" %"));
<ide> noiseLevelPanel.add(bendNoisePanel);
<ide>
<ide> JPanel dipCorrNoisePanel = new JPanel();
<ide> df_dipCorrNoise = new DecimalField( 0., 5, numberFormat );
<ide> dipCorrNoisePanel.add(new JLabel("Dipole Corr.: "));
<ide> dipCorrNoisePanel.add(df_dipCorrNoise);
<del> dipCorrNoisePanel.add(new JLabel(" % "));
<add> dipCorrNoisePanel.add(new JLabel(" %"));
<ide> noiseLevelPanel.add(dipCorrNoisePanel);
<ide>
<ide> JPanel solNoisePanel = new JPanel();
<ide> df_solNoise = new DecimalField( 0., 5, numberFormat );
<ide> solNoisePanel.add(new JLabel("Solenoid: "));
<ide> solNoisePanel.add(df_solNoise);
<del> solNoisePanel.add(new JLabel(" % "));
<add> solNoisePanel.add(new JLabel(" %"));
<ide> noiseLevelPanel.add(solNoisePanel);
<ide>
<ide> JPanel rfAmpNoisePanel = new JPanel();
<ide> df_rfAmpNoise = new DecimalField( 0., 5, numberFormat );
<ide> rfAmpNoisePanel.add(new JLabel("RF amp: "));
<ide> rfAmpNoisePanel.add(df_rfAmpNoise);
<del> rfAmpNoisePanel.add(new JLabel(" % "));
<add> rfAmpNoisePanel.add(new JLabel(" %"));
<ide> noiseLevelPanel.add(rfAmpNoisePanel);
<ide>
<ide> JPanel rfPhaseNoisePanel = new JPanel();
<ide> df_rfPhaseNoise = new DecimalField( 0., 5, numberFormat );
<ide> rfPhaseNoisePanel.add(new JLabel("RF phase: "));
<ide> rfPhaseNoisePanel.add(df_rfPhaseNoise);
<del> rfPhaseNoisePanel.add(new JLabel(" % "));
<add> rfPhaseNoisePanel.add(new JLabel(" %"));
<ide> noiseLevelPanel.add(rfPhaseNoisePanel);
<ide>
<ide> JPanel bpmNoisePanel = new JPanel();
<ide> df_bpmNoise = new DecimalField( 0., 5, numberFormat );
<ide> bpmNoisePanel.add(new JLabel("BPM: "));
<ide> bpmNoisePanel.add(df_bpmNoise);
<del> bpmNoisePanel.add(new JLabel("mm"));
<add> bpmNoisePanel.add(new JLabel(" mm"));
<ide> noiseLevelPanel.add(bpmNoisePanel);
<ide>
<ide> // for static errors
<ide> staticErrorPanel.setLayout(new GridLayout(8, 1));
<del> staticErrorPanel.add(new JLabel("Static error for Device Type:"));
<add> staticErrorPanel.add(new JLabel("Static error"));
<ide>
<ide> JPanel quadStatErrPanel = new JPanel();
<ide> quadStatErrPanel.setLayout(new GridLayout(1, 2));
<ide> df_quadStatErr = new DecimalField( 0., 5, numberFormat );
<ide> quadStatErrPanel.add(new JLabel("Quad: "));
<ide> quadStatErrPanel.add(df_quadStatErr);
<del> quadStatErrPanel.add(new JLabel(" T/m "));
<add> quadStatErrPanel.add(new JLabel(" T/m"));
<ide> staticErrorPanel.add(quadStatErrPanel);
<ide>
<ide> JPanel bendStatErrPanel = new JPanel();
<ide> df_bendStatErr = new DecimalField( 0., 5, numberFormat );
<ide> bendStatErrPanel.add(new JLabel("Bending Dipole: "));
<ide> bendStatErrPanel.add(df_bendStatErr);
<del> bendStatErrPanel.add(new JLabel(" T "));
<add> bendStatErrPanel.add(new JLabel(" T"));
<ide> staticErrorPanel.add(bendStatErrPanel);
<ide>
<ide> JPanel dipCorrStatErrPanel = new JPanel();
<ide> df_dipCorrStatErr = new DecimalField( 0., 5, numberFormat );
<ide> dipCorrStatErrPanel.add(new JLabel("Dipole Corr.: "));
<ide> dipCorrStatErrPanel.add(df_dipCorrStatErr);
<del> dipCorrStatErrPanel.add(new JLabel(" T "));
<add> dipCorrStatErrPanel.add(new JLabel(" T"));
<ide> staticErrorPanel.add(dipCorrStatErrPanel);
<ide>
<ide> JPanel solStatErrPanel = new JPanel();
<ide> df_solStatErr = new DecimalField( 0., 5, numberFormat );
<ide> solStatErrPanel.add(new JLabel("Solenoid: "));
<ide> solStatErrPanel.add(df_solStatErr);
<del> solStatErrPanel.add(new JLabel(" T "));
<add> solStatErrPanel.add(new JLabel(" T"));
<ide> staticErrorPanel.add(solStatErrPanel);
<ide>
<ide> JPanel rfAmpStatErrPanel = new JPanel();
<ide> df_rfAmpStatErr = new DecimalField( 0., 5, numberFormat );
<ide> rfAmpStatErrPanel.add(new JLabel("RF amp: "));
<ide> rfAmpStatErrPanel.add(df_rfAmpStatErr);
<del> rfAmpStatErrPanel.add(new JLabel(" kV "));
<add> rfAmpStatErrPanel.add(new JLabel(" kV"));
<ide> staticErrorPanel.add(rfAmpStatErrPanel);
<ide>
<ide> JPanel rfPhaseStatErrPanel = new JPanel();
<ide> df_rfPhaseStatErr = new DecimalField( 0., 5, numberFormat );
<ide> rfPhaseStatErrPanel.add(new JLabel("RF phase: "));
<ide> rfPhaseStatErrPanel.add(df_rfPhaseStatErr);
<del> rfPhaseStatErrPanel.add(new JLabel(" deg "));
<add> rfPhaseStatErrPanel.add(new JLabel(" deg"));
<ide> staticErrorPanel.add(rfPhaseStatErrPanel);
<ide>
<ide> JPanel bpmStatErrPanel = new JPanel();
<ide> df_bpmStatErr = new DecimalField( 0., 5, numberFormat );
<ide> bpmStatErrPanel.add(new JLabel("BPM: "));
<ide> bpmStatErrPanel.add(df_bpmStatErr);
<del> bpmStatErrPanel.add(new JLabel(" mm "));
<add> bpmStatErrPanel.add(new JLabel(" mm"));
<ide> staticErrorPanel.add(bpmStatErrPanel);
<add>
<add> // for horizontal misalignments (static)
<add> staticHorMisalignPanel.setLayout(new GridLayout(8, 1));
<add> staticHorMisalignPanel.add(new JLabel("Horizontal misalignments"));
<add>
<add> JPanel quadStatHorMisalignPanel = new JPanel();
<add> quadStatHorMisalignPanel.setLayout(new GridLayout(1, 2));
<add> df_quadStatHorMisalign = new DecimalField( 0., 5, numberFormat );
<add> quadStatHorMisalignPanel.add(new JLabel("Quad: "));
<add> quadStatHorMisalignPanel.add(df_quadStatHorMisalign);
<add> quadStatHorMisalignPanel.add(new JLabel(" mm"));
<add> staticHorMisalignPanel.add(quadStatHorMisalignPanel);
<add>
<add> JPanel bpmStatHorMisalignPanel = new JPanel();
<add> bpmStatHorMisalignPanel.setLayout(new GridLayout(1, 2));
<add> df_bpmStatHorMisalign = new DecimalField( 0., 5, numberFormat );
<add> bpmStatHorMisalignPanel.add(new JLabel("bpm: "));
<add> bpmStatHorMisalignPanel.add(df_bpmStatHorMisalign);
<add> bpmStatHorMisalignPanel.add(new JLabel(" mm"));
<add> staticHorMisalignPanel.add(bpmStatHorMisalignPanel);
<add>
<add> // for vertical misalignments (static)
<add> staticVerMisalignPanel.setLayout(new GridLayout(8, 1));
<add> staticVerMisalignPanel.add(new JLabel("Vertical misalignments"));
<add>
<add> JPanel quadStatVerMisalignPanel = new JPanel();
<add> quadStatVerMisalignPanel.setLayout(new GridLayout(1, 2));
<add> df_quadStatVerMisalign = new DecimalField( 0., 5, numberFormat );
<add> quadStatVerMisalignPanel.add(new JLabel("Quad: "));
<add> quadStatVerMisalignPanel.add(df_quadStatVerMisalign);
<add> quadStatVerMisalignPanel.add(new JLabel(" mm"));
<add> staticVerMisalignPanel.add(quadStatVerMisalignPanel);
<add>
<add> JPanel bpmStatVerMisalignPanel = new JPanel();
<add> bpmStatVerMisalignPanel.setLayout(new GridLayout(1, 2));
<add> df_bpmStatVerMisalign = new DecimalField( 0., 5, numberFormat );
<add> bpmStatVerMisalignPanel.add(new JLabel("bpm: "));
<add> bpmStatVerMisalignPanel.add(df_bpmStatVerMisalign);
<add> bpmStatVerMisalignPanel.add(new JLabel(" mm"));
<add> staticVerMisalignPanel.add(bpmStatVerMisalignPanel);
<ide>
<ide> // put everything together
<ide> setNoise.setBounds(300, 300, 900, 600);
<ide> settingPanel.setLayout(new BoxLayout(settingPanel, BoxLayout.X_AXIS));
<ide> settingPanel.add(noiseLevelPanel);
<ide> settingPanel.add(staticErrorPanel);
<add> settingPanel.add(staticHorMisalignPanel);
<add> settingPanel.add(staticVerMisalignPanel);
<ide> setNoise.getContentPane().setLayout(new BorderLayout());
<ide> setNoise.getContentPane().add(settingPanel, BorderLayout.CENTER);
<ide> setNoise.getContentPane().add(done, BorderLayout.SOUTH);
<ide> daStaticError.setValue("sol", solStaticError);
<ide> daStaticError.setValue("rfAmp", rfAmpStaticError);
<ide> daStaticError.setValue("rfPhase", rfPhaseStaticError);
<add>
<add> DataAdaptor daStaticHorMisalign = daLevel1.createChild("horizontalMisalignments");
<add> daStaticHorMisalign.setValue("quad", quadStatHorMisalign);
<add> daStaticHorMisalign.setValue("bpm", bpmStatHorMisalign);
<add>
<add> DataAdaptor daStaticVerMisalign = daLevel1.createChild("verticalMisalignments");
<add> daStaticVerMisalign.setValue("quad", quadStatVerMisalign);
<add> daStaticVerMisalign.setValue("bpm", bpmStatVerMisalign);
<ide> }
<ide>
<ide> daLevel1.setValue( "modelSyncPeriod", _modelSyncPeriod );
<ide> static final long serialVersionUID = 0;
<ide> @Override
<ide> public void actionPerformed(ActionEvent event) {
<add>
<ide> df_quadNoise.setValue(quadNoise);
<ide> df_bendNoise.setValue(dipoleNoise);
<ide> df_dipCorrNoise.setValue(correctorNoise);
<ide> df_solNoise.setValue(solNoise);
<ide> df_rfAmpNoise.setValue(rfAmpNoise);
<ide> df_rfPhaseNoise.setValue(rfPhaseNoise);
<add>
<ide> df_quadStatErr.setValue(quadStaticError);
<ide> df_bendStatErr.setValue(dipoleStaticError);
<ide> df_dipCorrStatErr.setValue(correctorStaticError);
<ide> df_solStatErr.setValue(solStaticError);
<ide> df_rfAmpStatErr.setValue(rfAmpStaticError);
<ide> df_rfPhaseStatErr.setValue(rfPhaseStaticError);
<add>
<add> df_quadStatHorMisalign.setValue(quadStatHorMisalign);
<add> df_quadStatVerMisalign.setValue(quadStatVerMisalign);
<add>
<add> df_bpmStatHorMisalign.setValue(bpmStatHorMisalign);
<add> df_bpmStatVerMisalign.setValue(bpmStatVerMisalign);
<ide> setNoise.setVisible(true);
<ide> }
<ide> };
<ide> rfPhaseStaticError = daStaticError.doubleValue("rfPhase");
<ide> }
<ide>
<add> DataAdaptor daStaticHorMisalign = da1.childAdaptor("horizontalMisalignments");
<add> if (daStaticHorMisalign != null) {
<add> quadStatHorMisalign = daStaticHorMisalign.doubleValue("quad");
<add> bpmStatHorMisalign = daStaticHorMisalign.doubleValue("bpm");
<add> }
<add>
<add> DataAdaptor daStaticVerMisalign = da1.childAdaptor("verticalMisalignments");
<add> if (daStaticVerMisalign != null) {
<add> quadStatVerMisalign = daStaticVerMisalign.doubleValue("quad");
<add> bpmStatVerMisalign = daStaticVerMisalign.doubleValue("bpm");
<add> }
<add>
<ide> temp = da2a.childAdaptors("seq");
<ide> for ( final DataAdaptor da : temp ) {
<ide> seqs.add( getAccelerator().getSequence( da.stringValue("name") ) );
<ide> // so we should be able to compute a "fixed orbit" in any context
<ide> //
<ide> // CKA Nov 25, 2013
<del>// if ( probeState instanceof ICoordinateState ) {
<del>// final PhaseVector coordinates = ((ICoordinateState)probeState).getFixedOrbit();
<add> // if ( probeState instanceof ICoordinateState ) {
<add> // final PhaseVector coordinates = ((ICoordinateState)probeState).getFixedOrbit();
<ide> final PhaseVector coordinates = cmpCalcEngine.computeFixedOrbit(probeState);
<del>// final PhaseVector coordinates = cmpCalcEngine.computeCoordinatePosition(probeState);
<add> // final PhaseVector coordinates = cmpCalcEngine.computeCoordinatePosition(probeState);
<ide>
<ide> // For SNS Ring BPM system, we only measure the signal with respect to the center of the beam pipe.
<ide>
<ide> // TO-DO: the turn by turn arrays should really be generated from betatron motion rather than random data about the nominal
<del> final double[] xTBT = NoiseGenerator.noisyArrayForNominal( coordinates.getx() * 1000.0, DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
<add> final double[] xTBT = NoiseGenerator.noisyArrayForNominal( coordinates.getx() * 1000.0 - bpm.getXOffset(), DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
<ide> final double xAvg = NoiseGenerator.getAverage( xTBT, DEFAULT_BPM_WAVEFORM_DATA_SIZE );
<ide>
<del> final double[] yTBT = NoiseGenerator.noisyArrayForNominal( coordinates.gety() * 1000.0, DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
<add> final double[] yTBT = NoiseGenerator.noisyArrayForNominal( coordinates.gety() * 1000.0 - bpm.getYOffset(), DEFAULT_BPM_WAVEFORM_SIZE, DEFAULT_BPM_WAVEFORM_DATA_SIZE, bpmNoise, bpmStaticError );
<ide> final double yAvg = NoiseGenerator.getAverage( yTBT, DEFAULT_BPM_WAVEFORM_DATA_SIZE );
<ide>
<ide> bpmXAvgChannel.putValCallback( xAvg, this );
<ide> if ( element.isKindOf( VDipoleCorr.s_strType ) ) staticError=correctorStaticError;
<ide> if ( element.isKindOf( Solenoid.s_strType ) ) staticError=solStaticError;
<ide> return getStaticError(staticError);
<add> }
<add> // Used to set horizontal misalignments in misalignElements()
<add> private double getStaticHorizontalMisalignmentForElement(AcceleratorNode element) {
<add> double horizontalMisalignment=0.0;
<add> if ( element.isKindOf( Quadrupole.s_strType ) ) horizontalMisalignment = quadStatHorMisalign;
<add> if ( element.isKindOf( BPM.s_strType ) ) horizontalMisalignment = bpmStatHorMisalign;
<add> return getStaticError(horizontalMisalignment);
<add> }
<add> // Used to set vertical misalignments in misalignElements()
<add> private double getStaticVerticalMisalignmentForElement(AcceleratorNode element) {
<add> double verticalMisalignment=0.0;
<add> if ( element.isKindOf( Quadrupole.s_strType ) ) verticalMisalignment = quadStatVerMisalign;
<add> if ( element.isKindOf( BPM.s_strType ) ) verticalMisalignment = bpmStatVerMisalign;
<add> return getStaticError(verticalMisalignment);
<ide> }
<ide>
<ide> /** create the map between the "readback" and "set" PVs */
<ide> solStaticError = df_solStatErr.getDoubleValue();
<ide> rfAmpStaticError = df_rfAmpStatErr.getDoubleValue();
<ide> rfPhaseStaticError = df_rfPhaseStatErr.getDoubleValue();
<add>
<ide> setHasChanges(true);
<ide>
<ide> /**add below*/
<ide> configureReadbacks();
<add> misalignElements();
<ide> setNoise.setVisible(false);
<add> }
<add> }
<add>
<add> private void misalignElements() {
<add>
<add> quadStatHorMisalign = df_quadStatHorMisalign.getDoubleValue();
<add> quadStatVerMisalign = df_quadStatVerMisalign.getDoubleValue();
<add>
<add> bpmStatHorMisalign = df_bpmStatHorMisalign.getDoubleValue();
<add> bpmStatVerMisalign = df_bpmStatVerMisalign.getDoubleValue();
<add>
<add> for ( final AcceleratorNode node : getSelectedSequence().getAllNodes() ) {
<add> node.setXOffset(getStaticHorizontalMisalignmentForElement(node));
<add> node.setYOffset(getStaticVerticalMisalignmentForElement(node));
<ide> }
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
7727a65bda048aa4180f9c84f10e0c9a2c6fd925
| 0 |
eFaps/eFaps-Kernel-Install,eFaps/eFaps-Kernel-Install
|
/*
* Copyright 2003 - 2020 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.esjp.common.history.status.listener;
import org.efaps.admin.datamodel.attributetype.IStatusChangeListener;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.db.Delete;
import org.efaps.db.Insert;
import org.efaps.db.Instance;
import org.efaps.eql.EQL;
import org.efaps.eql2.StmtFlag;
import org.efaps.esjp.ci.CICommon;
import org.efaps.util.EFapsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@EFapsUUID("b6ce7392-4771-4ba1-8859-cc299a0051e2")
@EFapsApplication("eFaps-Kernel")
public abstract class StatusUpdateListener_Base
implements IStatusChangeListener
{
private static final Logger LOG = LoggerFactory.getLogger(StatusUpdateListener.class);
@Override
public int getWeight()
{
return 0;
}
@Override
public void onInsert(final Instance _instance, final Long _statusId)
throws EFapsException
{
registerUpdate(_instance, _statusId);
}
@Override
public void onUpdate(final Instance _instance, final Long _statusId)
throws EFapsException
{
registerUpdate(_instance, _statusId);
}
protected void registerUpdate(final Instance _instance, final Long _statusId)
throws EFapsException
{
if (_instance.getType().isGeneralInstance() && _instance.getGeneralId() > 0) {
final Insert insert = new Insert(CICommon.HistoryStatus);
insert.add(CICommon.HistoryStatus.GeneralInstanceLink, _instance.getGeneralId());
insert.add(CICommon.HistoryStatus.StatusLink, _statusId);
insert.executeWithoutTrigger();
} else {
LOG.warn("Could not register Status update due to not being a General Instance: {}, StatusID: {}",
_instance, _statusId);
}
}
@Override
public void onDelete(final Instance _instance)
throws EFapsException
{
if (_instance.getType().isGeneralInstance()) {
final var eval = EQL.builder()
.with(StmtFlag.TRIGGEROFF)
.print()
.query(CICommon.HistoryStatus)
.where()
.attribute(CICommon.HistoryStatus.GeneralInstanceLink).eq(_instance.getGeneralId())
.select().instance()
.evaluate();
while (eval.next()) {
new Delete((Instance) eval.get(1)).executeWithoutTrigger();
}
}
}
}
|
src/main/efaps/ESJP/org/efaps/esjp/common/history/status/listener/StatusUpdateListener_Base.java
|
/*
* Copyright 2003 - 2020 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.esjp.common.history.status.listener;
import org.efaps.admin.datamodel.attributetype.IStatusChangeListener;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.db.Delete;
import org.efaps.db.Insert;
import org.efaps.db.Instance;
import org.efaps.eql.EQL;
import org.efaps.eql2.StmtFlag;
import org.efaps.esjp.ci.CICommon;
import org.efaps.util.EFapsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@EFapsUUID("b6ce7392-4771-4ba1-8859-cc299a0051e2")
@EFapsApplication("eFaps-Kernel")
public abstract class StatusUpdateListener_Base
implements IStatusChangeListener
{
private static final Logger LOG = LoggerFactory.getLogger(StatusUpdateListener.class);
@Override
public int getWeight()
{
return 0;
}
@Override
public void onInsert(final Instance _instance, final Long _statusId)
throws EFapsException
{
registerUpdate(_instance, _statusId);
}
@Override
public void onUpdate(final Instance _instance, final Long _statusId)
throws EFapsException
{
registerUpdate(_instance, _statusId);
}
protected void registerUpdate(final Instance _instance, final Long _statusId)
throws EFapsException
{
if (_instance.getType().isGeneralInstance()) {
final Insert insert = new Insert(CICommon.HistoryStatus);
insert.add(CICommon.HistoryStatus.GeneralInstanceLink, _instance.getGeneralId());
insert.add(CICommon.HistoryStatus.StatusLink, _statusId);
insert.executeWithoutTrigger();
} else {
LOG.warn("Could not register Status update due to not being a General Instance: {}, StatusID: {}",
_instance, _statusId);
}
}
@Override
public void onDelete(final Instance _instance)
throws EFapsException
{
if (_instance.getType().isGeneralInstance()) {
final var eval = EQL.builder()
.with(StmtFlag.TRIGGEROFF)
.print()
.query(CICommon.HistoryStatus)
.where()
.attribute(CICommon.HistoryStatus.GeneralInstanceLink).eq(_instance.getGeneralId())
.select().instance()
.evaluate();
while (eval.next()) {
new Delete((Instance) eval.get(1)).executeWithoutTrigger();
}
}
}
}
|
Add check for greate 0
|
src/main/efaps/ESJP/org/efaps/esjp/common/history/status/listener/StatusUpdateListener_Base.java
|
Add check for greate 0
|
<ide><path>rc/main/efaps/ESJP/org/efaps/esjp/common/history/status/listener/StatusUpdateListener_Base.java
<ide> protected void registerUpdate(final Instance _instance, final Long _statusId)
<ide> throws EFapsException
<ide> {
<del> if (_instance.getType().isGeneralInstance()) {
<add> if (_instance.getType().isGeneralInstance() && _instance.getGeneralId() > 0) {
<ide> final Insert insert = new Insert(CICommon.HistoryStatus);
<ide> insert.add(CICommon.HistoryStatus.GeneralInstanceLink, _instance.getGeneralId());
<ide> insert.add(CICommon.HistoryStatus.StatusLink, _statusId);
|
|
Java
|
apache-2.0
|
1b4426d251f88e489bfcc3279330bb8d6f3ac285
| 0 |
mgodave/barge,mgodave/barge,mgodave/barge
|
/**
* Copyright 2013 David Rusek <dave dot rusek at gmail dot com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotninjas.barge.log;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.Iterables;
import com.google.common.eventbus.EventBus;
import com.google.inject.Inject;
import com.google.protobuf.ByteString;
import journal.io.api.Journal;
import journal.io.api.Location;
import org.robotninjas.barge.Replica;
import org.robotninjas.barge.annotations.ClusterMembers;
import org.robotninjas.barge.annotations.LocalReplicaInfo;
import org.robotninjas.barge.proto.ClientProto;
import org.robotninjas.barge.proto.LogProto;
import org.robotninjas.barge.proto.RaftEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.NotThreadSafe;
import java.nio.ByteBuffer;
import java.util.*;
import static com.google.common.base.Preconditions.*;
import static com.google.common.base.Throwables.propagate;
import static com.google.common.collect.Lists.newArrayList;
import static journal.io.api.Journal.WriteType;
import static org.robotninjas.barge.log.DefaultRaftLog.LoadFunction.loadFromCache;
import static org.robotninjas.barge.proto.RaftEntry.Entry;
@NotThreadSafe
class DefaultRaftLog implements RaftLog {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultRaftLog.class);
private final Journal journal;
private final LoadingCache<Long, Entry> entryCache;
private final SortedMap<Long, EntryMeta> entryIndex = new TreeMap<Long, EntryMeta>();
private final Replica local;
private final List<Replica> members;
private final EventBus eventBus;
private volatile long lastLogIndex = -1;
private volatile long term = 0;
private volatile Optional<Replica> votedFor = Optional.absent();
private volatile long commitIndex = 0;
private volatile long lastApplied = 0;
@Inject
DefaultRaftLog(@Nonnull Journal journal,
@LocalReplicaInfo @Nonnull Replica local,
@ClusterMembers @Nonnull List<Replica> members,
@Nonnull EventBus eventBus) {
this.local = checkNotNull(local);
this.journal = checkNotNull(journal);
this.members = members;
this.eventBus = eventBus;
EntryCacheLoader loader = new EntryCacheLoader(entryIndex, journal);
this.entryCache = CacheBuilder.newBuilder()
.recordStats()
.maximumSize(100000)
.build(loader);
}
public void init() {
Entry entry = Entry.newBuilder()
.setTerm(-1L)
.setCommand(ByteString.EMPTY)
.build();
storeEntry(-1L, entry);
}
private void storeEntry(long index, @Nonnull Entry entry) {
try {
LogProto.JournalEntry journalEntry =
LogProto.JournalEntry.newBuilder()
.setAppend(LogProto.Append.newBuilder()
.setIndex(index)
.setEntry(entry))
.build();
Location loc = journal.write(journalEntry.toByteArray(), WriteType.SYNC);
EntryMeta meta = new EntryMeta(index, entry.getTerm(), loc);
this.entryIndex.put(index, meta);
this.entryCache.put(index, entry);
} catch (Exception e) {
throw propagate(e);
}
}
public long append(@Nonnull ClientProto.CommitOperation operation, long term) {
checkState(entryIndex.containsKey(lastLogIndex));
checkState(!entryIndex.containsKey(lastLogIndex + 1));
long index = ++lastLogIndex;
LOGGER.debug("leader append: index {}, term {}", index, term);
Entry entry =
Entry.newBuilder()
.setCommand(operation.getOp())
.setTerm(term)
.build();
storeEntry(index, entry);
return index;
}
public boolean append(long prevLogIndex, long prevLogTerm, @Nonnull List<Entry> entries) {
EntryMeta previousEntry = entryIndex.get(prevLogIndex);
if ((previousEntry == null) || (previousEntry.term != prevLogTerm)) {
return false;
}
this.entryIndex.tailMap(prevLogIndex + 1).clear();
lastLogIndex = prevLogIndex;
for (Entry entry : entries) {
storeEntry(++lastLogIndex, entry);
}
return true;
}
@Nonnull
public GetEntriesResult getEntry(@Nonnegative final long index) {
checkArgument(index >= 0);
EntryMeta previousEntry = this.entryIndex.get(index - 1);
Entry entry = entryCache.getIfPresent(index);
List<Entry> list = entry == null ? Collections.<Entry>emptyList() : newArrayList(entry);
return new GetEntriesResult(previousEntry.term, index - 1, list);
}
@Nonnull
public GetEntriesResult getEntriesFrom(@Nonnegative long beginningIndex, @Nonnegative int max) {
checkArgument(beginningIndex >= 0);
Set<Long> indices = entryIndex.tailMap(beginningIndex).keySet();
Iterable<Entry> values = Iterables.transform(Iterables.limit(indices, max), loadFromCache(entryCache));
Entry previousEntry = entryCache.getIfPresent(beginningIndex - 1);
return new GetEntriesResult(previousEntry.getTerm(), beginningIndex - 1, newArrayList(values));
}
@Nonnull
public GetEntriesResult getEntriesFrom(@Nonnegative final long beginningIndex) {
checkArgument(beginningIndex >= 0);
Set<Long> indices = entryIndex.tailMap(beginningIndex).keySet();
Iterable<Entry> values = Iterables.transform(indices, loadFromCache(entryCache));
Entry previousEntry = entryCache.getIfPresent(beginningIndex - 1);
return new GetEntriesResult(previousEntry.getTerm(), beginningIndex - 1, newArrayList(values));
}
private void fireComitted() {
try {
for (long i = lastApplied; i <= commitIndex; ++i, ++lastApplied) {
byte[] rawCommand = entryCache.get(i).getCommand().toByteArray();
ByteBuffer operation = ByteBuffer.wrap(rawCommand).asReadOnlyBuffer();
eventBus.post(new ComittedEvent(operation));
}
} catch (Exception e) {
throw propagate(e);
}
}
public long lastLogIndex() {
return lastLogIndex;
}
public long lastLogTerm() {
return entryIndex.get(lastLogIndex()).term;
}
public long commitIndex() {
return commitIndex;
}
public void commitIndex(long index) {
this.commitIndex = index;
fireComitted();
}
@Nonnull
@Override
public List<Replica> members() {
return Collections.unmodifiableList(members);
}
public long term() {
return term;
}
public void term(@Nonnegative long term) {
checkArgument(term >= 0);
MDC.put("term", Long.toString(term));
LOGGER.debug("New term {}", term);
this.term = term;
}
@Nonnull
public Optional<Replica> votedFor() {
return votedFor;
}
public void votedFor(@Nonnull Optional<Replica> vote) {
LOGGER.debug("Voting for {}", vote.orNull());
this.votedFor = checkNotNull(vote);
}
@Nonnull
public Replica self() {
return local;
}
@Immutable
static final class EntryMeta {
private final long index;
private final long term;
private final Location location;
EntryMeta(long index, long term, @Nonnull Location location) {
this.index = index;
this.term = term;
this.location = location;
}
}
@Immutable
static final class EntryCacheLoader extends CacheLoader<Long, Entry> {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final Map<Long, EntryMeta> index;
private final Journal journal;
EntryCacheLoader(@Nonnull Map<Long, EntryMeta> index, @Nonnull Journal journal) {
this.index = checkNotNull(index);
this.journal = checkNotNull(journal);
}
@Override
public Entry load(@Nonnull Long key) throws Exception {
checkNotNull(key);
try {
logger.debug("Loading {}", key);
EntryMeta meta = index.get(key);
Location loc = meta.location;
byte[] data = journal.read(loc, Journal.ReadType.ASYNC);
LogProto.JournalEntry journalEntry = LogProto.JournalEntry.parseFrom(data);
if (!journalEntry.hasAppend()) {
throw new IllegalStateException("Journal entry does not contain Append");
}
return journalEntry.getAppend().getEntry();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}
@Immutable
static final class LoadFunction implements Function<Long, RaftEntry.Entry> {
private final LoadingCache<Long, Entry> cache;
private LoadFunction(LoadingCache<Long, Entry> cache) {
this.cache = cache;
}
@Nullable
@Override
public Entry apply(@Nullable Long input) {
checkNotNull(input);
return cache.getUnchecked(input);
}
@Nonnull
public static Function<Long, Entry> loadFromCache(@Nonnull LoadingCache<Long, Entry> cache) {
return new LoadFunction(cache);
}
}
}
|
barge-core/src/main/java/org/robotninjas/barge/log/DefaultRaftLog.java
|
/**
* Copyright 2013 David Rusek <dave dot rusek at gmail dot com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robotninjas.barge.log;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.Iterables;
import com.google.common.eventbus.EventBus;
import com.google.inject.Inject;
import com.google.protobuf.ByteString;
import journal.io.api.Journal;
import journal.io.api.Location;
import org.robotninjas.barge.Replica;
import org.robotninjas.barge.annotations.ClusterMembers;
import org.robotninjas.barge.annotations.LocalReplicaInfo;
import org.robotninjas.barge.proto.ClientProto;
import org.robotninjas.barge.proto.LogProto;
import org.robotninjas.barge.proto.RaftEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.NotThreadSafe;
import java.nio.ByteBuffer;
import java.util.*;
import static com.google.common.base.Preconditions.*;
import static com.google.common.base.Throwables.propagate;
import static com.google.common.collect.Lists.newArrayList;
import static journal.io.api.Journal.WriteType;
import static org.robotninjas.barge.log.DefaultRaftLog.LoadFunction.loadFromCache;
import static org.robotninjas.barge.proto.RaftEntry.Entry;
@NotThreadSafe
class DefaultRaftLog implements RaftLog {
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultRaftLog.class);
private final Journal journal;
private final LoadingCache<Long, Entry> entryCache;
private final SortedMap<Long, EntryMeta> entryIndex = new TreeMap<Long, EntryMeta>();
private final Replica local;
private final List<Replica> members;
private final EventBus eventBus;
private volatile long lastLogIndex = 0;
private volatile long term = 1;
private volatile Optional<Replica> votedFor = Optional.absent();
private volatile long commitIndex = 0;
private volatile long lastApplied = 0;
@Inject
DefaultRaftLog(@Nonnull Journal journal,
@LocalReplicaInfo @Nonnull Replica local,
@ClusterMembers @Nonnull List<Replica> members,
@Nonnull EventBus eventBus) {
this.local = checkNotNull(local);
this.journal = checkNotNull(journal);
this.members = members;
this.eventBus = eventBus;
EntryCacheLoader loader = new EntryCacheLoader(entryIndex, journal);
this.entryCache = CacheBuilder.newBuilder()
.recordStats()
.maximumSize(100000)
.build(loader);
}
public void init() {
Entry entry = Entry.newBuilder()
.setTerm(0)
.setCommand(ByteString.EMPTY)
.build();
storeEntry(0, entry);
}
private void storeEntry(long index, @Nonnull Entry entry) {
try {
LogProto.JournalEntry journalEntry =
LogProto.JournalEntry.newBuilder()
.setAppend(LogProto.Append.newBuilder()
.setIndex(index)
.setEntry(entry))
.build();
Location loc = journal.write(journalEntry.toByteArray(), WriteType.SYNC);
EntryMeta meta = new EntryMeta(index, entry.getTerm(), loc);
this.entryIndex.put(index, meta);
this.entryCache.put(index, entry);
} catch (Exception e) {
throw propagate(e);
}
}
public long append(@Nonnull ClientProto.CommitOperation operation, long term) {
checkState(entryIndex.containsKey(lastLogIndex));
checkState(!entryIndex.containsKey(lastLogIndex + 1));
long index = ++lastLogIndex;
LOGGER.debug("leader append: index {}, term {}", index, term);
Entry entry =
Entry.newBuilder()
.setCommand(operation.getOp())
.setTerm(term)
.build();
storeEntry(index, entry);
return index;
}
public boolean append(long prevLogIndex, long prevLogTerm, @Nonnull List<Entry> entries) {
EntryMeta previousEntry = entryIndex.get(prevLogIndex);
if ((previousEntry == null) || (previousEntry.term != prevLogTerm)) {
return false;
}
this.entryIndex.tailMap(prevLogIndex + 1).clear();
lastLogIndex = prevLogIndex;
for (Entry entry : entries) {
storeEntry(++lastLogIndex, entry);
}
return true;
}
@Nonnull
public GetEntriesResult getEntry(@Nonnegative final long index) {
checkArgument(index > 0);
EntryMeta previousEntry = this.entryIndex.get(index - 1);
Entry entry = entryCache.getIfPresent(index);
List<Entry> list = entry == null ? Collections.<Entry>emptyList() : newArrayList(entry);
return new GetEntriesResult(previousEntry.term, index - 1, list);
}
@Nonnull
public GetEntriesResult getEntriesFrom(@Nonnegative long beginningIndex, @Nonnegative int max) {
checkArgument(beginningIndex > 0);
Set<Long> indices = entryIndex.tailMap(beginningIndex).keySet();
Iterable<Entry> values = Iterables.transform(Iterables.limit(indices, max), loadFromCache(entryCache));
Entry previousEntry = entryCache.getIfPresent(beginningIndex - 1);
return new GetEntriesResult(previousEntry.getTerm(), beginningIndex - 1, newArrayList(values));
}
@Nonnull
public GetEntriesResult getEntriesFrom(@Nonnegative final long beginningIndex) {
checkArgument(beginningIndex > 0);
Set<Long> indices = entryIndex.tailMap(beginningIndex).keySet();
Iterable<Entry> values = Iterables.transform(indices, loadFromCache(entryCache));
Entry previousEntry = entryCache.getIfPresent(beginningIndex - 1);
return new GetEntriesResult(previousEntry.getTerm(), beginningIndex - 1, newArrayList(values));
}
private void fireComitted() {
try {
for (long i = lastApplied; i <= commitIndex; ++i, ++lastApplied) {
byte[] rawCommand = entryCache.get(i).getCommand().toByteArray();
ByteBuffer operation = ByteBuffer.wrap(rawCommand).asReadOnlyBuffer();
eventBus.post(new ComittedEvent(operation));
}
} catch (Exception e) {
throw propagate(e);
}
}
public long lastLogIndex() {
return lastLogIndex;
}
public long lastLogTerm() {
return entryIndex.get(lastLogIndex()).term;
}
public long commitIndex() {
return commitIndex;
}
public void commitIndex(long index) {
this.commitIndex = index;
fireComitted();
}
@Nonnull
@Override
public List<Replica> members() {
return Collections.unmodifiableList(members);
}
public long term() {
return term;
}
public void term(@Nonnegative long term) {
checkArgument(term > 0);
MDC.put("term", Long.toString(term));
LOGGER.debug("New term {}", term);
this.term = term;
}
@Nonnull
public Optional<Replica> votedFor() {
return votedFor;
}
public void votedFor(@Nonnull Optional<Replica> vote) {
LOGGER.debug("Voting for {}", vote.orNull());
this.votedFor = checkNotNull(vote);
}
@Nonnull
public Replica self() {
return local;
}
@Immutable
static final class EntryMeta {
private final long index;
private final long term;
private final Location location;
EntryMeta(long index, long term, @Nonnull Location location) {
this.index = index;
this.term = term;
this.location = location;
}
}
@Immutable
static final class EntryCacheLoader extends CacheLoader<Long, Entry> {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final Map<Long, EntryMeta> index;
private final Journal journal;
EntryCacheLoader(@Nonnull Map<Long, EntryMeta> index, @Nonnull Journal journal) {
this.index = checkNotNull(index);
this.journal = checkNotNull(journal);
}
@Override
public Entry load(@Nonnull Long key) throws Exception {
checkNotNull(key);
try {
logger.debug("Loading {}", key);
EntryMeta meta = index.get(key);
Location loc = meta.location;
byte[] data = journal.read(loc, Journal.ReadType.ASYNC);
LogProto.JournalEntry journalEntry = LogProto.JournalEntry.parseFrom(data);
if (!journalEntry.hasAppend()) {
throw new IllegalStateException("Journal entry does not contain Append");
}
return journalEntry.getAppend().getEntry();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
}
@Immutable
static final class LoadFunction implements Function<Long, RaftEntry.Entry> {
private final LoadingCache<Long, Entry> cache;
private LoadFunction(LoadingCache<Long, Entry> cache) {
this.cache = cache;
}
@Nullable
@Override
public Entry apply(@Nullable Long input) {
checkNotNull(input);
return cache.getUnchecked(input);
}
@Nonnull
public static Function<Long, Entry> loadFromCache(@Nonnull LoadingCache<Long, Entry> cache) {
return new LoadFunction(cache);
}
}
}
|
Revert "Term starts at 1 instead of 0 and index starts at 1 instead of 0"
This reverts commit 558a866b0e06963725d01bcc4237713b3aa19059.
|
barge-core/src/main/java/org/robotninjas/barge/log/DefaultRaftLog.java
|
Revert "Term starts at 1 instead of 0 and index starts at 1 instead of 0"
|
<ide><path>arge-core/src/main/java/org/robotninjas/barge/log/DefaultRaftLog.java
<ide> private final Replica local;
<ide> private final List<Replica> members;
<ide> private final EventBus eventBus;
<del> private volatile long lastLogIndex = 0;
<del> private volatile long term = 1;
<add> private volatile long lastLogIndex = -1;
<add> private volatile long term = 0;
<ide> private volatile Optional<Replica> votedFor = Optional.absent();
<ide> private volatile long commitIndex = 0;
<ide> private volatile long lastApplied = 0;
<ide>
<ide> public void init() {
<ide> Entry entry = Entry.newBuilder()
<del> .setTerm(0)
<add> .setTerm(-1L)
<ide> .setCommand(ByteString.EMPTY)
<ide> .build();
<del> storeEntry(0, entry);
<add> storeEntry(-1L, entry);
<ide> }
<ide>
<ide> private void storeEntry(long index, @Nonnull Entry entry) {
<ide>
<ide> @Nonnull
<ide> public GetEntriesResult getEntry(@Nonnegative final long index) {
<del> checkArgument(index > 0);
<add> checkArgument(index >= 0);
<ide> EntryMeta previousEntry = this.entryIndex.get(index - 1);
<ide> Entry entry = entryCache.getIfPresent(index);
<ide> List<Entry> list = entry == null ? Collections.<Entry>emptyList() : newArrayList(entry);
<ide>
<ide> @Nonnull
<ide> public GetEntriesResult getEntriesFrom(@Nonnegative long beginningIndex, @Nonnegative int max) {
<del> checkArgument(beginningIndex > 0);
<add> checkArgument(beginningIndex >= 0);
<ide> Set<Long> indices = entryIndex.tailMap(beginningIndex).keySet();
<ide> Iterable<Entry> values = Iterables.transform(Iterables.limit(indices, max), loadFromCache(entryCache));
<ide> Entry previousEntry = entryCache.getIfPresent(beginningIndex - 1);
<ide>
<ide> @Nonnull
<ide> public GetEntriesResult getEntriesFrom(@Nonnegative final long beginningIndex) {
<del> checkArgument(beginningIndex > 0);
<add> checkArgument(beginningIndex >= 0);
<ide> Set<Long> indices = entryIndex.tailMap(beginningIndex).keySet();
<ide> Iterable<Entry> values = Iterables.transform(indices, loadFromCache(entryCache));
<ide> Entry previousEntry = entryCache.getIfPresent(beginningIndex - 1);
<ide> }
<ide>
<ide> public void term(@Nonnegative long term) {
<del> checkArgument(term > 0);
<add> checkArgument(term >= 0);
<ide> MDC.put("term", Long.toString(term));
<ide> LOGGER.debug("New term {}", term);
<ide> this.term = term;
|
|
Java
|
apache-2.0
|
235e626194370c58ab08ed2acfcd5b5437188b6e
| 0 |
maobaolong/alluxio,wwjiang007/alluxio,maobaolong/alluxio,Alluxio/alluxio,maobaolong/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,Alluxio/alluxio,wwjiang007/alluxio,maobaolong/alluxio,wwjiang007/alluxio,Alluxio/alluxio,Alluxio/alluxio,wwjiang007/alluxio,maobaolong/alluxio,Alluxio/alluxio,maobaolong/alluxio,wwjiang007/alluxio,Alluxio/alluxio,wwjiang007/alluxio,maobaolong/alluxio,Alluxio/alluxio,Alluxio/alluxio,maobaolong/alluxio,maobaolong/alluxio,Alluxio/alluxio,Alluxio/alluxio,maobaolong/alluxio
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.block;
import alluxio.Constants;
import alluxio.MasterStorageTierAssoc;
import alluxio.Server;
import alluxio.StorageTierAssoc;
import alluxio.client.block.options.GetWorkerReportOptions;
import alluxio.client.block.options.GetWorkerReportOptions.WorkerRange;
import alluxio.clock.SystemClock;
import alluxio.collections.ConcurrentHashSet;
import alluxio.collections.IndexDefinition;
import alluxio.collections.IndexedSet;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.exception.BlockInfoException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.status.InvalidArgumentException;
import alluxio.exception.status.NotFoundException;
import alluxio.exception.status.UnavailableException;
import alluxio.grpc.Command;
import alluxio.grpc.CommandType;
import alluxio.grpc.ConfigProperty;
import alluxio.grpc.GrpcService;
import alluxio.grpc.GrpcUtils;
import alluxio.grpc.RegisterWorkerPOptions;
import alluxio.grpc.ServiceType;
import alluxio.grpc.StorageList;
import alluxio.grpc.WorkerLostStorageInfo;
import alluxio.heartbeat.HeartbeatContext;
import alluxio.heartbeat.HeartbeatExecutor;
import alluxio.heartbeat.HeartbeatThread;
import alluxio.master.CoreMaster;
import alluxio.master.CoreMasterContext;
import alluxio.master.block.meta.MasterWorkerInfo;
import alluxio.master.block.meta.WorkerMetaLockSection;
import alluxio.master.journal.JournalContext;
import alluxio.master.journal.checkpoint.CheckpointName;
import alluxio.master.metastore.BlockStore;
import alluxio.master.metastore.BlockStore.Block;
import alluxio.master.metrics.MetricsMaster;
import alluxio.metrics.Metric;
import alluxio.metrics.MetricInfo;
import alluxio.metrics.MetricKey;
import alluxio.metrics.MetricsSystem;
import alluxio.proto.journal.Block.BlockContainerIdGeneratorEntry;
import alluxio.proto.journal.Block.BlockInfoEntry;
import alluxio.proto.journal.Block.DeleteBlockEntry;
import alluxio.proto.journal.Journal.JournalEntry;
import alluxio.proto.meta.Block.BlockLocation;
import alluxio.proto.meta.Block.BlockMeta;
import alluxio.resource.CloseableIterator;
import alluxio.resource.LockResource;
import alluxio.util.CommonUtils;
import alluxio.util.IdUtils;
import alluxio.util.executor.ExecutorServiceFactories;
import alluxio.util.executor.ExecutorServiceFactory;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.wire.Address;
import alluxio.wire.BlockInfo;
import alluxio.wire.WorkerInfo;
import alluxio.wire.WorkerNetAddress;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.util.concurrent.Striped;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.UnknownHostException;
import java.time.Clock;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.NotThreadSafe;
/**
* This block master manages the metadata for all the blocks and block workers in Alluxio.
*/
@NotThreadSafe // TODO(jiri): make thread-safe (c.f. ALLUXIO-1664)
public class DefaultBlockMaster extends CoreMaster implements BlockMaster {
private static final Logger LOG = LoggerFactory.getLogger(DefaultBlockMaster.class);
private static final Set<Class<? extends Server>> DEPS =
ImmutableSet.<Class<? extends Server>>of(MetricsMaster.class);
/**
* The number of container ids to 'reserve' before having to journal container id state. This
* allows the master to return container ids within the reservation, without having to write to
* the journal.
*/
private static final long CONTAINER_ID_RESERVATION_SIZE = 1000;
/** The only valid key for {@link #mWorkerInfoCache}. */
private static final String WORKER_INFO_CACHE_KEY = "WorkerInfoKey";
// Worker metadata management.
private static final IndexDefinition<MasterWorkerInfo, Long> ID_INDEX =
new IndexDefinition<MasterWorkerInfo, Long>(true) {
@Override
public Long getFieldValue(MasterWorkerInfo o) {
return o.getId();
}
};
private static final IndexDefinition<MasterWorkerInfo, WorkerNetAddress> ADDRESS_INDEX =
new IndexDefinition<MasterWorkerInfo, WorkerNetAddress>(true) {
@Override
public WorkerNetAddress getFieldValue(MasterWorkerInfo o) {
return o.getWorkerAddress();
}
};
/**
* Concurrency and locking in the BlockMaster
*
* The block master uses concurrent data structures to allow non-conflicting concurrent access.
* This means each piece of metadata should be locked individually. There are two types of
* metadata in the {@link DefaultBlockMaster}: block metadata and worker metadata.
*
* The worker metadata is represented by the {@link MasterWorkerInfo} object.
* See javadoc of {@link MasterWorkerInfo} for details.
*
* To modify or read a modifiable piece of worker metadata, the {@link MasterWorkerInfo} for the
* worker must be locked following the instructions in {@link MasterWorkerInfo}.
* For block metadata, the id of the block must be locked.
* This will protect the internal integrity of the block and worker metadata.
*
* A worker's relevant locks must be held to
* - Check/Update the worker register status
* - Read/Update the worker usage
* - Read/Update the worker present/to-be-removed blocks
* - Any combinations of the above
*
* A block's lock must be held to
* - Perform any BlockStore operations on the block
* - Add or remove the block from mLostBlocks
*
* Lock ordering must be preserved in order to prevent deadlock. If both worker and block
* metadata must be locked at the same time, the worker metadata must be locked before the block
* metadata. When the locks are released, they must be released in the opposite order.
*
* Locking on the worker metadata are managed by
* {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}.
* This guarantees when multiple parts of the worker metadata are accessed/updated,
* the locks are acquired and released in order.
* See javadoc of {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)} for
* example usages.
*
* It should not be the case that multiple worker metadata must be locked at the same time, or
* multiple block metadata must be locked at the same time. Operations involving different workers
* or different blocks should be able to be performed independently.
*/
/**
* 10k locks balances between keeping a small memory footprint and avoiding unnecessary lock
* contention. Each stripe is around 100 bytes, so this takes about 1MB. Block locking critical
* sections are short, so it is acceptable to occasionally have conflicts where two different
* blocks want to lock the same stripe.
*/
private final Striped<Lock> mBlockLocks = Striped.lock(10_000);
/** Manages block metadata and block locations. */
private final BlockStore mBlockStore;
/** Keeps track of blocks which are no longer in Alluxio storage. */
private final ConcurrentHashSet<Long> mLostBlocks = new ConcurrentHashSet<>(64, 0.90f, 64);
/** This state must be journaled. */
@GuardedBy("itself")
private final BlockContainerIdGenerator mBlockContainerIdGenerator =
new BlockContainerIdGenerator();
/**
* Mapping between all possible storage level aliases and their ordinal position. This mapping
* forms a total ordering on all storage level aliases in the system, and must be consistent
* across masters.
*/
private final StorageTierAssoc mGlobalStorageTierAssoc;
/** Keeps track of workers which are in communication with the master. */
private final IndexedSet<MasterWorkerInfo> mWorkers =
new IndexedSet<>(ID_INDEX, ADDRESS_INDEX);
/** Keeps track of workers which are no longer in communication with the master. */
private final IndexedSet<MasterWorkerInfo> mLostWorkers =
new IndexedSet<>(ID_INDEX, ADDRESS_INDEX);
/** Worker is not visualable until registration completes. */
private final IndexedSet<MasterWorkerInfo> mTempWorkers =
new IndexedSet<>(ID_INDEX, ADDRESS_INDEX);
/** Listeners to call when lost workers are found. */
private final List<Consumer<Address>> mLostWorkerFoundListeners
= new ArrayList<>();
/** Listeners to call when workers are lost. */
private final List<Consumer<Address>> mWorkerLostListeners = new ArrayList<>();
/** Listeners to call when a new worker registers. */
private final List<BiConsumer<Address, List<ConfigProperty>>> mWorkerRegisteredListeners
= new ArrayList<>();
/** Handle to the metrics master. */
private final MetricsMaster mMetricsMaster;
/**
* The service that detects lost worker nodes, and tries to restart the failed workers.
* We store it here so that it can be accessed from tests.
*/
@SuppressFBWarnings("URF_UNREAD_FIELD")
private Future<?> mLostWorkerDetectionService;
/** The value of the 'next container id' last journaled. */
@GuardedBy("mBlockContainerIdGenerator")
private long mJournaledNextContainerId = 0;
/**
* A loading cache for worker info list, refresh periodically.
* This cache only has a single key {@link #WORKER_INFO_CACHE_KEY}.
*/
private LoadingCache<String, List<WorkerInfo>> mWorkerInfoCache;
/**
* Creates a new instance of {@link DefaultBlockMaster}.
*
* @param metricsMaster the metrics master
* @param masterContext the context for Alluxio master
*/
DefaultBlockMaster(MetricsMaster metricsMaster, CoreMasterContext masterContext) {
this(metricsMaster, masterContext, new SystemClock(),
ExecutorServiceFactories.cachedThreadPool(Constants.BLOCK_MASTER_NAME));
}
/**
* Creates a new instance of {@link DefaultBlockMaster}.
*
* @param metricsMaster the metrics master
* @param masterContext the context for Alluxio master
* @param clock the clock to use for determining the time
* @param executorServiceFactory a factory for creating the executor service to use for running
* maintenance threads
*/
DefaultBlockMaster(MetricsMaster metricsMaster, CoreMasterContext masterContext, Clock clock,
ExecutorServiceFactory executorServiceFactory) {
super(masterContext, clock, executorServiceFactory);
Preconditions.checkNotNull(metricsMaster, "metricsMaster");
mBlockStore = masterContext.getBlockStoreFactory().get();
mGlobalStorageTierAssoc = new MasterStorageTierAssoc();
mMetricsMaster = metricsMaster;
Metrics.registerGauges(this);
mWorkerInfoCache = CacheBuilder.newBuilder()
.refreshAfterWrite(ServerConfiguration
.getMs(PropertyKey.MASTER_WORKER_INFO_CACHE_REFRESH_TIME), TimeUnit.MILLISECONDS)
.build(new CacheLoader<String, List<WorkerInfo>>() {
@Override
public List<WorkerInfo> load(String key) {
return constructWorkerInfoList();
}
});
}
@Override
public String getName() {
return Constants.BLOCK_MASTER_NAME;
}
@Override
public Map<ServiceType, GrpcService> getServices() {
Map<ServiceType, GrpcService> services = new HashMap<>();
services.put(ServiceType.BLOCK_MASTER_CLIENT_SERVICE,
new GrpcService(new BlockMasterClientServiceHandler(this)));
services.put(ServiceType.BLOCK_MASTER_WORKER_SERVICE,
new GrpcService(new BlockMasterWorkerServiceHandler(this)));
return services;
}
@Override
public boolean processJournalEntry(JournalEntry entry) {
// TODO(gene): A better way to process entries besides a huge switch?
if (entry.hasBlockContainerIdGenerator()) {
mJournaledNextContainerId = (entry.getBlockContainerIdGenerator()).getNextContainerId();
mBlockContainerIdGenerator.setNextContainerId((mJournaledNextContainerId));
} else if (entry.hasDeleteBlock()) {
mBlockStore.removeBlock(entry.getDeleteBlock().getBlockId());
} else if (entry.hasBlockInfo()) {
BlockInfoEntry blockInfoEntry = entry.getBlockInfo();
long length = blockInfoEntry.getLength();
Optional<BlockMeta> block = mBlockStore.getBlock(blockInfoEntry.getBlockId());
if (block.isPresent()) {
long oldLen = block.get().getLength();
if (oldLen != Constants.UNKNOWN_SIZE) {
LOG.warn("Attempting to update block length ({}) to a different length ({}).", oldLen,
length);
return true;
}
}
mBlockStore.putBlock(blockInfoEntry.getBlockId(),
BlockMeta.newBuilder().setLength(blockInfoEntry.getLength()).build());
} else {
return false;
}
return true;
}
@Override
public void resetState() {
mBlockStore.clear();
mJournaledNextContainerId = 0;
mBlockContainerIdGenerator.setNextContainerId(0);
}
@Override
public CheckpointName getCheckpointName() {
return CheckpointName.BLOCK_MASTER;
}
@Override
public CloseableIterator<JournalEntry> getJournalEntryIterator() {
Iterator<Block> it = mBlockStore.iterator();
Iterator<JournalEntry> blockIterator = new Iterator<JournalEntry>() {
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public JournalEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
Block block = it.next();
BlockInfoEntry blockInfoEntry =
BlockInfoEntry.newBuilder().setBlockId(block.getId())
.setLength(block.getMeta().getLength()).build();
return JournalEntry.newBuilder().setBlockInfo(blockInfoEntry).build();
}
@Override
public void remove() {
throw new UnsupportedOperationException("BlockMaster#Iterator#remove is not supported.");
}
};
return CloseableIterator.noopCloseable(Iterators
.concat(CommonUtils.singleElementIterator(getContainerIdJournalEntry()), blockIterator));
}
@Override
public void start(Boolean isLeader) throws IOException {
super.start(isLeader);
if (isLeader) {
mLostWorkerDetectionService = getExecutorService().submit(new HeartbeatThread(
HeartbeatContext.MASTER_LOST_WORKER_DETECTION, new LostWorkerDetectionHeartbeatExecutor(),
(int) ServerConfiguration.getMs(PropertyKey.MASTER_LOST_WORKER_DETECTION_INTERVAL),
ServerConfiguration.global(), mMasterContext.getUserState()));
}
}
@Override
public void stop() throws IOException {
super.stop();
}
@Override
public void close() throws IOException {
super.close();
mBlockStore.close();
}
@Override
public int getWorkerCount() {
return mWorkers.size();
}
@Override
public int getLostWorkerCount() {
return mLostWorkers.size();
}
@Override
public long getCapacityBytes() {
long ret = 0;
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
ret += worker.getCapacityBytes();
}
}
return ret;
}
@Override
public StorageTierAssoc getGlobalStorageTierAssoc() {
return mGlobalStorageTierAssoc;
}
@Override
public long getUsedBytes() {
long ret = 0;
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
ret += worker.getUsedBytes();
}
}
return ret;
}
@Override
public List<WorkerInfo> getWorkerInfoList() throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
try {
return mWorkerInfoCache.get(WORKER_INFO_CACHE_KEY);
} catch (ExecutionException e) {
throw new UnavailableException("Unable to get worker info list from cache", e);
}
}
private List<WorkerInfo> constructWorkerInfoList() {
List<WorkerInfo> workerInfoList = new ArrayList<>(mWorkers.size());
for (MasterWorkerInfo worker : mWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, null, true));
}
return workerInfoList;
}
@Override
public List<WorkerInfo> getLostWorkersInfoList() throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
List<WorkerInfo> workerInfoList = new ArrayList<>(mLostWorkers.size());
for (MasterWorkerInfo worker : mLostWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, null, false));
}
Collections.sort(workerInfoList, new WorkerInfo.LastContactSecComparator());
return workerInfoList;
}
@Override
public Set<WorkerNetAddress> getWorkerAddresses() throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
Set<WorkerNetAddress> workerAddresses = new HashSet<>(mWorkers.size());
for (MasterWorkerInfo worker : mWorkers) {
// worker net address is unmodifiable after initialization, no locking is needed
workerAddresses.add(worker.getWorkerAddress());
}
return workerAddresses;
}
@Override
public List<WorkerInfo> getWorkerReport(GetWorkerReportOptions options)
throws UnavailableException, InvalidArgumentException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
Set<MasterWorkerInfo> selectedLiveWorkers = new HashSet<>();
Set<MasterWorkerInfo> selectedLostWorkers = new HashSet<>();
WorkerRange workerRange = options.getWorkerRange();
switch (workerRange) {
case ALL:
selectedLiveWorkers.addAll(mWorkers);
selectedLostWorkers.addAll(mLostWorkers);
break;
case LIVE:
selectedLiveWorkers.addAll(mWorkers);
break;
case LOST:
selectedLostWorkers.addAll(mLostWorkers);
break;
case SPECIFIED:
Set<String> addresses = options.getAddresses();
Set<String> workerNames = new HashSet<>();
selectedLiveWorkers = selectInfoByAddress(addresses, mWorkers, workerNames);
selectedLostWorkers = selectInfoByAddress(addresses, mLostWorkers, workerNames);
if (!addresses.isEmpty()) {
String info = String.format("Unrecognized worker names: %s%n"
+ "Supported worker names: %s%n",
addresses.toString(), workerNames.toString());
throw new InvalidArgumentException(info);
}
break;
default:
throw new InvalidArgumentException("Unrecognized worker range: " + workerRange);
}
List<WorkerInfo> workerInfoList = new ArrayList<>();
for (MasterWorkerInfo worker : selectedLiveWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, options.getFieldRange(), true));
}
for (MasterWorkerInfo worker : selectedLostWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, options.getFieldRange(), false));
}
return workerInfoList;
}
/**
* Locks the {@link MasterWorkerInfo} properly and convert it to a {@link WorkerInfo}.
*/
private WorkerInfo extractWorkerInfo(MasterWorkerInfo worker,
Set<GetWorkerReportOptions.WorkerInfoField> fieldRange, boolean isLiveWorker) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
return worker.generateWorkerInfo(fieldRange, isLiveWorker);
}
}
@Override
public List<WorkerLostStorageInfo> getWorkerLostStorage() {
List<WorkerLostStorageInfo> workerLostStorageList = new ArrayList<>();
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
if (worker.hasLostStorage()) {
Map<String, StorageList> lostStorage = worker.getLostStorage().entrySet()
.stream().collect(Collectors.toMap(Map.Entry::getKey,
e -> StorageList.newBuilder().addAllStorage(e.getValue()).build()));
workerLostStorageList.add(WorkerLostStorageInfo.newBuilder()
.setAddress(GrpcUtils.toProto(worker.getWorkerAddress()))
.putAllLostStorage(lostStorage).build());
}
}
}
return workerLostStorageList;
}
@Override
public void removeBlocks(List<Long> blockIds, boolean delete) throws UnavailableException {
try (JournalContext journalContext = createJournalContext()) {
for (long blockId : blockIds) {
HashSet<Long> workerIds = new HashSet<>();
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(blockId);
if (!block.isPresent()) {
continue;
}
for (BlockLocation loc : mBlockStore.getLocations(blockId)) {
workerIds.add(loc.getWorkerId());
}
// Two cases here:
// 1) For delete: delete the block metadata.
// 2) For free: keep the block metadata. mLostBlocks will be changed in
// processWorkerRemovedBlocks
if (delete) {
// Make sure blockId is removed from mLostBlocks when the block metadata is deleted.
// Otherwise blockId in mLostBlock can be dangling index if the metadata is gone.
mLostBlocks.remove(blockId);
mBlockStore.removeBlock(blockId);
JournalEntry entry = JournalEntry.newBuilder()
.setDeleteBlock(DeleteBlockEntry.newBuilder().setBlockId(blockId)).build();
journalContext.append(entry);
}
}
// Outside of locking the block. This does not have to be synchronized with the block
// metadata, since it is essentially an asynchronous signal to the worker to remove the
// block.
// TODO(jiacheng): if the block locations are changed (like a new worker is registered
// with the block), the block will not be freed ever. The locking logic in
// workerRegister should be changed to address this race condition.
for (long workerId : workerIds) {
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
if (worker != null) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.BLOCKS), false)) {
worker.updateToRemovedBlock(true, blockId);
}
}
}
}
}
}
@Override
public void validateBlocks(Function<Long, Boolean> validator, boolean repair)
throws UnavailableException {
List<Long> invalidBlocks = new ArrayList<>();
for (Iterator<Block> iter = mBlockStore.iterator(); iter.hasNext(); ) {
long id = iter.next().getId();
if (!validator.apply(id)) {
invalidBlocks.add(id);
}
}
if (!invalidBlocks.isEmpty()) {
long limit = 100;
List<Long> loggedBlocks = invalidBlocks.stream().limit(limit).collect(Collectors.toList());
LOG.warn("Found {} orphan blocks without corresponding file metadata.", invalidBlocks.size());
if (invalidBlocks.size() > limit) {
LOG.warn("The first {} orphan blocks include {}.", limit, loggedBlocks);
} else {
LOG.warn("The orphan blocks include {}.", loggedBlocks);
}
if (repair) {
LOG.warn("Deleting {} orphan blocks.", invalidBlocks.size());
removeBlocks(invalidBlocks, true);
} else {
LOG.warn("Restart Alluxio master with {}=true to delete the blocks and repair the system.",
PropertyKey.Name.MASTER_STARTUP_BLOCK_INTEGRITY_CHECK_ENABLED);
}
}
}
/**
* @return a new block container id
*/
@Override
public long getNewContainerId() throws UnavailableException {
synchronized (mBlockContainerIdGenerator) {
long containerId = mBlockContainerIdGenerator.getNewContainerId();
if (containerId < mJournaledNextContainerId) {
// This container id is within the reserved container ids, so it is safe to return the id
// without having to write anything to the journal.
return containerId;
}
// This container id is not safe with respect to the last journaled container id.
// Therefore, journal the new state of the container id. This implies that when a master
// crashes, the container ids within the reservation which have not been used yet will
// never be used. This is a tradeoff between fully utilizing the container id space, vs.
// improving master scalability.
// TODO(gpang): investigate if dynamic reservation sizes could be effective
// Set the next id to journal with a reservation of container ids, to avoid having to write
// to the journal for ids within the reservation.
mJournaledNextContainerId = containerId + CONTAINER_ID_RESERVATION_SIZE;
try (JournalContext journalContext = createJournalContext()) {
// This must be flushed while holding the lock on mBlockContainerIdGenerator, in order to
// prevent subsequent calls to return ids that have not been journaled and flushed.
journalContext.append(getContainerIdJournalEntry());
}
return containerId;
}
}
/**
* @return a {@link JournalEntry} representing the state of the container id generator
*/
private JournalEntry getContainerIdJournalEntry() {
synchronized (mBlockContainerIdGenerator) {
BlockContainerIdGeneratorEntry blockContainerIdGenerator =
BlockContainerIdGeneratorEntry.newBuilder().setNextContainerId(mJournaledNextContainerId)
.build();
return JournalEntry.newBuilder().setBlockContainerIdGenerator(blockContainerIdGenerator)
.build();
}
}
// TODO(binfan): check the logic is correct or not when commitBlock is a retry
@Override
public void commitBlock(long workerId, long usedBytesOnTier, String tierAlias,
String mediumType, long blockId, long length)
throws NotFoundException, UnavailableException {
LOG.debug("Commit block from workerId: {}, usedBytesOnTier: {}, blockId: {}, length: {}",
workerId, usedBytesOnTier, blockId, length);
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
// TODO(peis): Check lost workers as well.
if (worker == null) {
throw new NotFoundException(ExceptionMessage.NO_WORKER_FOUND.getMessage(workerId));
}
try (JournalContext journalContext = createJournalContext()) {
// Lock the worker metadata here to preserve the lock order
// The worker metadata must be locked before the blocks
try (LockResource lr = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE, WorkerMetaLockSection.BLOCKS), false)) {
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(blockId);
if (!block.isPresent() || block.get().getLength() != length) {
if (block.isPresent() && block.get().getLength() != Constants.UNKNOWN_SIZE) {
LOG.warn("Rejecting attempt to change block length from {} to {}",
block.get().getLength(), length);
} else {
mBlockStore.putBlock(blockId, BlockMeta.newBuilder().setLength(length).build());
BlockInfoEntry blockInfo =
BlockInfoEntry.newBuilder().setBlockId(blockId).setLength(length).build();
journalContext.append(JournalEntry.newBuilder().setBlockInfo(blockInfo).build());
}
}
// Update the block metadata with the new worker location.
mBlockStore.addLocation(blockId, BlockLocation.newBuilder()
.setWorkerId(workerId)
.setTier(tierAlias)
.setMediumType(mediumType)
.build());
// This worker has this block, so it is no longer lost.
mLostBlocks.remove(blockId);
// Update the worker information for this new block.
// TODO(binfan): when retry commitBlock on master is expected, make sure metrics are not
// double counted.
worker.addBlock(blockId);
worker.updateUsedBytes(tierAlias, usedBytesOnTier);
}
}
worker.updateLastUpdatedTimeMs();
}
}
@Override
public void commitBlockInUFS(long blockId, long length) throws UnavailableException {
LOG.debug("Commit block in ufs. blockId: {}, length: {}", blockId, length);
try (JournalContext journalContext = createJournalContext();
LockResource r = lockBlock(blockId)) {
if (mBlockStore.getBlock(blockId).isPresent()) {
// Block metadata already exists, so do not need to create a new one.
return;
}
mBlockStore.putBlock(blockId, BlockMeta.newBuilder().setLength(length).build());
BlockInfoEntry blockInfo =
BlockInfoEntry.newBuilder().setBlockId(blockId).setLength(length).build();
journalContext.append(JournalEntry.newBuilder().setBlockInfo(blockInfo).build());
}
}
@Override
public BlockInfo getBlockInfo(long blockId) throws BlockInfoException, UnavailableException {
return generateBlockInfo(blockId)
.orElseThrow(() -> new BlockInfoException(ExceptionMessage.BLOCK_META_NOT_FOUND, blockId));
}
@Override
public List<BlockInfo> getBlockInfoList(List<Long> blockIds) throws UnavailableException {
List<BlockInfo> ret = new ArrayList<>(blockIds.size());
for (long blockId : blockIds) {
generateBlockInfo(blockId).ifPresent(info -> ret.add(info));
}
return ret;
}
@Override
public Map<String, Long> getTotalBytesOnTiers() {
Map<String, Long> ret = new HashMap<>();
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
for (Map.Entry<String, Long> entry : worker.getTotalBytesOnTiers().entrySet()) {
Long total = ret.get(entry.getKey());
ret.put(entry.getKey(), (total == null ? 0L : total) + entry.getValue());
}
}
}
return ret;
}
@Override
public Map<String, Long> getUsedBytesOnTiers() {
Map<String, Long> ret = new HashMap<>();
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
for (Map.Entry<String, Long> entry : worker.getUsedBytesOnTiers().entrySet()) {
Long used = ret.get(entry.getKey());
ret.put(entry.getKey(), (used == null ? 0L : used) + entry.getValue());
}
}
}
return ret;
}
/**
* Find a worker which is considered lost or just gets its id.
* @param workerNetAddress the address used to find a worker
* @return a {@link MasterWorkerInfo} which is presented in master but not registered,
* or null if not worker is found.
*/
@Nullable
private MasterWorkerInfo findUnregisteredWorker(WorkerNetAddress workerNetAddress) {
for (IndexedSet<MasterWorkerInfo> workers: Arrays.asList(mTempWorkers, mLostWorkers)) {
MasterWorkerInfo worker = workers.getFirstByField(ADDRESS_INDEX, workerNetAddress);
if (worker != null) {
return worker;
}
}
return null;
}
/**
* Find a worker which is considered lost or just gets its id.
* @param workerId the id used to find a worker
* @return a {@link MasterWorkerInfo} which is presented in master but not registered,
* or null if not worker is found.
*/
@Nullable
private MasterWorkerInfo findUnregisteredWorker(long workerId) {
for (IndexedSet<MasterWorkerInfo> workers: Arrays.asList(mTempWorkers, mLostWorkers)) {
MasterWorkerInfo worker = workers.getFirstByField(ID_INDEX, workerId);
if (worker != null) {
return worker;
}
}
return null;
}
/**
* Re-register a lost worker or complete registration after getting a worker id.
* This method requires no locking on {@link MasterWorkerInfo} because it is only
* reading final fields.
*
* @param workerId the worker id to register
*/
@Nullable
private MasterWorkerInfo recordWorkerRegistration(long workerId) {
for (IndexedSet<MasterWorkerInfo> workers: Arrays.asList(mTempWorkers, mLostWorkers)) {
MasterWorkerInfo worker = workers.getFirstByField(ID_INDEX, workerId);
if (worker == null) {
continue;
}
mWorkers.add(worker);
workers.remove(worker);
if (workers == mLostWorkers) {
for (Consumer<Address> function : mLostWorkerFoundListeners) {
// The worker address is final, no need for locking here
function.accept(new Address(worker.getWorkerAddress().getHost(),
worker.getWorkerAddress().getRpcPort()));
}
LOG.warn("A lost worker {} has requested its old id {}.",
worker.getWorkerAddress(), worker.getId());
}
return worker;
}
return null;
}
@Override
public long getWorkerId(WorkerNetAddress workerNetAddress) {
MasterWorkerInfo existingWorker = mWorkers.getFirstByField(ADDRESS_INDEX, workerNetAddress);
if (existingWorker != null) {
// This worker address is already mapped to a worker id.
long oldWorkerId = existingWorker.getId();
LOG.warn("The worker {} already exists as id {}.", workerNetAddress, oldWorkerId);
return oldWorkerId;
}
existingWorker = findUnregisteredWorker(workerNetAddress);
if (existingWorker != null) {
return existingWorker.getId();
}
// Generate a new worker id.
long workerId = IdUtils.getRandomNonNegativeLong();
while (!mTempWorkers.add(new MasterWorkerInfo(workerId, workerNetAddress))) {
workerId = IdUtils.getRandomNonNegativeLong();
}
LOG.info("getWorkerId(): WorkerNetAddress: {} id: {}", workerNetAddress, workerId);
return workerId;
}
@Override
public void workerRegister(long workerId, List<String> storageTiers,
Map<String, Long> totalBytesOnTiers, Map<String, Long> usedBytesOnTiers,
Map<BlockLocation, List<Long>> currentBlocksOnLocation,
Map<String, StorageList> lostStorage, RegisterWorkerPOptions options)
throws NotFoundException {
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
if (worker == null) {
worker = findUnregisteredWorker(workerId);
}
if (worker == null) {
throw new NotFoundException(ExceptionMessage.NO_WORKER_FOUND.getMessage(workerId));
}
// Gather all blocks on this worker.
HashSet<Long> blocks = new HashSet<>();
for (List<Long> blockIds : currentBlocksOnLocation.values()) {
blocks.addAll(blockIds);
}
// Lock all the locks
try (LockResource r = worker.lockWorkerMeta(EnumSet.of(
WorkerMetaLockSection.STATUS,
WorkerMetaLockSection.USAGE,
WorkerMetaLockSection.BLOCKS), false)) {
// Detect any lost blocks on this worker.
Set<Long> removedBlocks = worker.register(mGlobalStorageTierAssoc, storageTiers,
totalBytesOnTiers, usedBytesOnTiers, blocks);
processWorkerRemovedBlocks(worker, removedBlocks);
processWorkerAddedBlocks(worker, currentBlocksOnLocation);
processWorkerOrphanedBlocks(worker);
worker.addLostStorage(lostStorage);
}
if (options.getConfigsCount() > 0) {
for (BiConsumer<Address, List<ConfigProperty>> function : mWorkerRegisteredListeners) {
WorkerNetAddress workerAddress = worker.getWorkerAddress();
function.accept(new Address(workerAddress.getHost(), workerAddress.getRpcPort()),
options.getConfigsList());
}
}
recordWorkerRegistration(workerId);
// Update the TS at the end of the process
worker.updateLastUpdatedTimeMs();
// Invalidate cache to trigger new build of worker info list
mWorkerInfoCache.invalidate(WORKER_INFO_CACHE_KEY);
Metrics.TOTAL_BLOCKS.inc(currentBlocksOnLocation.size());
LOG.info("registerWorker(): {}", worker);
}
@Override
public Command workerHeartbeat(long workerId, Map<String, Long> capacityBytesOnTiers,
Map<String, Long> usedBytesOnTiers, List<Long> removedBlockIds,
Map<BlockLocation, List<Long>> addedBlocks,
Map<String, StorageList> lostStorage,
List<Metric> metrics) {
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
if (worker == null) {
LOG.warn("Could not find worker id: {} for heartbeat.", workerId);
return Command.newBuilder().setCommandType(CommandType.Register).build();
}
// Update the TS before the heartbeat so even if the worker heartbeat processing
// is time-consuming or triggers GC, the worker does not get marked as lost
// by the LostWorkerDetectionHeartbeatExecutor
worker.updateLastUpdatedTimeMs();
// The address is final, no need for locking
processWorkerMetrics(worker.getWorkerAddress().getHost(), metrics);
Command workerCommand = null;
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE, WorkerMetaLockSection.BLOCKS), false)) {
worker.addLostStorage(lostStorage);
if (capacityBytesOnTiers != null) {
worker.updateCapacityBytes(capacityBytesOnTiers);
}
worker.updateUsedBytes(usedBytesOnTiers);
// Technically, 'worker' should be confirmed to still be in the data structure. Lost worker
// detection can remove it. However, we are intentionally ignoring this race, since the worker
// will just re-register regardless.
processWorkerRemovedBlocks(worker, removedBlockIds);
processWorkerAddedBlocks(worker, addedBlocks);
List<Long> toRemoveBlocks = worker.getToRemoveBlocks();
Metrics.TOTAL_BLOCKS.inc(addedBlocks.size() - removedBlockIds.size());
if (toRemoveBlocks.isEmpty()) {
workerCommand = Command.newBuilder().setCommandType(CommandType.Nothing).build();
} else {
workerCommand = Command.newBuilder().setCommandType(CommandType.Free)
.addAllData(toRemoveBlocks).build();
}
}
// Update the TS again
worker.updateLastUpdatedTimeMs();
// Should not reach here
Preconditions.checkNotNull(workerCommand, "Worker heartbeat response command is null!");
return workerCommand;
}
private void processWorkerMetrics(String hostname, List<Metric> metrics) {
if (metrics.isEmpty()) {
return;
}
mMetricsMaster.workerHeartbeat(hostname, metrics);
}
/**
* Updates the worker and block metadata for blocks removed from a worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#BLOCKS} specified.
* An exclusive lock is required.
*
* @param workerInfo The worker metadata object
* @param removedBlockIds A list of block ids removed from the worker
*/
private void processWorkerRemovedBlocks(MasterWorkerInfo workerInfo,
Collection<Long> removedBlockIds) {
for (long removedBlockId : removedBlockIds) {
try (LockResource r = lockBlock(removedBlockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(removedBlockId);
if (block.isPresent()) {
LOG.debug("Block {} is removed on worker {}.", removedBlockId, workerInfo.getId());
mBlockStore.removeLocation(removedBlockId, workerInfo.getId());
if (mBlockStore.getLocations(removedBlockId).size() == 0) {
mLostBlocks.add(removedBlockId);
}
}
// Remove the block even if its metadata has been deleted already.
workerInfo.removeBlock(removedBlockId);
}
}
Metrics.TOTAL_BLOCKS.dec(removedBlockIds.size());
}
/**
* Updates the worker and block metadata for blocks added to a worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#BLOCKS} specified.
* An exclusive lock is required.
*
* @param workerInfo The worker metadata object
* @param addedBlockIds A mapping from storage tier alias to a list of block ids added
*/
private void processWorkerAddedBlocks(MasterWorkerInfo workerInfo,
Map<BlockLocation, List<Long>> addedBlockIds) {
long invalidBlockCount = 0;
for (Map.Entry<BlockLocation, List<Long>> entry : addedBlockIds.entrySet()) {
for (long blockId : entry.getValue()) {
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(blockId);
if (block.isPresent()) {
workerInfo.addBlock(blockId);
BlockLocation location = entry.getKey();
Preconditions.checkState(location.getWorkerId() == workerInfo.getId(),
String.format("BlockLocation has a different workerId %s from "
+ "the request sender's workerId %s!",
location.getWorkerId(), workerInfo.getId()));
mBlockStore.addLocation(blockId, location);
mLostBlocks.remove(blockId);
} else {
invalidBlockCount++;
LOG.debug("Invalid block: {} from worker {}.", blockId,
workerInfo.getWorkerAddress().getHost());
}
}
}
}
LOG.warn("{} invalid blocks found on worker {} in total", invalidBlockCount,
workerInfo.getWorkerAddress().getHost());
}
/**
* Checks the blocks on the worker. For blocks not present in Alluxio anymore,
* they will be marked to-be-removed from the worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#USAGE} specified.
* A shared lock is required.
*
* @param workerInfo The worker metadata object
*/
private void processWorkerOrphanedBlocks(MasterWorkerInfo workerInfo) {
long orphanedBlockCount = 0;
for (long block : workerInfo.getBlocks()) {
if (!mBlockStore.getBlock(block).isPresent()) {
orphanedBlockCount++;
LOG.debug("Requesting delete for orphaned block: {} from worker {}.", block,
workerInfo.getWorkerAddress().getHost());
workerInfo.updateToRemovedBlock(true, block);
}
}
LOG.warn("{} blocks marked as orphaned from worker {}", orphanedBlockCount,
workerInfo.getWorkerAddress().getHost());
}
@Override
public boolean isBlockLost(long blockId) {
return mLostBlocks.contains(blockId);
}
@Override
public Iterator<Long> getLostBlocksIterator() {
return mLostBlocks.iterator();
}
@Override
public int getLostBlocksCount() {
return mLostBlocks.size();
}
/**
* Generates block info, including worker locations, for a block id.
* This requires no locks on the {@link MasterWorkerInfo} because it is only reading
* final fields.
*
* @param blockId a block id
* @return optional block info, empty if the block does not exist
*/
private Optional<BlockInfo> generateBlockInfo(long blockId) throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
BlockMeta block;
List<BlockLocation> blockLocations;
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> blockOpt = mBlockStore.getBlock(blockId);
if (!blockOpt.isPresent()) {
return Optional.empty();
}
block = blockOpt.get();
blockLocations = new ArrayList<>(mBlockStore.getLocations(blockId));
}
// Sort the block locations by their alias ordinal in the master storage tier mapping
Collections.sort(blockLocations,
Comparator.comparingInt(o -> mGlobalStorageTierAssoc.getOrdinal(o.getTier())));
List<alluxio.wire.BlockLocation> locations = new ArrayList<>();
for (BlockLocation location : blockLocations) {
MasterWorkerInfo workerInfo =
mWorkers.getFirstByField(ID_INDEX, location.getWorkerId());
if (workerInfo != null) {
// worker metadata is intentionally not locked here because:
// - it would be an incorrect order (correct order is lock worker first, then block)
// - only uses getters of final variables
locations.add(new alluxio.wire.BlockLocation().setWorkerId(location.getWorkerId())
.setWorkerAddress(workerInfo.getWorkerAddress())
.setTierAlias(location.getTier()).setMediumType(location.getMediumType()));
}
}
return Optional.of(
new BlockInfo().setBlockId(blockId).setLength(block.getLength()).setLocations(locations));
}
@Override
public void reportLostBlocks(List<Long> blockIds) {
mLostBlocks.addAll(blockIds);
}
@Override
public Set<Class<? extends Server>> getDependencies() {
return DEPS;
}
/**
* Lost worker periodic check.
*/
private final class LostWorkerDetectionHeartbeatExecutor implements HeartbeatExecutor {
/**
* Constructs a new {@link LostWorkerDetectionHeartbeatExecutor}.
*/
public LostWorkerDetectionHeartbeatExecutor() {}
@Override
public void heartbeat() {
long masterWorkerTimeoutMs = ServerConfiguration.getMs(PropertyKey.MASTER_WORKER_TIMEOUT_MS);
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.BLOCKS), false)) {
// This is not locking because the field is atomic
final long lastUpdate = mClock.millis() - worker.getLastUpdatedTimeMs();
if (lastUpdate > masterWorkerTimeoutMs) {
LOG.error("The worker {}({}) timed out after {}ms without a heartbeat!", worker.getId(),
worker.getWorkerAddress(), lastUpdate);
processLostWorker(worker);
}
}
}
}
@Override
public void close() {
// Nothing to clean up
}
}
/**
* Forces all workers to be lost. This should only be used for testing.
*/
@VisibleForTesting
public void forgetAllWorkers() {
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.BLOCKS), false)) {
processLostWorker(worker);
}
}
}
/**
* Updates the metadata for the specified lost worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#BLOCKS} specified.
* An exclusive lock is required.
*
* @param worker the worker metadata
*/
private void processLostWorker(MasterWorkerInfo worker) {
mLostWorkers.add(worker);
mWorkers.remove(worker);
WorkerNetAddress workerAddress = worker.getWorkerAddress();
for (Consumer<Address> function : mWorkerLostListeners) {
function.accept(new Address(workerAddress.getHost(), workerAddress.getRpcPort()));
}
processWorkerRemovedBlocks(worker, worker.getBlocks());
}
LockResource lockBlock(long blockId) {
return new LockResource(mBlockLocks.get(blockId));
}
/**
* Selects the MasterWorkerInfo from workerInfoSet whose host or related IP address
* exists in addresses.
*
* @param addresses the address set that user passed in
* @param workerInfoSet the MasterWorkerInfo set to select info from
* @param workerNames the supported worker names
*/
private Set<MasterWorkerInfo> selectInfoByAddress(Set<String> addresses,
Set<MasterWorkerInfo> workerInfoSet, Set<String> workerNames) {
return workerInfoSet.stream().filter(info -> {
String host = info.getWorkerAddress().getHost();
workerNames.add(host);
String ip = null;
try {
ip = NetworkAddressUtils.resolveIpAddress(host);
workerNames.add(ip);
} catch (UnknownHostException e) {
// The host may already be an IP address
}
if (addresses.contains(host)) {
addresses.remove(host);
return true;
}
if (ip != null) {
if (addresses.contains(ip)) {
addresses.remove(ip);
return true;
}
}
return false;
}).collect(Collectors.toSet());
}
@Override
public void registerLostWorkerFoundListener(Consumer<Address> function) {
mLostWorkerFoundListeners.add(function);
}
@Override
public void registerWorkerLostListener(Consumer<Address> function) {
mWorkerLostListeners.add(function);
}
@Override
public void registerNewWorkerConfListener(BiConsumer<Address, List<ConfigProperty>> function) {
mWorkerRegisteredListeners.add(function);
}
/**
* Class that contains metrics related to BlockMaster.
*/
public static final class Metrics {
private static final Counter TOTAL_BLOCKS =
MetricsSystem.counter(MetricKey.MASTER_TOTAL_BLOCKS.getName());
/**
* Registers metric gauges.
*
* @param master the block master handle
*/
@VisibleForTesting
public static void registerGauges(final BlockMaster master) {
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_CAPACITY_TOTAL.getName(),
master::getCapacityBytes);
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_CAPACITY_USED.getName(),
master::getUsedBytes);
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_CAPACITY_FREE.getName(),
() -> master.getCapacityBytes() - master.getUsedBytes());
for (int i = 0; i < master.getGlobalStorageTierAssoc().size(); i++) {
String alias = master.getGlobalStorageTierAssoc().getAlias(i);
// TODO(lu) Add template to dynamically construct metric key
MetricsSystem.registerGaugeIfAbsent(
MetricKey.CLUSTER_CAPACITY_TOTAL.getName() + MetricInfo.TIER + alias,
new Gauge<Long>() {
@Override
public Long getValue() {
return master.getTotalBytesOnTiers().getOrDefault(alias, 0L);
}
});
MetricsSystem.registerGaugeIfAbsent(
MetricKey.CLUSTER_CAPACITY_USED.getName() + MetricInfo.TIER + alias, new Gauge<Long>() {
@Override
public Long getValue() {
return master.getUsedBytesOnTiers().getOrDefault(alias, 0L);
}
});
MetricsSystem.registerGaugeIfAbsent(
MetricKey.CLUSTER_CAPACITY_FREE.getName() + MetricInfo.TIER + alias, new Gauge<Long>() {
@Override
public Long getValue() {
return master.getTotalBytesOnTiers().getOrDefault(alias, 0L)
- master.getUsedBytesOnTiers().getOrDefault(alias, 0L);
}
});
}
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_WORKERS.getName(),
new Gauge<Integer>() {
@Override
public Integer getValue() {
return master.getWorkerCount();
}
});
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_LOST_WORKERS.getName(),
new Gauge<Integer>() {
@Override
public Integer getValue() {
return master.getLostWorkerCount();
}
});
}
private Metrics() {} // prevent instantiation
}
}
|
core/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.block;
import alluxio.Constants;
import alluxio.MasterStorageTierAssoc;
import alluxio.Server;
import alluxio.StorageTierAssoc;
import alluxio.client.block.options.GetWorkerReportOptions;
import alluxio.client.block.options.GetWorkerReportOptions.WorkerRange;
import alluxio.clock.SystemClock;
import alluxio.collections.ConcurrentHashSet;
import alluxio.collections.IndexDefinition;
import alluxio.collections.IndexedSet;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.exception.BlockInfoException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.status.InvalidArgumentException;
import alluxio.exception.status.NotFoundException;
import alluxio.exception.status.UnavailableException;
import alluxio.grpc.Command;
import alluxio.grpc.CommandType;
import alluxio.grpc.ConfigProperty;
import alluxio.grpc.GrpcService;
import alluxio.grpc.GrpcUtils;
import alluxio.grpc.RegisterWorkerPOptions;
import alluxio.grpc.ServiceType;
import alluxio.grpc.StorageList;
import alluxio.grpc.WorkerLostStorageInfo;
import alluxio.heartbeat.HeartbeatContext;
import alluxio.heartbeat.HeartbeatExecutor;
import alluxio.heartbeat.HeartbeatThread;
import alluxio.master.CoreMaster;
import alluxio.master.CoreMasterContext;
import alluxio.master.block.meta.MasterWorkerInfo;
import alluxio.master.block.meta.WorkerMetaLockSection;
import alluxio.master.journal.JournalContext;
import alluxio.master.journal.checkpoint.CheckpointName;
import alluxio.master.metastore.BlockStore;
import alluxio.master.metastore.BlockStore.Block;
import alluxio.master.metrics.MetricsMaster;
import alluxio.metrics.Metric;
import alluxio.metrics.MetricInfo;
import alluxio.metrics.MetricKey;
import alluxio.metrics.MetricsSystem;
import alluxio.proto.journal.Block.BlockContainerIdGeneratorEntry;
import alluxio.proto.journal.Block.BlockInfoEntry;
import alluxio.proto.journal.Block.DeleteBlockEntry;
import alluxio.proto.journal.Journal.JournalEntry;
import alluxio.proto.meta.Block.BlockLocation;
import alluxio.proto.meta.Block.BlockMeta;
import alluxio.resource.CloseableIterator;
import alluxio.resource.LockResource;
import alluxio.util.CommonUtils;
import alluxio.util.IdUtils;
import alluxio.util.executor.ExecutorServiceFactories;
import alluxio.util.executor.ExecutorServiceFactory;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.wire.Address;
import alluxio.wire.BlockInfo;
import alluxio.wire.WorkerInfo;
import alluxio.wire.WorkerNetAddress;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.util.concurrent.Striped;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.UnknownHostException;
import java.time.Clock;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.NotThreadSafe;
/**
* This block master manages the metadata for all the blocks and block workers in Alluxio.
*/
@NotThreadSafe // TODO(jiri): make thread-safe (c.f. ALLUXIO-1664)
public class DefaultBlockMaster extends CoreMaster implements BlockMaster {
private static final Logger LOG = LoggerFactory.getLogger(DefaultBlockMaster.class);
private static final Set<Class<? extends Server>> DEPS =
ImmutableSet.<Class<? extends Server>>of(MetricsMaster.class);
/**
* The number of container ids to 'reserve' before having to journal container id state. This
* allows the master to return container ids within the reservation, without having to write to
* the journal.
*/
private static final long CONTAINER_ID_RESERVATION_SIZE = 1000;
/** The only valid key for {@link #mWorkerInfoCache}. */
private static final String WORKER_INFO_CACHE_KEY = "WorkerInfoKey";
// Worker metadata management.
private static final IndexDefinition<MasterWorkerInfo, Long> ID_INDEX =
new IndexDefinition<MasterWorkerInfo, Long>(true) {
@Override
public Long getFieldValue(MasterWorkerInfo o) {
return o.getId();
}
};
private static final IndexDefinition<MasterWorkerInfo, WorkerNetAddress> ADDRESS_INDEX =
new IndexDefinition<MasterWorkerInfo, WorkerNetAddress>(true) {
@Override
public WorkerNetAddress getFieldValue(MasterWorkerInfo o) {
return o.getWorkerAddress();
}
};
/**
* Concurrency and locking in the BlockMaster
*
* The block master uses concurrent data structures to allow non-conflicting concurrent access.
* This means each piece of metadata should be locked individually. There are two types of
* metadata in the {@link DefaultBlockMaster}: block metadata and worker metadata.
*
* The worker metadata is represented by the {@link MasterWorkerInfo} object.
* See javadoc of {@link MasterWorkerInfo} for details.
*
* To modify or read a modifiable piece of worker metadata, the {@link MasterWorkerInfo} for the
* worker must be locked following the instructions in {@link MasterWorkerInfo}.
* For block metadata, the id of the block must be locked.
* This will protect the internal integrity of the block and worker metadata.
*
* A worker's relevant locks must be held to
* - Check/Update the worker register status
* - Read/Update the worker usage
* - Read/Update the worker present/to-be-removed blocks
* - Any combinations of the above
*
* A block's lock must be held to
* - Perform any BlockStore operations on the block
* - Add or remove the block from mLostBlocks
*
* Lock ordering must be preserved in order to prevent deadlock. If both worker and block
* metadata must be locked at the same time, the worker metadata must be locked before the block
* metadata. When the locks are released, they must be released in the opposite order.
*
* Locking on the worker metadata are managed by
* {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}.
* This guarantees when multiple parts of the worker metadata are accessed/updated,
* the locks are acquired and released in order.
* See javadoc of {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)} for
* example usages.
*
* It should not be the case that multiple worker metadata must be locked at the same time, or
* multiple block metadata must be locked at the same time. Operations involving different workers
* or different blocks should be able to be performed independently.
*/
/**
* 10k locks balances between keeping a small memory footprint and avoiding unnecessary lock
* contention. Each stripe is around 100 bytes, so this takes about 1MB. Block locking critical
* sections are short, so it is acceptable to occasionally have conflicts where two different
* blocks want to lock the same stripe.
*/
private final Striped<Lock> mBlockLocks = Striped.lock(10_000);
/** Manages block metadata and block locations. */
private final BlockStore mBlockStore;
/** Keeps track of blocks which are no longer in Alluxio storage. */
private final ConcurrentHashSet<Long> mLostBlocks = new ConcurrentHashSet<>(64, 0.90f, 64);
/** This state must be journaled. */
@GuardedBy("itself")
private final BlockContainerIdGenerator mBlockContainerIdGenerator =
new BlockContainerIdGenerator();
/**
* Mapping between all possible storage level aliases and their ordinal position. This mapping
* forms a total ordering on all storage level aliases in the system, and must be consistent
* across masters.
*/
private final StorageTierAssoc mGlobalStorageTierAssoc;
/** Keeps track of workers which are in communication with the master. */
private final IndexedSet<MasterWorkerInfo> mWorkers =
new IndexedSet<>(ID_INDEX, ADDRESS_INDEX);
/** Keeps track of workers which are no longer in communication with the master. */
private final IndexedSet<MasterWorkerInfo> mLostWorkers =
new IndexedSet<>(ID_INDEX, ADDRESS_INDEX);
/** Worker is not visualable until registration completes. */
private final IndexedSet<MasterWorkerInfo> mTempWorkers =
new IndexedSet<>(ID_INDEX, ADDRESS_INDEX);
/** Listeners to call when lost workers are found. */
private final List<Consumer<Address>> mLostWorkerFoundListeners
= new ArrayList<>();
/** Listeners to call when workers are lost. */
private final List<Consumer<Address>> mWorkerLostListeners = new ArrayList<>();
/** Listeners to call when a new worker registers. */
private final List<BiConsumer<Address, List<ConfigProperty>>> mWorkerRegisteredListeners
= new ArrayList<>();
/** Handle to the metrics master. */
private final MetricsMaster mMetricsMaster;
/**
* The service that detects lost worker nodes, and tries to restart the failed workers.
* We store it here so that it can be accessed from tests.
*/
@SuppressFBWarnings("URF_UNREAD_FIELD")
private Future<?> mLostWorkerDetectionService;
/** The value of the 'next container id' last journaled. */
@GuardedBy("mBlockContainerIdGenerator")
private long mJournaledNextContainerId = 0;
/**
* A loading cache for worker info list, refresh periodically.
* This cache only has a single key {@link #WORKER_INFO_CACHE_KEY}.
*/
private LoadingCache<String, List<WorkerInfo>> mWorkerInfoCache;
/**
* Creates a new instance of {@link DefaultBlockMaster}.
*
* @param metricsMaster the metrics master
* @param masterContext the context for Alluxio master
*/
DefaultBlockMaster(MetricsMaster metricsMaster, CoreMasterContext masterContext) {
this(metricsMaster, masterContext, new SystemClock(),
ExecutorServiceFactories.cachedThreadPool(Constants.BLOCK_MASTER_NAME));
}
/**
* Creates a new instance of {@link DefaultBlockMaster}.
*
* @param metricsMaster the metrics master
* @param masterContext the context for Alluxio master
* @param clock the clock to use for determining the time
* @param executorServiceFactory a factory for creating the executor service to use for running
* maintenance threads
*/
DefaultBlockMaster(MetricsMaster metricsMaster, CoreMasterContext masterContext, Clock clock,
ExecutorServiceFactory executorServiceFactory) {
super(masterContext, clock, executorServiceFactory);
Preconditions.checkNotNull(metricsMaster, "metricsMaster");
mBlockStore = masterContext.getBlockStoreFactory().get();
mGlobalStorageTierAssoc = new MasterStorageTierAssoc();
mMetricsMaster = metricsMaster;
Metrics.registerGauges(this);
mWorkerInfoCache = CacheBuilder.newBuilder()
.refreshAfterWrite(ServerConfiguration
.getMs(PropertyKey.MASTER_WORKER_INFO_CACHE_REFRESH_TIME), TimeUnit.MILLISECONDS)
.build(new CacheLoader<String, List<WorkerInfo>>() {
@Override
public List<WorkerInfo> load(String key) {
return constructWorkerInfoList();
}
});
}
@Override
public String getName() {
return Constants.BLOCK_MASTER_NAME;
}
@Override
public Map<ServiceType, GrpcService> getServices() {
Map<ServiceType, GrpcService> services = new HashMap<>();
services.put(ServiceType.BLOCK_MASTER_CLIENT_SERVICE,
new GrpcService(new BlockMasterClientServiceHandler(this)));
services.put(ServiceType.BLOCK_MASTER_WORKER_SERVICE,
new GrpcService(new BlockMasterWorkerServiceHandler(this)));
return services;
}
@Override
public boolean processJournalEntry(JournalEntry entry) {
// TODO(gene): A better way to process entries besides a huge switch?
if (entry.hasBlockContainerIdGenerator()) {
mJournaledNextContainerId = (entry.getBlockContainerIdGenerator()).getNextContainerId();
mBlockContainerIdGenerator.setNextContainerId((mJournaledNextContainerId));
} else if (entry.hasDeleteBlock()) {
mBlockStore.removeBlock(entry.getDeleteBlock().getBlockId());
} else if (entry.hasBlockInfo()) {
BlockInfoEntry blockInfoEntry = entry.getBlockInfo();
long length = blockInfoEntry.getLength();
Optional<BlockMeta> block = mBlockStore.getBlock(blockInfoEntry.getBlockId());
if (block.isPresent()) {
long oldLen = block.get().getLength();
if (oldLen != Constants.UNKNOWN_SIZE) {
LOG.warn("Attempting to update block length ({}) to a different length ({}).", oldLen,
length);
return true;
}
}
mBlockStore.putBlock(blockInfoEntry.getBlockId(),
BlockMeta.newBuilder().setLength(blockInfoEntry.getLength()).build());
} else {
return false;
}
return true;
}
@Override
public void resetState() {
mBlockStore.clear();
mJournaledNextContainerId = 0;
mBlockContainerIdGenerator.setNextContainerId(0);
}
@Override
public CheckpointName getCheckpointName() {
return CheckpointName.BLOCK_MASTER;
}
@Override
public CloseableIterator<JournalEntry> getJournalEntryIterator() {
Iterator<Block> it = mBlockStore.iterator();
Iterator<JournalEntry> blockIterator = new Iterator<JournalEntry>() {
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public JournalEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
Block block = it.next();
BlockInfoEntry blockInfoEntry =
BlockInfoEntry.newBuilder().setBlockId(block.getId())
.setLength(block.getMeta().getLength()).build();
return JournalEntry.newBuilder().setBlockInfo(blockInfoEntry).build();
}
@Override
public void remove() {
throw new UnsupportedOperationException("BlockMaster#Iterator#remove is not supported.");
}
};
return CloseableIterator.noopCloseable(Iterators
.concat(CommonUtils.singleElementIterator(getContainerIdJournalEntry()), blockIterator));
}
@Override
public void start(Boolean isLeader) throws IOException {
super.start(isLeader);
if (isLeader) {
mLostWorkerDetectionService = getExecutorService().submit(new HeartbeatThread(
HeartbeatContext.MASTER_LOST_WORKER_DETECTION, new LostWorkerDetectionHeartbeatExecutor(),
(int) ServerConfiguration.getMs(PropertyKey.MASTER_LOST_WORKER_DETECTION_INTERVAL),
ServerConfiguration.global(), mMasterContext.getUserState()));
}
}
@Override
public void stop() throws IOException {
super.stop();
}
@Override
public void close() throws IOException {
super.close();
mBlockStore.close();
}
@Override
public int getWorkerCount() {
return mWorkers.size();
}
@Override
public int getLostWorkerCount() {
return mLostWorkers.size();
}
@Override
public long getCapacityBytes() {
long ret = 0;
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
ret += worker.getCapacityBytes();
}
}
return ret;
}
@Override
public StorageTierAssoc getGlobalStorageTierAssoc() {
return mGlobalStorageTierAssoc;
}
@Override
public long getUsedBytes() {
long ret = 0;
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
ret += worker.getUsedBytes();
}
}
return ret;
}
@Override
public List<WorkerInfo> getWorkerInfoList() throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
try {
return mWorkerInfoCache.get(WORKER_INFO_CACHE_KEY);
} catch (ExecutionException e) {
throw new UnavailableException("Unable to get worker info list from cache", e);
}
}
private List<WorkerInfo> constructWorkerInfoList() {
List<WorkerInfo> workerInfoList = new ArrayList<>(mWorkers.size());
for (MasterWorkerInfo worker : mWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, null, true));
}
return workerInfoList;
}
@Override
public List<WorkerInfo> getLostWorkersInfoList() throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
List<WorkerInfo> workerInfoList = new ArrayList<>(mLostWorkers.size());
for (MasterWorkerInfo worker : mLostWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, null, false));
}
Collections.sort(workerInfoList, new WorkerInfo.LastContactSecComparator());
return workerInfoList;
}
@Override
public Set<WorkerNetAddress> getWorkerAddresses() throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
Set<WorkerNetAddress> workerAddresses = new HashSet<>(mWorkers.size());
for (MasterWorkerInfo worker : mWorkers) {
// worker net address is unmodifiable after initialization, no locking is needed
workerAddresses.add(worker.getWorkerAddress());
}
return workerAddresses;
}
@Override
public List<WorkerInfo> getWorkerReport(GetWorkerReportOptions options)
throws UnavailableException, InvalidArgumentException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
Set<MasterWorkerInfo> selectedLiveWorkers = new HashSet<>();
Set<MasterWorkerInfo> selectedLostWorkers = new HashSet<>();
WorkerRange workerRange = options.getWorkerRange();
switch (workerRange) {
case ALL:
selectedLiveWorkers.addAll(mWorkers);
selectedLostWorkers.addAll(mLostWorkers);
break;
case LIVE:
selectedLiveWorkers.addAll(mWorkers);
break;
case LOST:
selectedLostWorkers.addAll(mLostWorkers);
break;
case SPECIFIED:
Set<String> addresses = options.getAddresses();
Set<String> workerNames = new HashSet<>();
selectedLiveWorkers = selectInfoByAddress(addresses, mWorkers, workerNames);
selectedLostWorkers = selectInfoByAddress(addresses, mLostWorkers, workerNames);
if (!addresses.isEmpty()) {
String info = String.format("Unrecognized worker names: %s%n"
+ "Supported worker names: %s%n",
addresses.toString(), workerNames.toString());
throw new InvalidArgumentException(info);
}
break;
default:
throw new InvalidArgumentException("Unrecognized worker range: " + workerRange);
}
List<WorkerInfo> workerInfoList = new ArrayList<>();
for (MasterWorkerInfo worker : selectedLiveWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, options.getFieldRange(), true));
}
for (MasterWorkerInfo worker : selectedLostWorkers) {
// extractWorkerInfo handles the locking internally
workerInfoList.add(extractWorkerInfo(worker, options.getFieldRange(), false));
}
return workerInfoList;
}
/**
* Locks the {@link MasterWorkerInfo} properly and convert it to a {@link WorkerInfo}.
*/
private WorkerInfo extractWorkerInfo(MasterWorkerInfo worker,
Set<GetWorkerReportOptions.WorkerInfoField> fieldRange, boolean isLiveWorker) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
return worker.generateWorkerInfo(fieldRange, isLiveWorker);
}
}
@Override
public List<WorkerLostStorageInfo> getWorkerLostStorage() {
List<WorkerLostStorageInfo> workerLostStorageList = new ArrayList<>();
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
if (worker.hasLostStorage()) {
Map<String, StorageList> lostStorage = worker.getLostStorage().entrySet()
.stream().collect(Collectors.toMap(Map.Entry::getKey,
e -> StorageList.newBuilder().addAllStorage(e.getValue()).build()));
workerLostStorageList.add(WorkerLostStorageInfo.newBuilder()
.setAddress(GrpcUtils.toProto(worker.getWorkerAddress()))
.putAllLostStorage(lostStorage).build());
}
}
}
return workerLostStorageList;
}
@Override
public void removeBlocks(List<Long> blockIds, boolean delete) throws UnavailableException {
try (JournalContext journalContext = createJournalContext()) {
for (long blockId : blockIds) {
HashSet<Long> workerIds = new HashSet<>();
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(blockId);
if (!block.isPresent()) {
continue;
}
for (BlockLocation loc : mBlockStore.getLocations(blockId)) {
workerIds.add(loc.getWorkerId());
}
// Two cases here:
// 1) For delete: delete the block metadata.
// 2) For free: keep the block metadata. mLostBlocks will be changed in
// processWorkerRemovedBlocks
if (delete) {
// Make sure blockId is removed from mLostBlocks when the block metadata is deleted.
// Otherwise blockId in mLostBlock can be dangling index if the metadata is gone.
mLostBlocks.remove(blockId);
mBlockStore.removeBlock(blockId);
JournalEntry entry = JournalEntry.newBuilder()
.setDeleteBlock(DeleteBlockEntry.newBuilder().setBlockId(blockId)).build();
journalContext.append(entry);
}
}
// Outside of locking the block. This does not have to be synchronized with the block
// metadata, since it is essentially an asynchronous signal to the worker to remove the
// block.
// TODO(jiacheng): if the block locations are changed (like a new worker is registered
// with the block), the block will not be freed ever. The locking logic in
// workerRegister should be changed to address this race condition.
for (long workerId : workerIds) {
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
if (worker != null) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.BLOCKS), false)) {
worker.updateToRemovedBlock(true, blockId);
}
}
}
}
}
}
@Override
public void validateBlocks(Function<Long, Boolean> validator, boolean repair)
throws UnavailableException {
List<Long> invalidBlocks = new ArrayList<>();
for (Iterator<Block> iter = mBlockStore.iterator(); iter.hasNext(); ) {
long id = iter.next().getId();
if (!validator.apply(id)) {
invalidBlocks.add(id);
}
}
if (!invalidBlocks.isEmpty()) {
long limit = 100;
List<Long> loggedBlocks = invalidBlocks.stream().limit(limit).collect(Collectors.toList());
LOG.warn("Found {} orphan blocks without corresponding file metadata.", invalidBlocks.size());
if (invalidBlocks.size() > limit) {
LOG.warn("The first {} orphan blocks include {}.", limit, loggedBlocks);
} else {
LOG.warn("The orphan blocks include {}.", loggedBlocks);
}
if (repair) {
LOG.warn("Deleting {} orphan blocks.", invalidBlocks.size());
removeBlocks(invalidBlocks, true);
} else {
LOG.warn("Restart Alluxio master with {}=true to delete the blocks and repair the system.",
PropertyKey.Name.MASTER_STARTUP_BLOCK_INTEGRITY_CHECK_ENABLED);
}
}
}
/**
* @return a new block container id
*/
@Override
public long getNewContainerId() throws UnavailableException {
synchronized (mBlockContainerIdGenerator) {
long containerId = mBlockContainerIdGenerator.getNewContainerId();
if (containerId < mJournaledNextContainerId) {
// This container id is within the reserved container ids, so it is safe to return the id
// without having to write anything to the journal.
return containerId;
}
// This container id is not safe with respect to the last journaled container id.
// Therefore, journal the new state of the container id. This implies that when a master
// crashes, the container ids within the reservation which have not been used yet will
// never be used. This is a tradeoff between fully utilizing the container id space, vs.
// improving master scalability.
// TODO(gpang): investigate if dynamic reservation sizes could be effective
// Set the next id to journal with a reservation of container ids, to avoid having to write
// to the journal for ids within the reservation.
mJournaledNextContainerId = containerId + CONTAINER_ID_RESERVATION_SIZE;
try (JournalContext journalContext = createJournalContext()) {
// This must be flushed while holding the lock on mBlockContainerIdGenerator, in order to
// prevent subsequent calls to return ids that have not been journaled and flushed.
journalContext.append(getContainerIdJournalEntry());
}
return containerId;
}
}
/**
* @return a {@link JournalEntry} representing the state of the container id generator
*/
private JournalEntry getContainerIdJournalEntry() {
synchronized (mBlockContainerIdGenerator) {
BlockContainerIdGeneratorEntry blockContainerIdGenerator =
BlockContainerIdGeneratorEntry.newBuilder().setNextContainerId(mJournaledNextContainerId)
.build();
return JournalEntry.newBuilder().setBlockContainerIdGenerator(blockContainerIdGenerator)
.build();
}
}
// TODO(binfan): check the logic is correct or not when commitBlock is a retry
@Override
public void commitBlock(long workerId, long usedBytesOnTier, String tierAlias,
String mediumType, long blockId, long length)
throws NotFoundException, UnavailableException {
LOG.debug("Commit block from workerId: {}, usedBytesOnTier: {}, blockId: {}, length: {}",
workerId, usedBytesOnTier, blockId, length);
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
// TODO(peis): Check lost workers as well.
if (worker == null) {
throw new NotFoundException(ExceptionMessage.NO_WORKER_FOUND.getMessage(workerId));
}
try (JournalContext journalContext = createJournalContext()) {
// Lock the worker metadata here to preserve the lock order
// The worker metadata must be locked before the blocks
try (LockResource lr = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE, WorkerMetaLockSection.BLOCKS), false)) {
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(blockId);
if (!block.isPresent() || block.get().getLength() != length) {
if (block.isPresent() && block.get().getLength() != Constants.UNKNOWN_SIZE) {
LOG.warn("Rejecting attempt to change block length from {} to {}",
block.get().getLength(), length);
} else {
mBlockStore.putBlock(blockId, BlockMeta.newBuilder().setLength(length).build());
BlockInfoEntry blockInfo =
BlockInfoEntry.newBuilder().setBlockId(blockId).setLength(length).build();
journalContext.append(JournalEntry.newBuilder().setBlockInfo(blockInfo).build());
}
}
// Update the block metadata with the new worker location.
mBlockStore.addLocation(blockId, BlockLocation.newBuilder()
.setWorkerId(workerId)
.setTier(tierAlias)
.setMediumType(mediumType)
.build());
// This worker has this block, so it is no longer lost.
mLostBlocks.remove(blockId);
// Update the worker information for this new block.
// TODO(binfan): when retry commitBlock on master is expected, make sure metrics are not
// double counted.
worker.addBlock(blockId);
worker.updateUsedBytes(tierAlias, usedBytesOnTier);
}
}
worker.updateLastUpdatedTimeMs();
}
}
@Override
public void commitBlockInUFS(long blockId, long length) throws UnavailableException {
LOG.debug("Commit block in ufs. blockId: {}, length: {}", blockId, length);
try (JournalContext journalContext = createJournalContext();
LockResource r = lockBlock(blockId)) {
if (mBlockStore.getBlock(blockId).isPresent()) {
// Block metadata already exists, so do not need to create a new one.
return;
}
mBlockStore.putBlock(blockId, BlockMeta.newBuilder().setLength(length).build());
BlockInfoEntry blockInfo =
BlockInfoEntry.newBuilder().setBlockId(blockId).setLength(length).build();
journalContext.append(JournalEntry.newBuilder().setBlockInfo(blockInfo).build());
}
}
@Override
public BlockInfo getBlockInfo(long blockId) throws BlockInfoException, UnavailableException {
return generateBlockInfo(blockId)
.orElseThrow(() -> new BlockInfoException(ExceptionMessage.BLOCK_META_NOT_FOUND, blockId));
}
@Override
public List<BlockInfo> getBlockInfoList(List<Long> blockIds) throws UnavailableException {
List<BlockInfo> ret = new ArrayList<>(blockIds.size());
for (long blockId : blockIds) {
generateBlockInfo(blockId).ifPresent(info -> ret.add(info));
}
return ret;
}
@Override
public Map<String, Long> getTotalBytesOnTiers() {
Map<String, Long> ret = new HashMap<>();
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
for (Map.Entry<String, Long> entry : worker.getTotalBytesOnTiers().entrySet()) {
Long total = ret.get(entry.getKey());
ret.put(entry.getKey(), (total == null ? 0L : total) + entry.getValue());
}
}
}
return ret;
}
@Override
public Map<String, Long> getUsedBytesOnTiers() {
Map<String, Long> ret = new HashMap<>();
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE), true)) {
for (Map.Entry<String, Long> entry : worker.getUsedBytesOnTiers().entrySet()) {
Long used = ret.get(entry.getKey());
ret.put(entry.getKey(), (used == null ? 0L : used) + entry.getValue());
}
}
}
return ret;
}
/**
* Find a worker which is considered lost or just gets its id.
* @param workerNetAddress the address used to find a worker
* @return a {@link MasterWorkerInfo} which is presented in master but not registered,
* or null if not worker is found.
*/
@Nullable
private MasterWorkerInfo findUnregisteredWorker(WorkerNetAddress workerNetAddress) {
for (IndexedSet<MasterWorkerInfo> workers: Arrays.asList(mTempWorkers, mLostWorkers)) {
MasterWorkerInfo worker = workers.getFirstByField(ADDRESS_INDEX, workerNetAddress);
if (worker != null) {
return worker;
}
}
return null;
}
/**
* Find a worker which is considered lost or just gets its id.
* @param workerId the id used to find a worker
* @return a {@link MasterWorkerInfo} which is presented in master but not registered,
* or null if not worker is found.
*/
@Nullable
private MasterWorkerInfo findUnregisteredWorker(long workerId) {
for (IndexedSet<MasterWorkerInfo> workers: Arrays.asList(mTempWorkers, mLostWorkers)) {
MasterWorkerInfo worker = workers.getFirstByField(ID_INDEX, workerId);
if (worker != null) {
return worker;
}
}
return null;
}
/**
* Re-register a lost worker or complete registration after getting a worker id.
* This method requires no locking on {@link MasterWorkerInfo} because it is only
* reading final fields.
*
* @param workerId the worker id to register
*/
@Nullable
private MasterWorkerInfo recordWorkerRegistration(long workerId) {
for (IndexedSet<MasterWorkerInfo> workers: Arrays.asList(mTempWorkers, mLostWorkers)) {
MasterWorkerInfo worker = workers.getFirstByField(ID_INDEX, workerId);
if (worker == null) {
continue;
}
mWorkers.add(worker);
workers.remove(worker);
if (workers == mLostWorkers) {
for (Consumer<Address> function : mLostWorkerFoundListeners) {
// The worker address is final, no need for locking here
function.accept(new Address(worker.getWorkerAddress().getHost(),
worker.getWorkerAddress().getRpcPort()));
}
LOG.warn("A lost worker {} has requested its old id {}.",
worker.getWorkerAddress(), worker.getId());
}
return worker;
}
return null;
}
@Override
public long getWorkerId(WorkerNetAddress workerNetAddress) {
MasterWorkerInfo existingWorker = mWorkers.getFirstByField(ADDRESS_INDEX, workerNetAddress);
if (existingWorker != null) {
// This worker address is already mapped to a worker id.
long oldWorkerId = existingWorker.getId();
LOG.warn("The worker {} already exists as id {}.", workerNetAddress, oldWorkerId);
return oldWorkerId;
}
existingWorker = findUnregisteredWorker(workerNetAddress);
if (existingWorker != null) {
return existingWorker.getId();
}
// Generate a new worker id.
long workerId = IdUtils.getRandomNonNegativeLong();
while (!mTempWorkers.add(new MasterWorkerInfo(workerId, workerNetAddress))) {
workerId = IdUtils.getRandomNonNegativeLong();
}
LOG.info("getWorkerId(): WorkerNetAddress: {} id: {}", workerNetAddress, workerId);
return workerId;
}
@Override
public void workerRegister(long workerId, List<String> storageTiers,
Map<String, Long> totalBytesOnTiers, Map<String, Long> usedBytesOnTiers,
Map<BlockLocation, List<Long>> currentBlocksOnLocation,
Map<String, StorageList> lostStorage, RegisterWorkerPOptions options)
throws NotFoundException {
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
if (worker == null) {
worker = findUnregisteredWorker(workerId);
}
if (worker == null) {
throw new NotFoundException(ExceptionMessage.NO_WORKER_FOUND.getMessage(workerId));
}
// Gather all blocks on this worker.
HashSet<Long> blocks = new HashSet<>();
for (List<Long> blockIds : currentBlocksOnLocation.values()) {
blocks.addAll(blockIds);
}
// Lock all the locks
try (LockResource r = worker.lockWorkerMeta(EnumSet.of(
WorkerMetaLockSection.STATUS,
WorkerMetaLockSection.USAGE,
WorkerMetaLockSection.BLOCKS), false)) {
// Detect any lost blocks on this worker.
Set<Long> removedBlocks = worker.register(mGlobalStorageTierAssoc, storageTiers,
totalBytesOnTiers, usedBytesOnTiers, blocks);
processWorkerRemovedBlocks(worker, removedBlocks);
processWorkerAddedBlocks(worker, currentBlocksOnLocation);
processWorkerOrphanedBlocks(worker);
worker.addLostStorage(lostStorage);
}
if (options.getConfigsCount() > 0) {
for (BiConsumer<Address, List<ConfigProperty>> function : mWorkerRegisteredListeners) {
WorkerNetAddress workerAddress = worker.getWorkerAddress();
function.accept(new Address(workerAddress.getHost(), workerAddress.getRpcPort()),
options.getConfigsList());
}
}
recordWorkerRegistration(workerId);
// Update the TS at the end of the process
worker.updateLastUpdatedTimeMs();
// Invalidate cache to trigger new build of worker info list
mWorkerInfoCache.invalidate(WORKER_INFO_CACHE_KEY);
Metrics.TOTAL_BLOCKS.inc(currentBlocksOnLocation.size());
LOG.info("registerWorker(): {}", worker);
}
@Override
public Command workerHeartbeat(long workerId, Map<String, Long> capacityBytesOnTiers,
Map<String, Long> usedBytesOnTiers, List<Long> removedBlockIds,
Map<BlockLocation, List<Long>> addedBlocks,
Map<String, StorageList> lostStorage,
List<Metric> metrics) {
MasterWorkerInfo worker = mWorkers.getFirstByField(ID_INDEX, workerId);
if (worker == null) {
LOG.warn("Could not find worker id: {} for heartbeat.", workerId);
return Command.newBuilder().setCommandType(CommandType.Register).build();
}
// Update the TS before the heartbeat so even if the worker heartbeat processing
// is time-consuming or triggers GC, the worker does not get marked as lost
// by the LostWorkerDetectionHeartbeatExecutor
worker.updateLastUpdatedTimeMs();
// The address is final, no need for locking
processWorkerMetrics(worker.getWorkerAddress().getHost(), metrics);
Command workerCommand = null;
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.USAGE, WorkerMetaLockSection.BLOCKS), false)) {
worker.addLostStorage(lostStorage);
if (capacityBytesOnTiers != null) {
worker.updateCapacityBytes(capacityBytesOnTiers);
}
worker.updateUsedBytes(usedBytesOnTiers);
// Technically, 'worker' should be confirmed to still be in the data structure. Lost worker
// detection can remove it. However, we are intentionally ignoring this race, since the worker
// will just re-register regardless.
processWorkerRemovedBlocks(worker, removedBlockIds);
processWorkerAddedBlocks(worker, addedBlocks);
List<Long> toRemoveBlocks = worker.getToRemoveBlocks();
Metrics.TOTAL_BLOCKS.inc(addedBlocks.size() - removedBlockIds.size());
if (toRemoveBlocks.isEmpty()) {
workerCommand = Command.newBuilder().setCommandType(CommandType.Nothing).build();
} else {
workerCommand = Command.newBuilder().setCommandType(CommandType.Free)
.addAllData(toRemoveBlocks).build();
}
}
// Update the TS again
worker.updateLastUpdatedTimeMs();
// Should not reach here
Preconditions.checkNotNull(workerCommand, "Worker heartbeat response command is null!");
return workerCommand;
}
private void processWorkerMetrics(String hostname, List<Metric> metrics) {
if (metrics.isEmpty()) {
return;
}
mMetricsMaster.workerHeartbeat(hostname, metrics);
}
/**
* Updates the worker and block metadata for blocks removed from a worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#BLOCKS} specified.
* An exclusive lock is required.
*
* @param workerInfo The worker metadata object
* @param removedBlockIds A list of block ids removed from the worker
*/
private void processWorkerRemovedBlocks(MasterWorkerInfo workerInfo,
Collection<Long> removedBlockIds) {
for (long removedBlockId : removedBlockIds) {
try (LockResource r = lockBlock(removedBlockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(removedBlockId);
if (block.isPresent()) {
LOG.debug("Block {} is removed on worker {}.", removedBlockId, workerInfo.getId());
mBlockStore.removeLocation(removedBlockId, workerInfo.getId());
if (mBlockStore.getLocations(removedBlockId).size() == 0) {
mLostBlocks.add(removedBlockId);
}
}
// Remove the block even if its metadata has been deleted already.
workerInfo.removeBlock(removedBlockId);
}
}
Metrics.TOTAL_BLOCKS.dec(removedBlockIds.size());
}
/**
* Updates the worker and block metadata for blocks added to a worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#BLOCKS} specified.
* An exclusive lock is required.
*
* @param workerInfo The worker metadata object
* @param addedBlockIds A mapping from storage tier alias to a list of block ids added
*/
private void processWorkerAddedBlocks(MasterWorkerInfo workerInfo,
Map<BlockLocation, List<Long>> addedBlockIds) {
for (Map.Entry<BlockLocation, List<Long>> entry : addedBlockIds.entrySet()) {
for (long blockId : entry.getValue()) {
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> block = mBlockStore.getBlock(blockId);
if (block.isPresent()) {
workerInfo.addBlock(blockId);
BlockLocation location = entry.getKey();
Preconditions.checkState(location.getWorkerId() == workerInfo.getId(),
String.format("BlockLocation has a different workerId %s from "
+ "the request sender's workerId %s!",
location.getWorkerId(), workerInfo.getId()));
mBlockStore.addLocation(blockId, location);
mLostBlocks.remove(blockId);
} else {
LOG.warn("Invalid block: {} from worker {}.", blockId,
workerInfo.getWorkerAddress().getHost());
}
}
}
}
}
/**
* Checks the blocks on the worker. For blocks not present in Alluxio anymore,
* they will be marked to-be-removed from the worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#USAGE} specified.
* A shared lock is required.
*
* @param workerInfo The worker metadata object
*/
private void processWorkerOrphanedBlocks(MasterWorkerInfo workerInfo) {
for (long block : workerInfo.getBlocks()) {
if (!mBlockStore.getBlock(block).isPresent()) {
LOG.info("Requesting delete for orphaned block: {} from worker {}.", block,
workerInfo.getWorkerAddress().getHost());
workerInfo.updateToRemovedBlock(true, block);
}
}
}
@Override
public boolean isBlockLost(long blockId) {
return mLostBlocks.contains(blockId);
}
@Override
public Iterator<Long> getLostBlocksIterator() {
return mLostBlocks.iterator();
}
@Override
public int getLostBlocksCount() {
return mLostBlocks.size();
}
/**
* Generates block info, including worker locations, for a block id.
* This requires no locks on the {@link MasterWorkerInfo} because it is only reading
* final fields.
*
* @param blockId a block id
* @return optional block info, empty if the block does not exist
*/
private Optional<BlockInfo> generateBlockInfo(long blockId) throws UnavailableException {
if (mSafeModeManager.isInSafeMode()) {
throw new UnavailableException(ExceptionMessage.MASTER_IN_SAFEMODE.getMessage());
}
BlockMeta block;
List<BlockLocation> blockLocations;
try (LockResource r = lockBlock(blockId)) {
Optional<BlockMeta> blockOpt = mBlockStore.getBlock(blockId);
if (!blockOpt.isPresent()) {
return Optional.empty();
}
block = blockOpt.get();
blockLocations = new ArrayList<>(mBlockStore.getLocations(blockId));
}
// Sort the block locations by their alias ordinal in the master storage tier mapping
Collections.sort(blockLocations,
Comparator.comparingInt(o -> mGlobalStorageTierAssoc.getOrdinal(o.getTier())));
List<alluxio.wire.BlockLocation> locations = new ArrayList<>();
for (BlockLocation location : blockLocations) {
MasterWorkerInfo workerInfo =
mWorkers.getFirstByField(ID_INDEX, location.getWorkerId());
if (workerInfo != null) {
// worker metadata is intentionally not locked here because:
// - it would be an incorrect order (correct order is lock worker first, then block)
// - only uses getters of final variables
locations.add(new alluxio.wire.BlockLocation().setWorkerId(location.getWorkerId())
.setWorkerAddress(workerInfo.getWorkerAddress())
.setTierAlias(location.getTier()).setMediumType(location.getMediumType()));
}
}
return Optional.of(
new BlockInfo().setBlockId(blockId).setLength(block.getLength()).setLocations(locations));
}
@Override
public void reportLostBlocks(List<Long> blockIds) {
mLostBlocks.addAll(blockIds);
}
@Override
public Set<Class<? extends Server>> getDependencies() {
return DEPS;
}
/**
* Lost worker periodic check.
*/
private final class LostWorkerDetectionHeartbeatExecutor implements HeartbeatExecutor {
/**
* Constructs a new {@link LostWorkerDetectionHeartbeatExecutor}.
*/
public LostWorkerDetectionHeartbeatExecutor() {}
@Override
public void heartbeat() {
long masterWorkerTimeoutMs = ServerConfiguration.getMs(PropertyKey.MASTER_WORKER_TIMEOUT_MS);
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.BLOCKS), false)) {
// This is not locking because the field is atomic
final long lastUpdate = mClock.millis() - worker.getLastUpdatedTimeMs();
if (lastUpdate > masterWorkerTimeoutMs) {
LOG.error("The worker {}({}) timed out after {}ms without a heartbeat!", worker.getId(),
worker.getWorkerAddress(), lastUpdate);
processLostWorker(worker);
}
}
}
}
@Override
public void close() {
// Nothing to clean up
}
}
/**
* Forces all workers to be lost. This should only be used for testing.
*/
@VisibleForTesting
public void forgetAllWorkers() {
for (MasterWorkerInfo worker : mWorkers) {
try (LockResource r = worker.lockWorkerMeta(
EnumSet.of(WorkerMetaLockSection.BLOCKS), false)) {
processLostWorker(worker);
}
}
}
/**
* Updates the metadata for the specified lost worker.
*
* You should lock externally with {@link MasterWorkerInfo#lockWorkerMeta(EnumSet, boolean)}
* with {@link WorkerMetaLockSection#BLOCKS} specified.
* An exclusive lock is required.
*
* @param worker the worker metadata
*/
private void processLostWorker(MasterWorkerInfo worker) {
mLostWorkers.add(worker);
mWorkers.remove(worker);
WorkerNetAddress workerAddress = worker.getWorkerAddress();
for (Consumer<Address> function : mWorkerLostListeners) {
function.accept(new Address(workerAddress.getHost(), workerAddress.getRpcPort()));
}
processWorkerRemovedBlocks(worker, worker.getBlocks());
}
LockResource lockBlock(long blockId) {
return new LockResource(mBlockLocks.get(blockId));
}
/**
* Selects the MasterWorkerInfo from workerInfoSet whose host or related IP address
* exists in addresses.
*
* @param addresses the address set that user passed in
* @param workerInfoSet the MasterWorkerInfo set to select info from
* @param workerNames the supported worker names
*/
private Set<MasterWorkerInfo> selectInfoByAddress(Set<String> addresses,
Set<MasterWorkerInfo> workerInfoSet, Set<String> workerNames) {
return workerInfoSet.stream().filter(info -> {
String host = info.getWorkerAddress().getHost();
workerNames.add(host);
String ip = null;
try {
ip = NetworkAddressUtils.resolveIpAddress(host);
workerNames.add(ip);
} catch (UnknownHostException e) {
// The host may already be an IP address
}
if (addresses.contains(host)) {
addresses.remove(host);
return true;
}
if (ip != null) {
if (addresses.contains(ip)) {
addresses.remove(ip);
return true;
}
}
return false;
}).collect(Collectors.toSet());
}
@Override
public void registerLostWorkerFoundListener(Consumer<Address> function) {
mLostWorkerFoundListeners.add(function);
}
@Override
public void registerWorkerLostListener(Consumer<Address> function) {
mWorkerLostListeners.add(function);
}
@Override
public void registerNewWorkerConfListener(BiConsumer<Address, List<ConfigProperty>> function) {
mWorkerRegisteredListeners.add(function);
}
/**
* Class that contains metrics related to BlockMaster.
*/
public static final class Metrics {
private static final Counter TOTAL_BLOCKS =
MetricsSystem.counter(MetricKey.MASTER_TOTAL_BLOCKS.getName());
/**
* Registers metric gauges.
*
* @param master the block master handle
*/
@VisibleForTesting
public static void registerGauges(final BlockMaster master) {
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_CAPACITY_TOTAL.getName(),
master::getCapacityBytes);
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_CAPACITY_USED.getName(),
master::getUsedBytes);
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_CAPACITY_FREE.getName(),
() -> master.getCapacityBytes() - master.getUsedBytes());
for (int i = 0; i < master.getGlobalStorageTierAssoc().size(); i++) {
String alias = master.getGlobalStorageTierAssoc().getAlias(i);
// TODO(lu) Add template to dynamically construct metric key
MetricsSystem.registerGaugeIfAbsent(
MetricKey.CLUSTER_CAPACITY_TOTAL.getName() + MetricInfo.TIER + alias,
new Gauge<Long>() {
@Override
public Long getValue() {
return master.getTotalBytesOnTiers().getOrDefault(alias, 0L);
}
});
MetricsSystem.registerGaugeIfAbsent(
MetricKey.CLUSTER_CAPACITY_USED.getName() + MetricInfo.TIER + alias, new Gauge<Long>() {
@Override
public Long getValue() {
return master.getUsedBytesOnTiers().getOrDefault(alias, 0L);
}
});
MetricsSystem.registerGaugeIfAbsent(
MetricKey.CLUSTER_CAPACITY_FREE.getName() + MetricInfo.TIER + alias, new Gauge<Long>() {
@Override
public Long getValue() {
return master.getTotalBytesOnTiers().getOrDefault(alias, 0L)
- master.getUsedBytesOnTiers().getOrDefault(alias, 0L);
}
});
}
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_WORKERS.getName(),
new Gauge<Integer>() {
@Override
public Integer getValue() {
return master.getWorkerCount();
}
});
MetricsSystem.registerGaugeIfAbsent(MetricKey.CLUSTER_LOST_WORKERS.getName(),
new Gauge<Integer>() {
@Override
public Integer getValue() {
return master.getLostWorkerCount();
}
});
}
private Metrics() {} // prevent instantiation
}
}
|
Change logging on unrecognized blocks to DEBUG
I've monitored excessive memory usage incurred by logging these two
messages. Below is a test in a test cluster with master heap of 12G
(-Xmx=12g -Xms=12g).
I simulated registerWorker RPCs where all the blocks are not present on
the master. This can happen when a worker registers with blocks that
have been removed while the worker lost heartbeat. Each batch of test
invokes 20 concurrent RPCs and records the average time taken.
Before the change - logging this message per block. RPCs took >150s to
finish on average.
```
$ jstat -gcutil 12182 10000
S0 S1 E O M CCS YGC YGCT FGC FGCT GCT
52.54 0.00 72.40 1.19 93.46 90.07 4 0.960 0 0.000 0.960
100.00 100.00 100.00 9.09 94.80 92.34 14 3.725 0 0.000 3.725
...
0.00 75.80 35.72 40.30 94.70 92.20 1345 111.688 4 0.194 111.882
0.00 75.56 92.48 40.51 94.70 92.20 1351 112.070 4 0.194 112.265
```
After the change - not logging this message. RPCs took 57s to finish on
average.
```
$ jstat -gcutil 25566 10000
S0 S1 E O M CCS YGC YGCT FGC FGCT GCT
0.00 66.07 28.13 0.20 94.78 91.53 3 0.290 0 0.000 0.290
...
100.00 0.00 70.44 79.94 94.97 93.37 205 57.866 3 30.672 88.538
```
This also addresses https://github.com/Alluxio/alluxio/issues/13143
pr-link: Alluxio/alluxio#13520
change-id: cid-9936c90eeb64ac02d346e54d831f0110b2aeb4f9
|
core/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java
|
Change logging on unrecognized blocks to DEBUG
|
<ide><path>ore/server/master/src/main/java/alluxio/master/block/DefaultBlockMaster.java
<ide> */
<ide> private void processWorkerAddedBlocks(MasterWorkerInfo workerInfo,
<ide> Map<BlockLocation, List<Long>> addedBlockIds) {
<add> long invalidBlockCount = 0;
<ide> for (Map.Entry<BlockLocation, List<Long>> entry : addedBlockIds.entrySet()) {
<ide> for (long blockId : entry.getValue()) {
<ide> try (LockResource r = lockBlock(blockId)) {
<ide> mBlockStore.addLocation(blockId, location);
<ide> mLostBlocks.remove(blockId);
<ide> } else {
<del> LOG.warn("Invalid block: {} from worker {}.", blockId,
<add> invalidBlockCount++;
<add> LOG.debug("Invalid block: {} from worker {}.", blockId,
<ide> workerInfo.getWorkerAddress().getHost());
<ide> }
<ide> }
<ide> }
<ide> }
<add> LOG.warn("{} invalid blocks found on worker {} in total", invalidBlockCount,
<add> workerInfo.getWorkerAddress().getHost());
<ide> }
<ide>
<ide> /**
<ide> * @param workerInfo The worker metadata object
<ide> */
<ide> private void processWorkerOrphanedBlocks(MasterWorkerInfo workerInfo) {
<add> long orphanedBlockCount = 0;
<ide> for (long block : workerInfo.getBlocks()) {
<ide> if (!mBlockStore.getBlock(block).isPresent()) {
<del> LOG.info("Requesting delete for orphaned block: {} from worker {}.", block,
<add> orphanedBlockCount++;
<add> LOG.debug("Requesting delete for orphaned block: {} from worker {}.", block,
<ide> workerInfo.getWorkerAddress().getHost());
<ide> workerInfo.updateToRemovedBlock(true, block);
<ide> }
<ide> }
<add> LOG.warn("{} blocks marked as orphaned from worker {}", orphanedBlockCount,
<add> workerInfo.getWorkerAddress().getHost());
<ide> }
<ide>
<ide> @Override
|
|
Java
|
lgpl-2.1
|
f6d9439b8d29a4d6b9cf1b8d9920bfb5b13671eb
| 0 |
bitrepository/reference,bitrepository/reference,bitrepository/reference,bitrepository/reference
|
package org.bitrepository.integrityservice.utils;
/**
* Util class for handling formatting of datasizes.
*/
public class FileSizeUtils {
private static final int unitSize = 1024;
private static final long kiloSize = unitSize;
private static final long megaSize = kiloSize * unitSize;
private static final long gigaSize = megaSize * unitSize;
private static final long teraSize = gigaSize * unitSize;
private static final long petaSize = teraSize * unitSize;
private static final long exaSize = petaSize * unitSize;
private static final long zettaSize = exaSize * unitSize;
private static final String bytePostfix = "B";
private static final String kiloPostfix = "KB";
private static final String megaPostfix = "MB";
private static final String gigaPostfix = "GB";
private static final String teraPostfix = "TB";
private static final String petaPostfix = "PB";
private static final String exaPostfix = "EB";
private static final String zettaPostfix = "ZB";
public static String toHumanShort(long size) {
if(size >= zettaSize) {
return formatShortZetta(size);
} else if(size >= petaSize) {
return formatShortExa(size);
} else if(size >= petaSize) {
return formatShortPeta(size);
} else if(size >= teraSize) {
return formatShortTera(size);
} else if(size >= gigaSize) {
return formatShortGiga(size);
} else if(size >= megaSize) {
return formatShortMega(size);
} else if(size >= kiloSize) {
return formatShortKilo(size);
} else {
return formatShortByte(size);
}
}
private static String formatShortZetta(long size) {
int wholeZB = (int) (size / zettaSize);
return wholeZB + zettaPostfix;
}
private static String formatShortExa(long size) {
int wholeEB = (int) (size / exaSize);
return wholeEB + exaPostfix;
}
private static String formatShortPeta(long size) {
int wholePB = (int) (size / petaSize);
return wholePB + petaPostfix;
}
private static String formatShortTera(long size) {
int wholeTB = (int) (size / teraSize);
return wholeTB + teraPostfix;
}
private static String formatShortGiga(long size) {
int wholeGB = (int) (size / gigaSize);
return wholeGB + gigaPostfix;
}
private static String formatShortMega(long size) {
int wholeMB = (int) (size / megaSize);
return wholeMB + megaPostfix;
}
private static String formatShortKilo(long size) {
int wholeKB = (int) (size / kiloSize);
return wholeKB + kiloPostfix;
}
private static String formatShortByte(long size) {
return size + bytePostfix;
}
}
|
bitrepository-integrity-service/src/main/java/org/bitrepository/integrityservice/utils/FileSizeUtils.java
|
package org.bitrepository.integrityservice.utils;
/**
* Util class for handling formatting of datasizes.
*/
public class FileSizeUtils {
private static final int unitSize = 1024;
private static final long kiloSize = unitSize;
private static final long megaSize = kiloSize * unitSize;
private static final long gigaSize = megaSize * unitSize;
private static final long teraSize = gigaSize * unitSize;
private static final long petaSize = teraSize * unitSize;
private static final long exaSize = petaSize * unitSize;
private static final long zettaSize = exaSize * unitSize;
private static final long yottaSize = zettaSize * unitSize;
private static final String bytePostfix = "B";
private static final String kiloPostfix = "KB";
private static final String megaPostfix = "MB";
private static final String gigaPostfix = "GB";
private static final String teraPostfix = "TB";
private static final String petaPostfix = "PB";
private static final String exaPostfix = "EB";
private static final String zettaPostfix = "ZB";
private static final String yottaPostfix = "YB";
public static String toHumanShort(long size) {
if(size >= yottaSize) {
return formatShortYotta(size);
} else if(size >= zettaSize) {
return formatShortZetta(size);
} else if(size >= petaSize) {
return formatShortExa(size);
} else if(size >= petaSize) {
return formatShortPeta(size);
} else if(size >= teraSize) {
return formatShortTera(size);
} else if(size >= gigaSize) {
return formatShortGiga(size);
} else if(size >= megaSize) {
return formatShortMega(size);
} else if(size >= kiloSize) {
return formatShortKilo(size);
} else {
return formatShortByte(size);
}
}
private static String formatShortYotta(long size) {
int wholeZB = (int) (size / yottaSize);
return wholeZB + yottaPostfix;
}
private static String formatShortZetta(long size) {
int wholeZB = (int) (size / zettaSize);
return wholeZB + zettaPostfix;
}
private static String formatShortExa(long size) {
int wholeEB = (int) (size / exaSize);
return wholeEB + exaPostfix;
}
private static String formatShortPeta(long size) {
int wholePB = (int) (size / petaSize);
return wholePB + petaPostfix;
}
private static String formatShortTera(long size) {
int wholeTB = (int) (size / teraSize);
return wholeTB + teraPostfix;
}
private static String formatShortGiga(long size) {
int wholeGB = (int) (size / gigaSize);
return wholeGB + gigaPostfix;
}
private static String formatShortMega(long size) {
int wholeMB = (int) (size / megaSize);
return wholeMB + megaPostfix;
}
private static String formatShortKilo(long size) {
int wholeKB = (int) (size / kiloSize);
return wholeKB + kiloPostfix;
}
private static String formatShortByte(long size) {
return size + bytePostfix;
}
}
|
remove yotta scale form filesizeutils
|
bitrepository-integrity-service/src/main/java/org/bitrepository/integrityservice/utils/FileSizeUtils.java
|
remove yotta scale form filesizeutils
|
<ide><path>itrepository-integrity-service/src/main/java/org/bitrepository/integrityservice/utils/FileSizeUtils.java
<ide> private static final long petaSize = teraSize * unitSize;
<ide> private static final long exaSize = petaSize * unitSize;
<ide> private static final long zettaSize = exaSize * unitSize;
<del> private static final long yottaSize = zettaSize * unitSize;
<del>
<add>
<ide> private static final String bytePostfix = "B";
<ide> private static final String kiloPostfix = "KB";
<ide> private static final String megaPostfix = "MB";
<ide> private static final String petaPostfix = "PB";
<ide> private static final String exaPostfix = "EB";
<ide> private static final String zettaPostfix = "ZB";
<del> private static final String yottaPostfix = "YB";
<del>
<add>
<ide>
<ide> public static String toHumanShort(long size) {
<del> if(size >= yottaSize) {
<del> return formatShortYotta(size);
<del> } else if(size >= zettaSize) {
<add> if(size >= zettaSize) {
<ide> return formatShortZetta(size);
<ide> } else if(size >= petaSize) {
<ide> return formatShortExa(size);
<ide> } else {
<ide> return formatShortByte(size);
<ide> }
<del> }
<del>
<del> private static String formatShortYotta(long size) {
<del> int wholeZB = (int) (size / yottaSize);
<del> return wholeZB + yottaPostfix;
<ide> }
<ide>
<ide> private static String formatShortZetta(long size) {
|
|
Java
|
apache-2.0
|
42cdb1cbc00aa7a8e43b4a3764a3ea09cf864a88
| 0 |
eclub-sense/iot-cloud,eclub-sense/iot-cloud,eclub-sense/iot-cloud,eclub-sense/iot-cloud,eclub-sense/iot-cloud
|
package cz.esc.iot.cloudservice.oauth2;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Random;
import org.apache.commons.codec.binary.Hex;
import org.json.JSONException;
import org.restlet.data.Reference;
import org.restlet.ext.oauth.AccessTokenClientResource;
import org.restlet.ext.oauth.GrantType;
import org.restlet.ext.oauth.OAuthException;
import org.restlet.ext.oauth.OAuthParameters;
import org.restlet.ext.oauth.internal.Token;
import org.restlet.representation.Representation;
import org.restlet.resource.ClientResource;
import com.google.gson.Gson;
import cz.esc.iot.cloudservice.persistance.dao.MorfiaSetUp;
import cz.esc.iot.cloudservice.persistance.model.AccessToken;
import cz.esc.iot.cloudservice.persistance.model.UserEntity;
/**
* Class for communication with authorisation and token servers.
*/
public class OAuth2 {
public static String clientID;
public static String clientSecret;
/**
* Sets Google's clientId and clientSecret.
*/
public static void setClientCredentials() {
try {
BufferedReader br = new BufferedReader(new FileReader(new File("/home/z3tt0r/google_client_credentials2")));
clientID = br.readLine();
clientSecret = br.readLine();
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Find, whether user obtained from Google is registered in
* Zettor's database.
* @return Returns verified user.
*/
public static UserEntity findUserInDatabase(String access_token) {
AccessToken token = MorfiaSetUp.getDatastore().createQuery(AccessToken.class).field("access_token").equal(access_token).get();
return token.getUser();
}
/**
* Ask for information about user. Uses received access token for it.
* @return Returns information from Google.
*/
public static GoogleUserInfo getGoogleUserFromAccessToken(String accessToken) throws IOException {
String uri = "https://www.googleapis.com/oauth2/v1/userinfo?access_token=" + accessToken;
ClientResource getter = new ClientResource(uri);
Representation response = getter.get();
Gson gson = new Gson();
GoogleUserInfo user = null;
user = gson.fromJson(response.getText(), GoogleUserInfo.class);
return user;
}
/**
* Asks Google for access token. Uses code, received as parameter, for it.
* @return Returns valid access token.
* @throws JSONException
* @throws OAuthException
*/
public static Token exchangeCodeForAccessToken(String code) throws IOException, OAuthException, JSONException {
AccessTokenClientResource client = new AccessTokenClientResource(new Reference("https://accounts.google.com/o/oauth2/token"));
client.setClientCredentials(OAuth2.clientID, OAuth2.clientSecret);
OAuthParameters params = new OAuthParameters();
params.code(code);
params.grantType(GrantType.authorization_code);
//params.redirectURI("http://localhost:3000/callback");
Token token = client.requestToken(params);
return token;
}
public static GoogleUserInfo getGoogleUserInfoFromCode(String code) throws IOException, OAuthException, JSONException {
// exchange code for access token
Token token = OAuth2.exchangeCodeForAccessToken(code);
String accessToken = token.getAccessToken();
// get info about user from IDP
GoogleUserInfo googleUser = OAuth2.getGoogleUserFromAccessToken(accessToken);
return googleUser;
}
public static CloudToken generateToken() {
Random random = new Random();
byte[] accessToken = new byte[40];
byte[] refreshToken = new byte[40];
random.nextBytes(accessToken);
random.nextBytes(refreshToken);
return new CloudToken(String.valueOf(Hex.encodeHex(accessToken)), String.valueOf(Hex.encodeHex(refreshToken)));
}
}
|
src/main/java/cz/esc/iot/cloudservice/oauth2/OAuth2.java
|
package cz.esc.iot.cloudservice.oauth2;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Random;
import org.apache.commons.codec.binary.Hex;
import org.json.JSONException;
import org.restlet.data.Reference;
import org.restlet.ext.oauth.AccessTokenClientResource;
import org.restlet.ext.oauth.GrantType;
import org.restlet.ext.oauth.OAuthException;
import org.restlet.ext.oauth.OAuthParameters;
import org.restlet.ext.oauth.internal.Token;
import org.restlet.representation.Representation;
import org.restlet.resource.ClientResource;
import com.google.gson.Gson;
import cz.esc.iot.cloudservice.persistance.dao.MorfiaSetUp;
import cz.esc.iot.cloudservice.persistance.model.AccessToken;
import cz.esc.iot.cloudservice.persistance.model.UserEntity;
/**
* Class for communication with authorisation and token servers.
*/
public class OAuth2 {
public static String clientID;
public static String clientSecret;
/**
* Sets Google's clientId and clientSecret.
*/
public static void setClientCredentials() {
try {
BufferedReader br = new BufferedReader(new FileReader(new File("/home/z3tt0r/google_client_credentials2")));
clientID = br.readLine();
clientSecret = br.readLine();
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Find, whether user obtained from Google is registered in
* Zettor's database.
* @return Returns verified user.
*/
public static UserEntity findUserInDatabase(String access_token) {
AccessToken token = MorfiaSetUp.getDatastore().createQuery(AccessToken.class).field("access_token").equal(access_token).get();
return token.getUser();
}
/**
* Ask for information about user. Uses received access token for it.
* @return Returns information from Google.
*/
public static GoogleUserInfo getGoogleUserFromAccessToken(String accessToken) throws IOException {
String uri = "https://www.googleapis.com/oauth2/v1/userinfo?access_token=" + accessToken;
ClientResource getter = new ClientResource(uri);
Representation response = getter.get();
Gson gson = new Gson();
GoogleUserInfo user = null;
user = gson.fromJson(response.getText(), GoogleUserInfo.class);
return user;
}
/**
* Asks Google for access token. Uses code, received as parameter, for it.
* @return Returns valid access token.
* @throws JSONException
* @throws OAuthException
*/
public static Token exchangeCodeForAccessToken(String code) throws IOException, OAuthException, JSONException {
AccessTokenClientResource client = new AccessTokenClientResource(new Reference("https://accounts.google.com/o/oauth2/token"));
client.setClientCredentials(OAuth2.clientID, OAuth2.clientSecret);
OAuthParameters params = new OAuthParameters();
params.code(code);
params.grantType(GrantType.authorization_code);
params.redirectURI("http://localhost:3000/callback");
Token token = client.requestToken(params);
return token;
}
public static GoogleUserInfo getGoogleUserInfoFromCode(String code) throws IOException, OAuthException, JSONException {
// exchange code for access token
Token token = OAuth2.exchangeCodeForAccessToken(code);
String accessToken = token.getAccessToken();
// get info about user from IDP
GoogleUserInfo googleUser = OAuth2.getGoogleUserFromAccessToken(accessToken);
return googleUser;
}
public static CloudToken generateToken() {
Random random = new Random();
byte[] accessToken = new byte[40];
byte[] refreshToken = new byte[40];
random.nextBytes(accessToken);
random.nextBytes(refreshToken);
return new CloudToken(String.valueOf(Hex.encodeHex(accessToken)), String.valueOf(Hex.encodeHex(refreshToken)));
}
}
|
debugging
|
src/main/java/cz/esc/iot/cloudservice/oauth2/OAuth2.java
|
debugging
|
<ide><path>rc/main/java/cz/esc/iot/cloudservice/oauth2/OAuth2.java
<ide> OAuthParameters params = new OAuthParameters();
<ide> params.code(code);
<ide> params.grantType(GrantType.authorization_code);
<del> params.redirectURI("http://localhost:3000/callback");
<add> //params.redirectURI("http://localhost:3000/callback");
<ide> Token token = client.requestToken(params);
<ide>
<ide> return token;
|
|
Java
|
lgpl-2.1
|
1f46b5c0e9048723edb5a8cc5eaf6f9dd6d6dded
| 0 |
jamezp/wildfly-core,ivassile/wildfly-core,yersan/wildfly-core,jfdenise/wildfly-core,JiriOndrusek/wildfly-core,jamezp/wildfly-core,yersan/wildfly-core,darranl/wildfly-core,aloubyansky/wildfly-core,jfdenise/wildfly-core,jfdenise/wildfly-core,luck3y/wildfly-core,JiriOndrusek/wildfly-core,bstansberry/wildfly-core,aloubyansky/wildfly-core,aloubyansky/wildfly-core,soul2zimate/wildfly-core,jamezp/wildfly-core,ivassile/wildfly-core,yersan/wildfly-core,soul2zimate/wildfly-core,bstansberry/wildfly-core,darranl/wildfly-core,luck3y/wildfly-core,bstansberry/wildfly-core,ivassile/wildfly-core,soul2zimate/wildfly-core,luck3y/wildfly-core,darranl/wildfly-core,JiriOndrusek/wildfly-core
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.server.deployment;
/**
* An enumeration of the phases of a deployment unit's processing cycle.
*
* @author <a href="mailto:[email protected]">David M. Lloyd</a>
*/
public enum Phase {
/* == TEMPLATE ==
* Upon entry, this phase performs the following actions:
* <ul>
* <li></li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments, in addition to those defined
* for the previous phase:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
*/
/**
* This phase creates the initial root structure. Depending on the service for this phase will ensure that the
* deployment unit's initial root structure is available and accessible.
* <p>
* Upon entry, this phase performs the following actions:
* <ul>
* <li>The primary deployment root is mounted (during {@link #STRUCTURE_MOUNT})</li>
* <li>Other internal deployment roots are mounted (during {@link #STRUCTURE_NESTED_JAR})</li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li><i>N/A</i></li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments:
* <ul>
* <li>{@link Attachments#DEPLOYMENT_ROOT} - the mounted deployment root for this deployment unit</li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* </ul>
* <p>
*/
STRUCTURE(null),
/**
* This phase assembles information from the root structure to prepare for adding and processing additional external
* structure, such as from class path entries and other similar mechanisms.
* <p>
* Upon entry, this phase performs the following actions:
* <ul>
* <li>The root content's MANIFEST is read and made available during {@link #PARSE_MANIFEST}.</li>
* <li>The annotation index for the root structure is calculated during {@link #STRUCTURE_ANNOTATION_INDEX}.</li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li>{@link Attachments#MANIFEST} - the parsed manifest of the root structure</li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments, in addition to those defined
* for the previous phase:
* <ul>
* <li><i>N/A</i></li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* <li>{@link Attachments#CLASS_PATH_ENTRIES} - class path entries found in the manifest and elsewhere.</li>
* <li>{@link Attachments#EXTENSION_LIST_ENTRIES} - extension-list entries found in the manifest and elsewhere.</li>
* </ul>
* <p>
*/
PARSE(null),
/**
* In this phase, the full structure of the deployment unit is made available and module dependencies may be assembled.
* <p>
* Upon entry, this phase performs the following actions:
* <ul>
* <li>Any additional external structure is mounted during {@link #XXX}</li>
* <li></li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments, in addition to those defined
* for the previous phase:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
*/
DEPENDENCIES(null),
CONFIGURE_MODULE(null),
POST_MODULE(null),
INSTALL(null),
CLEANUP(null),
;
/**
* This is the key for the attachment to use as the phase's "value". The attachment is taken from
* the deployment unit. If a phase doesn't have a single defining "value", {@code null} is specified.
*/
private final AttachmentKey<?> phaseKey;
private Phase(final AttachmentKey<?> key) {
phaseKey = key;
}
/**
* Get the next phase, or {@code null} if none.
*
* @return the next phase, or {@code null} if there is none
*/
public Phase next() {
final int ord = ordinal() + 1;
final Phase[] phases = Phase.values();
return ord == phases.length ? null : phases[ord];
}
/**
* Get the attachment key of the {@code DeploymentUnit} attachment that represents the result value
* of this phase.
*
* @return the key
*/
public AttachmentKey<?> getPhaseKey() {
return phaseKey;
}
// STRUCTURE
public static final int STRUCTURE_WAR_DEPLOYMENT_INIT = 0x0000;
public static final int STRUCTURE_MOUNT = 0x0001;
public static final int STRUCTURE_MANIFEST = 0x0100;
// must be before osgi
public static final int STRUCTURE_JDBC_DRIVER = 0x0150;
public static final int STRUCTURE_OSGI_MANIFEST = 0x0200;
public static final int STRUCTURE_RAR = 0x0300;
public static final int STRUCTURE_WAR = 0x0500;
public static final int STRUCTURE_EAR_DEPLOYMENT_INIT = 0x0600;
public static final int STRUCTURE_EAR_APP_XML_PARSE = 0x0700;
public static final int STRUCTURE_EAR_JBOSS_APP_XML_PARSE = 0x0800;
public static final int STRUCTURE_EAR = 0x0900;
public static final int STRUCTURE_SERVICE_MODULE_LOADER = 0x0A00;
public static final int STRUCTURE_ANNOTATION_INDEX = 0x0B00;
public static final int STRUCTURE_EJB_JAR_IN_EAR = 0x0C00;
public static final int STRUCTURE_MANAGED_BEAN_JAR_IN_EAR = 0x0C01;
public static final int STRUCTURE_SAR_SUB_DEPLOY_CHECK = 0x0D00;
public static final int STRUCTURE_ADDITIONAL_MANIFEST = 0x0E00;
public static final int STRUCTURE_SUB_DEPLOYMENT = 0x0F00;
public static final int STRUCTURE_MODULE_IDENTIFIERS = 0x1000;
public static final int STRUCTURE_EE_MODULE_INIT = 0x1100;
// PARSE
public static final int PARSE_EE_MODULE_NAME = 0x0100;
public static final int PARSE_EAR_SUBDEPLOYMENTS_ISOLATION_DEFAULT = 0x0200;
public static final int PARSE_STRUCTURE_DESCRIPTOR = 0x0201;
public static final int PARSE_DEPENDENCIES_MANIFEST = 0x0300;
public static final int PARSE_COMPOSITE_ANNOTATION_INDEX = 0x0301;
public static final int PARSE_EAR_LIB_CLASS_PATH = 0x0400;
public static final int PARSE_ADDITIONAL_MODULES = 0x0500;
public static final int PARSE_CLASS_PATH = 0x0600;
public static final int PARSE_EXTENSION_LIST = 0x0700;
public static final int PARSE_EXTENSION_NAME = 0x0800;
public static final int PARSE_OSGI_BUNDLE_INFO = 0x0900;
public static final int PARSE_OSGI_XSERVICE_PROPERTIES = 0x0A00;
public static final int PARSE_OSGI_DEPLOYMENT = 0x0A80;
public static final int PARSE_WEB_DEPLOYMENT = 0x0B00;
public static final int PARSE_WEB_DEPLOYMENT_FRAGMENT = 0x0C00;
public static final int PARSE_ANNOTATION_WAR = 0x0D00;
public static final int PARSE_JBOSS_WEB_DEPLOYMENT = 0x0E00;
public static final int PARSE_TLD_DEPLOYMENT = 0x0F00;
public static final int PARSE_EAR_CONTEXT_ROOT = 0x1000;
// create and attach EJB metadata for EJB deployments
public static final int PARSE_EJB_DEPLOYMENT = 0x1100;
public static final int PARSE_EJB_CREATE_COMPONENT_DESCRIPTIONS = 0x1150;
public static final int PARSE_EJB_SESSION_BEAN_DD = 0x1200;
public static final int PARSE_EJB_MDB_DD = 0x1300;
// create and attach the component description out of EJB annotations
public static final int PARSE_EJB_ANNOTATION = 0x1400;
public static final int PARSE_MESSAGE_DRIVEN_ANNOTATION = 0x1500;
public static final int PARSE_EJB_TRANSACTION_MANAGEMENT = 0x1600;
public static final int PARSE_EJB_BUSINESS_VIEW_ANNOTATION = 0x1700;
public static final int PARSE_EJB_STARTUP_ANNOTATION = 0x1800;
public static final int PARSE_EJB_SECURITY_DOMAIN_ANNOTATION = 0x1801;
public static final int PARSE_EJB_CONCURRENCY_MANAGEMENT_ANNOTATION = 0x1900;
public static final int PARSE_EJB_APPLICATION_EXCEPTION_ANNOTATION = 0x1901;
public static final int PARSE_REMOVE_METHOD_ANNOTAION = 0x1902;
public static final int PARSE_EJB_DECLARE_ROLES_ANNOTATION = 0x1903;
public static final int PARSE_EJB_RUN_AS_ANNOTATION = 0x1904;
public static final int PARSE_EJB_DENY_ALL_ANNOTATION = 0x1905;
public static final int PARSE_EJB_ROLES_ALLOWED_ANNOTATION = 0x1906;
public static final int PARSE_EJB_PERMIT_ALL_ANNOTATION = 0x1907;
// should be after ConcurrencyManagement annotation processor
public static final int PARSE_EJB_LOCK_ANNOTATION = 0x1A00;
public static final int PARSE_EJB_STATEFUL_TIMEOUT_ANNOTATION = 0x1A01;
// should be after ConcurrencyManagement annotation processor
public static final int PARSE_EJB_ACCESS_TIMEOUT_ANNOTATION = 0x1B00;
// should be after all views are known
public static final int PARSE_EJB_TRANSACTION_ATTR_ANNOTATION = 0x1C00;
public static final int PARSE_EJB_SESSION_SYNCHRONIZATION = 0x1C50;
public static final int PARSE_EJB_RESOURCE_ADAPTER_ANNOTATION = 0x1D00;
public static final int PARSE_EJB_ASYNCHRONOUS_ANNOTATION = 0x1E00;
public static final int PARSE_WEB_COMPONENTS = 0x1F00;
public static final int PARSE_WEB_MERGE_METADATA = 0x2000;
public static final int PARSE_RA_DEPLOYMENT = 0x2100;
public static final int PARSE_SERVICE_LOADER_DEPLOYMENT = 0x2200;
public static final int PARSE_SERVICE_DEPLOYMENT = 0x2300;
public static final int PARSE_MC_BEAN_DEPLOYMENT = 0x2400;
public static final int PARSE_IRON_JACAMAR_DEPLOYMENT = 0x2500;
public static final int PARSE_RESOURCE_ADAPTERS = 0x2600;
public static final int PARSE_DATA_SOURCES = 0x2700;
public static final int PARSE_ARQUILLIAN_RUNWITH = 0x2800;
public static final int PARSE_MANAGED_BEAN_ANNOTATION = 0x2900;
public static final int PARSE_JAXRS_ANNOTATIONS = 0x2A00;
public static final int PARSE_WELD_DEPLOYMENT = 0x2B00;
public static final int PARSE_WELD_WEB_INTEGRATION = 0x2B10;
public static final int PARSE_WEBSERVICES_XML = 0x2C00;
public static final int PARSE_DATA_SOURCE_DEFINITION_ANNOTATION = 0x2D00;
public static final int PARSE_EJB_CONTEXT_BINDING = 0x2E00;
public static final int PARSE_EJB_TIMERSERVICE_BINDING = 0x2E01;
public static final int PARSE_PERSISTENCE_UNIT = 0x2F00;
public static final int PARSE_PERSISTENCE_ANNOTATION = 0x3000;
public static final int PARSE_INTERCEPTORS_ANNOTATION = 0x3100;
public static final int PARSE_LIEFCYCLE_ANNOTATION = 0x3200;
public static final int PARSE_AROUNDINVOKE_ANNOTATION = 0x3300;
public static final int PARSE_RESOURCE_INJECTION_WEBSERVICE_CONTEXT_ANNOTATION = 0x3401;
public static final int PARSE_EJB_DD_INTERCEPTORS = 0x3500;
public static final int PARSE_EJB_SECURITY_ROLE_REF_DD = 0x3501;
public static final int PARSE_EJB_SECURITY_IDENTITY_DD = 0x3502;
public static final int PARSE_EJB_ASSEMBLY_DESC_DD = 0x3600;
// should be after all components are known
public static final int PARSE_EJB_INJECTION_ANNOTATION = 0x3700;
public static final int PARSE_WEB_SERVICE_INJECTION_ANNOTATION = 0x3800;
// DEPENDENCIES
public static final int DEPENDENCIES_EJB = 0x0000;
public static final int DEPENDENCIES_MODULE = 0x0100;
public static final int DEPENDENCIES_DS = 0x0200;
public static final int DEPENDENCIES_RAR_CONFIG = 0x0300;
public static final int DEPENDENCIES_MANAGED_BEAN = 0x0400;
public static final int DEPENDENCIES_SAR_MODULE = 0x0500;
public static final int DEPENDENCIES_WAR_MODULE = 0x0600;
public static final int DEPENDENCIES_ARQUILLIAN = 0x0700;
public static final int DEPENDENCIES_CLASS_PATH = 0x0800;
public static final int DEPENDENCIES_EXTENSION_LIST = 0x0900;
public static final int DEPENDENCIES_WELD = 0x0A00;
public static final int DEPENDENCIES_SEAM = 0x0A01;
public static final int DEPENDENCIES_NAMING = 0x0B00;
public static final int DEPENDENCIES_WS = 0x0C00;
public static final int DEPENDENCIES_JAXRS = 0x0D00;
public static final int DEPENDENCIES_SUB_DEPLOYMENTS = 0x0E00;
// Sets up appropriate module dependencies for EJB deployments
public static final int DEPENDENCIES_JPA = 0x1000;
public static final int DEPENDENCIES_GLOBAL_MODULES = 0x1100;
public static final int DEPENDENCIES_JDK = 0x1200;
//must be last
public static final int DEPENDENCIES_MODULE_INFO_SERVICE = 0x1300;
// CONFIGURE_MODULE
public static final int CONFIGURE_MODULE_SPEC = 0x0100;
// POST_MODULE
public static final int POST_MODULE_INJECTION_ANNOTATION = 0x0100;
public static final int POST_MODULE_REFLECTION_INDEX = 0x0200;
public static final int POST_MODULE_JSF_MANAGED_BEANS = 0x0300;
public static final int POST_MODULE_EJB_DD_METHOD_RESOLUTION = 0x0400;
public static final int POST_MODULE_EJB_DD_REMOVE_METHOD = 0x0500;
public static final int POST_MODULE_EJB_EXCLUDE_LIST_DD = 0x0501;
public static final int POST_MODULE_EJB_METHOD_PERMISSION_DD = 0x0502;
public static final int POST_MODULE_EJB_DD_INTERCEPTORS = 0x0600;
public static final int POST_MODULE_EJB_DD_CONCURRENCY = 0x0601;
public static final int POST_MODULE_WELD_EJB_INTERCEPTORS_INTEGRATION = 0x0700;
public static final int POST_MODULE_WELD_COMPONENT_INTEGRATION = 0x0800;
public static final int POST_MODULE_AGGREGATE_COMPONENT_INDEX = 0x0900;
public static final int POST_MODULE_INSTALL_EXTENSION = 0x0A00;
public static final int POST_MODULE_VALIDATOR_FACTORY = 0x0B00;
public static final int POST_MODULE_EAR_DEPENDENCY = 0x0C00;
public static final int POST_MODULE_WELD_BEAN_ARCHIVE = 0x0D00;
public static final int POST_MODULE_WELD_PORTABLE_EXTENSIONS = 0x0E00;
public static final int POST_MODULE_WS_EJB_INTEGRATION = 0x0F00;
// should come before ejb jndi bindings processor
public static final int POST_MODULE_EJB_IMPLICIT_NO_INTERFACE_VIEW = 0x1000;
public static final int POST_MODULE_EJB_JNDI_BINDINGS = 0x1100;
public static final int POST_MODULE_EJB_MODULE_CONFIGURATION = 0x1200;
public static final int POST_INITIALIZE_IN_ORDER = 0x1300;
public static final int POST_MODULE_ENV_ENTRY = 0x1400;
public static final int POST_MODULE_EJB_REF = 0x1500;
public static final int POST_MODULE_PERSISTENCE_REF = 0x1600;
public static final int POST_MODULE_DATASOURCE_REF = 0x1700;
public static final int POST_MODULE_WS_JMS_INTEGRATION = 0x1800;
// INSTALL
public static final int INSTALL_JAXRS_SCANNING = 0x0200;
public static final int INSTALL_APP_CONTEXT = 0x0300;
public static final int INSTALL_MODULE_CONTEXT = 0x0400;
public static final int INSTALL_SERVICE_ACTIVATOR = 0x0500;
public static final int INSTALL_OSGI_DEPLOYMENT = 0x0600;
public static final int INSTALL_OSGI_MODULE = 0x0650;
public static final int INSTALL_WS_DEPLOYMENT_TYPE_DETECTOR = 0x0700;
public static final int INSTALL_WS_UNIVERSAL_META_DATA_MODEL = 0x0701;
public static final int INSTALL_WS_DEPLOYMENT_ASPECTS = 0x0710;
// IMPORTANT: WS integration installs deployment aspects dynamically
// so consider INSTALL 0x0710 - 0x07FF reserved for WS subsystem!
public static final int INSTALL_RA_DEPLOYMENT = 0x0800;
public static final int INSTALL_SERVICE_DEPLOYMENT = 0x0900;
public static final int INSTALL_MC_BEAN_DEPLOYMENT = 0x0A00;
public static final int INSTALL_RA_XML_DEPLOYMENT = 0x0B00;
public static final int INSTALL_EE_COMP_LAZY_BINDING_SOURCE_HANDLER = 0x0C00;
public static final int INSTALL_WS_LAZY_BINDING_SOURCE_HANDLER = 0x0D00;
public static final int INSTALL_EE_CLASS_CONFIG = 0x1100;
public static final int INSTALL_EE_MODULE_CONFIG = 0x1101;
public static final int INSTALL_MODULE_JNDI_BINDINGS = 0x1200;
public static final int INSTALL_DEPENDS_ON_ANNOTATION = 0x1210;
public static final int INSTALL_EE_COMPONENT = 0x1230;
public static final int INSTALL_SERVLET_INIT_DEPLOYMENT = 0x1300;
public static final int INSTALL_JAXRS_COMPONENT = 0x1400;
public static final int INSTALL_JAXRS_DEPLOYMENT = 0x1500;
public static final int INSTALL_JSF_ANNOTATIONS = 0x1600;
public static final int INSTALL_ARQUILLIAN_DEPLOYMENT = 0x1700;
public static final int INSTALL_JDBC_DRIVER = 0x1800;
public static final int INSTALL_TRANSACTION_BINDINGS = 0x1900;
public static final int INSTALL_PERSISTENCE_PROVIDER = 0x1A00;
public static final int INSTALL_PERSISTENTUNIT = 0x1A50;
public static final int INSTALL_WELD_DEPLOYMENT = 0x1B00;
public static final int INSTALL_WELD_BEAN_MANAGER = 0x1C00;
public static final int INSTALL_WAR_DEPLOYMENT = 0x1D00;
// CLEANUP
public static final int CLEANUP_REFLECTION_INDEX = 0x0100;
}
|
server/src/main/java/org/jboss/as/server/deployment/Phase.java
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.server.deployment;
/**
* An enumeration of the phases of a deployment unit's processing cycle.
*
* @author <a href="mailto:[email protected]">David M. Lloyd</a>
*/
public enum Phase {
/* == TEMPLATE ==
* Upon entry, this phase performs the following actions:
* <ul>
* <li></li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments, in addition to those defined
* for the previous phase:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
*/
/**
* This phase creates the initial root structure. Depending on the service for this phase will ensure that the
* deployment unit's initial root structure is available and accessible.
* <p>
* Upon entry, this phase performs the following actions:
* <ul>
* <li>The primary deployment root is mounted (during {@link #STRUCTURE_MOUNT})</li>
* <li>Other internal deployment roots are mounted (during {@link #STRUCTURE_NESTED_JAR})</li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li><i>N/A</i></li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments:
* <ul>
* <li>{@link Attachments#DEPLOYMENT_ROOT} - the mounted deployment root for this deployment unit</li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* </ul>
* <p>
*/
STRUCTURE(null),
/**
* This phase assembles information from the root structure to prepare for adding and processing additional external
* structure, such as from class path entries and other similar mechanisms.
* <p>
* Upon entry, this phase performs the following actions:
* <ul>
* <li>The root content's MANIFEST is read and made available during {@link #PARSE_MANIFEST}.</li>
* <li>The annotation index for the root structure is calculated during {@link #STRUCTURE_ANNOTATION_INDEX}.</li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li>{@link Attachments#MANIFEST} - the parsed manifest of the root structure</li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments, in addition to those defined
* for the previous phase:
* <ul>
* <li><i>N/A</i></li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* <li>{@link Attachments#CLASS_PATH_ENTRIES} - class path entries found in the manifest and elsewhere.</li>
* <li>{@link Attachments#EXTENSION_LIST_ENTRIES} - extension-list entries found in the manifest and elsewhere.</li>
* </ul>
* <p>
*/
PARSE(null),
/**
* In this phase, the full structure of the deployment unit is made available and module dependencies may be assembled.
* <p>
* Upon entry, this phase performs the following actions:
* <ul>
* <li>Any additional external structure is mounted during {@link #XXX}</li>
* <li></li>
* </ul>
* <p>
* Processors in this phase have access to the following phase attachments:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* Processors in this phase have access to the following deployment unit attachments, in addition to those defined
* for the previous phase:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
* In this phase, these phase attachments may be modified:
* <ul>
* <li>{@link Attachments#BLAH} - description here</li>
* </ul>
* <p>
*/
DEPENDENCIES(null),
CONFIGURE_MODULE(null),
POST_MODULE(null),
INSTALL(null),
CLEANUP(null),
;
/**
* This is the key for the attachment to use as the phase's "value". The attachment is taken from
* the deployment unit. If a phase doesn't have a single defining "value", {@code null} is specified.
*/
private final AttachmentKey<?> phaseKey;
private Phase(final AttachmentKey<?> key) {
phaseKey = key;
}
/**
* Get the next phase, or {@code null} if none.
*
* @return the next phase, or {@code null} if there is none
*/
public Phase next() {
final int ord = ordinal() + 1;
final Phase[] phases = Phase.values();
return ord == phases.length ? null : phases[ord];
}
/**
* Get the attachment key of the {@code DeploymentUnit} attachment that represents the result value
* of this phase.
*
* @return the key
*/
public AttachmentKey<?> getPhaseKey() {
return phaseKey;
}
// STRUCTURE
public static final int STRUCTURE_WAR_DEPLOYMENT_INIT = 0x0000;
public static final int STRUCTURE_MOUNT = 0x0001;
public static final int STRUCTURE_MANIFEST = 0x0100;
// must be before osgi
public static final int STRUCTURE_JDBC_DRIVER = 0x0150;
public static final int STRUCTURE_OSGI_MANIFEST = 0x0200;
public static final int STRUCTURE_RAR = 0x0300;
public static final int STRUCTURE_WAR = 0x0500;
public static final int STRUCTURE_EAR_DEPLOYMENT_INIT = 0x0600;
public static final int STRUCTURE_EAR_APP_XML_PARSE = 0x0700;
public static final int STRUCTURE_EAR_JBOSS_APP_XML_PARSE = 0x0800;
public static final int STRUCTURE_EAR = 0x0900;
public static final int STRUCTURE_SERVICE_MODULE_LOADER = 0x0A00;
public static final int STRUCTURE_ANNOTATION_INDEX = 0x0B00;
public static final int STRUCTURE_EJB_JAR_IN_EAR = 0x0C00;
public static final int STRUCTURE_MANAGED_BEAN_JAR_IN_EAR = 0x0C01;
public static final int STRUCTURE_SAR_SUB_DEPLOY_CHECK = 0x0D00;
public static final int STRUCTURE_ADDITIONAL_MANIFEST = 0x0E00;
public static final int STRUCTURE_SUB_DEPLOYMENT = 0x0F00;
public static final int STRUCTURE_MODULE_IDENTIFIERS = 0x1000;
public static final int STRUCTURE_EE_MODULE_INIT = 0x1100;
// PARSE
public static final int PARSE_EE_MODULE_NAME = 0x0100;
public static final int PARSE_EAR_SUBDEPLOYMENTS_ISOLATION_DEFAULT = 0x0200;
public static final int PARSE_STRUCTURE_DESCRIPTOR = 0x0201;
public static final int PARSE_DEPENDENCIES_MANIFEST = 0x0300;
public static final int PARSE_COMPOSITE_ANNOTATION_INDEX = 0x0301;
public static final int PARSE_EAR_LIB_CLASS_PATH = 0x0400;
public static final int PARSE_ADDITIONAL_MODULES = 0x0500;
public static final int PARSE_CLASS_PATH = 0x0600;
public static final int PARSE_EXTENSION_LIST = 0x0700;
public static final int PARSE_EXTENSION_NAME = 0x0800;
public static final int PARSE_OSGI_BUNDLE_INFO = 0x0900;
public static final int PARSE_OSGI_XSERVICE_PROPERTIES = 0x0A00;
public static final int PARSE_OSGI_DEPLOYMENT = 0x0A80;
public static final int PARSE_WEB_DEPLOYMENT = 0x0B00;
public static final int PARSE_WEB_DEPLOYMENT_FRAGMENT = 0x0C00;
public static final int PARSE_ANNOTATION_WAR = 0x0D00;
public static final int PARSE_JBOSS_WEB_DEPLOYMENT = 0x0E00;
public static final int PARSE_TLD_DEPLOYMENT = 0x0F00;
public static final int PARSE_EAR_CONTEXT_ROOT = 0x1000;
// create and attach EJB metadata for EJB deployments
public static final int PARSE_EJB_DEPLOYMENT = 0x1100;
public static final int PARSE_EJB_CREATE_COMPONENT_DESCRIPTIONS = 0x1150;
public static final int PARSE_EJB_SESSION_BEAN_DD = 0x1200;
public static final int PARSE_EJB_MDB_DD = 0x1300;
// create and attach the component description out of EJB annotations
public static final int PARSE_EJB_ANNOTATION = 0x1400;
public static final int PARSE_MESSAGE_DRIVEN_ANNOTATION = 0x1500;
public static final int PARSE_EJB_TRANSACTION_MANAGEMENT = 0x1600;
public static final int PARSE_EJB_BUSINESS_VIEW_ANNOTATION = 0x1700;
public static final int PARSE_EJB_STARTUP_ANNOTATION = 0x1800;
public static final int PARSE_EJB_SECURITY_DOMAIN_ANNOTATION = 0x1801;
public static final int PARSE_EJB_CONCURRENCY_MANAGEMENT_ANNOTATION = 0x1900;
public static final int PARSE_EJB_APPLICATION_EXCEPTION_ANNOTATION = 0x1901;
public static final int PARSE_REMOVE_METHOD_ANNOTAION = 0x1902;
public static final int PARSE_EJB_DECLARE_ROLES_ANNOTATION = 0x1903;
public static final int PARSE_EJB_RUN_AS_ANNOTATION = 0x1904;
public static final int PARSE_EJB_DENY_ALL_ANNOTATION = 0x1905;
public static final int PARSE_EJB_ROLES_ALLOWED_ANNOTATION = 0x1906;
// should be after ConcurrencyManagement annotation processor
public static final int PARSE_EJB_LOCK_ANNOTATION = 0x1A00;
public static final int PARSE_EJB_STATEFUL_TIMEOUT_ANNOTATION = 0x1A01;
// should be after ConcurrencyManagement annotation processor
public static final int PARSE_EJB_ACCESS_TIMEOUT_ANNOTATION = 0x1B00;
// should be after all views are known
public static final int PARSE_EJB_TRANSACTION_ATTR_ANNOTATION = 0x1C00;
public static final int PARSE_EJB_SESSION_SYNCHRONIZATION = 0x1C50;
public static final int PARSE_EJB_RESOURCE_ADAPTER_ANNOTATION = 0x1D00;
public static final int PARSE_EJB_ASYNCHRONOUS_ANNOTATION = 0x1E00;
public static final int PARSE_WEB_COMPONENTS = 0x1F00;
public static final int PARSE_WEB_MERGE_METADATA = 0x2000;
public static final int PARSE_RA_DEPLOYMENT = 0x2100;
public static final int PARSE_SERVICE_LOADER_DEPLOYMENT = 0x2200;
public static final int PARSE_SERVICE_DEPLOYMENT = 0x2300;
public static final int PARSE_MC_BEAN_DEPLOYMENT = 0x2400;
public static final int PARSE_IRON_JACAMAR_DEPLOYMENT = 0x2500;
public static final int PARSE_RESOURCE_ADAPTERS = 0x2600;
public static final int PARSE_DATA_SOURCES = 0x2700;
public static final int PARSE_ARQUILLIAN_RUNWITH = 0x2800;
public static final int PARSE_MANAGED_BEAN_ANNOTATION = 0x2900;
public static final int PARSE_JAXRS_ANNOTATIONS = 0x2A00;
public static final int PARSE_WELD_DEPLOYMENT = 0x2B00;
public static final int PARSE_WELD_WEB_INTEGRATION = 0x2B10;
public static final int PARSE_WEBSERVICES_XML = 0x2C00;
public static final int PARSE_DATA_SOURCE_DEFINITION_ANNOTATION = 0x2D00;
public static final int PARSE_EJB_CONTEXT_BINDING = 0x2E00;
public static final int PARSE_EJB_TIMERSERVICE_BINDING = 0x2E01;
public static final int PARSE_PERSISTENCE_UNIT = 0x2F00;
public static final int PARSE_PERSISTENCE_ANNOTATION = 0x3000;
public static final int PARSE_INTERCEPTORS_ANNOTATION = 0x3100;
public static final int PARSE_LIEFCYCLE_ANNOTATION = 0x3200;
public static final int PARSE_AROUNDINVOKE_ANNOTATION = 0x3300;
public static final int PARSE_RESOURCE_INJECTION_WEBSERVICE_CONTEXT_ANNOTATION = 0x3401;
public static final int PARSE_EJB_DD_INTERCEPTORS = 0x3500;
public static final int PARSE_EJB_SECURITY_ROLE_REF_DD = 0x3501;
public static final int PARSE_EJB_SECURITY_IDENTITY_DD = 0x3502;
public static final int PARSE_EJB_ASSEMBLY_DESC_DD = 0x3600;
// should be after all components are known
public static final int PARSE_EJB_INJECTION_ANNOTATION = 0x3700;
public static final int PARSE_WEB_SERVICE_INJECTION_ANNOTATION = 0x3800;
// DEPENDENCIES
public static final int DEPENDENCIES_EJB = 0x0000;
public static final int DEPENDENCIES_MODULE = 0x0100;
public static final int DEPENDENCIES_DS = 0x0200;
public static final int DEPENDENCIES_RAR_CONFIG = 0x0300;
public static final int DEPENDENCIES_MANAGED_BEAN = 0x0400;
public static final int DEPENDENCIES_SAR_MODULE = 0x0500;
public static final int DEPENDENCIES_WAR_MODULE = 0x0600;
public static final int DEPENDENCIES_ARQUILLIAN = 0x0700;
public static final int DEPENDENCIES_CLASS_PATH = 0x0800;
public static final int DEPENDENCIES_EXTENSION_LIST = 0x0900;
public static final int DEPENDENCIES_WELD = 0x0A00;
public static final int DEPENDENCIES_SEAM = 0x0A01;
public static final int DEPENDENCIES_NAMING = 0x0B00;
public static final int DEPENDENCIES_WS = 0x0C00;
public static final int DEPENDENCIES_JAXRS = 0x0D00;
public static final int DEPENDENCIES_SUB_DEPLOYMENTS = 0x0E00;
// Sets up appropriate module dependencies for EJB deployments
public static final int DEPENDENCIES_JPA = 0x1000;
public static final int DEPENDENCIES_GLOBAL_MODULES = 0x1100;
public static final int DEPENDENCIES_JDK = 0x1200;
//must be last
public static final int DEPENDENCIES_MODULE_INFO_SERVICE = 0x1300;
// CONFIGURE_MODULE
public static final int CONFIGURE_MODULE_SPEC = 0x0100;
// POST_MODULE
public static final int POST_MODULE_INJECTION_ANNOTATION = 0x0100;
public static final int POST_MODULE_REFLECTION_INDEX = 0x0200;
public static final int POST_MODULE_JSF_MANAGED_BEANS = 0x0300;
public static final int POST_MODULE_EJB_DD_METHOD_RESOLUTION = 0x0400;
public static final int POST_MODULE_EJB_DD_REMOVE_METHOD = 0x0500;
public static final int POST_MODULE_EJB_EXCLUDE_LIST_DD = 0x0501;
public static final int POST_MODULE_EJB_METHOD_PERMISSION_DD = 0x0502;
public static final int POST_MODULE_EJB_DD_INTERCEPTORS = 0x0600;
public static final int POST_MODULE_EJB_DD_CONCURRENCY = 0x0601;
public static final int POST_MODULE_WELD_EJB_INTERCEPTORS_INTEGRATION = 0x0700;
public static final int POST_MODULE_WELD_COMPONENT_INTEGRATION = 0x0800;
public static final int POST_MODULE_AGGREGATE_COMPONENT_INDEX = 0x0900;
public static final int POST_MODULE_INSTALL_EXTENSION = 0x0A00;
public static final int POST_MODULE_VALIDATOR_FACTORY = 0x0B00;
public static final int POST_MODULE_EAR_DEPENDENCY = 0x0C00;
public static final int POST_MODULE_WELD_BEAN_ARCHIVE = 0x0D00;
public static final int POST_MODULE_WELD_PORTABLE_EXTENSIONS = 0x0E00;
public static final int POST_MODULE_WS_EJB_INTEGRATION = 0x0F00;
// should come before ejb jndi bindings processor
public static final int POST_MODULE_EJB_IMPLICIT_NO_INTERFACE_VIEW = 0x1000;
public static final int POST_MODULE_EJB_JNDI_BINDINGS = 0x1100;
public static final int POST_MODULE_EJB_MODULE_CONFIGURATION = 0x1200;
public static final int POST_INITIALIZE_IN_ORDER = 0x1300;
public static final int POST_MODULE_ENV_ENTRY = 0x1400;
public static final int POST_MODULE_EJB_REF = 0x1500;
public static final int POST_MODULE_PERSISTENCE_REF = 0x1600;
public static final int POST_MODULE_DATASOURCE_REF = 0x1700;
public static final int POST_MODULE_WS_JMS_INTEGRATION = 0x1800;
// INSTALL
public static final int INSTALL_JAXRS_SCANNING = 0x0200;
public static final int INSTALL_APP_CONTEXT = 0x0300;
public static final int INSTALL_MODULE_CONTEXT = 0x0400;
public static final int INSTALL_SERVICE_ACTIVATOR = 0x0500;
public static final int INSTALL_OSGI_DEPLOYMENT = 0x0600;
public static final int INSTALL_OSGI_MODULE = 0x0650;
public static final int INSTALL_WS_DEPLOYMENT_TYPE_DETECTOR = 0x0700;
public static final int INSTALL_WS_UNIVERSAL_META_DATA_MODEL = 0x0701;
public static final int INSTALL_WS_DEPLOYMENT_ASPECTS = 0x0710;
// IMPORTANT: WS integration installs deployment aspects dynamically
// so consider INSTALL 0x0710 - 0x07FF reserved for WS subsystem!
public static final int INSTALL_RA_DEPLOYMENT = 0x0800;
public static final int INSTALL_SERVICE_DEPLOYMENT = 0x0900;
public static final int INSTALL_MC_BEAN_DEPLOYMENT = 0x0A00;
public static final int INSTALL_RA_XML_DEPLOYMENT = 0x0B00;
public static final int INSTALL_EE_COMP_LAZY_BINDING_SOURCE_HANDLER = 0x0C00;
public static final int INSTALL_WS_LAZY_BINDING_SOURCE_HANDLER = 0x0D00;
public static final int INSTALL_EE_CLASS_CONFIG = 0x1100;
public static final int INSTALL_EE_MODULE_CONFIG = 0x1101;
public static final int INSTALL_MODULE_JNDI_BINDINGS = 0x1200;
public static final int INSTALL_DEPENDS_ON_ANNOTATION = 0x1210;
public static final int INSTALL_EE_COMPONENT = 0x1230;
public static final int INSTALL_SERVLET_INIT_DEPLOYMENT = 0x1300;
public static final int INSTALL_JAXRS_COMPONENT = 0x1400;
public static final int INSTALL_JAXRS_DEPLOYMENT = 0x1500;
public static final int INSTALL_JSF_ANNOTATIONS = 0x1600;
public static final int INSTALL_ARQUILLIAN_DEPLOYMENT = 0x1700;
public static final int INSTALL_JDBC_DRIVER = 0x1800;
public static final int INSTALL_TRANSACTION_BINDINGS = 0x1900;
public static final int INSTALL_PERSISTENCE_PROVIDER = 0x1A00;
public static final int INSTALL_PERSISTENTUNIT = 0x1A50;
public static final int INSTALL_WELD_DEPLOYMENT = 0x1B00;
public static final int INSTALL_WELD_BEAN_MANAGER = 0x1C00;
public static final int INSTALL_WAR_DEPLOYMENT = 0x1D00;
// CLEANUP
public static final int CLEANUP_REFLECTION_INDEX = 0x0100;
}
|
AS7-965 Fix @PermitAll processing on EJB
was: a5bd9681a066767a20da90f2765a380cd88f6528
|
server/src/main/java/org/jboss/as/server/deployment/Phase.java
|
AS7-965 Fix @PermitAll processing on EJB
|
<ide><path>erver/src/main/java/org/jboss/as/server/deployment/Phase.java
<ide> public static final int PARSE_EJB_RUN_AS_ANNOTATION = 0x1904;
<ide> public static final int PARSE_EJB_DENY_ALL_ANNOTATION = 0x1905;
<ide> public static final int PARSE_EJB_ROLES_ALLOWED_ANNOTATION = 0x1906;
<add> public static final int PARSE_EJB_PERMIT_ALL_ANNOTATION = 0x1907;
<ide> // should be after ConcurrencyManagement annotation processor
<ide> public static final int PARSE_EJB_LOCK_ANNOTATION = 0x1A00;
<ide> public static final int PARSE_EJB_STATEFUL_TIMEOUT_ANNOTATION = 0x1A01;
|
|
Java
|
agpl-3.0
|
0bc318809fa7b7d4ee27962ba940b8294f138674
| 0 |
geothomasp/kcmit,jwillia/kc-old1,iu-uits-es/kc,kuali/kc,UniversityOfHawaiiORS/kc,geothomasp/kcmit,ColostateResearchServices/kc,jwillia/kc-old1,jwillia/kc-old1,geothomasp/kcmit,kuali/kc,geothomasp/kcmit,iu-uits-es/kc,UniversityOfHawaiiORS/kc,ColostateResearchServices/kc,mukadder/kc,kuali/kc,mukadder/kc,geothomasp/kcmit,mukadder/kc,iu-uits-es/kc,jwillia/kc-old1,ColostateResearchServices/kc,UniversityOfHawaiiORS/kc
|
/*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.web.struts.action;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.apache.commons.lang.StringUtils.replace;
import static org.kuali.RiceConstants.CONFIRMATION_QUESTION;
import static org.kuali.RiceConstants.EMPTY_STRING;
import static org.kuali.RiceConstants.QUESTION_CLICKED_BUTTON;
import static org.kuali.kra.infrastructure.KraServiceLocator.getService;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.kuali.RiceConstants;
import org.kuali.core.question.ConfirmationQuestion;
import org.kuali.core.service.KualiConfigurationService;
import org.kuali.core.web.struts.action.KualiTransactionalDocumentActionBase;
import org.kuali.core.web.struts.form.KualiForm;
import org.kuali.kra.budget.bo.BudgetVersionOverview;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.service.ResearchDocumentService;
import org.kuali.kra.web.struts.form.KraTransactionalDocumentFormBase;
import org.kuali.notification.util.NotificationConstants;
import edu.iu.uis.eden.clientapp.IDocHandler;
// TODO : should move this class to org.kuali.kra.web.struts.action
public class KraTransactionalDocumentActionBase extends KualiTransactionalDocumentActionBase {
private static final Log LOG = LogFactory.getLog(KraTransactionalDocumentActionBase.class);
@Override
/**
* Overriding headerTab to customize how clearing tab state works on PDForm.
*/
public ActionForward headerTab(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
((KualiForm) form).setTabStates(new HashMap());
return super.headerTab(mapping, form, request, response);
}
public ActionForward updateTextArea(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {
// parse out the important strings from our methodToCall parameter
String fullParameter = (String) request.getAttribute(RiceConstants.METHOD_TO_CALL_ATTRIBUTE);
// parse textfieldname:htmlformaction
String parameterFields = StringUtils.substringBetween(fullParameter, RiceConstants.METHOD_TO_CALL_PARM2_LEFT_DEL, RiceConstants.METHOD_TO_CALL_PARM2_RIGHT_DEL);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "fullParameter: " + fullParameter );
LOG.debug( "parameterFields: " + parameterFields );
}
String[] keyValue = null;
if (StringUtils.isNotBlank(parameterFields)) {
String[] textAreaParams = parameterFields.split(RiceConstants.FIELD_CONVERSIONS_SEPERATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "lookupParams: " + textAreaParams );
}
for (int i = 0; i < textAreaParams.length; i++) {
keyValue = textAreaParams[i].split(RiceConstants.FIELD_CONVERSION_PAIR_SEPERATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "keyValue[0]: " + keyValue[0] );
LOG.debug( "keyValue[1]: " + keyValue[1] );
}
}
}
request.setAttribute(org.kuali.kra.infrastructure.Constants.TEXT_AREA_FIELD_NAME, keyValue[0]);
request.setAttribute(org.kuali.kra.infrastructure.Constants.HTML_FORM_ACTION,keyValue[1]);
request.setAttribute(org.kuali.kra.infrastructure.Constants.TEXT_AREA_FIELD_LABEL,keyValue[2]);
if (form instanceof KualiForm && StringUtils.isNotEmpty(((KualiForm) form).getAnchor())) {
request.setAttribute(org.kuali.kra.infrastructure.Constants.TEXT_AREA_FIELD_ANCHOR,((KualiForm) form).getAnchor());
}
return mapping.findForward("updateTextArea");
}
public ActionForward postTextAreaToParent(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {
return mapping.findForward("basic");
}
/**
*
* This method...
* @param question
* @param yesMethodName
* @param noMethodName
* @return
* @throws Exception
*/
public ActionForward confirm(StrutsConfirmation question, String yesMethodName, String noMethodName) throws Exception {
// Figure out what the caller is. We want the direct caller of confirm()
question.setCaller(((KualiForm) question.getForm()).getMethodToCall());
LOG.info("Caller is " + question.getCaller());
LOG.info("Setting caller from stacktrace " + Arrays.asList(new Throwable().getStackTrace()));
LOG.info("Current action is " + getClass());
if (question.hasQuestionInstAttributeName()) {
Object buttonClicked = question.getRequest().getParameter(QUESTION_CLICKED_BUTTON);
if (ConfirmationQuestion.YES.equals(buttonClicked) && isNotBlank(yesMethodName)) {
return dispatchMethod(question.getMapping(), question.getForm(), question.getRequest(), question.getResponse(), yesMethodName);
}
else if (isNotBlank(noMethodName)) {
return dispatchMethod(question.getMapping(), question.getForm(), question.getRequest(), question.getResponse(), noMethodName);
}
}
else {
return this.performQuestionWithoutInput(question, EMPTY_STRING);
}
return question.getMapping().findForward(Constants.MAPPING_BASIC);
}
/**
* Generically creates a <code>{@link StrutsConfirmation}</code> instance while deriving the question from a resource bundle.<br/>
* <br/>
* In this case, the question in the resource bundle is expected to be parameterized. This method takes this into account, and passes
* parameters and replaces tokens in the question with the parameters.
*
* @param mapping The mapping associated with this action.
* @param form The Proposal Development form.
* @param request the HTTP request
* @param response the HTTP response
* @return the confirmation question
* @throws Exception
*/
protected StrutsConfirmation buildParameterizedConfirmationQuestion(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String questionId, String configurationId, String ... params) throws Exception {
StrutsConfirmation retval = new StrutsConfirmation();
retval.setMapping(mapping);
retval.setForm(form);
retval.setRequest(request);
retval.setResponse(response);
retval.setQuestionId(questionId);
retval.setQuestionType(CONFIRMATION_QUESTION);
KualiConfigurationService kualiConfiguration = getService(KualiConfigurationService.class);
String questionText = kualiConfiguration.getPropertyString(configurationId);
for (int i = 0; i < params.length; i++) {
questionText = replace(questionText, "{" + i + "}", params[i]);
}
retval.setQuestionText(questionText);
return retval;
}
/**
* Wrapper around <code>{@link performQuestionWithoutInput(ActionMapping, ActionForm, HttpServletRequest, HttpServletResponse)}</code> using
* <code>{@link StrutsConfirmation}</code>
*
* @param question StrutsConfirmation
* @param context
* @return ActionForward
* @throws Exception
*/
protected ActionForward performQuestionWithoutInput(StrutsConfirmation question, String context) throws Exception {
return this.performQuestionWithoutInput(question.getMapping(), question.getForm(), question.getRequest(), question.getResponse(),
question.getQuestionId(), question.getQuestionText(), question.getQuestionType(),
question.getCaller(), context);
}
protected String buildForwardUrl(Long routeHeaderId) {
ResearchDocumentService researchDocumentService = KraServiceLocator.getService(ResearchDocumentService.class);
String forward = researchDocumentService.getDocHandlerUrl(routeHeaderId);
if (forward.indexOf("?") == -1) {
forward += "?";
} else {
forward += "&";
}
forward += IDocHandler.ROUTEHEADER_ID_PARAMETER + "=" + routeHeaderId;
forward += "&" + IDocHandler.COMMAND_PARAMETER + "=" + NotificationConstants.NOTIFICATION_DETAIL_VIEWS.DOC_SEARCH_VIEW;
// if (getUserSession(request).isBackdoorInUse()) {
// forward += "&" + IDocHandler.BACKDOOR_ID_PARAMETER + "=" + getUserSession(request).getNetworkId();
// }
return forward;
}
protected void setFinalBudgetVersion(Integer finalBudgetVersion, List<BudgetVersionOverview> budgetVersions) {
for (BudgetVersionOverview budgetVersion: budgetVersions) {
if (budgetVersion.getBudgetVersionNumber().equals(finalBudgetVersion)) {
budgetVersion.setFinalVersionFlag(true);
} else {
budgetVersion.setFinalVersionFlag(false);
}
}
}
}
|
src/main/java/org/kuali/kra/web/struts/action/KraTransactionalDocumentActionBase.java
|
/*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.web.struts.action;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.apache.commons.lang.StringUtils.replace;
import static org.kuali.RiceConstants.CONFIRMATION_QUESTION;
import static org.kuali.RiceConstants.EMPTY_STRING;
import static org.kuali.RiceConstants.QUESTION_CLICKED_BUTTON;
import static org.kuali.kra.infrastructure.KraServiceLocator.getService;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.kuali.RiceConstants;
import org.kuali.core.question.ConfirmationQuestion;
import org.kuali.core.service.KualiConfigurationService;
import org.kuali.core.web.struts.action.KualiTransactionalDocumentActionBase;
import org.kuali.core.web.struts.form.KualiForm;
import org.kuali.kra.budget.bo.BudgetVersionOverview;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.service.ResearchDocumentService;
import org.kuali.kra.web.struts.form.KraTransactionalDocumentFormBase;
import org.kuali.notification.util.NotificationConstants;
import edu.iu.uis.eden.clientapp.IDocHandler;
// TODO : should move this class to org.kuali.kra.web.struts.action
public class KraTransactionalDocumentActionBase extends KualiTransactionalDocumentActionBase {
private static final Log LOG = LogFactory.getLog(KraTransactionalDocumentActionBase.class);
@Override
/**
* Overriding headerTab to customize how clearing tab state works on PDForm.
*/
public ActionForward headerTab(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
((KualiForm) form).setTabStates(new HashMap());
return super.headerTab(mapping, form, request, response);
}
public ActionForward updateTextArea(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {
// parse out the important strings from our methodToCall parameter
String fullParameter = (String) request.getAttribute(RiceConstants.METHOD_TO_CALL_ATTRIBUTE);
// parse textfieldname:htmlformaction
String parameterFields = StringUtils.substringBetween(fullParameter, RiceConstants.METHOD_TO_CALL_PARM2_LEFT_DEL, RiceConstants.METHOD_TO_CALL_PARM2_RIGHT_DEL);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "fullParameter: " + fullParameter );
LOG.debug( "parameterFields: " + parameterFields );
}
String[] keyValue = null;
if (StringUtils.isNotBlank(parameterFields)) {
String[] textAreaParams = parameterFields.split(RiceConstants.FIELD_CONVERSIONS_SEPERATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "lookupParams: " + textAreaParams );
}
for (int i = 0; i < textAreaParams.length; i++) {
keyValue = textAreaParams[i].split(RiceConstants.FIELD_CONVERSION_PAIR_SEPERATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "keyValue[0]: " + keyValue[0] );
LOG.debug( "keyValue[1]: " + keyValue[1] );
}
}
}
request.setAttribute(org.kuali.kra.infrastructure.Constants.TEXT_AREA_FIELD_NAME, keyValue[0]);
request.setAttribute(org.kuali.kra.infrastructure.Constants.HTML_FORM_ACTION,keyValue[1]);
request.setAttribute(org.kuali.kra.infrastructure.Constants.TEXT_AREA_FIELD_LABEL,keyValue[2]);
if (form instanceof KualiForm && StringUtils.isNotEmpty(((KualiForm) form).getAnchor())) {
request.setAttribute(org.kuali.kra.infrastructure.Constants.TEXT_AREA_FIELD_ANCHOR,((KualiForm) form).getAnchor());
}
return mapping.findForward("updateTextArea");
}
public ActionForward postTextAreaToParent(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) {
return mapping.findForward("basic");
}
/**
*
* This method...
* @param question
* @param yesMethodName
* @param noMethodName
* @return
* @throws Exception
*/
public ActionForward confirm(StrutsConfirmation question, String yesMethodName, String noMethodName) throws Exception {
// Figure out what the caller is. We want the direct caller of confirm()
question.setCaller(new Throwable().getStackTrace()[1].getMethodName());
LOG.info("Caller is " + question.getCaller());
LOG.info("Setting caller from stacktrace " + Arrays.asList(new Throwable().getStackTrace()));
LOG.info("Current action is " + getClass());
if (question.hasQuestionInstAttributeName()) {
Object buttonClicked = question.getRequest().getParameter(QUESTION_CLICKED_BUTTON);
if (ConfirmationQuestion.YES.equals(buttonClicked) && isNotBlank(yesMethodName)) {
return dispatchMethod(question.getMapping(), question.getForm(), question.getRequest(), question.getResponse(), yesMethodName);
}
else if (isNotBlank(noMethodName)) {
return dispatchMethod(question.getMapping(), question.getForm(), question.getRequest(), question.getResponse(), noMethodName);
}
}
else {
return this.performQuestionWithoutInput(question, EMPTY_STRING);
}
return question.getMapping().findForward(Constants.MAPPING_BASIC);
}
/**
* Generically creates a <code>{@link StrutsConfirmation}</code> instance while deriving the question from a resource bundle.<br/>
* <br/>
* In this case, the question in the resource bundle is expected to be parameterized. This method takes this into account, and passes
* parameters and replaces tokens in the question with the parameters.
*
* @param mapping The mapping associated with this action.
* @param form The Proposal Development form.
* @param request the HTTP request
* @param response the HTTP response
* @return the confirmation question
* @throws Exception
*/
protected StrutsConfirmation buildParameterizedConfirmationQuestion(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String questionId, String configurationId, String ... params) throws Exception {
StrutsConfirmation retval = new StrutsConfirmation();
retval.setMapping(mapping);
retval.setForm(form);
retval.setRequest(request);
retval.setResponse(response);
retval.setQuestionId(questionId);
retval.setQuestionType(CONFIRMATION_QUESTION);
KualiConfigurationService kualiConfiguration = getService(KualiConfigurationService.class);
String questionText = kualiConfiguration.getPropertyString(configurationId);
for (int i = 0; i < params.length; i++) {
questionText = replace(questionText, "{" + i + "}", params[i]);
}
retval.setQuestionText(questionText);
return retval;
}
/**
* Wrapper around <code>{@link performQuestionWithoutInput(ActionMapping, ActionForm, HttpServletRequest, HttpServletResponse)}</code> using
* <code>{@link StrutsConfirmation}</code>
*
* @param question StrutsConfirmation
* @param context
* @return ActionForward
* @throws Exception
*/
protected ActionForward performQuestionWithoutInput(StrutsConfirmation question, String context) throws Exception {
return this.performQuestionWithoutInput(question.getMapping(), question.getForm(), question.getRequest(), question.getResponse(),
question.getQuestionId(), question.getQuestionText(), question.getQuestionType(),
question.getCaller(), context);
}
protected String buildForwardUrl(Long routeHeaderId) {
ResearchDocumentService researchDocumentService = KraServiceLocator.getService(ResearchDocumentService.class);
String forward = researchDocumentService.getDocHandlerUrl(routeHeaderId);
if (forward.indexOf("?") == -1) {
forward += "?";
} else {
forward += "&";
}
forward += IDocHandler.ROUTEHEADER_ID_PARAMETER + "=" + routeHeaderId;
forward += "&" + IDocHandler.COMMAND_PARAMETER + "=" + NotificationConstants.NOTIFICATION_DETAIL_VIEWS.DOC_SEARCH_VIEW;
// if (getUserSession(request).isBackdoorInUse()) {
// forward += "&" + IDocHandler.BACKDOOR_ID_PARAMETER + "=" + getUserSession(request).getNetworkId();
// }
return forward;
}
protected void setFinalBudgetVersion(Integer finalBudgetVersion, List<BudgetVersionOverview> budgetVersions) {
for (BudgetVersionOverview budgetVersion: budgetVersions) {
if (budgetVersion.getBudgetVersionNumber().equals(finalBudgetVersion)) {
budgetVersion.setFinalVersionFlag(true);
} else {
budgetVersion.setFinalVersionFlag(false);
}
}
}
}
|
KRACOEUS-445 - using methodToCall instead of grabbing method from the throwable
|
src/main/java/org/kuali/kra/web/struts/action/KraTransactionalDocumentActionBase.java
|
KRACOEUS-445 - using methodToCall instead of grabbing method from the throwable
|
<ide><path>rc/main/java/org/kuali/kra/web/struts/action/KraTransactionalDocumentActionBase.java
<ide> */
<ide> public ActionForward confirm(StrutsConfirmation question, String yesMethodName, String noMethodName) throws Exception {
<ide> // Figure out what the caller is. We want the direct caller of confirm()
<del> question.setCaller(new Throwable().getStackTrace()[1].getMethodName());
<add> question.setCaller(((KualiForm) question.getForm()).getMethodToCall());
<ide> LOG.info("Caller is " + question.getCaller());
<ide> LOG.info("Setting caller from stacktrace " + Arrays.asList(new Throwable().getStackTrace()));
<ide> LOG.info("Current action is " + getClass());
|
|
JavaScript
|
mit
|
4e5a367a44ce6b209a419f6b04d3f64069177e5d
| 0 |
yetu/controlcenter,yetu/controlcenter,yetu/controlcenter
|
/* eslint-env node */
var webpack = require('webpack');
var path = require('path');
module.exports = {
context: __dirname,
entry: [
'./app/app.jsx'
// 'webpack-dev-server/client?http://localhost:8899',
// 'webpack/hot/only-dev-server'
],
output: {
filename: 'bundle.js',
path: path.join(__dirname, '/dist'),
publicPath: '/assets/dist/',
chunkFilename: '[id].chunk.js'
},
resolve: {
modulesDirectories: [ 'node_modules', 'bower_components' ],
alias: {
'setup': path.join(__dirname, 'app/project-setup'),
'actions': path.join(__dirname, 'app/actions'),
'screens': path.join(__dirname, 'app/screens'),
'stores': path.join(__dirname, 'app/stores'),
'common': path.join(__dirname, 'app/common'),
'mixins': path.join(__dirname, 'app/common/mixins'),
'models': path.join(__dirname, 'app/common/models'),
'helpers': path.join(__dirname, 'app/common/helpers'),
'services': path.join(__dirname, 'app/common/services')
},
extensions: [ '', '.coffee', '.cjsx', '.js', '.jsx' ]
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin()
],
module: {
preLoaders: [
{
test: /\.coffee?$/,
loader: 'coffee-lint-loader',
exclude: /node_modules/
}
],
loaders: [
{
test: /\.coffee?$/,
loader: 'coffee-loader'
},
{
test: /\.cjsx$/,
loaders: ['coffee', 'cjsx']
},
{
test: /\.jsx?$/,
// TODO: Add 'react-hot' only for local dev deployment. Create a separate config.
// TODO: See react-hot-loader/docs/Troubleshooting.md for details.
// loaders: [ 'react-hot', 'babel' ],
loaders: [ 'babel' ],
exclude: /(node_modules|bower_components)/
},
// style! attaches the css to the DOM automatically,
// which is not optimal for components
{
test: /project-setup\/.*\.scss$/,
loader: 'css!autoprefixer!sass?' +
'includePaths[]=' + path.join(__dirname, 'app/project-setup') +
'&includePaths[]=' + path.join(__dirname, 'bower_components/foundation/scss')
},
{
test: /\/(screens|common)\/.*\.scss$/,
loader: 'style/useable!css!autoprefixer!sass?' +
'includePaths[]=' + path.join(__dirname, 'app/project-setup') +
'&includePaths[]=' + path.join(__dirname, 'bower_components/foundation/scss')
},
{ test: /\.(png|jpg)$/, loader: 'url?limit=32768' },
{ test: /\.jade$/, loader: 'jade' },
{
test: /\.woff(\?v=[0-9]\.[0-9]\.[0-9])?$/,
loader: 'url?limit=10000&mimetype=application/font-woff'
},
{ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: 'url?limit=10000' }
],
noParse: []
},
coffeelint: {
configFile: path.join(__dirname, '../coffee-lint.json')
},
externals: {},
devtool: 'eval'
};
|
public/webpack.config.js
|
/* eslint-env node */
var webpack = require('webpack');
var path = require('path');
module.exports = {
context: __dirname,
entry: [
'./app/app.jsx'
// 'webpack-dev-server/client?http://localhost:8899',
// 'webpack/hot/only-dev-server'
],
output: {
filename: 'bundle.js',
path: path.join(__dirname, '/dist'),
publicPath: '/assets/dist/',
chunkFilename: '[id].chunk.js'
},
resolve: {
modulesDirectories: [ 'node_modules', 'bower_components' ],
alias: {
'setup': path.join(__dirname, 'app/project-setup'),
'actions': path.join(__dirname, 'app/actions'),
'screens': path.join(__dirname, 'app/screens'),
'stores': path.join(__dirname, 'app/stores'),
'common': path.join(__dirname, 'app/common'),
'mixins': path.join(__dirname, 'app/common/mixins'),
'models': path.join(__dirname, 'app/common/models'),
'helpers': path.join(__dirname, 'app/common/helpers'),
'services': path.join(__dirname, 'app/common/services')
},
extensions: [ '', '.coffee', '.cjsx', '.js', '.jsx' ]
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin()
],
module: {
preLoaders: [
{
test: /\.coffee?$/,
loader: 'coffee-lint-loader',
exclude: /node_modules/
}
],
loaders: [
{
test: /\.coffee?$/,
loader: 'coffee-loader'
},
{
test: /\.cjsx$/,
loaders: ['coffee', 'cjsx']
},
{
test: /\.jsx?$/,
// TODO: Add 'react-hot' only for local dev deployment. Create a separate config.
// TODO: See react-hot-loader/docs/Troubleshooting.md for details.
// loaders: [ 'react-hot', 'babel' ],
loaders: [ 'babel' ],
exclude: /(node_modules|bower_components)/
},
// style! attaches the css to the DOM automatically,
// which is not optimal for components
{
test: /project-setup\/.*\.scss$/,
loader: 'css!autoprefixer!sass?' +
'includePaths[]=' + path.join(__dirname, 'app/project-setup') +
'&includePaths[]=' + path.join(__dirname, 'bower_components/foundation/scss')
},
{
test: /\/(screens|common)\/.*\.scss$/,
loader: 'style/useable!css!autoprefixer!sass?' +
'includePaths[]=' + path.join(__dirname, 'app/project-setup') +
'&includePaths[]=' + path.join(__dirname, 'bower_components/foundation/scss')
},
{ test: /\.(png|jpg)$/, loader: 'url?limit=32768' },
{ test: /\.jade$/, loader: 'jade' },
{
test: /\.woff(\?v=[0-9]\.[0-9]\.[0-9])?$/,
loader: 'url?limit=10000&mimetype=application/font-woff'
},
{ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: 'url?limit=10000' }
],
noParse: []
},
coffeelint: {
configFile: path.join(__dirname, '../coffee-lint.json')
},
externals: {},
devtool: 'eval'
};
console.log(path.join(__dirname, '../coffee-lint.json'));
|
Remove console.log from webpack.config
|
public/webpack.config.js
|
Remove console.log from webpack.config
|
<ide><path>ublic/webpack.config.js
<ide> externals: {},
<ide> devtool: 'eval'
<ide> };
<del>
<del>console.log(path.join(__dirname, '../coffee-lint.json'));
|
|
JavaScript
|
agpl-3.0
|
5766898ffb7789a7fd69e8d7d9c98e2b3b85fa26
| 0 |
ONLYOFFICE/sdkjs,ONLYOFFICE/sdkjs,ONLYOFFICE/sdkjs,ONLYOFFICE/sdkjs,ONLYOFFICE/sdkjs
|
/*
* (c) Copyright Ascensio System SIA 2010-2017
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia,
* EU, LV-1021.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
"use strict";
(function (window, undefined){
/**
* CDrawingDocContent
* @constructor
* @extends {CDocumentContent}
*/
function CDrawingDocContent(Parent, DrawingDocument, X, Y, XLimit, YLimit) {
CDocumentContent.call(this, Parent, DrawingDocument, X, Y, XLimit, YLimit, false, false, true);
this.FullRecalc = new CDocumentRecalculateState();
}
CDrawingDocContent.prototype = Object.create(CDocumentContent.prototype);
CDrawingDocContent.prototype.constructor = CDrawingDocContent;
CDrawingDocContent.prototype.Get_SummaryHeight = function(){
var fSummHeight = 0;
var nColumnsCount = this.Get_ColumnsCount();
for(var i = 0; i < this.Pages.length; ++i){
var oPage = this.Pages[i];
var fPageHeight = 0;
if(oPage.Sections.length > 0){
var aColumns = oPage.Sections[0].Columns;
for(var j = 0; j < aColumns.length; ++j){
var oColumn = aColumns[j];
for(var k = oColumn.Pos; k <= oColumn.EndPos; ++k){
var nElementPageIndex = this.private_GetElementPageIndex(k, i, j, nColumnsCount);
var fParagraphPageBottom = this.Content[k].Get_PageBounds(nElementPageIndex).Bottom;
if(fPageHeight < fParagraphPageBottom){
fPageHeight = fParagraphPageBottom;
}
}
}
}
else{
var Bounds = this.Get_PageBounds(i);
fPageHeight = Bounds.Bottom - Bounds.Top;
}
fSummHeight += fPageHeight;
}
return fSummHeight;
};
CDrawingDocContent.prototype.Get_ColumnsCount = function(){
var nColumnCount = 1;
if(this.Parent.getBodyPr){
var oBodyPr = this.Parent.getBodyPr();
nColumnCount = AscFormat.isRealNumber(oBodyPr.numCol) ? oBodyPr.numCol : 1;
}
return nColumnCount;
};
CDrawingDocContent.prototype.Get_PageContentStartPos2 = function(StartPageIndex, StartColumnIndex, ElementPageIndex, ElementIndex){
var ColumnsCount = this.Get_ColumnsCount();
var nColumnIndex = (StartColumnIndex + ElementPageIndex) - ((StartColumnIndex + ElementPageIndex) / ColumnsCount | 0) * ColumnsCount;
return this.Get_PageContentStartPos3(nColumnIndex);
};
CDrawingDocContent.prototype.Get_PageContentStartPos3 = function(nColumnIndex){
var X = this.X;
var Y = this.Y;
var XLimit = this.XLimit;
var YLimit = this.YLimit;
var ColumnSpaceBefore = 0;
var ColumnSpaceAfter = 0;
var nNumCol = this.Get_ColumnsCount();
var oBodyPr = this.Parent.getBodyPr && this.Parent.getBodyPr();
if(nNumCol > 1 && oBodyPr)
{
var fSpace = AscFormat.isRealNumber(oBodyPr.spcCol) ? oBodyPr.spcCol : 0;
var fColumnWidth = Math.max((this.XLimit - this.X - (nNumCol - 1)*fSpace)/nNumCol, 0);
X += nColumnIndex*(fColumnWidth + fSpace);
XLimit = X + fColumnWidth;
if(nColumnIndex > 0)
{
ColumnSpaceBefore = fSpace;
}
if(nColumnIndex < nNumCol - 1)
{
ColumnSpaceAfter = fSpace;
}
}
return {
X : X,
Y : Y,
XLimit : XLimit,
YLimit : YLimit,
ColumnSpaceBefore : ColumnSpaceBefore,
ColumnSpaceAfter : ColumnSpaceAfter
};
};
CDrawingDocContent.prototype.RecalculateContent = function(fWidth, fHeight, nStartPage){
if(this.Get_ColumnsCount() === 1){
CDocumentContent.prototype.RecalculateContent.call(this, fWidth, fHeight, nStartPage);
}
else{
this.Start_Recalculate(fWidth, fHeight);
if(this.Pages.length > 1){
var fSummaryHeight = this.Get_SummaryHeight();
var fNeedHeight = fSummaryHeight;
if(this.Get_ColumnsCount() > 1){
var fLow = fHeight, fHigh = fSummaryHeight;
while((fHigh - fLow) > 0.1){
var fCheckHeight = fLow + (fHigh - fLow)/2;
this.Start_Recalculate(fWidth, fCheckHeight);
if(this.Pages.length > 1){
fLow = fCheckHeight;
}
else{
fHigh = fCheckHeight;
fNeedHeight = fCheckHeight;
}
}
}
this.Start_Recalculate(fWidth, fNeedHeight + 0.01);
}
}
};
CDrawingDocContent.prototype.Start_Recalculate = function(fWidth, fHeight){
this.FullRecalc.PageIndex = 0;
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.StartIndex = 0;
this.FullRecalc.Start = true;
this.FullRecalc.StartPage = 0;
this.Reset(0, 0, fWidth, fHeight);
this.Recalculate_PageDrawing();
};
CDrawingDocContent.prototype.Recalculate_PageDrawing = function()
{
var nColumnsCount = this.Get_ColumnsCount();
var nPageIndex = this.FullRecalc.PageIndex;
this.Pages.length = nPageIndex + 1;
if(0 === this.FullRecalc.ColumnIndex && true === this.FullRecalc.Start)
{
var oPage = new CDocumentPage();
oPage.Pos = this.FullRecalc.StartIndex;
oPage.Sections[0] = new CDocumentPageSection();
for (var i = 0; i < nColumnsCount; ++i)
{
oPage.Sections[0].Columns[i] = new CDocumentPageColumn();
}
this.Pages[nPageIndex] = oPage;
}
this.Recalculate_PageColumn();
};
CDrawingDocContent.prototype.Recalculate_PageColumn = function()
{
var nPageIndex = this.FullRecalc.PageIndex;
var nColumnIndex = this.FullRecalc.ColumnIndex;
var nStartIndex = this.FullRecalc.StartIndex;
var oStartPos = this.Get_PageContentStartPos3(nColumnIndex);
var X = oStartPos.X;
var Y = oStartPos.Y;
var XLimit = oStartPos.XLimit;
var YLimit = oStartPos.YLimit;
var nColumnsCount = this.Get_ColumnsCount();
var aContent = this.Content;
var nCount = aContent.length;
var nRecalcResult = recalcresult_NextPage;
var oParagraph;
var bContinue = false;
var oPage = this.Pages[nPageIndex];
var oSection = oPage.Sections[0];
var oColumn = oSection.Columns[nColumnIndex];
oColumn.X = X;
oColumn.XLimit = XLimit;
oColumn.Y = Y;
oColumn.YLimit = YLimit;
oColumn.Pos = nStartIndex;
oColumn.Empty = false;
oColumn.SpaceBefore = oStartPos.ColumnSpaceBefore;
oColumn.SpaceAfter = oStartPos.ColumnSpaceAfter;
for(var i = nStartIndex; i < nCount; ++i)
{
oParagraph = this.Content[i];
if((0 === i && 0 === nPageIndex && 0 === nColumnIndex) || i != nStartIndex || (i === nStartIndex && true === this.FullRecalc.ResetStartElement))
{
oParagraph.Set_DocumentIndex(i);
oParagraph.Reset(X, Y, XLimit, YLimit, nPageIndex, nColumnIndex, nColumnsCount);
}
var nElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
nRecalcResult = oParagraph.Recalculate_Page(nElementPageIndex);
if(nRecalcResult & recalcresult_NextElement)
{
Y = oParagraph.Get_PageBounds(nElementPageIndex).Bottom;
}
oColumn.Bounds.Bottom = Y;
if (nRecalcResult & recalcresult_CurPage)
{
if (nRecalcResult & recalcresultflags_Column)
{
this.FullRecalc.ColumnIndex = nColumnIndex;
}
else
{
this.FullRecalc.ColumnIndex = 0;
}
bContinue = true;
break;
}
else if(nRecalcResult & recalcresult_NextPage)
{
if (nRecalcResult & recalcresultflags_LastFromNewColumn)
{
oColumn.EndPos = i - 1;
oSection.EndPos = i - 1;
oPage.EndPos = i - 1;
bContinue = true;
this.FullRecalc.ColumnIndex = nColumnIndex + 1;
this.FullRecalc.PageIndex = nPageIndex;
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (this.FullRecalc.ColumnIndex >= nColumnsCount)
{
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
}
break;
}
else if (nRecalcResult & recalcresultflags_LastFromNewPage)
{
oColumn.EndPos = i - 1;
oSection.EndPos = i - 1;
oPage.EndPos = i - 1;
bContinue = true;
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (oColumn.EndPos === oColumn.Pos)
{
var Element = this.Content[oColumn.Pos];
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
if (true === Element.Is_EmptyPage(ElementPageIndex))
oColumn.Empty = true;
}
for (var TempColumnIndex = ColumnIndex + 1; TempColumnIndex < ColumnsCount; ++TempColumnIndex)
{
oSection.Columns[TempColumnIndex].Empty = true;
oSection.Columns[TempColumnIndex].Pos = i;
oSection.Columns[TempColumnIndex].EndPos = i - 1;
}
break;
}
else if (nRecalcResult & recalcresultflags_Page)
{
oColumn.EndPos = i;
oSection.EndPos = i;
oPage.EndPos = i;
bContinue = true;
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (oColumn.EndPos === oColumn.Pos)
{
var Element = this.Content[oColumn.Pos];
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
if (true === Element.Is_EmptyPage(nElementPageIndex))
oColumn.Empty = true;
}
for (var TempColumnIndex = nColumnIndex + 1; TempColumnIndex < nColumnsCount; ++TempColumnIndex)
{
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, TempColumnIndex, nColumnsCount);
this.Content[Index].Recalculate_SkipPage(ElementPageIndex);
oSection.Columns[TempColumnIndex].Empty = true;
oSection.Columns[TempColumnIndex].Pos = i;
oSection.Columns[TempColumnIndex].EndPos = i - 1;
}
break;
}
else
{
oColumn.EndPos = i;
oSection.EndPos = i;
oPage.EndPos = i;
bContinue = true;
this.FullRecalc.ColumnIndex = nColumnIndex + 1;
if (this.FullRecalc.ColumnIndex >= nColumnsCount)
{
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
}
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (oColumn.EndPos === oColumn.Pos)
{
var Element = this.Content[oColumn.Pos];
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
if (true === Element.Is_EmptyPage(ElementPageIndex))
oColumn.Empty = true;
}
break;
}
}
}
if (i === nCount)
{
oPage.EndPos = nCount - 1;
oSection.EndPos = nCount - 1;
oColumn.EndPos = nCount - 1;
}
if(bContinue)
{
this.Recalculate_PageDrawing();
}
};
CDrawingDocContent.prototype.Draw = function(nPageIndex, pGraphics){
if(this.Pages.length > 0){
var oSection = this.Pages[0].Sections[0];
if(oSection){
if (pGraphics.Start_Command)
{
pGraphics.Start_Command(AscFormat.DRAW_COMMAND_CONTENT);
}
for (var ColumnIndex = 0, ColumnsCount = oSection.Columns.length; ColumnIndex < ColumnsCount; ++ColumnIndex)
{
var Column = oSection.Columns[ColumnIndex];
var ColumnStartPos = Column.Pos;
var ColumnEndPos = Column.EndPos;
// Плавающие объекты не должны попадать в клип колонок
var FlowElements = [];
if (ColumnsCount > 1)
{
// pGraphics.SaveGrState();
var X = ColumnIndex === 0 ? 0 : Column.X - Column.SpaceBefore / 2;
// var XEnd = (ColumnIndex >= ColumnsCount - 1 ? Page.Width : Column.XLimit + Column.SpaceAfter / 2);
// pGraphics.AddClipRect(X, 0, XEnd - X, Page.Height);
}
for (var ContentPos = ColumnStartPos; ContentPos <= ColumnEndPos; ++ContentPos)
{
var ElementPageIndex = this.private_GetElementPageIndex(ContentPos, 0, ColumnIndex, ColumnsCount);
this.Content[ContentPos].Draw(ElementPageIndex, pGraphics);
}
/*if (ColumnsCount > 1)
{
pGraphics.RestoreGrState();
}*/
}
if (pGraphics.End_Command)
{
pGraphics.End_Command();
}
}
else{
CDocumentContent.prototype.Draw.call(this, nPageIndex, pGraphics);
}
}
};
CDrawingDocContent.prototype.Write_ToBinary2 = function(oWriter){
oWriter.WriteLong(AscDFH.historyitem_type_DrawingContent);
CDocumentContent.prototype.Write_ToBinary2.call(this, oWriter);
};
CDrawingDocContent.prototype.Read_FromBinary2 = function(oReader){
oReader.GetLong();//type of DocumentContent
CDocumentContent.prototype.Read_FromBinary2.call(this, oReader);
};
CDrawingDocContent.prototype.Is_TableCellContent = function(){
return false;
};
CDrawingDocContent.prototype.Is_ChartTitleContent = function(){
if(this.Parent instanceof AscFormat.CTextBody &&
this.Parent.parent instanceof AscFormat.CTitle){
return true;
}
return false;
};
CDrawingDocContent.prototype.Selection_Draw_Page = function(PageIndex){
var CurPage = PageIndex;
if (CurPage < 0 || CurPage >= this.Pages.length)
return;
var Pos_start = this.Pages[CurPage].Pos;
var Pos_end = this.Pages[CurPage].EndPos;
if (true === this.Selection.Use)
{
if(this.Selection.Flag === selectionflag_Common)
{
var Start = this.Selection.StartPos;
var End = this.Selection.EndPos;
if (Start > End)
{
Start = this.Selection.EndPos;
End = this.Selection.StartPos;
}
var Start = Math.max(Start, Pos_start);
var End = Math.min(End, Pos_end);
var Page = this.Pages[PageIndex];
if(this.Pages[PageIndex].Sections.length === 0){
for (var Index = Start; Index <= End; Index++)
{
var ElementPageIndex = this.private_GetElementPageIndex(Index, CurPage, 0, 1);
this.Content[Index].Selection_Draw_Page(ElementPageIndex);
}
}
else{
var PageSection = Page.Sections[0];
for (var ColumnIndex = 0, ColumnsCount = PageSection.Columns.length; ColumnIndex < ColumnsCount; ++ColumnIndex)
{
var Pos_start = Page.Pos;
var Pos_end = Page.EndPos;
var Start = this.Selection.StartPos;
var End = this.Selection.EndPos;
if (Start > End)
{
Start = this.Selection.EndPos;
End = this.Selection.StartPos;
}
var Start = Math.max(Start, Pos_start);
var End = Math.min(End, Pos_end);
for (var Index = Start; Index <= End; ++Index)
{
var ElementPage = this.private_GetElementPageIndex(Index, 0, ColumnIndex, ColumnsCount);
this.Content[Index].Selection_Draw_Page(ElementPage);
}
}
}
}
}
};
CDrawingDocContent.prototype.Internal_GetContentPosByXY = function(X, Y, PageNum, ColumnsInfo)
{
if (!ColumnsInfo)
ColumnsInfo = {Column : 0, ColumnsCount : 1};
if (undefined === PageNum || null === PageNum)
PageNum = this.CurPage;
// Теперь проверим пустые параграфы с окончанием секций
var SectCount = this.Pages[PageNum].EndSectionParas.length;
if(this.Pages[PageNum].Sections.length === 0){
return CDocumentContent.prototype.Internal_GetContentPosByXY.call(this, X, Y, PageNum, ColumnsInfo);
}
for (var Index = 0; Index < SectCount; ++Index)
{
var Item = this.Pages[PageNum].EndSectionParas[Index];
var Bounds = Item.Pages[0].Bounds;
if (Y < Bounds.Bottom && Y > Bounds.Top && X > Bounds.Left && X < Bounds.Right)
{
var Element = this.Content[Item.Index];
ColumnsInfo.Column = Element.Get_StartColumn();
ColumnsInfo.ColumnsCount = Element.Get_ColumnsCount();
return Item.Index;
}
}
// Сначала мы определим секцию и колонку, в которую попали
var Page = this.Pages[PageNum];
var SectionIndex = 0;
for (var SectionsCount = Page.Sections.length; SectionIndex < SectionsCount - 1; ++SectionIndex)
{
if (Y < Page.Sections[SectionIndex + 1].Y)
break;
}
var PageSection = this.Pages[PageNum].Sections[SectionIndex];
var ColumnsCount = PageSection.Columns.length;
var ColumnIndex = 0;
for (; ColumnIndex < ColumnsCount - 1; ++ColumnIndex)
{
if (X < (PageSection.Columns[ColumnIndex].XLimit + PageSection.Columns[ColumnIndex + 1].X) / 2)
break;
}
// TODO: Разобраться с ситуацией, когда пустые колонки стоят не только в конце
while (ColumnIndex > 0 && true === PageSection.Columns[ColumnIndex].Empty)
ColumnIndex--;
ColumnsInfo.Column = ColumnIndex;
ColumnsInfo.ColumnsCount = ColumnsCount;
var Column = PageSection.Columns[ColumnIndex];
var StartPos = Column.Pos;
var EndPos = Column.EndPos;
for (var Pos = StartPos; Pos < EndPos; ++Pos)
{
var Item = this.Content[Pos + 1];
var PageBounds = Item.Get_PageBounds(0);
if (Y < PageBounds.Top)
return Pos;
}
if (Pos === EndPos)
{
return EndPos;
}
return 0;
};
CDrawingDocContent.prototype.private_GetElementPageIndexByXY = function(ElementPos, X, Y, PageIndex){
if(this.Pages.length > 0){
if(this.Pages[0].Sections.length > 0){
return CDocument_prototype_private_GetElementPageIndexByXY.call(this, ElementPos, X, Y, PageIndex);
}
}
return CDocumentContent.prototype.private_GetElementPageIndexByXY.call(this, ElementPos, X, Y, PageIndex);
};
CDrawingDocContent.prototype.Copy = function(Parent, DrawingDocument)
{
var DC = new CDrawingDocContent(Parent, DrawingDocument ? DrawingDocument : this.DrawingDocument, 0, 0, 0, 0, this.Split, this.TurnOffInnerWrap, this.bPresentation);
// Копируем содержимое
DC.Internal_Content_RemoveAll();
var Count = this.Content.length;
for (var Index = 0; Index < Count; Index++)
{
DC.Internal_Content_Add(Index, this.Content[Index].Copy(DC, DrawingDocument), false);
}
return DC;
};
CDrawingDocContent.prototype.Copy3 = function(Parent)//для заголовков диаграмм
{
var DC = new CDrawingDocContent(Parent, this.DrawingDocument, 0, 0, 0, 0, this.Split, this.TurnOffInnerWrap, true);
// Копируем содержимое
DC.Internal_Content_RemoveAll();
var Count = this.Content.length;
for (var Index = 0; Index < Count; Index++)
{
DC.Internal_Content_Add(Index, this.Content[Index].Copy2(DC), false);
}
return DC;
};
CDrawingDocContent.prototype.Recalculate = function()
{
if(this.Parent){
if(this.Parent instanceof AscFormat.CShape){
this.Parent.recalculateContent();
return;
}
else if(this.Parent && this.Parent.parent){
if(this.Parent.parent instanceof AscFormat.CShape){
this.Parent.parent.recalculateContent();
return;
}
}
}
if(this.XLimit > 0){
this.Recalculate_PageDrawing();
}
};
// TODO: сделать по-нормальному!!!
function CDocument_prototype_private_GetElementPageIndexByXY(ElementPos, X, Y, PageIndex)
{
var Element = this.Content[ElementPos];
if (!Element)
return 0;
var Page = this.Pages[PageIndex];
if (!Page)
return 0;
var PageSection = null;
for (var SectionIndex = 0, SectionsCount = Page.Sections.length; SectionIndex < SectionsCount; ++SectionIndex)
{
if (Page.Sections[SectionIndex].Pos <= ElementPos && ElementPos <= Page.Sections[SectionIndex].EndPos)
{
PageSection = Page.Sections[SectionIndex];
break;
}
}
if (!PageSection)
return 0;
var ElementStartPage = Element.Get_StartPage_Relative();
var ElementStartColumn = Element.Get_StartColumn();
var ElementPagesCount = Element.Get_PagesCount();
var ColumnsCount = PageSection.Columns.length;
var StartColumn = 0;
var EndColumn = ColumnsCount - 1;
if (PageIndex === ElementStartPage)
{
StartColumn = Element.Get_StartColumn();
EndColumn = Math.min(ElementStartColumn + ElementPagesCount - 1, ColumnsCount - 1);
}
else
{
StartColumn = 0;
EndColumn = Math.min(ElementPagesCount - ElementStartColumn + (PageIndex - ElementStartPage) * ColumnsCount, ColumnsCount - 1);
}
// TODO: Разобраться с ситуацией, когда пустые колонки стоят не только в конце
while (true === PageSection.Columns[EndColumn].Empty && EndColumn > StartColumn)
EndColumn--;
var ResultColumn = EndColumn;
for (var ColumnIndex = StartColumn; ColumnIndex < EndColumn; ++ColumnIndex)
{
if (X < (PageSection.Columns[ColumnIndex].XLimit + PageSection.Columns[ColumnIndex + 1].X) / 2)
{
ResultColumn = ColumnIndex;
break;
}
}
return this.private_GetElementPageIndex(ElementPos, PageIndex, ResultColumn, ColumnsCount);
}
AscFormat.CDrawingDocContent = CDrawingDocContent;
})(window);
|
common/Drawings/Format/DrawingContent.js
|
/*
* (c) Copyright Ascensio System SIA 2010-2017
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at Lubanas st. 125a-25, Riga, Latvia,
* EU, LV-1021.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
"use strict";
(function (window, undefined){
/**
* CDrawingDocContent
* @constructor
* @extends {CDocumentContent}
*/
function CDrawingDocContent(Parent, DrawingDocument, X, Y, XLimit, YLimit) {
CDocumentContent.call(this, Parent, DrawingDocument, X, Y, XLimit, YLimit, false, false, true);
this.FullRecalc = new CDocumentRecalculateState();
}
CDrawingDocContent.prototype = Object.create(CDocumentContent.prototype);
CDrawingDocContent.prototype.constructor = CDrawingDocContent;
CDrawingDocContent.prototype.Get_SummaryHeight = function(){
var fSummHeight = 0;
var nColumnsCount = this.Get_ColumnsCount();
for(var i = 0; i < this.Pages.length; ++i){
var oPage = this.Pages[i];
var fPageHeight = 0;
if(oPage.Sections.length > 0){
var aColumns = oPage.Sections[0].Columns;
for(var j = 0; j < aColumns.length; ++j){
var oColumn = aColumns[j];
for(var k = oColumn.Pos; k <= oColumn.EndPos; ++k){
var nElementPageIndex = this.private_GetElementPageIndex(k, i, j, nColumnsCount);
var fParagraphPageBottom = this.Content[k].Get_PageBounds(nElementPageIndex).Bottom;
if(fPageHeight < fParagraphPageBottom){
fPageHeight = fParagraphPageBottom;
}
}
}
}
else{
var Bounds = this.Get_PageBounds(i);
fPageHeight = Bounds.Bottom - Bounds.Top;
}
fSummHeight += fPageHeight;
}
return fSummHeight;
};
CDrawingDocContent.prototype.Get_ColumnsCount = function(){
var nColumnCount = 1;
if(this.Parent.getBodyPr){
var oBodyPr = this.Parent.getBodyPr();
nColumnCount = AscFormat.isRealNumber(oBodyPr.numCol) ? oBodyPr.numCol : 1;
}
return nColumnCount;
};
CDrawingDocContent.prototype.Get_PageContentStartPos2 = function(StartPageIndex, StartColumnIndex, ElementPageIndex, ElementIndex){
var ColumnsCount = this.Get_ColumnsCount();
var nColumnIndex = (StartColumnIndex + ElementPageIndex) - ((StartColumnIndex + ElementPageIndex) / ColumnsCount | 0) * ColumnsCount;
return this.Get_PageContentStartPos3(nColumnIndex);
};
CDrawingDocContent.prototype.Get_PageContentStartPos3 = function(nColumnIndex){
var X = this.X;
var Y = this.Y;
var XLimit = this.XLimit;
var YLimit = this.YLimit;
var ColumnSpaceBefore = 0;
var ColumnSpaceAfter = 0;
var nNumCol = this.Get_ColumnsCount();
var oBodyPr = this.Parent.getBodyPr && this.Parent.getBodyPr();
if(nNumCol > 1 && oBodyPr)
{
var fSpace = AscFormat.isRealNumber(oBodyPr.spcCol) ? oBodyPr.spcCol : 0;
var fColumnWidth = Math.max((this.XLimit - this.X - (nNumCol - 1)*fSpace)/nNumCol, 0);
X += nColumnIndex*(fColumnWidth + fSpace);
XLimit = X + fColumnWidth;
if(nColumnIndex > 0)
{
ColumnSpaceBefore = fSpace;
}
if(nColumnIndex < nNumCol - 1)
{
ColumnSpaceAfter = fSpace;
}
}
return {
X : X,
Y : Y,
XLimit : XLimit,
YLimit : YLimit,
ColumnSpaceBefore : ColumnSpaceBefore,
ColumnSpaceAfter : ColumnSpaceAfter
};
};
CDrawingDocContent.prototype.RecalculateContent = function(fWidth, fHeight, nStartPage){
if(this.Get_ColumnsCount() === 1){
CDocumentContent.prototype.RecalculateContent.call(this, fWidth, fHeight, nStartPage);
}
else{
this.Start_Recalculate(fWidth, fHeight);
if(this.Pages.length > 1){
var fSummaryHeight = this.Get_SummaryHeight();
var fNeedHeight = fSummaryHeight;
if(this.Get_ColumnsCount() > 1){
var fLow = fHeight, fHigh = fSummaryHeight;
while((fHigh - fLow) > 0.1){
var fCheckHeight = fLow + (fHigh - fLow)/2;
this.Start_Recalculate(fWidth, fCheckHeight);
if(this.Pages.length > 1){
fLow = fCheckHeight;
}
else{
fHigh = fCheckHeight;
fNeedHeight = fCheckHeight;
}
}
}
this.Start_Recalculate(fWidth, fNeedHeight + 0.01);
}
}
};
CDrawingDocContent.prototype.Start_Recalculate = function(fWidth, fHeight){
this.FullRecalc.PageIndex = 0;
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.StartIndex = 0;
this.FullRecalc.Start = true;
this.FullRecalc.StartPage = 0;
this.Reset(0, 0, fWidth, fHeight);
this.Recalculate_PageDrawing();
};
CDrawingDocContent.prototype.Recalculate_PageDrawing = function()
{
var nColumnsCount = this.Get_ColumnsCount();
var nPageIndex = this.FullRecalc.PageIndex;
this.Pages.length = nPageIndex + 1;
if(0 === this.FullRecalc.ColumnIndex && true === this.FullRecalc.Start)
{
var oPage = new CDocumentPage();
oPage.Pos = this.FullRecalc.StartIndex;
oPage.Sections[0] = new CDocumentPageSection();
for (var i = 0; i < nColumnsCount; ++i)
{
oPage.Sections[0].Columns[i] = new CDocumentPageColumn();
}
this.Pages[nPageIndex] = oPage;
}
this.Recalculate_PageColumn();
};
CDrawingDocContent.prototype.Recalculate_PageColumn = function()
{
var nPageIndex = this.FullRecalc.PageIndex;
var nColumnIndex = this.FullRecalc.ColumnIndex;
var nStartIndex = this.FullRecalc.StartIndex;
var oStartPos = this.Get_PageContentStartPos3(nColumnIndex);
var X = oStartPos.X;
var Y = oStartPos.Y;
var XLimit = oStartPos.XLimit;
var YLimit = oStartPos.YLimit;
var nColumnsCount = this.Get_ColumnsCount();
var aContent = this.Content;
var nCount = aContent.length;
var nRecalcResult = recalcresult_NextPage;
var oParagraph;
var bContinue = false;
var oPage = this.Pages[nPageIndex];
var oSection = oPage.Sections[0];
var oColumn = oSection.Columns[nColumnIndex];
oColumn.X = X;
oColumn.XLimit = XLimit;
oColumn.Y = Y;
oColumn.YLimit = YLimit;
oColumn.Pos = nStartIndex;
oColumn.Empty = false;
oColumn.SpaceBefore = oStartPos.ColumnSpaceBefore;
oColumn.SpaceAfter = oStartPos.ColumnSpaceAfter;
for(var i = nStartIndex; i < nCount; ++i)
{
oParagraph = this.Content[i];
if((0 === i && 0 === nPageIndex && 0 === nColumnIndex) || i != nStartIndex || (i === nStartIndex && true === this.FullRecalc.ResetStartElement))
{
oParagraph.Set_DocumentIndex(i);
oParagraph.Reset(X, Y, XLimit, YLimit, nPageIndex, nColumnIndex, nColumnsCount);
}
var nElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
nRecalcResult = oParagraph.Recalculate_Page(nElementPageIndex);
if(nRecalcResult & recalcresult_NextElement)
{
Y = oParagraph.Get_PageBounds(nElementPageIndex).Bottom;
}
oColumn.Bounds.Bottom = Y;
if (nRecalcResult & recalcresult_CurPage)
{
if (nRecalcResult & recalcresultflags_Column)
{
this.FullRecalc.ColumnIndex = nColumnIndex;
}
else
{
this.FullRecalc.ColumnIndex = 0;
}
bContinue = true;
break;
}
else if(nRecalcResult & recalcresult_NextPage)
{
if (nRecalcResult & recalcresultflags_LastFromNewColumn)
{
oColumn.EndPos = i - 1;
oSection.EndPos = i - 1;
oPage.EndPos = i - 1;
bContinue = true;
this.FullRecalc.ColumnIndex = nColumnIndex + 1;
this.FullRecalc.PageIndex = nPageIndex;
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (this.FullRecalc.ColumnIndex >= nColumnsCount)
{
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
}
break;
}
else if (nRecalcResult & recalcresultflags_LastFromNewPage)
{
oColumn.EndPos = i - 1;
oSection.EndPos = i - 1;
oPage.EndPos = i - 1;
bContinue = true;
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (oColumn.EndPos === oColumn.Pos)
{
var Element = this.Content[oColumn.Pos];
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
if (true === Element.Is_EmptyPage(ElementPageIndex))
oColumn.Empty = true;
}
for (var TempColumnIndex = ColumnIndex + 1; TempColumnIndex < ColumnsCount; ++TempColumnIndex)
{
oSection.Columns[TempColumnIndex].Empty = true;
oSection.Columns[TempColumnIndex].Pos = i;
oSection.Columns[TempColumnIndex].EndPos = i - 1;
}
break;
}
else if (nRecalcResult & recalcresultflags_Page)
{
oColumn.EndPos = i;
oSection.EndPos = i;
oPage.EndPos = i;
bContinue = true;
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (oColumn.EndPos === oColumn.Pos)
{
var Element = this.Content[oColumn.Pos];
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
if (true === Element.Is_EmptyPage(nElementPageIndex))
oColumn.Empty = true;
}
for (var TempColumnIndex = nColumnIndex + 1; TempColumnIndex < nColumnsCount; ++TempColumnIndex)
{
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, TempColumnIndex, nColumnsCount);
this.Content[Index].Recalculate_SkipPage(ElementPageIndex);
oSection.Columns[TempColumnIndex].Empty = true;
oSection.Columns[TempColumnIndex].Pos = i;
oSection.Columns[TempColumnIndex].EndPos = i - 1;
}
break;
}
else
{
oColumn.EndPos = i;
oSection.EndPos = i;
oPage.EndPos = i;
bContinue = true;
this.FullRecalc.ColumnIndex = nColumnIndex + 1;
if (this.FullRecalc.ColumnIndex >= nColumnsCount)
{
this.FullRecalc.SectionIndex = 0;
this.FullRecalc.ColumnIndex = 0;
this.FullRecalc.PageIndex = nPageIndex + 1;
}
this.FullRecalc.StartIndex = i;
this.FullRecalc.Start = true;
if (oColumn.EndPos === oColumn.Pos)
{
var Element = this.Content[oColumn.Pos];
var ElementPageIndex = this.private_GetElementPageIndex(i, nPageIndex, nColumnIndex, nColumnsCount);
if (true === Element.Is_EmptyPage(ElementPageIndex))
oColumn.Empty = true;
}
break;
}
}
}
if (i === nCount)
{
oPage.EndPos = nCount - 1;
oSection.EndPos = nCount - 1;
oColumn.EndPos = nCount - 1;
}
if(bContinue)
{
this.Recalculate_PageDrawing();
}
};
CDrawingDocContent.prototype.Draw = function(nPageIndex, pGraphics){
if(this.Pages.length > 0){
var oSection = this.Pages[0].Sections[0];
if(oSection){
if (pGraphics.Start_Command)
{
pGraphics.Start_Command(AscFormat.DRAW_COMMAND_CONTENT);
}
for (var ColumnIndex = 0, ColumnsCount = oSection.Columns.length; ColumnIndex < ColumnsCount; ++ColumnIndex)
{
var Column = oSection.Columns[ColumnIndex];
var ColumnStartPos = Column.Pos;
var ColumnEndPos = Column.EndPos;
// Плавающие объекты не должны попадать в клип колонок
var FlowElements = [];
if (ColumnsCount > 1)
{
// pGraphics.SaveGrState();
var X = ColumnIndex === 0 ? 0 : Column.X - Column.SpaceBefore / 2;
// var XEnd = (ColumnIndex >= ColumnsCount - 1 ? Page.Width : Column.XLimit + Column.SpaceAfter / 2);
// pGraphics.AddClipRect(X, 0, XEnd - X, Page.Height);
}
for (var ContentPos = ColumnStartPos; ContentPos <= ColumnEndPos; ++ContentPos)
{
var ElementPageIndex = this.private_GetElementPageIndex(ContentPos, 0, ColumnIndex, ColumnsCount);
this.Content[ContentPos].Draw(ElementPageIndex, pGraphics);
}
/*if (ColumnsCount > 1)
{
pGraphics.RestoreGrState();
}*/
}
if (pGraphics.End_Command)
{
pGraphics.End_Command();
}
}
else{
CDocumentContent.prototype.Draw.call(this, nPageIndex, pGraphics);
}
}
};
CDrawingDocContent.prototype.Write_ToBinary2 = function(oWriter){
oWriter.WriteLong(AscDFH.historyitem_type_DrawingContent);
CDocumentContent.prototype.Write_ToBinary2.call(this, oWriter);
};
CDrawingDocContent.prototype.Read_FromBinary2 = function(oReader){
oReader.GetLong();//type of DocumentContent
CDocumentContent.prototype.Read_FromBinary2.call(this, oReader);
};
CDrawingDocContent.prototype.Is_TableCellContent = function(){
return false;
};
CDrawingDocContent.prototype.Is_ChartTitleContent = function(){
if(this.Parent instanceof AscFormat.CTextBody &&
this.Parent.parent instanceof AscFormat.CTitle){
return true;
}
return false;
};
CDrawingDocContent.prototype.Selection_Draw_Page = function(PageIndex){
var CurPage = PageIndex;
if (CurPage < 0 || CurPage >= this.Pages.length)
return;
var Pos_start = this.Pages[CurPage].Pos;
var Pos_end = this.Pages[CurPage].EndPos;
if (true === this.Selection.Use)
{
if(this.Selection.Flag === selectionflag_Common)
{
var Start = this.Selection.StartPos;
var End = this.Selection.EndPos;
if (Start > End)
{
Start = this.Selection.EndPos;
End = this.Selection.StartPos;
}
var Start = Math.max(Start, Pos_start);
var End = Math.min(End, Pos_end);
var Page = this.Pages[PageIndex];
if(this.Pages[PageIndex].Sections.length === 0){
for (var Index = Start; Index <= End; Index++)
{
var ElementPageIndex = this.private_GetElementPageIndex(Index, CurPage, 0, 1);
this.Content[Index].Selection_Draw_Page(ElementPageIndex);
}
}
else{
var PageSection = Page.Sections[0];
for (var ColumnIndex = 0, ColumnsCount = PageSection.Columns.length; ColumnIndex < ColumnsCount; ++ColumnIndex)
{
var Pos_start = Page.Pos;
var Pos_end = Page.EndPos;
var Start = this.Selection.StartPos;
var End = this.Selection.EndPos;
if (Start > End)
{
Start = this.Selection.EndPos;
End = this.Selection.StartPos;
}
var Start = Math.max(Start, Pos_start);
var End = Math.min(End, Pos_end);
for (var Index = Start; Index <= End; ++Index)
{
var ElementPage = this.private_GetElementPageIndex(Index, 0, ColumnIndex, ColumnsCount);
this.Content[Index].Selection_Draw_Page(ElementPage);
}
}
}
}
}
};
CDrawingDocContent.prototype.Internal_GetContentPosByXY = function(X, Y, PageNum, ColumnsInfo)
{
if (!ColumnsInfo)
ColumnsInfo = {Column : 0, ColumnsCount : 1};
if (undefined === PageNum || null === PageNum)
PageNum = this.CurPage;
// Теперь проверим пустые параграфы с окончанием секций
var SectCount = this.Pages[PageNum].EndSectionParas.length;
if(this.Pages[PageNum].Sections.length === 0){
return CDocumentContent.prototype.Internal_GetContentPosByXY.call(this, X, Y, PageNum, ColumnsInfo);
}
for (var Index = 0; Index < SectCount; ++Index)
{
var Item = this.Pages[PageNum].EndSectionParas[Index];
var Bounds = Item.Pages[0].Bounds;
if (Y < Bounds.Bottom && Y > Bounds.Top && X > Bounds.Left && X < Bounds.Right)
{
var Element = this.Content[Item.Index];
ColumnsInfo.Column = Element.Get_StartColumn();
ColumnsInfo.ColumnsCount = Element.Get_ColumnsCount();
return Item.Index;
}
}
// Сначала мы определим секцию и колонку, в которую попали
var Page = this.Pages[PageNum];
var SectionIndex = 0;
for (var SectionsCount = Page.Sections.length; SectionIndex < SectionsCount - 1; ++SectionIndex)
{
if (Y < Page.Sections[SectionIndex + 1].Y)
break;
}
var PageSection = this.Pages[PageNum].Sections[SectionIndex];
var ColumnsCount = PageSection.Columns.length;
var ColumnIndex = 0;
for (; ColumnIndex < ColumnsCount - 1; ++ColumnIndex)
{
if (X < (PageSection.Columns[ColumnIndex].XLimit + PageSection.Columns[ColumnIndex + 1].X) / 2)
break;
}
// TODO: Разобраться с ситуацией, когда пустые колонки стоят не только в конце
while (ColumnIndex > 0 && true === PageSection.Columns[ColumnIndex].Empty)
ColumnIndex--;
ColumnsInfo.Column = ColumnIndex;
ColumnsInfo.ColumnsCount = ColumnsCount;
var Column = PageSection.Columns[ColumnIndex];
var StartPos = Column.Pos;
var EndPos = Column.EndPos;
for (var Pos = StartPos; Pos < EndPos; ++Pos)
{
var Item = this.Content[Pos + 1];
var PageBounds = Item.Get_PageBounds(0);
if (Y < PageBounds.Top)
return Pos;
}
if (Pos === EndPos)
{
return EndPos;
}
return 0;
};
CDrawingDocContent.prototype.private_GetElementPageIndexByXY = function(ElementPos, X, Y, PageIndex){
if(this.Pages.length > 0){
if(this.Pages[0].Sections.length > 0){
return CDocument_prototype_private_GetElementPageIndexByXY.call(this, ElementPos, X, Y, PageIndex);
}
}
return CDocumentContent.prototype.private_GetElementPageIndexByXY.call(this, ElementPos, X, Y, PageIndex);
};
CDrawingDocContent.prototype.Copy = function(Parent, DrawingDocument)
{
var DC = new CDrawingDocContent(Parent, DrawingDocument ? DrawingDocument : this.DrawingDocument, 0, 0, 0, 0, this.Split, this.TurnOffInnerWrap, this.bPresentation);
// Копируем содержимое
DC.Internal_Content_RemoveAll();
var Count = this.Content.length;
for (var Index = 0; Index < Count; Index++)
{
DC.Internal_Content_Add(Index, this.Content[Index].Copy(DC, DrawingDocument), false);
}
return DC;
};
CDrawingDocContent.prototype.Copy3 = function(Parent)//для заголовков диаграмм
{
var DC = new CDrawingDocContent(Parent, this.DrawingDocument, 0, 0, 0, 0, this.Split, this.TurnOffInnerWrap, true);
// Копируем содержимое
DC.Internal_Content_RemoveAll();
var Count = this.Content.length;
for (var Index = 0; Index < Count; Index++)
{
DC.Internal_Content_Add(Index, this.Content[Index].Copy2(DC), false);
}
return DC;
};
CDrawingDocContent.prototype.Recalculate = function()
{
if(this.XLimit > 0){
this.Recalculate_PageDrawing();
}
};
// TODO: сделать по-нормальному!!!
function CDocument_prototype_private_GetElementPageIndexByXY(ElementPos, X, Y, PageIndex)
{
var Element = this.Content[ElementPos];
if (!Element)
return 0;
var Page = this.Pages[PageIndex];
if (!Page)
return 0;
var PageSection = null;
for (var SectionIndex = 0, SectionsCount = Page.Sections.length; SectionIndex < SectionsCount; ++SectionIndex)
{
if (Page.Sections[SectionIndex].Pos <= ElementPos && ElementPos <= Page.Sections[SectionIndex].EndPos)
{
PageSection = Page.Sections[SectionIndex];
break;
}
}
if (!PageSection)
return 0;
var ElementStartPage = Element.Get_StartPage_Relative();
var ElementStartColumn = Element.Get_StartColumn();
var ElementPagesCount = Element.Get_PagesCount();
var ColumnsCount = PageSection.Columns.length;
var StartColumn = 0;
var EndColumn = ColumnsCount - 1;
if (PageIndex === ElementStartPage)
{
StartColumn = Element.Get_StartColumn();
EndColumn = Math.min(ElementStartColumn + ElementPagesCount - 1, ColumnsCount - 1);
}
else
{
StartColumn = 0;
EndColumn = Math.min(ElementPagesCount - ElementStartColumn + (PageIndex - ElementStartPage) * ColumnsCount, ColumnsCount - 1);
}
// TODO: Разобраться с ситуацией, когда пустые колонки стоят не только в конце
while (true === PageSection.Columns[EndColumn].Empty && EndColumn > StartColumn)
EndColumn--;
var ResultColumn = EndColumn;
for (var ColumnIndex = StartColumn; ColumnIndex < EndColumn; ++ColumnIndex)
{
if (X < (PageSection.Columns[ColumnIndex].XLimit + PageSection.Columns[ColumnIndex + 1].X) / 2)
{
ResultColumn = ColumnIndex;
break;
}
}
return this.private_GetElementPageIndex(ElementPos, PageIndex, ResultColumn, ColumnsCount);
}
AscFormat.CDrawingDocContent = CDrawingDocContent;
})(window);
|
CDrawingDocContent.prototype.Recalculate
|
common/Drawings/Format/DrawingContent.js
|
CDrawingDocContent.prototype.Recalculate
|
<ide><path>ommon/Drawings/Format/DrawingContent.js
<ide> return DC;
<ide> };
<ide>
<del> CDrawingDocContent.prototype.Recalculate = function()
<add> CDrawingDocContent.prototype.Recalculate = function()
<ide> {
<add> if(this.Parent){
<add> if(this.Parent instanceof AscFormat.CShape){
<add> this.Parent.recalculateContent();
<add> return;
<add> }
<add> else if(this.Parent && this.Parent.parent){
<add> if(this.Parent.parent instanceof AscFormat.CShape){
<add> this.Parent.parent.recalculateContent();
<add> return;
<add> }
<add> }
<add> }
<ide> if(this.XLimit > 0){
<ide> this.Recalculate_PageDrawing();
<ide> }
|
|
Java
|
apache-2.0
|
b30b75cd18120d1b541f0ecf5f02271f9ede1070
| 0 |
resmo/cloudstack,jcshen007/cloudstack,argv0/cloudstack,wido/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,resmo/cloudstack,wido/cloudstack,resmo/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,argv0/cloudstack,argv0/cloudstack,GabrielBrascher/cloudstack,cinderella/incubator-cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,argv0/cloudstack,cinderella/incubator-cloudstack,GabrielBrascher/cloudstack,argv0/cloudstack,wido/cloudstack,cinderella/incubator-cloudstack,jcshen007/cloudstack,cinderella/incubator-cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,argv0/cloudstack
|
/**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.server;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.net.UnknownHostException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.crypto.KeyGenerator;
import javax.crypto.Mac;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.GetVncPortAnswer;
import com.cloud.agent.api.GetVncPortCommand;
import com.cloud.agent.api.proxy.UpdateCertificateCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.alert.Alert;
import com.cloud.alert.AlertManager;
import com.cloud.alert.AlertVO;
import com.cloud.alert.dao.AlertDao;
import com.cloud.api.ApiDBUtils;
import com.cloud.api.BaseCmd;
import com.cloud.api.ServerApiException;
import com.cloud.api.commands.CreateDomainCmd;
import com.cloud.api.commands.CreateSSHKeyPairCmd;
import com.cloud.api.commands.DeleteDomainCmd;
import com.cloud.api.commands.DeletePreallocatedLunCmd;
import com.cloud.api.commands.DeleteSSHKeyPairCmd;
import com.cloud.api.commands.ExtractVolumeCmd;
import com.cloud.api.commands.GetCloudIdentifierCmd;
import com.cloud.api.commands.GetVMPasswordCmd;
import com.cloud.api.commands.ListAccountsCmd;
import com.cloud.api.commands.ListAlertsCmd;
import com.cloud.api.commands.ListAsyncJobsCmd;
import com.cloud.api.commands.ListCapabilitiesCmd;
import com.cloud.api.commands.ListCapacityCmd;
import com.cloud.api.commands.ListCfgsByCmd;
import com.cloud.api.commands.ListClustersCmd;
import com.cloud.api.commands.ListDiskOfferingsCmd;
import com.cloud.api.commands.ListDomainChildrenCmd;
import com.cloud.api.commands.ListDomainsCmd;
import com.cloud.api.commands.ListEventsCmd;
import com.cloud.api.commands.ListGuestOsCategoriesCmd;
import com.cloud.api.commands.ListGuestOsCmd;
import com.cloud.api.commands.ListHostsCmd;
import com.cloud.api.commands.ListHypervisorsCmd;
import com.cloud.api.commands.ListIsosCmd;
import com.cloud.api.commands.ListPodsByCmd;
import com.cloud.api.commands.ListPreallocatedLunsCmd;
import com.cloud.api.commands.ListPublicIpAddressesCmd;
import com.cloud.api.commands.ListRoutersCmd;
import com.cloud.api.commands.ListSSHKeyPairsCmd;
import com.cloud.api.commands.ListServiceOfferingsCmd;
import com.cloud.api.commands.ListStoragePoolsCmd;
import com.cloud.api.commands.ListSystemVMsCmd;
import com.cloud.api.commands.ListTemplateOrIsoPermissionsCmd;
import com.cloud.api.commands.ListTemplatesCmd;
import com.cloud.api.commands.ListUsersCmd;
import com.cloud.api.commands.ListVMGroupsCmd;
import com.cloud.api.commands.ListVlanIpRangesCmd;
import com.cloud.api.commands.ListVolumesCmd;
import com.cloud.api.commands.ListZonesByCmd;
import com.cloud.api.commands.RebootSystemVmCmd;
import com.cloud.api.commands.RegisterCmd;
import com.cloud.api.commands.RegisterPreallocatedLunCmd;
import com.cloud.api.commands.RegisterSSHKeyPairCmd;
import com.cloud.api.commands.StartSystemVMCmd;
import com.cloud.api.commands.StopSystemVmCmd;
import com.cloud.api.commands.UpdateDomainCmd;
import com.cloud.api.commands.UpdateIsoCmd;
import com.cloud.api.commands.UpdateIsoPermissionsCmd;
import com.cloud.api.commands.UpdateTemplateCmd;
import com.cloud.api.commands.UpdateTemplateOrIsoCmd;
import com.cloud.api.commands.UpdateTemplateOrIsoPermissionsCmd;
import com.cloud.api.commands.UpdateTemplatePermissionsCmd;
import com.cloud.api.commands.UpdateVMGroupCmd;
import com.cloud.api.commands.UploadCustomCertificateCmd;
import com.cloud.api.response.ExtractResponse;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobResult;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.async.dao.AsyncJobDao;
import com.cloud.capacity.CapacityVO;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.certificate.CertificateVO;
import com.cloud.certificate.dao.CertificateDao;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationManager;
import com.cloud.configuration.ConfigurationVO;
import com.cloud.configuration.ResourceLimitVO;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.configuration.dao.ResourceLimitDao;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.AccountVlanMapVO;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenterIpAddressVO;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.PodVlanMapVO;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.dc.dao.AccountVlanMapDao;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.DataCenterIpAddressDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.dc.dao.PodVlanMapDao;
import com.cloud.dc.dao.VlanDao;
import com.cloud.domain.DomainVO;
import com.cloud.domain.dao.DomainDao;
import com.cloud.event.ActionEvent;
import com.cloud.event.Event;
import com.cloud.event.EventTypes;
import com.cloud.event.EventUtils;
import com.cloud.event.EventVO;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.CloudAuthenticationException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.ManagementServerException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.host.Host;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.info.ConsoleProxyInfo;
import com.cloud.network.IPAddressVO;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.router.VirtualNetworkApplianceManager;
import com.cloud.network.security.SecurityGroupVO;
import com.cloud.network.security.dao.SecurityGroupDao;
import com.cloud.offering.ServiceOffering;
import com.cloud.server.auth.UserAuthenticator;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.GuestOSVO;
import com.cloud.storage.LaunchPermissionVO;
import com.cloud.storage.Storage;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.TemplateType;
import com.cloud.storage.StorageManager;
import com.cloud.storage.StoragePoolHostVO;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.StorageStats;
import com.cloud.storage.Upload;
import com.cloud.storage.Upload.Mode;
import com.cloud.storage.UploadVO;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.Volume;
import com.cloud.storage.VolumeStats;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.GuestOSCategoryDao;
import com.cloud.storage.dao.GuestOSDao;
import com.cloud.storage.dao.LaunchPermissionDao;
import com.cloud.storage.dao.StoragePoolDao;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.UploadDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.preallocatedlun.PreallocatedLunVO;
import com.cloud.storage.preallocatedlun.dao.PreallocatedLunDao;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.upload.UploadMonitor;
import com.cloud.template.TemplateManager;
import com.cloud.template.VirtualMachineTemplate.TemplateFilter;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.AccountVO;
import com.cloud.user.SSHKeyPair;
import com.cloud.user.SSHKeyPairVO;
import com.cloud.user.User;
import com.cloud.user.UserAccount;
import com.cloud.user.UserAccountVO;
import com.cloud.user.UserContext;
import com.cloud.user.UserVO;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.SSHKeyPairDao;
import com.cloud.user.dao.UserAccountDao;
import com.cloud.user.dao.UserDao;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.PasswordGenerator;
import com.cloud.utils.component.Adapters;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.JoinBuilder;
import com.cloud.utils.db.JoinBuilder.JoinType;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.exception.ExecutionException;
import com.cloud.utils.net.MacAddress;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.ssh.SSHKeysHelper;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.DomainRouterVO;
import com.cloud.vm.InstanceGroupVO;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.UserVmDetailVO;
import com.cloud.vm.UserVmManager;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.DomainRouterDao;
import com.cloud.vm.dao.InstanceGroupDao;
import com.cloud.vm.dao.SecondaryStorageVmDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.UserVmDetailsDao;
import com.cloud.vm.dao.VMInstanceDao;
public class ManagementServerImpl implements ManagementServer {
public static final Logger s_logger = Logger.getLogger(ManagementServerImpl.class.getName());
private final AccountManager _accountMgr;
private final AgentManager _agentMgr;
private final ConfigurationManager _configMgr;
private final SecurityGroupDao _networkSecurityGroupDao;
private final IPAddressDao _publicIpAddressDao;
private final DataCenterIpAddressDao _privateIpAddressDao;
private final DomainRouterDao _routerDao;
private final ConsoleProxyDao _consoleProxyDao;
private final ClusterDao _clusterDao;
private final SecondaryStorageVmDao _secStorageVmDao;
private final EventDao _eventDao;
private final DataCenterDao _dcDao;
private final VlanDao _vlanDao;
private final AccountVlanMapDao _accountVlanMapDao;
private final PodVlanMapDao _podVlanMapDao;
private final HostDao _hostDao;
private final UserDao _userDao;
private final UserVmDao _userVmDao;
private final ConfigurationDao _configDao;
private final UserVmManager _vmMgr;
private final ConsoleProxyManager _consoleProxyMgr;
private final SecondaryStorageVmManager _secStorageVmMgr;
private final ServiceOfferingDao _offeringsDao;
private final DiskOfferingDao _diskOfferingDao;
private final VMTemplateDao _templateDao;
private final LaunchPermissionDao _launchPermissionDao;
private final DomainDao _domainDao;
private final AccountDao _accountDao;
private final ResourceLimitDao _resourceLimitDao;
private final UserAccountDao _userAccountDao;
private final AlertDao _alertDao;
private final CapacityDao _capacityDao;
private final GuestOSDao _guestOSDao;
private final GuestOSCategoryDao _guestOSCategoryDao;
private final StoragePoolDao _poolDao;
private final StoragePoolHostDao _poolHostDao;
private final StorageManager _storageMgr;
private final Adapters<UserAuthenticator> _userAuthenticators;
private final HostPodDao _hostPodDao;
private final VMInstanceDao _vmInstanceDao;
private final VolumeDao _volumeDao;
private final AlertManager _alertMgr;
private final AsyncJobDao _jobDao;
private final AsyncJobManager _asyncMgr;
private final TemplateManager _tmpltMgr;
private final int _purgeDelay;
private final PreallocatedLunDao _lunDao;
private final InstanceGroupDao _vmGroupDao;
private final UploadMonitor _uploadMonitor;
private final UploadDao _uploadDao;
private final CertificateDao _certDao;
private final SSHKeyPairDao _sshKeyPairDao;
private final ScheduledExecutorService _eventExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("EventChecker"));
private final StatsCollector _statsCollector;
private final Map<String, String> _configs;
private String _domain;
private final int _routerRamSize;
private final int _proxyRamSize;
private final int _ssRamSize;
private final Map<String, Boolean> _availableIdsMap;
private boolean _isHypervisorSnapshotCapable = false;
private String _hashKey = null;
protected ManagementServerImpl() {
ComponentLocator locator = ComponentLocator.getLocator(Name);
_lunDao = locator.getDao(PreallocatedLunDao.class);
_configDao = locator.getDao(ConfigurationDao.class);
_routerDao = locator.getDao(DomainRouterDao.class);
_eventDao = locator.getDao(EventDao.class);
_dcDao = locator.getDao(DataCenterDao.class);
_vlanDao = locator.getDao(VlanDao.class);
_accountVlanMapDao = locator.getDao(AccountVlanMapDao.class);
_podVlanMapDao = locator.getDao(PodVlanMapDao.class);
_hostDao = locator.getDao(HostDao.class);
_hostPodDao = locator.getDao(HostPodDao.class);
_jobDao = locator.getDao(AsyncJobDao.class);
_clusterDao = locator.getDao(ClusterDao.class);
_accountMgr = locator.getManager(AccountManager.class);
_agentMgr = locator.getManager(AgentManager.class);
_configMgr = locator.getManager(ConfigurationManager.class);
_vmMgr = locator.getManager(UserVmManager.class);
_consoleProxyMgr = locator.getManager(ConsoleProxyManager.class);
_secStorageVmMgr = locator.getManager(SecondaryStorageVmManager.class);
_storageMgr = locator.getManager(StorageManager.class);
_networkSecurityGroupDao = locator.getDao(SecurityGroupDao.class);
_publicIpAddressDao = locator.getDao(IPAddressDao.class);
_privateIpAddressDao = locator.getDao(DataCenterIpAddressDao.class);
_consoleProxyDao = locator.getDao(ConsoleProxyDao.class);
_secStorageVmDao = locator.getDao(SecondaryStorageVmDao.class);
_userDao = locator.getDao(UserDao.class);
_userVmDao = locator.getDao(UserVmDao.class);
_offeringsDao = locator.getDao(ServiceOfferingDao.class);
_diskOfferingDao = locator.getDao(DiskOfferingDao.class);
_templateDao = locator.getDao(VMTemplateDao.class);
_launchPermissionDao = locator.getDao(LaunchPermissionDao.class);
_domainDao = locator.getDao(DomainDao.class);
_accountDao = locator.getDao(AccountDao.class);
_resourceLimitDao = locator.getDao(ResourceLimitDao.class);
_userAccountDao = locator.getDao(UserAccountDao.class);
_alertDao = locator.getDao(AlertDao.class);
_capacityDao = locator.getDao(CapacityDao.class);
_guestOSDao = locator.getDao(GuestOSDao.class);
_guestOSCategoryDao = locator.getDao(GuestOSCategoryDao.class);
_poolDao = locator.getDao(StoragePoolDao.class);
_poolHostDao = locator.getDao(StoragePoolHostDao.class);
_vmGroupDao = locator.getDao(InstanceGroupDao.class);
_uploadDao = locator.getDao(UploadDao.class);
_certDao = locator.getDao(CertificateDao.class);
_configs = _configDao.getConfiguration();
_vmInstanceDao = locator.getDao(VMInstanceDao.class);
_volumeDao = locator.getDao(VolumeDao.class);
_alertMgr = locator.getManager(AlertManager.class);
_asyncMgr = locator.getManager(AsyncJobManager.class);
_tmpltMgr = locator.getManager(TemplateManager.class);
_uploadMonitor = locator.getManager(UploadMonitor.class);
_sshKeyPairDao = locator.getDao(SSHKeyPairDao.class);
_userAuthenticators = locator.getAdapters(UserAuthenticator.class);
if (_userAuthenticators == null || !_userAuthenticators.isSet()) {
s_logger.error("Unable to find an user authenticator.");
}
_domain = _configs.get("domain");
if (_domain == null) {
_domain = ".myvm.com";
}
if (!_domain.startsWith(".")) {
_domain = "." + _domain;
}
String value = _configs.get("account.cleanup.interval");
int cleanup = NumbersUtil.parseInt(value, 60 * 60 * 24); // 1 hour.
// Parse the max number of UserVMs and public IPs from server-setup.xml,
// and set them in the right places
_routerRamSize = NumbersUtil.parseInt(_configs.get("router.ram.size"),VirtualNetworkApplianceManager.DEFAULT_ROUTER_VM_RAMSIZE);
_proxyRamSize = NumbersUtil.parseInt(_configs.get("consoleproxy.ram.size"), ConsoleProxyManager.DEFAULT_PROXY_VM_RAMSIZE);
_ssRamSize = NumbersUtil.parseInt(_configs.get("secstorage.ram.size"), SecondaryStorageVmManager.DEFAULT_SS_VM_RAMSIZE);
_statsCollector = StatsCollector.getInstance(_configs);
_purgeDelay = NumbersUtil.parseInt(_configs.get("event.purge.delay"), 0);
if(_purgeDelay != 0){
_eventExecutor.scheduleAtFixedRate(new EventPurgeTask(), cleanup, cleanup, TimeUnit.SECONDS);
}
String[] availableIds = TimeZone.getAvailableIDs();
_availableIdsMap = new HashMap<String, Boolean>(availableIds.length);
for (String id: availableIds) {
_availableIdsMap.put(id, true);
}
}
protected Map<String, String> getConfigs() {
return _configs;
}
@Override
public StorageStats getStorageStatistics(long hostId) {
return _statsCollector.getStorageStats(hostId);
}
@Override
public PreallocatedLunVO registerPreallocatedLun(RegisterPreallocatedLunCmd cmd) {
Long zoneId = cmd.getZoneId();
String portal = cmd.getPortal();
String targetIqn = cmd.getTargetIqn();
Integer lun = cmd.getLun();
Long size = cmd.getDiskSize();
String t = cmd.getTags();
String[] tags = null;
if (t != null) {
tags = t.split(",");
for (int i = 0; i < tags.length; i++) {
tags[i] = tags[i].trim();
}
} else {
tags = new String[0];
}
PreallocatedLunVO vo = new PreallocatedLunVO(zoneId, portal, targetIqn, lun, size);
return _lunDao.persist(vo, tags);
}
@Override
public boolean unregisterPreallocatedLun(DeletePreallocatedLunCmd cmd) throws IllegalArgumentException {
Long id = cmd.getId();
PreallocatedLunVO lun = null;
if ((lun = _lunDao.findById(id)) == null) {
throw new IllegalArgumentException("Unable to find a LUN with ID " + id);
}
if (lun.getTaken() != null) {
throw new IllegalArgumentException("The LUN is currently in use and cannot be deleted.");
}
return _lunDao.delete(id);
}
@Override
public VolumeStats[] getVolumeStatistics(long[] volIds) {
return _statsCollector.getVolumeStats(volIds);
}
@Override
public String updateAdminPassword(long userId, String oldPassword, String newPassword) {
// String old = StringToMD5(oldPassword);
// User user = getUser(userId);
// if (old.equals(user.getPassword())) {
UserVO userVO = _userDao.createForUpdate(userId);
userVO.setPassword(StringToMD5(newPassword));
_userDao.update(userId, userVO);
return newPassword;
// } else {
// return null;
// }
}
private String StringToMD5(String string) {
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new CloudRuntimeException("Error", e);
}
md5.reset();
BigInteger pwInt = new BigInteger(1, md5.digest(string.getBytes()));
// make sure our MD5 hash value is 32 digits long...
StringBuffer sb = new StringBuffer();
String pwStr = pwInt.toString(16);
int padding = 32 - pwStr.length();
for (int i = 0; i < padding; i++) {
sb.append('0');
}
sb.append(pwStr);
return sb.toString();
}
@Override
public User getUser(long userId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving user with id: " + userId);
}
UserVO user = _userDao.getUser(userId);
if (user == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find user with id " + userId);
}
return null;
}
return user;
}
@Override
public User getUser(long userId, boolean active) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving user with id: " + userId + " and active = " + active);
}
if (active) {
return _userDao.getUser(userId);
} else {
return _userDao.findById(userId);
}
}
@Override
public UserAccount getUserAccount(String username, Long domainId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving user: " + username + " in domain " + domainId);
}
UserAccount userAccount = _userAccountDao.getUserAccount(username, domainId);
if (userAccount == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find user with name " + username + " in domain " + domainId);
}
return null;
}
return userAccount;
}
private UserAccount getUserAccount(String username, String password, Long domainId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Attempting to log in user: " + username + " in domain " + domainId);
}
UserAccount userAccount = _userAccountDao.getUserAccount(username, domainId);
if (userAccount == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find user with name " + username + " in domain " + domainId);
}
return null;
}
DomainVO domain = _domainDao.findById(domainId);
String domainName = null;
if(domain != null) {
domainName = domain.getName();
}
if (!userAccount.getState().equalsIgnoreCase(Account.State.enabled.toString()) || !userAccount.getAccountState().equalsIgnoreCase(Account.State.enabled.toString())) {
if (s_logger.isInfoEnabled()) {
s_logger.info("User " + username + " in domain " + domainName + " is disabled/locked (or account is disabled/locked)");
}
throw new CloudAuthenticationException("User " + username + " in domain " + domainName + " is disabled/locked (or account is disabled/locked)");
//return null;
}
// We only use the first adapter even if multiple have been
// configured
Enumeration<UserAuthenticator> en = _userAuthenticators.enumeration();
UserAuthenticator authenticator = en.nextElement();
boolean authenticated = authenticator.authenticate(username, password, domainId);
if (authenticated) {
return userAccount;
} else {
return null;
}
}
@Override
public Pair<User, Account> findUserByApiKey(String apiKey) {
return _accountDao.findUserAccountByApiKey(apiKey);
}
@Override
public Account getAccount(long accountId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving account with id: " + accountId);
}
AccountVO account = _accountDao.findById(Long.valueOf(accountId));
if (account == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find account with id " + accountId);
}
return null;
}
return account;
}
@Override
public String[] createApiKeyAndSecretKey(RegisterCmd cmd) {
Long userId = cmd.getId();
User user = _userDao.findById(userId);
if (user == null) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to find user for id : " + userId);
}
// generate both an api key and a secret key, update the user table with the keys, return the keys to the user
String[] keys = new String[2];
keys[0] = createApiKey(userId);
keys[1] = createSecretKey(userId);
return keys;
}
private String createApiKey(Long userId) {
User user = findUserById(userId);
try {
UserVO updatedUser = _userDao.createForUpdate();
String encodedKey = null;
Pair<User, Account> userAcct = null;
int retryLimit = 10;
do {
// FIXME: what algorithm should we use for API keys?
KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
SecretKey key = generator.generateKey();
encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());
userAcct = _accountDao.findUserAccountByApiKey(encodedKey);
retryLimit--;
} while ((userAcct != null) && (retryLimit >= 0));
if (userAcct != null) {
return null;
}
updatedUser.setApiKey(encodedKey);
_userDao.update(user.getId(), updatedUser);
return encodedKey;
} catch (NoSuchAlgorithmException ex) {
s_logger.error("error generating secret key for user: " + user.getUsername(), ex);
}
return null;
}
private String createSecretKey(Long userId) {
User user = findUserById(userId);
try {
UserVO updatedUser = _userDao.createForUpdate();
String encodedKey = null;
int retryLimit = 10;
UserVO userBySecretKey = null;
do {
KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
SecretKey key = generator.generateKey();
encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());
userBySecretKey = _userDao.findUserBySecretKey(encodedKey);
retryLimit--;
} while ((userBySecretKey != null) && (retryLimit >= 0));
if (userBySecretKey != null) {
return null;
}
updatedUser.setSecretKey(encodedKey);
_userDao.update(user.getId(), updatedUser);
return encodedKey;
} catch (NoSuchAlgorithmException ex) {
s_logger.error("error generating secret key for user: " + user.getUsername(), ex);
}
return null;
}
@Override
public List<IPAddressVO> listPublicIpAddressesBy(Long accountId, boolean allocatedOnly, Long zoneId, Long vlanDbId) {
SearchCriteria<IPAddressVO> sc = _publicIpAddressDao.createSearchCriteria();
if (accountId != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (vlanDbId != null) {
sc.addAnd("vlanDbId", SearchCriteria.Op.EQ, vlanDbId);
}
if (allocatedOnly) {
sc.addAnd("allocated", SearchCriteria.Op.NNULL);
}
return _publicIpAddressDao.search(sc, null);
}
@Override
public List<DataCenterIpAddressVO> listPrivateIpAddressesBy(Long podId, Long zoneId) {
if (podId != null && zoneId != null) {
return _privateIpAddressDao.listByPodIdDcId(podId.longValue(), zoneId.longValue());
} else {
return new ArrayList<DataCenterIpAddressVO>();
}
}
@Override
public String generateRandomPassword() {
return PasswordGenerator.generateRandomPassword(6);
}
@Override
public boolean attachISOToVM(long vmId, long userId, long isoId, boolean attach) {
UserVmVO vm = _userVmDao.findById(vmId);
VMTemplateVO iso = _templateDao.findById(isoId);
boolean success = _vmMgr.attachISOToVM(vmId, isoId, attach);
if (success) {
if (attach) {
vm.setIsoId(iso.getId());
} else {
vm.setIsoId(null);
}
_userVmDao.update(vmId, vm);
}
return success;
}
@Override
public List<DataCenterVO> listDataCenters(ListZonesByCmd cmd) {
Account account = UserContext.current().getCaller();
List<DataCenterVO> dcs = null;
Long domainId = cmd.getDomainId();
Long id = cmd.getId();
if(domainId != null){
//for domainId != null
//right now, we made the decision to only list zones associated with this domain
dcs = _dcDao.findZonesByDomainId(domainId); //private zones
}
else if((account == null || account.getType() == Account.ACCOUNT_TYPE_ADMIN)){
dcs = _dcDao.listAll(); //all zones
}else if(account.getType() == Account.ACCOUNT_TYPE_NORMAL){
//it was decided to return all zones for the user's domain, and everything above till root
//list all zones belonging to this domain, and all of its parents
//check the parent, if not null, add zones for that parent to list
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
if(domainRecord != null)
{
while(true){
dcs.addAll(_dcDao.findZonesByDomainId(domainRecord.getId()));
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
//add all public zones too
dcs.addAll(_dcDao.listPublicZones());
}else if(account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN){
//it was decided to return all zones for the domain admin, and everything above till root
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
//this covers path till root
if(domainRecord != null)
{
DomainVO localRecord = domainRecord;
while(true){
dcs.addAll(_dcDao.findZonesByDomainId(localRecord.getId()));
if(localRecord.getParent() != null) {
localRecord = _domainDao.findById(localRecord.getParent());
} else {
break;
}
}
}
//this covers till leaf
if(domainRecord != null){
//find all children for this domain based on a like search by path
List<DomainVO> allChildDomains = _domainDao.findAllChildren(domainRecord.getPath(), domainRecord.getId());
List<Long> allChildDomainIds = new ArrayList<Long>();
//create list of domainIds for search
for(DomainVO domain : allChildDomains){
allChildDomainIds.add(domain.getId());
}
//now make a search for zones based on this
if(allChildDomainIds.size() > 0){
List<DataCenterVO> childZones = _dcDao.findChildZones((allChildDomainIds.toArray()));
dcs.addAll(childZones);
}
}
//add all public zones too
dcs.addAll(_dcDao.listPublicZones());
}
Boolean available = cmd.isAvailable();
if (account != null) {
if ((available != null) && Boolean.FALSE.equals(available)) {
List<DomainRouterVO> routers = _routerDao.listBy(account.getId());
for (Iterator<DataCenterVO> iter = dcs.iterator(); iter.hasNext();) {
DataCenterVO dc = iter.next();
boolean found = false;
for (DomainRouterVO router : routers) {
if (dc.getId() == router.getDataCenterId()) {
found = true;
break;
}
}
if (!found) {
iter.remove();
}
}
}
}
if (id != null) {
List<DataCenterVO> singleZone = new ArrayList<DataCenterVO>();
for (DataCenterVO zone : dcs) {
if (zone.getId() == id) {
singleZone.add(zone);
}
}
return singleZone;
}
return dcs;
}
@Override
public HostVO getHostBy(long hostId) {
return _hostDao.findById(hostId);
}
@Override
public long getId() {
return MacAddress.getMacAddress().toLong();
}
protected void checkPortParameters(String publicPort, String privatePort, String privateIp, String proto) throws InvalidParameterValueException {
if (!NetUtils.isValidPort(publicPort)) {
throw new InvalidParameterValueException("publicPort is an invalid value");
}
if (!NetUtils.isValidPort(privatePort)) {
throw new InvalidParameterValueException("privatePort is an invalid value");
}
// s_logger.debug("Checking if " + privateIp + " is a valid private IP address. Guest IP address is: " + _configs.get("guest.ip.network"));
//
// if (!NetUtils.isValidPrivateIp(privateIp, _configs.get("guest.ip.network"))) {
// throw new InvalidParameterValueException("Invalid private ip address");
// }
if (!NetUtils.isValidProto(proto)) {
throw new InvalidParameterValueException("Invalid protocol");
}
}
@Override
public List<EventVO> getEvents(long userId, long accountId, Long domainId, String type, String level, Date startDate, Date endDate) {
SearchCriteria<EventVO> sc = _eventDao.createSearchCriteria();
if (userId > 0) {
sc.addAnd("userId", SearchCriteria.Op.EQ, userId);
}
if (accountId > 0) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
}
if (domainId != null) {
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
if (level != null) {
sc.addAnd("level", SearchCriteria.Op.EQ, level);
}
if (startDate != null && endDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.BETWEEN, startDate, endDate);
} else if (startDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
sc.addAnd("createDate", SearchCriteria.Op.GTEQ, startDate);
} else if (endDate != null) {
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.LTEQ, endDate);
}
return _eventDao.search(sc, null);
}
private Date massageDate(Date date, int hourOfDay, int minute, int second) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, hourOfDay);
cal.set(Calendar.MINUTE, minute);
cal.set(Calendar.SECOND, second);
return cal.getTime();
}
@Override
public List<UserAccountVO> searchForUsers(ListUsersCmd cmd) throws PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list users.");
}
} else {
// default domainId to the admin's domain
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
Filter searchFilter = new Filter(UserAccountVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
Object username = cmd.getUsername();
Object type = cmd.getAccountType();
Object accountName = cmd.getAccountName();
Object state = cmd.getState();
Object keyword = cmd.getKeyword();
SearchBuilder<UserAccountVO> sb = _userAccountDao.createSearchBuilder();
sb.and("username", sb.entity().getUsername(), SearchCriteria.Op.LIKE);
if (id != null && id == 1) {
//system user should NOT be searchable
List<UserAccountVO> emptyList = new ArrayList<UserAccountVO>();
return emptyList;
} else if (id != null) {
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
} else {
//this condition is used to exclude system user from the search results
sb.and("id", sb.entity().getId(), SearchCriteria.Op.NEQ);
}
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
sb.and("accountName", sb.entity().getAccountName(), SearchCriteria.Op.LIKE);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
if ((accountName == null) && (domainId != null)) {
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<UserAccountVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<UserAccountVO> ssc = _userAccountDao.createSearchCriteria();
ssc.addOr("username", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("firstname", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("lastname", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("email", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("accountName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("accountState", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("username", SearchCriteria.Op.SC, ssc);
}
if (username != null) {
sc.setParameters("username", "%" + username + "%");
}
if (id != null) {
sc.setParameters("id", id);
} else {
//Don't return system user, search builder with NEQ
sc.setParameters("id", 1);
}
if (type != null) {
sc.setParameters("type", type);
}
if (accountName != null) {
sc.setParameters("accountName", "%" + accountName + "%");
if (domainId != null) {
sc.setParameters("domainId", domainId);
}
} else if (domainId != null) {
DomainVO domainVO = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domainVO.getPath() + "%");
}
if (state != null) {
sc.setParameters("state", state);
}
return _userAccountDao.search(sc, searchFilter);
}
//This method is used for permissions check for both disk and service offerings
private boolean isPermissible(Long accountDomainId, Long offeringDomainId){
if(accountDomainId == offeringDomainId)
{
return true; // account and service offering in same domain
}
DomainVO domainRecord = _domainDao.findById(accountDomainId);
if(domainRecord != null){
while(true){
if(domainRecord.getId() == offeringDomainId) {
return true;
}
//try and move on to the next domain
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
return false;
}
@Override
public List<ServiceOfferingVO> searchForServiceOfferings(ListServiceOfferingsCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
//Note
//The list method for offerings is being modified in accordance with discussion with Will/Kevin
//For now, we will be listing the following based on the usertype
//1. For root, we will list all offerings
//2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way till root
Filter searchFilter = new Filter(ServiceOfferingVO.class, "created", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
Account account = UserContext.current().getCaller();
Object name = cmd.getServiceOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long vmId = cmd.getVirtualMachineId();
Long domainId = cmd.getDomainId();
//Keeping this logic consistent with domain specific zones
//if a domainId is provided, we just return the so associated with this domain
if(domainId != null){
if(account.getType() == Account.ACCOUNT_TYPE_ADMIN){
return _offeringsDao.findServiceOfferingByDomainId(domainId);//no perm check
}else{
//check if the user's domain == so's domain || user's domain is a child of so's domain
if(isPermissible(account.getDomainId(), domainId)){
//perm check succeeded
return _offeringsDao.findServiceOfferingByDomainId(domainId);
}else{
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "The account:"+account.getAccountName()+" does not fall in the same domain hierarchy as the service offering");
}
}
}
//For non-root users
if((account.getType() == Account.ACCOUNT_TYPE_NORMAL || account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)){
return searchServiceOfferingsInternal(account, name, id, vmId, keyword, searchFilter);
}
//for root users, the existing flow
if (keyword != null) {
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
}
if ((account != null) && !isAdmin(account.getType())) {
if (account.getId() != vmInstance.getAccountId()) {
throw new PermissionDeniedException("unable to find a virtual machine with id " + vmId + " for this account");
}
}
ServiceOfferingVO offering = _offeringsDao.findById(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
// Only return offerings with the same Guest IP type and storage pool preference
//sc.addAnd("guestIpType", SearchCriteria.Op.EQ, offering.getGuestIpType());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, false);
return _offeringsDao.search(sc, searchFilter);
}
private List<ServiceOfferingVO> searchServiceOfferingsInternal(Account account, Object name, Object id, Long vmId, Object keyword, Filter searchFilter){
//it was decided to return all offerings for the user's domain, and everything above till root (for normal user or domain admin)
//list all offerings belonging to this domain, and all of its parents
//check the parent, if not null, add offerings for that parent to list
List<ServiceOfferingVO> sol = new ArrayList<ServiceOfferingVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
boolean includePublicOfferings = true;
if(domainRecord != null)
{
while(true){
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
includePublicOfferings = false;
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
}
if ((account != null) && !isAdmin(account.getType())) {
if (account.getId() != vmInstance.getAccountId()) {
throw new PermissionDeniedException("unable to find a virtual machine with id " + vmId + " for this account");
}
}
ServiceOfferingVO offering = _offeringsDao.findById(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
// Only return offerings with the same Guest IP type and storage pool preference
sc.addAnd("guestIpType", SearchCriteria.Op.EQ, offering.getGuestIpType());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (id != null) {
includePublicOfferings = false;
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
includePublicOfferings = false;
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, false);
//for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
//search and add for this domain
sol.addAll(_offeringsDao.search(sc, searchFilter));
//try and move on to the next domain
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
}
else {
break;//now we got all the offerings for this user/dom adm
}
}
}else{
s_logger.error("Could not find the domainId for account:"+account.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:"+account.getAccountName());
}
//add all the public offerings to the sol list before returning
if(includePublicOfferings) {
sol.addAll(_offeringsDao.findPublicServiceOfferings());
}
return sol;
}
@Override
public List<ClusterVO> searchForClusters(ListClustersCmd cmd) {
Filter searchFilter = new Filter(ClusterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ClusterVO> sc = _clusterDao.createSearchCriteria();
Object id = cmd.getId();
Object name = cmd.getClusterName();
Object podId = cmd.getPodId();
Object zoneId = cmd.getZoneId();
Object hypervisorType = cmd.getHypervisorType();
Object clusterType = cmd.getClusterType();
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (podId != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, podId);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if(hypervisorType != null) {
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, hypervisorType);
}
if(clusterType != null) {
sc.addAnd("clusterType", SearchCriteria.Op.EQ, clusterType);
}
return _clusterDao.search(sc, searchFilter);
}
@Override
public List<HostVO> searchForServers(ListHostsCmd cmd) {
Object name = cmd.getHostName();
Object type = cmd.getType();
Object state = cmd.getState();
Object zone = cmd.getZoneId();
Object pod = cmd.getPodId();
Object cluster = cmd.getClusterId();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
return searchForServers(cmd.getStartIndex(), cmd.getPageSizeVal(), name, type, state, zone, pod, cluster, id, keyword);
}
private List<HostVO> searchForServers(Long startIndex, Long pageSize, Object name, Object type, Object state, Object zone, Object pod, Object cluster, Object id, Object keyword) {
Filter searchFilter = new Filter(HostVO.class, "id", Boolean.TRUE, startIndex, pageSize);
SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<HostVO> ssc = _hostDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("status", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
if (state != null) {
sc.addAnd("status", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
return _hostDao.search(sc, searchFilter);
}
@Override
public List<HostPodVO> searchForPods(ListPodsByCmd cmd) {
Filter searchFilter = new Filter(HostPodVO.class, "dataCenterId", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<HostPodVO> sc = _hostPodDao.createSearchCriteria();
String podName = cmd.getPodName();
Long id = cmd.getId();
Long zoneId = cmd.getZoneId();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<HostPodVO> ssc = _hostPodDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (podName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + podName + "%");
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
return _hostPodDao.search(sc, searchFilter);
}
@Override
public List<DataCenterVO> searchForZones(Criteria c) {
Long dataCenterId = (Long) c.getCriteria(Criteria.DATACENTERID);
if (dataCenterId != null) {
DataCenterVO dc = _dcDao.findById(dataCenterId);
List<DataCenterVO> datacenters = new ArrayList<DataCenterVO>();
datacenters.add(dc);
return datacenters;
}
Filter searchFilter = new Filter(DataCenterVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<DataCenterVO> sc = _dcDao.createSearchCriteria();
String zoneName = (String) c.getCriteria(Criteria.ZONENAME);
if (zoneName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + zoneName + "%");
}
return _dcDao.search(sc, searchFilter);
}
@Override
public List<VlanVO> searchForVlans(ListVlanIpRangesCmd cmd) throws InvalidParameterValueException {
// If an account name and domain ID are specified, look up the account
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long accountId = null;
Long networkId = cmd.getNetworkId();
Boolean forVirtual = cmd.getForVirtualNetwork();
String vlanType = null;
if (accountName != null && domainId != null) {
Account account = _accountDao.findActiveAccount(accountName, domainId);
if (account == null) {
throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain " + domainId);
} else {
accountId = account.getId();
}
}
if (forVirtual != null) {
if (forVirtual) {
vlanType = VlanType.VirtualNetwork.toString();
} else {
vlanType = VlanType.DirectAttached.toString();
}
}
Filter searchFilter = new Filter(VlanVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Object vlan = cmd.getVlan();
Object dataCenterId = cmd.getZoneId();
Object podId = cmd.getPodId();
Object keyword = cmd.getKeyword();
SearchBuilder<VlanVO> sb = _vlanDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("networkId", sb.entity().getNetworkId(), SearchCriteria.Op.EQ);
sb.and("vlanType", sb.entity().getVlanType(), SearchCriteria.Op.EQ);
if (accountId != null) {
SearchBuilder<AccountVlanMapVO> accountVlanMapSearch = _accountVlanMapDao.createSearchBuilder();
accountVlanMapSearch.and("accountId", accountVlanMapSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.join("accountVlanMapSearch", accountVlanMapSearch, sb.entity().getId(), accountVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
if (podId != null) {
SearchBuilder<PodVlanMapVO> podVlanMapSearch = _podVlanMapDao.createSearchBuilder();
podVlanMapSearch.and("podId", podVlanMapSearch.entity().getPodId(), SearchCriteria.Op.EQ);
sb.join("podVlanMapSearch", podVlanMapSearch, sb.entity().getId(), podVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<VlanVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VlanVO> ssc = _vlanDao.createSearchCriteria();
ssc.addOr("vlanId", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("ipRange", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("vlanId", SearchCriteria.Op.SC, ssc);
} else {
if (id != null) {
sc.setParameters("id", id);
}
if (vlan != null) {
sc.setParameters("vlan", vlan);
}
if (dataCenterId != null) {
sc.setParameters("dataCenterId", dataCenterId);
}
if (networkId != null) {
sc.setParameters("networkId", networkId);
}
if (accountId != null) {
sc.setJoinParameters("accountVlanMapSearch", "accountId", accountId);
}
if (podId != null) {
sc.setJoinParameters("podVlanMapSearch", "podId", podId);
}
if (vlanType != null) {
sc.setParameters("vlanType", vlanType);
}
}
return _vlanDao.search(sc, searchFilter);
}
@Override
public Long getPodIdForVlan(long vlanDbId) {
List<PodVlanMapVO> podVlanMaps = _podVlanMapDao.listPodVlanMapsByVlan(vlanDbId);
if (podVlanMaps.isEmpty()) {
return null;
} else {
return podVlanMaps.get(0).getPodId();
}
}
@Override
public List<ConfigurationVO> searchForConfigurations(ListCfgsByCmd cmd) {
Filter searchFilter = new Filter(ConfigurationVO.class, "name", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ConfigurationVO> sc = _configDao.createSearchCriteria();
Object name = cmd.getConfigName();
Object category = cmd.getCategory();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<ConfigurationVO> ssc = _configDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instance", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("component", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("category", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("value", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (category != null) {
sc.addAnd("category", SearchCriteria.Op.EQ, category);
}
// hidden configurations are not displayed using the search API
sc.addAnd("category", SearchCriteria.Op.NEQ, "Hidden");
return _configDao.search(sc, searchFilter);
}
@Override
public List<HostVO> searchForAlertServers(Criteria c) {
Filter searchFilter = new Filter(HostVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
Object[] states = (Object[]) c.getCriteria(Criteria.STATE);
if (states != null) {
sc.addAnd("status", SearchCriteria.Op.IN, states);
}
return _hostDao.search(sc, searchFilter);
}
@Override
public List<VMTemplateVO> searchForTemplates(Criteria c) {
Filter searchFilter = new Filter(VMTemplateVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
Object name = c.getCriteria(Criteria.NAME);
Object isPublic = c.getCriteria(Criteria.ISPUBLIC);
Object id = c.getCriteria(Criteria.ID);
Object keyword = c.getCriteria(Criteria.KEYWORD);
Long creator = (Long) c.getCriteria(Criteria.CREATED_BY);
SearchBuilder<VMTemplateVO> sb = _templateDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("publicTemplate", sb.entity().isPublicTemplate(), SearchCriteria.Op.EQ);
sb.and("format", sb.entity().getFormat(), SearchCriteria.Op.NEQ);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
SearchCriteria<VMTemplateVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VMTemplateVO> ssc = _templateDao.createSearchCriteria();
ssc.addOr("displayName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("group", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instanceName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (isPublic != null) {
sc.setParameters("publicTemplate", isPublic);
}
if (creator != null) {
sc.setParameters("accountId", creator);
}
sc.setParameters("format", ImageFormat.ISO);
return _templateDao.search(sc, searchFilter);
}
@Override
public Set<Pair<Long, Long>> listIsos(ListIsosCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter isoFilter = TemplateFilter.valueOf(cmd.getIsoFilter());
Long accountId = null;
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
// validate domainId before proceeding
if ((domainId != null) && (accountName != null)) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new InvalidParameterValueException("Invalid domain id (" + domainId + ") given, unable to list events.");
}
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Failed to list ISOs. Unable to find account " + accountName + " in domain " + domainId);
}
} else if (account != null) {
accountId = account.getId();
}
} else {
accountId = account.getId();
}
//It is account specific if account is admin type and domainId and accountName are not null
boolean isAccountSpecific = (account == null || isAdmin(account.getType()))
&& (accountName != null)
&& (domainId != null);
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(cmd.getId(), cmd.getIsoName(), cmd.getKeyword(), isoFilter, true, cmd.isBootable(), accountId, cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, isAccountSpecific, true);
}
@Override
public Set<Pair<Long, Long>> listTemplates(ListTemplatesCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter templateFilter = TemplateFilter.valueOf(cmd.getTemplateFilter());
Long accountId = null;
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
// validate domainId before proceeding
if ((domainId != null) && (accountName != null)) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new InvalidParameterValueException("Invalid domain id (" + domainId + ") given, unable to list events.");
}
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Failed to list ISOs. Unable to find account " + accountName + " in domain " + domainId);
}
} else if (account != null) {
accountId = account.getId();
}
} else {
accountId = account.getId();
}
//It is account specific if account is admin type and domainId and accountName are not null
boolean isAccountSpecific = (account == null || isAdmin(account.getType()))
&& (accountName != null)
&& (domainId != null);
boolean showDomr = (templateFilter != TemplateFilter.selfexecutable);
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(cmd.getId(), cmd.getTemplateName(), cmd.getKeyword(), templateFilter, false, null, accountId, cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, isAccountSpecific, showDomr);
}
private Set<Pair<Long, Long>> listTemplates(Long templateId, String name, String keyword, TemplateFilter templateFilter, boolean isIso, Boolean bootable, Long accountId, Long pageSize, Long startIndex, Long zoneId, HypervisorType hyperType, boolean isAccountSpecific, boolean showDomr) throws InvalidParameterValueException {
VMTemplateVO template = null;
if (templateId != null) {
template = _templateDao.findById(templateId);
if (template == null) {
throw new InvalidParameterValueException("Please specify a valid template ID.");
}// If ISO requested then it should be ISO.
if (isIso && template.getFormat() != ImageFormat.ISO){
s_logger.error("Template Id " + templateId + " is not an ISO");
throw new InvalidParameterValueException("Template Id " + templateId + " is not an ISO");
}// If ISO not requested then it shouldn't be an ISO.
if (!isIso && template.getFormat() == ImageFormat.ISO){
s_logger.error("Incorrect format of the template id " + templateId);
throw new InvalidParameterValueException("Incorrect format " + template.getFormat() + " of the template id " + templateId);
}
}
// Show only those that are downloaded.
boolean onlyReady = (templateFilter == TemplateFilter.featured) ||
(templateFilter == TemplateFilter.selfexecutable) ||
(templateFilter == TemplateFilter.sharedexecutable) ||
(templateFilter == TemplateFilter.executable && isAccountSpecific) ||
(templateFilter == TemplateFilter.community);
Account account = null;
DomainVO domain = null;
if (accountId != null) {
account = _accountDao.findById(accountId);
domain = _domainDao.findById(account.getDomainId());
} else {
domain = _domainDao.findById(DomainVO.ROOT_DOMAIN);
}
Set<Pair<Long, Long>> templateZonePairSet = new HashSet<Pair<Long,Long>>();
if (template == null) {
templateZonePairSet = _templateDao.searchTemplates(name, keyword, templateFilter, isIso, bootable, account, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr);
} else {
templateZonePairSet.add(new Pair<Long,Long>(template.getId(), zoneId));
}
return templateZonePairSet;
}
@Override
public List<VMTemplateVO> listPermittedTemplates(long accountId) {
return _launchPermissionDao.listPermittedTemplates(accountId);
}
@Override
public List<HostPodVO> listPods(long dataCenterId) {
return _hostPodDao.listByDataCenterId(dataCenterId);
}
@Override
public String changePrivateIPRange(boolean add, Long podId, String startIP, String endIP) throws InvalidParameterValueException {
return _configMgr.changePrivateIPRange(add, podId, startIP, endIP);
}
@Override
public User findUserById(Long userId) {
return _userDao.findById(userId);
}
@Override
public List<AccountVO> findAccountsLike(String accountName) {
return _accountDao.findAccountsLike(accountName);
}
@Override
public Account findActiveAccountByName(String accountName) {
return _accountDao.findActiveAccountByName(accountName);
}
@Override
public Account findActiveAccount(String accountName, Long domainId) {
if (domainId == null) {
domainId = DomainVO.ROOT_DOMAIN;
}
return _accountDao.findActiveAccount(accountName, domainId);
}
@Override
public Account findAccountByName(String accountName, Long domainId) {
if (domainId == null) {
domainId = DomainVO.ROOT_DOMAIN;
}
return _accountDao.findAccount(accountName, domainId);
}
@Override
public Account findAccountById(Long accountId) {
return _accountDao.findById(accountId);
}
@Override
public List<AccountVO> searchForAccounts(ListAccountsCmd cmd) {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
Long accountId = cmd.getId();
String accountName = null;
if(accountId != null && accountId == 1){
//system account should NOT be searchable
List<AccountVO> emptyList = new ArrayList<AccountVO>();
return emptyList;
}
if ((account == null) || isAdmin(account.getType())) {
accountName = cmd.getSearchName(); // admin's can specify a name to search for
if (domainId == null) {
// default domainId to the admin's domain
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
} else if (account != null) {
if (!_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid domain id (" + domainId + ") given, unable to list accounts");
}
}
} else {
accountId = account.getId();
accountName = account.getAccountName(); // regular users must be constrained to their own account
}
Filter searchFilter = new Filter(AccountVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object type = cmd.getAccountType();
Object state = cmd.getState();
Object isCleanupRequired = cmd.isCleanupRequired();
Object keyword = cmd.getKeyword();
SearchBuilder<AccountVO> sb = _accountDao.createSearchBuilder();
sb.and("accountName", sb.entity().getAccountName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("nid", sb.entity().getId(), SearchCriteria.Op.NEQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("needsCleanup", sb.entity().getNeedsCleanup(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<AccountVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<AccountVO> ssc = _accountDao.createSearchCriteria();
ssc.addOr("accountName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("accountName", SearchCriteria.Op.SC, ssc);
}
if (accountName != null) {
sc.setParameters("accountName", "%" + accountName + "%");
}
if (accountId != null) {
sc.setParameters("id", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
// I want to join on user_vm.domain_id = domain.id where domain.path like 'foo%'
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
sc.setParameters("nid", 1L);
} else {
sc.setParameters("nid", 1L);
}
if (type != null) {
sc.setParameters("type", type);
}
if (state != null) {
sc.setParameters("state", state);
}
if (isCleanupRequired != null) {
sc.setParameters("needsCleanup", isCleanupRequired);
}
return _accountDao.search(sc, searchFilter);
}
@Override
public boolean deleteLimit(Long limitId) {
// A limit ID must be passed in
if (limitId == null) {
return false;
}
return _resourceLimitDao.expunge(limitId);
}
@Override
public ResourceLimitVO findLimitById(long limitId) {
return _resourceLimitDao.findById(limitId);
}
@Override
public List<VMTemplateVO> listIsos(Criteria c) {
Filter searchFilter = new Filter(VMTemplateVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
Boolean ready = (Boolean) c.getCriteria(Criteria.READY);
Boolean isPublic = (Boolean) c.getCriteria(Criteria.ISPUBLIC);
Long creator = (Long) c.getCriteria(Criteria.CREATED_BY);
Object keyword = c.getCriteria(Criteria.KEYWORD);
SearchCriteria<VMTemplateVO> sc = _templateDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<VMTemplateVO> ssc = _templateDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (creator != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, creator);
}
if (ready != null) {
sc.addAnd("ready", SearchCriteria.Op.EQ, ready);
}
if (isPublic != null) {
sc.addAnd("publicTemplate", SearchCriteria.Op.EQ, isPublic);
}
sc.addAnd("format", SearchCriteria.Op.EQ, ImageFormat.ISO);
return _templateDao.search(sc, searchFilter);
}
@Override
public List<VMInstanceVO> findVMInstancesLike(String vmInstanceName) {
return _vmInstanceDao.findVMInstancesLike(vmInstanceName);
}
@Override
public VMInstanceVO findVMInstanceById(long vmId) {
return _vmInstanceDao.findById(vmId);
}
@Override
public UserVmVO findUserVMInstanceById(long userVmId) {
return _userVmDao.findById(userVmId);
}
@Override
public ServiceOfferingVO findServiceOfferingById(long offeringId) {
return _offeringsDao.findById(offeringId);
}
@Override
public List<ServiceOfferingVO> listAllServiceOfferings() {
return _offeringsDao.listAllIncludingRemoved();
}
@Override
public List<HostVO> listAllActiveHosts() {
return _hostDao.listAll();
}
@Override
public DataCenterVO findDataCenterById(long dataCenterId) {
return _dcDao.findById(dataCenterId);
}
@Override
public VMTemplateVO updateTemplate(UpdateIsoCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
return updateTemplateOrIso(cmd);
}
@Override
public VMTemplateVO updateTemplate(UpdateTemplateCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
return updateTemplateOrIso(cmd);
}
private VMTemplateVO updateTemplateOrIso(UpdateTemplateOrIsoCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Long id = cmd.getId();
String name = cmd.getTemplateName();
String displayText = cmd.getDisplayText();
String format = cmd.getFormat();
Long guestOSId = cmd.getOsTypeId();
Boolean passwordEnabled = cmd.isPasswordEnabled();
Boolean bootable = cmd.isBootable();
Account account= UserContext.current().getCaller();
//verify that template exists
VMTemplateVO template = findTemplateById(id);
if (template == null) {
throw new InvalidParameterValueException("unable to find template/iso with id " + id);
}
//Don't allow to modify system template
if (id == Long.valueOf(1)) {
throw new InvalidParameterValueException("Unable to update template/iso with id " + id);
}
//do a permission check
if (account != null) {
Long templateOwner = template.getAccountId();
if (!BaseCmd.isAdmin(account.getType())) {
if ((templateOwner == null) || (account.getId() != templateOwner.longValue())) {
throw new PermissionDeniedException("Unable to modify template/iso with id " + id + ", permission denied.");
}
} else if (account.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Long templateOwnerDomainId = findDomainIdByAccountId(templateOwner);
if (!isChildDomain(account.getDomainId(), templateOwnerDomainId)) {
throw new PermissionDeniedException("Unable to modify template/iso with id " + id + ", permission denied");
}
}
}
boolean updateNeeded = !(name == null && displayText == null && format == null && guestOSId == null && passwordEnabled == null && bootable == null);
if (!updateNeeded) {
return template;
}
template = _templateDao.createForUpdate(id);
if (name != null) {
template.setName(name);
}
if (displayText != null) {
template.setDisplayText(displayText);
}
ImageFormat imageFormat = null;
if (format != null) {
try {
imageFormat = ImageFormat.valueOf(format.toUpperCase());
} catch (IllegalArgumentException e) {
throw new InvalidParameterValueException("Image format: " + format + " is incorrect. Supported formats are " + EnumUtils.listValues(ImageFormat.values()));
}
template.setFormat(imageFormat);
}
if (guestOSId != null) {
GuestOSVO guestOS = _guestOSDao.findById(guestOSId);
if (guestOS == null) {
throw new InvalidParameterValueException("Please specify a valid guest OS ID.");
} else {
template.setGuestOSId(guestOSId);
}
}
if (passwordEnabled != null) {
template.setEnablePassword(passwordEnabled);
}
if (bootable != null) {
template.setBootable(bootable);
}
_templateDao.update(id, template);
return _templateDao.findById(id);
}
@Override
public boolean copyTemplate(long userId, long templateId, long sourceZoneId, long destZoneId) {
boolean success = false;
try {
success = _tmpltMgr.copy(userId, templateId, sourceZoneId, destZoneId);
} catch (Exception e) {
s_logger.warn("Unable to copy template " + templateId + " from zone " + sourceZoneId + " to " + destZoneId , e);
success = false;
}
return success;
}
@Override
public VMTemplateVO findTemplateById(long templateId) {
return _templateDao.findById(templateId);
}
@Override
public List<EventVO> searchForEvents(ListEventsCmd cmd) throws PermissionDeniedException, InvalidParameterValueException {
Account account = UserContext.current().getCaller();
Long accountId = null;
boolean isAdmin = false;
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
if ((account == null) || isAdmin(account.getType())) {
isAdmin = true;
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list events.");
}
if (accountName != null) {
Account userAccount = _accountDao.findAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountId = account.getId();
}
Filter searchFilter = new Filter(EventVO.class, "createDate", false, cmd.getStartIndex(), cmd.getPageSizeVal());
Object type = cmd.getType();
Object level = cmd.getLevel();
Date startDate = cmd.getStartDate();
Date endDate = cmd.getEndDate();
Object keyword = cmd.getKeyword();
Integer entryTime = cmd.getEntryTime();
Integer duration = cmd.getDuration();
if ((entryTime != null) && (duration != null)) {
if (entryTime <= duration){
throw new InvalidParameterValueException("Entry time must be greater than duration");
}
return listPendingEvents(entryTime, duration);
}
SearchBuilder<EventVO> sb = _eventDao.createSearchBuilder();
sb.and("levelL", sb.entity().getLevel(), SearchCriteria.Op.LIKE);
sb.and("levelEQ", sb.entity().getLevel(), SearchCriteria.Op.EQ);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.and("accountName", sb.entity().getAccountName(), SearchCriteria.Op.LIKE);
sb.and("domainIdEQ", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("createDateB", sb.entity().getCreateDate(), SearchCriteria.Op.BETWEEN);
sb.and("createDateG", sb.entity().getCreateDate(), SearchCriteria.Op.GTEQ);
sb.and("createDateL", sb.entity().getCreateDate(), SearchCriteria.Op.LTEQ);
if ((accountId == null) && (accountName == null) && (domainId != null) && isAdmin) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<EventVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<EventVO> ssc = _eventDao.createSearchCriteria();
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("level", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("level", SearchCriteria.Op.SC, ssc);
}
if (level != null) {
sc.setParameters("levelEQ", level);
}
if (accountId != null) {
sc.setParameters("accountId", accountId);
} else if (domainId != null) {
if (accountName != null) {
sc.setParameters("domainIdEQ", domainId);
sc.setParameters("accountName", "%" + accountName + "%");
sc.addAnd("removed", SearchCriteria.Op.NULL);
} else if (isAdmin) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
}
if (type != null) {
sc.setParameters("type", type);
}
if (startDate != null && endDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
endDate = massageDate(endDate, 23, 59, 59);
sc.setParameters("createDateB", startDate, endDate);
} else if (startDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
sc.setParameters("createDateG", startDate);
} else if (endDate != null) {
endDate = massageDate(endDate, 23, 59, 59);
sc.setParameters("createDateL", endDate);
}
return _eventDao.searchAllEvents(sc, searchFilter);
}
@Override
public List<DomainRouterVO> listRoutersByHostId(long hostId) {
return _routerDao.listByHostId(hostId);
}
@Override
public List<DomainRouterVO> listAllActiveRouters() {
return _routerDao.listAll();
}
@Override
public List<DomainRouterVO> searchForRouters(ListRoutersCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
Account account = UserContext.current().getCaller();
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list routers");
}
if (accountName != null) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
Filter searchFilter = new Filter(DomainRouterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object name = cmd.getRouterName();
Object state = cmd.getState();
Object zone = cmd.getZoneId();
Object pod = cmd.getPodId();
Object hostId = cmd.getHostId();
Object keyword = cmd.getKeyword();
SearchBuilder<DomainRouterVO> sb = _routerDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodId(), SearchCriteria.Op.EQ);
sb.and("hostId", sb.entity().getHostId(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<DomainRouterVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DomainRouterVO> ssc = _routerDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instanceName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (accountId != null) {
sc.setParameters("accountId", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (state != null) {
sc.setParameters("state", state);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (pod != null) {
sc.setParameters("podId", pod);
}
if (hostId != null) {
sc.setParameters("hostId", hostId);
}
return _routerDao.search(sc, searchFilter);
}
@Override
public List<ConsoleProxyVO> searchForConsoleProxy(Criteria c) {
Filter searchFilter = new Filter(ConsoleProxyVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<ConsoleProxyVO> sc = _consoleProxyDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object state = c.getCriteria(Criteria.STATE);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object hostId = c.getCriteria(Criteria.HOSTID);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<ConsoleProxyVO> ssc = _consoleProxyDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if(id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (state != null) {
sc.addAnd("state", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (hostId != null) {
sc.addAnd("hostId", SearchCriteria.Op.EQ, hostId);
}
return _consoleProxyDao.search(sc, searchFilter);
}
@Override
public List<VolumeVO> searchForVolumes(ListVolumesCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
boolean isAdmin = false;
if ((account == null) || isAdmin(account.getType())) {
isAdmin = true;
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list volumes.");
}
if (accountName != null) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("could not find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountId = account.getId();
}
Filter searchFilter = new Filter(VolumeVO.class, "created", false, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Long vmInstanceId = cmd.getVirtualMachineId();
Object name = cmd.getVolumeName();
Object keyword = cmd.getKeyword();
Object type = cmd.getType();
Object zone = null;
Object pod = null;
//Object host = null; TODO
if (isAdmin) {
zone = cmd.getZoneId();
pod = cmd.getPodId();
// host = cmd.getHostId(); TODO
} else {
domainId = null;
}
// hack for now, this should be done better but due to needing a join I opted to
// do this quickly and worry about making it pretty later
SearchBuilder<VolumeVO> sb = _volumeDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("accountIdEQ", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("volumeType", sb.entity().getVolumeType(), SearchCriteria.Op.LIKE);
sb.and("instanceId", sb.entity().getInstanceId(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodId(), SearchCriteria.Op.EQ);
// Don't return DomR and ConsoleProxy volumes
sb.and("domRNameLabel", sb.entity().getName(), SearchCriteria.Op.NLIKE);
sb.and("domPNameLabel", sb.entity().getName(), SearchCriteria.Op.NLIKE);
sb.and("domSNameLabel", sb.entity().getName(), SearchCriteria.Op.NLIKE);
// Only return Volumes that are in the "Created" state
sb.and("status", sb.entity().getStatus(), SearchCriteria.Op.EQ);
// Only return volumes that are not destroyed
sb.and("destroyed", sb.entity().getDestroyed(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
// now set the SC criteria...
SearchCriteria<VolumeVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VolumeVO> ssc = _volumeDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("volumeType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (accountId != null) {
sc.setParameters("accountIdEQ", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (type != null) {
sc.setParameters("volumeType", "%" + type + "%");
}
if (vmInstanceId != null) {
sc.setParameters("instanceId", vmInstanceId);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (pod != null) {
sc.setParameters("podId", pod);
}
// Don't return DomR and ConsoleProxy volumes
/*
sc.setParameters("domRNameLabel", "r-%");
sc.setParameters("domPNameLabel", "v-%");
sc.setParameters("domSNameLabel", "s-%");
*/
// Only return volumes that are not destroyed
sc.setParameters("destroyed", false);
List<VolumeVO> allVolumes = _volumeDao.search(sc, searchFilter);
List<VolumeVO> returnableVolumes = new ArrayList<VolumeVO>(); //these are ones without domr and console proxy
for(VolumeVO v:allVolumes)
{
VMTemplateVO template = _templateDao.findById(v.getTemplateId());
if(template!=null && (template.getTemplateType() == TemplateType.SYSTEM))
{
//do nothing
}
else
{
//do not add to returnable list if vol belongs to a user vm that is destoyed and cmd called by user
if(v.getInstanceId() == null) {
returnableVolumes.add(v);
}else {
if (account.getType() == Account.ACCOUNT_TYPE_NORMAL){
VMInstanceVO owningVm = _vmInstanceDao.findById(v.getInstanceId());
if(owningVm != null && owningVm.getType().equals(VirtualMachine.Type.User) && owningVm.getState().equals(VirtualMachine.State.Destroyed)){
// do not show volumes
// do nothing
}else {
returnableVolumes.add(v);
}
}else {
returnableVolumes.add(v);
}
}
}
}
return returnableVolumes;
}
@Override
public VolumeVO findVolumeByInstanceAndDeviceId(long instanceId, long deviceId) {
VolumeVO volume = _volumeDao.findByInstanceAndDeviceId(instanceId, deviceId).get(0);
if (volume != null && !volume.getDestroyed() && volume.getRemoved() == null) {
return volume;
} else {
return null;
}
}
@Override
public HostPodVO findHostPodById(long podId) {
return _hostPodDao.findById(podId);
}
@Override
public HostVO findSecondaryStorageHosT(long zoneId) {
return _storageMgr.getSecondaryStorageHost(zoneId);
}
@Override
public List<IPAddressVO> searchForIPAddresses(ListPublicIpAddressesCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
if ((account == null) || isAdmin(account.getType())) {
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Unable to list IP addresses for domain " + domainId + ", permission denied.");
}
if (accountName != null) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountId = account.getId();
}
Boolean isAllocated = cmd.isAllocatedOnly();
if (isAllocated == null) {
isAllocated = Boolean.TRUE;
}
Filter searchFilter = new Filter(IPAddressVO.class, "address", false, cmd.getStartIndex(), cmd.getPageSizeVal());
Object zone = cmd.getZoneId();
Object address = cmd.getIpAddress();
Object vlan = cmd.getVlanId();
Object keyword = cmd.getKeyword();
Object forVirtualNetwork = cmd.isForVirtualNetwork();
SearchBuilder<IPAddressVO> sb = _publicIpAddressDao.createSearchBuilder();
sb.and("accountIdEQ", sb.entity().getAllocatedToAccountId(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("address", sb.entity().getAddress(), SearchCriteria.Op.EQ);
sb.and("vlanDbId", sb.entity().getVlanId(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getAllocatedInDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (forVirtualNetwork != null) {
SearchBuilder<VlanVO> vlanSearch = _vlanDao.createSearchBuilder();
vlanSearch.and("vlanType", vlanSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
sb.join("vlanSearch", vlanSearch, sb.entity().getVlanId(), vlanSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if ((isAllocated != null) && (isAllocated == true)) {
sb.and("allocated", sb.entity().getAllocatedTime(), SearchCriteria.Op.NNULL);
}
SearchCriteria<IPAddressVO> sc = sb.create();
if (accountId != null) {
sc.setParameters("accountIdEQ", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (forVirtualNetwork != null) {
VlanType vlanType = (Boolean) forVirtualNetwork ? VlanType.VirtualNetwork : VlanType.DirectAttached;
sc.setJoinParameters("vlanSearch", "vlanType", vlanType);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if ((address == null) && (keyword != null)) {
address = keyword;
}
if (address != null) {
sc.setParameters("address", address);
}
if (vlan != null) {
sc.setParameters("vlanDbId", vlan);
}
return _publicIpAddressDao.search(sc, searchFilter);
}
@Override
public UserAccount authenticateUser(String username, String password, Long domainId, Map<String, Object[]> requestParameters) {
UserAccount user = null;
if (password != null) {
user = getUserAccount(username, password, domainId);
} else {
String key = getConfigurationValue("security.singlesignon.key");
if (key == null) {
// the SSO key is gone, don't authenticate
return null;
}
String singleSignOnTolerance = getConfigurationValue("security.singlesignon.tolerance.millis");
if (singleSignOnTolerance == null) {
// the SSO tolerance is gone (how much time before/after system time we'll allow the login request to be valid), don't authenticate
return null;
}
long tolerance = Long.parseLong(singleSignOnTolerance);
String signature = null;
long timestamp = 0L;
String unsignedRequest = null;
// - build a request string with sorted params, make sure it's all lowercase
// - sign the request, verify the signature is the same
List<String> parameterNames = new ArrayList<String>();
for (Object paramNameObj : requestParameters.keySet()) {
parameterNames.add((String)paramNameObj); // put the name in a list that we'll sort later
}
Collections.sort(parameterNames);
try {
for (String paramName : parameterNames) {
// parameters come as name/value pairs in the form String/String[]
String paramValue = ((String[])requestParameters.get(paramName))[0];
if ("signature".equalsIgnoreCase(paramName)) {
signature = paramValue;
} else {
if ("timestamp".equalsIgnoreCase(paramName)) {
String timestampStr = paramValue;
try {
// If the timestamp is in a valid range according to our tolerance, verify the request signature, otherwise return null to indicate authentication failure
timestamp = Long.parseLong(timestampStr);
long currentTime = System.currentTimeMillis();
if (Math.abs(currentTime - timestamp) > tolerance) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Expired timestamp passed in to login, current time = " + currentTime + ", timestamp = " + timestamp);
}
return null;
}
} catch (NumberFormatException nfe) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Invalid timestamp passed in to login: " + timestampStr);
}
return null;
}
}
if (unsignedRequest == null) {
unsignedRequest = paramName + "=" + URLEncoder.encode(paramValue, "UTF-8").replaceAll("\\+", "%20");
} else {
unsignedRequest = unsignedRequest + "&" + paramName + "=" + URLEncoder.encode(paramValue, "UTF-8").replaceAll("\\+", "%20");
}
}
}
if ((signature == null) || (timestamp == 0L)) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Missing parameters in login request, signature = " + signature + ", timestamp = " + timestamp);
}
return null;
}
unsignedRequest = unsignedRequest.toLowerCase();
Mac mac = Mac.getInstance("HmacSHA1");
SecretKeySpec keySpec = new SecretKeySpec(key.getBytes(), "HmacSHA1");
mac.init(keySpec);
mac.update(unsignedRequest.getBytes());
byte[] encryptedBytes = mac.doFinal();
String computedSignature = new String(Base64.encodeBase64(encryptedBytes));
boolean equalSig = signature.equals(computedSignature);
if (!equalSig) {
s_logger.info("User signature: " + signature + " is not equaled to computed signature: " + computedSignature);
} else {
user = getUserAccount(username, domainId);
}
} catch (Exception ex) {
s_logger.error("Exception authenticating user", ex);
return null;
}
}
if (user != null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("User: " + username + " in domain " + domainId + " has successfully logged in");
}
return user;
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("User: " + username + " in domain " + domainId + " has failed to log in");
}
return null;
}
}
@Override
public void logoutUser(Long userId) {
UserAccount userAcct = _userAccountDao.findById(userId);
if (userAcct != null) {
EventUtils.saveEvent(userId, userAcct.getAccountId(), EventTypes.EVENT_USER_LOGOUT, "user has logged out");
} // else log some kind of error event? This likely means the user doesn't exist, or has been deleted...
}
@Override
public List<VMTemplateVO> listAllTemplates() {
return _templateDao.listAllIncludingRemoved();
}
@Override
public List<GuestOSVO> listGuestOSByCriteria(ListGuestOsCmd cmd) {
Filter searchFilter = new Filter(GuestOSVO.class, "displayName", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
Long osCategoryId = cmd.getOsCategoryId();
SearchBuilder<GuestOSVO> sb = _guestOSDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("categoryId", sb.entity().getCategoryId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSVO> sc = sb.create();
if (id != null) {
sc.setParameters("id",id);
}
if (osCategoryId != null) {
sc.setParameters("categoryId", osCategoryId);
}
return _guestOSDao.search(sc, searchFilter);
}
@Override
public List<GuestOSCategoryVO> listGuestOSCategoriesByCriteria(ListGuestOsCategoriesCmd cmd) {
Filter searchFilter = new Filter(GuestOSCategoryVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
SearchBuilder<GuestOSCategoryVO> sb = _guestOSCategoryDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSCategoryVO> sc = sb.create();
if (id != null) {
sc.setParameters("id",id);
}
return _guestOSCategoryDao.search(sc, searchFilter);
}
@Override
public String getConfigurationValue(String name) {
return _configDao.getValue(name);
}
@Override
public ConsoleProxyInfo getConsoleProxy(long dataCenterId, long userVmId) {
return _consoleProxyMgr.assignProxy(dataCenterId, userVmId);
}
@Override
public ConsoleProxyVO startConsoleProxy(long instanceId) {
return _consoleProxyMgr.startProxy(instanceId);
}
@Override
public ConsoleProxyVO stopConsoleProxy(long instanceId) {
_consoleProxyMgr.stopProxy(instanceId);
return _consoleProxyDao.findById(instanceId);
}
@Override
public ConsoleProxyVO rebootConsoleProxy(long instanceId) {
_consoleProxyMgr.rebootProxy(instanceId);
return _consoleProxyDao.findById(instanceId);
}
@Override
public String getConsoleAccessUrlRoot(long vmId) {
VMInstanceVO vm = this.findVMInstanceById(vmId);
if (vm != null) {
ConsoleProxyInfo proxy = getConsoleProxy(vm.getDataCenterId(), vmId);
if (proxy != null) {
return proxy.getProxyImageUrl();
}
}
return null;
}
@Override
public Pair<String, Integer> getVncPort(VirtualMachine vm) {
if (vm.getHostId() == null) {
s_logger.warn("VM " + vm.getName() + " does not have host, return -1 for its VNC port");
return new Pair<String, Integer>(null, -1);
}
if(s_logger.isTraceEnabled()) {
s_logger.trace("Trying to retrieve VNC port from agent about VM " + vm.getName());
}
GetVncPortAnswer answer = (GetVncPortAnswer) _agentMgr.easySend(vm.getHostId(), new GetVncPortCommand(vm.getId(), vm.getInstanceName()));
if(answer != null && answer.getResult()) {
return new Pair<String, Integer>(answer.getAddress(), answer.getPort());
}
return new Pair<String, Integer>(null, -1);
}
@Override
public ConsoleProxyVO findConsoleProxyById(long instanceId) {
return _consoleProxyDao.findById(instanceId);
}
@Override
public List<DomainVO> searchForDomains(ListDomainsCmd cmd) throws PermissionDeniedException {
Long domainId = cmd.getId();
Account account = UserContext.current().getCaller();
String path = null;
if (account != null && account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) {
DomainVO domain = _domainDao.findById(account.getDomainId());
if (domain != null) {
path = domain.getPath();
}
}
Filter searchFilter = new Filter(DomainVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
String domainName = cmd.getDomainName();
Integer level = cmd.getLevel();
Object keyword = cmd.getKeyword();
SearchBuilder<DomainVO> sb = _domainDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("level", sb.entity().getLevel(), SearchCriteria.Op.EQ);
sb.and("path", sb.entity().getPath(), SearchCriteria.Op.LIKE);
SearchCriteria<DomainVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DomainVO> ssc = _domainDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (domainName != null) {
sc.setParameters("name", "%" + domainName + "%");
}
if (level != null) {
sc.setParameters("level", level);
}
if (domainId != null) {
sc.setParameters("id", domainId);
}
if (path != null) {
sc.setParameters("path", "%" +path+"%");
}
return _domainDao.search(sc, searchFilter);
}
@Override
public List<DomainVO> searchForDomainChildren(ListDomainChildrenCmd cmd) throws PermissionDeniedException {
Filter searchFilter = new Filter(DomainVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long domainId = cmd.getId();
String domainName = cmd.getDomainName();
Boolean isRecursive = cmd.isRecursive();
Object keyword = cmd.getKeyword();
String path = null;
if (isRecursive == null) {
isRecursive = false;
}
Account account = UserContext.current().getCaller();
if (account != null) {
if (domainId != null) {
if (!_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Unable to list domains children for domain id " + domainId + ", permission denied.");
}
} else {
domainId = account.getDomainId();
}
}
DomainVO domain = _domainDao.findById(domainId);
if (domain != null && isRecursive) {
path = domain.getPath();
domainId = null;
}
List<DomainVO> domainList = searchForDomainChildren(searchFilter, domainId, domainName,
keyword, path);
return domainList;
}
private List<DomainVO> searchForDomainChildren(Filter searchFilter,
Long domainId, String domainName, Object keyword, String path) {
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<DomainVO> ssc = _domainDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (domainId != null) {
sc.addAnd("parent", SearchCriteria.Op.EQ, domainId);
}
if (domainName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + domainName + "%");
}
if (path != null) {
sc.addAnd("path", SearchCriteria.Op.NEQ, path);
sc.addAnd("path", SearchCriteria.Op.LIKE, path + "%");
}
return _domainDao.search(sc, searchFilter);
}
@Override
public DomainVO createDomain(CreateDomainCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
String name = cmd.getDomainName();
Long parentId = cmd.getParentDomainId();
Long ownerId = UserContext.current().getCaller().getId();
Account account = UserContext.current().getCaller();
if (ownerId == null) {
ownerId = Long.valueOf(1);
}
if (parentId == null) {
parentId = Long.valueOf(DomainVO.ROOT_DOMAIN);
}
DomainVO parentDomain = _domainDao.findById(parentId);
if (parentDomain == null) {
throw new InvalidParameterValueException("Unable to create domain " + name + ", parent domain " + parentId + " not found.");
}
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), parentId)) {
throw new PermissionDeniedException("Unable to create domain " + name + ", permission denied.");
}
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("name", SearchCriteria.Op.EQ, name);
sc.addAnd("parent", SearchCriteria.Op.EQ, parentId);
List<DomainVO> domains = _domainDao.search(sc, null);
if ((domains == null) || domains.isEmpty()) {
DomainVO domain = new DomainVO(name, ownerId, parentId);
try {
return _domainDao.create(domain);
} catch (IllegalArgumentException ex) {
s_logger.warn("Failed to create domain ", ex);
throw ex;
}
} else {
throw new InvalidParameterValueException("Domain with name " + name + " already exists for the parent id=" + parentId);
}
}
@Override
public boolean deleteDomain(DeleteDomainCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getId();
Boolean cleanup = cmd.getCleanup();
if ((domainId == DomainVO.ROOT_DOMAIN) || ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId))) {
throw new PermissionDeniedException("Unable to delete domain " + domainId + ", permission denied.");
}
try {
DomainVO domain = _domainDao.findById(domainId);
if (domain != null) {
long ownerId = domain.getAccountId();
if ((cleanup != null) && cleanup.booleanValue()) {
boolean success = cleanupDomain(domainId, ownerId);
if (!success) {
s_logger.error("Failed to clean up domain resources and sub domains, delete failed on domain " + domain.getName() + " (id: " + domainId + ").");
return false;
}
} else {
if (!_domainDao.remove(domainId)) {
s_logger.error("Delete failed on domain " + domain.getName() + " (id: " + domainId + "); please make sure all users and sub domains have been removed from the domain before deleting");
return false;
}
}
} else {
throw new InvalidParameterValueException("Failed to delete domain nable " + domainId + ", domain not found");
}
return true;
} catch (InvalidParameterValueException ex) {
throw ex;
} catch (Exception ex) {
s_logger.error("Exception deleting domain with id " + domainId, ex);
return false;
}
}
private boolean cleanupDomain(Long domainId, Long ownerId) throws ConcurrentOperationException, ResourceUnavailableException{
boolean success = true;
{
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("parent", SearchCriteria.Op.EQ, domainId);
List<DomainVO> domains = _domainDao.search(sc, null);
// cleanup sub-domains first
for (DomainVO domain : domains) {
success = (success && cleanupDomain(domain.getId(), domain.getAccountId()));
}
}
{
// delete users which will also delete accounts and release resources for those accounts
SearchCriteria<AccountVO> sc = _accountDao.createSearchCriteria();
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
List<AccountVO> accounts = _accountDao.search(sc, null);
for (AccountVO account : accounts) {
success = (success && _accountMgr.cleanupAccount(account, UserContext.current().getCallerUserId(), UserContext.current().getCaller()));
}
}
// delete the domain itself
boolean deleteDomainSuccess = _domainDao.remove(domainId);
return success && deleteDomainSuccess;
}
@Override
public DomainVO updateDomain(UpdateDomainCmd cmd) throws InvalidParameterValueException, PermissionDeniedException{
Long domainId = cmd.getId();
String domainName = cmd.getDomainName();
//check if domain exists in the system
DomainVO domain = _domainDao.findById(domainId);
if (domain == null) {
throw new InvalidParameterValueException("Unable to find domain " + domainId);
} else if (domain.getParent() == null) {
//check if domain is ROOT domain - and deny to edit it
throw new InvalidParameterValueException("ROOT domain can not be edited");
}
// check permissions
Account account = UserContext.current().getCaller();
if ((account != null) && !isChildDomain(account.getDomainId(), domain.getId())) {
throw new PermissionDeniedException("Unable to update domain " + domainId + ", permission denied");
}
if (domainName == null || domainName.equals(domain.getName())) {
return _domainDao.findById(domainId);
}
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("name", SearchCriteria.Op.EQ, domainName);
List<DomainVO> domains = _domainDao.search(sc, null);
if ((domains == null) || domains.isEmpty()) {
//whilst updating a domain name, update its path and update all its children's path
domain = _domainDao.findById(domainId);
String updatedDomainPath = getUpdatedDomainPath(domain.getPath(),domainName);
updateDomainChildren(domain,updatedDomainPath);
_domainDao.update(domainId, domainName, updatedDomainPath);
return _domainDao.findById(domainId);
} else {
domain = _domainDao.findById(domainId);
s_logger.error("Domain with name " + domainName + " already exists in the system");
throw new CloudRuntimeException("Failed to update domain " + domainId);
}
}
private String getUpdatedDomainPath(String oldPath, String newName){
String[] tokenizedPath = oldPath.split("/");
tokenizedPath[tokenizedPath.length-1] = newName;
StringBuilder finalPath = new StringBuilder();
for(String token : tokenizedPath){
finalPath.append(token);
finalPath.append("/");
}
return finalPath.toString();
}
private void updateDomainChildren(DomainVO domain, String updatedDomainPrefix){
List<DomainVO> domainChildren = _domainDao.findAllChildren(domain.getPath(), domain.getId());
//for each child, update the path
for(DomainVO dom : domainChildren){
dom.setPath(dom.getPath().replaceFirst(domain.getPath(), updatedDomainPrefix));
_domainDao.update(dom.getId(), dom);
}
}
@Override
public Long findDomainIdByAccountId(Long accountId) {
if (accountId == null) {
return null;
}
AccountVO account = _accountDao.findById(accountId);
if (account != null) {
return account.getDomainId();
}
return null;
}
@Override
public DomainVO findDomainByPath(String domainPath) {
return _domainDao.findDomainByPath(domainPath);
}
@Override
public List<? extends Alert> searchForAlerts(ListAlertsCmd cmd) {
Filter searchFilter = new Filter(AlertVO.class, "lastSent", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<AlertVO> sc = _alertDao.createSearchCriteria();
Object type = cmd.getType();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<AlertVO> ssc = _alertDao.createSearchCriteria();
ssc.addOr("subject", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("subject", SearchCriteria.Op.SC, ssc);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
return _alertDao.search(sc, searchFilter);
}
@Override
public List<CapacityVO> listCapacities(ListCapacityCmd cmd) {
// make sure capacity is accurate before displaying it anywhere
// NOTE: listCapacities is currently called by the UI only, so this
// shouldn't be called much since it checks all hosts/VMs
// to figure out what has been allocated.
_alertMgr.recalculateCapacity();
Filter searchFilter = new Filter(CapacityVO.class, "capacityType", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<CapacityVO> sc = _capacityDao.createSearchCriteria();
Object type = cmd.getType();
Object zoneId = cmd.getZoneId();
Object podId = cmd.getPodId();
Object hostId = cmd.getHostId();
if (type != null) {
sc.addAnd("capacityType", SearchCriteria.Op.EQ, type);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (podId != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, podId);
}
if (hostId != null) {
sc.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, hostId);
}
return _capacityDao.search(sc, searchFilter);
}
@Override
public long getMemoryUsagebyHost(Long hostId) {
long mem = 0;
List<VMInstanceVO> vms = _vmInstanceDao.listUpByHostIdTypes(hostId, VirtualMachine.Type.DomainRouter);
mem += vms.size() * _routerRamSize * 1024L * 1024L;
vms = _vmInstanceDao.listUpByHostIdTypes(hostId, VirtualMachine.Type.SecondaryStorageVm);
mem += vms.size() * _ssRamSize * 1024L * 1024L;
vms = _vmInstanceDao.listUpByHostIdTypes(hostId, VirtualMachine.Type.ConsoleProxy);
mem += vms.size() * _proxyRamSize * 1024L * 1024L;
List<UserVmVO> instances = _userVmDao.listUpByHostId(hostId);
for (UserVmVO vm : instances) {
ServiceOffering so = findServiceOfferingById(vm.getServiceOfferingId());
if (so != null) {
mem += so.getRamSize() * 1024L * 1024L;
}
}
return mem;
}
@Override
public DiskOfferingVO findDiskOfferingById(long diskOfferingId) {
return _diskOfferingDao.findById(diskOfferingId);
}
@Override
public List<DiskOfferingVO> findPrivateDiskOffering() {
return _diskOfferingDao.findPrivateDiskOffering();
}
protected boolean templateIsCorrectType(VMTemplateVO template) {
return true;
}
public static boolean isAdmin(short accountType) {
return ((accountType == Account.ACCOUNT_TYPE_ADMIN) ||
(accountType == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) ||
(accountType == Account.ACCOUNT_TYPE_READ_ONLY_ADMIN));
}
@Override @DB
public boolean updateTemplatePermissions(UpdateTemplatePermissionsCmd cmd) {
return updateTemplateOrIsoPermissions(cmd);
}
@Override @DB
public boolean updateTemplatePermissions(UpdateIsoPermissionsCmd cmd) {
return updateTemplateOrIsoPermissions(cmd);
}
@DB
protected boolean updateTemplateOrIsoPermissions(UpdateTemplateOrIsoPermissionsCmd cmd) {
Transaction txn = Transaction.currentTxn();
//Input validation
Long id = cmd.getId();
Account account = UserContext.current().getCaller();
List<String> accountNames = cmd.getAccountNames();
Long userId = UserContext.current().getCallerUserId();
Boolean isFeatured = cmd.isFeatured();
Boolean isPublic = cmd.isPublic();
String operation = cmd.getOperation();
String mediaType = "";
VMTemplateVO template = _templateDao.findById(id);
if (template == null || !templateIsCorrectType(template)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "unable to find " + mediaType + " with id " + id);
}
if(cmd instanceof UpdateTemplatePermissionsCmd)
{
mediaType = "template";
if(template.getFormat().equals(ImageFormat.ISO))
{
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please provide a valid template");
}
}
if(cmd instanceof UpdateIsoPermissionsCmd)
{
mediaType = "iso";
if(!template.getFormat().equals(ImageFormat.ISO))
{
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please provide a valid iso");
}
}
if (account != null)
{
if (!isAdmin(account.getType()) && (template.getAccountId() != account.getId())) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to update permissions for " + mediaType + " with id " + id);
} else if (account.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Long templateOwnerDomainId = findDomainIdByAccountId(template.getAccountId());
if (!isChildDomain(account.getDomainId(), templateOwnerDomainId)) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to update permissions for " + mediaType + " with id " + id);
}
}
}
// If command is executed via 8096 port, set userId to the id of System account (1)
if (userId == null) {
userId = Long.valueOf(User.UID_SYSTEM);
}
// If the template is removed throw an error.
if (template.getRemoved() != null){
s_logger.error("unable to update permissions for " + mediaType + " with id " + id + " as it is removed ");
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to update permissions for " + mediaType + " with id " + id + " as it is removed ");
}
if (id == Long.valueOf(1)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "unable to update permissions for " + mediaType + " with id " + id);
}
boolean isAdmin = ((account == null) || isAdmin(account.getType()));
boolean allowPublicUserTemplates = Boolean.parseBoolean(getConfigurationValue("allow.public.user.templates"));
if (!isAdmin && !allowPublicUserTemplates && isPublic != null && isPublic) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Only private " + mediaType + "s can be created.");
}
// // package up the accountNames as a list
// List<String> accountNameList = new ArrayList<String>();
if (accountNames != null)
{
if ((operation == null) || (!operation.equalsIgnoreCase("add") && !operation.equalsIgnoreCase("remove") && !operation.equalsIgnoreCase("reset")))
{
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid operation on accounts, the operation must be either 'add' or 'remove' in order to modify launch permissions." +
" Given operation is: '" + operation + "'");
}
// StringTokenizer st = new StringTokenizer(accountNames, ",");
// while (st.hasMoreTokens()) {
// accountNameList.add(st.nextToken());
// }
}
Long accountId = template.getAccountId();
if (accountId == null) {
// if there is no owner of the template then it's probably already a public template (or domain private template) so publishing to individual users is irrelevant
throw new InvalidParameterValueException("Update template permissions is an invalid operation on template " + template.getName());
}
VMTemplateVO updatedTemplate = _templateDao.createForUpdate();
if (isPublic != null) {
updatedTemplate.setPublicTemplate(isPublic.booleanValue());
}
if (isFeatured != null) {
updatedTemplate.setFeatured(isFeatured.booleanValue());
}
_templateDao.update(template.getId(), updatedTemplate);
Long domainId;
domainId = (null == account) ? DomainVO.ROOT_DOMAIN : account.getDomainId(); // Account == null for 8096 and so its safe for domainid = ROOT
if ("add".equalsIgnoreCase(operation)) {
txn.start();
for (String accountName : accountNames) {
Account permittedAccount = _accountDao.findActiveAccount(accountName, domainId);
if (permittedAccount != null) {
if (permittedAccount.getId() == account.getId()) {
continue; // don't grant permission to the template owner, they implicitly have permission
}
LaunchPermissionVO existingPermission = _launchPermissionDao.findByTemplateAndAccount(id, permittedAccount.getId());
if (existingPermission == null) {
LaunchPermissionVO launchPermission = new LaunchPermissionVO(id, permittedAccount.getId());
_launchPermissionDao.persist(launchPermission);
}
} else {
txn.rollback();
throw new InvalidParameterValueException("Unable to grant a launch permission to account " + accountName + ", account not found. "
+ "No permissions updated, please verify the account names and retry.");
}
}
txn.commit();
} else if ("remove".equalsIgnoreCase(operation)) {
List<Long> accountIds = new ArrayList<Long>();
for (String accountName : accountNames) {
Account permittedAccount = _accountDao.findActiveAccount(accountName, domainId);
if (permittedAccount != null) {
accountIds.add(permittedAccount.getId());
}
}
_launchPermissionDao.removePermissions(id, accountIds);
} else if ("reset".equalsIgnoreCase(operation)) {
// do we care whether the owning account is an admin? if the
// owner is an admin, will we still set public to false?
updatedTemplate = _templateDao.createForUpdate();
updatedTemplate.setPublicTemplate(false);
updatedTemplate.setFeatured(false);
_templateDao.update(template.getId(), updatedTemplate);
_launchPermissionDao.removeAllPermissions(id);
}
return true;
}
@Override
public List<String> listTemplatePermissions(ListTemplateOrIsoPermissionsCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String acctName = cmd.getAccountName();
Long id = cmd.getId();
Long accountId = null;
if ((account == null) || account.getType() == Account.ACCOUNT_TYPE_ADMIN) {
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list " + cmd.getMediaType() + " permissions.");
}
if (acctName != null) {
Account userAccount = _accountDao.findActiveAccount(acctName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new PermissionDeniedException("Unable to find account " + acctName + " in domain " + domainId);
}
}
}
} else {
accountId = account.getId();
}
VMTemplateVO template = _templateDao.findById(id.longValue());
if (template == null || !templateIsCorrectType(template)) {
throw new InvalidParameterValueException("unable to find " + cmd.getMediaType() + " with id " + id);
}
if (accountId != null && !template.isPublicTemplate()) {
if (account.getType() == Account.ACCOUNT_TYPE_NORMAL && template.getAccountId() != accountId) {
throw new PermissionDeniedException("unable to list permissions for " + cmd.getMediaType() + " with id " + id);
} else if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) {
DomainVO accountDomain = _domainDao.findById(account.getDomainId());
Account templateAccount = _accountDao.findById(template.getAccountId());
DomainVO templateDomain = _domainDao.findById(templateAccount.getDomainId());
if (!templateDomain.getPath().contains(accountDomain.getPath())) {
throw new PermissionDeniedException("unable to list permissions for " + cmd.getMediaType() + " with id " + id);
}
}
}
if (id == Long.valueOf(1)) {
throw new PermissionDeniedException("unable to list permissions for " + cmd.getMediaType() + " with id " + id);
}
List<String> accountNames = new ArrayList<String>();
List<LaunchPermissionVO> permissions = _launchPermissionDao.findByTemplate(id);
if ((permissions != null) && !permissions.isEmpty()) {
for (LaunchPermissionVO permission : permissions) {
Account acct = _accountDao.findById(permission.getAccountId());
accountNames.add(acct.getAccountName());
}
}
return accountNames;
}
private List<DiskOfferingVO> searchDiskOfferingsInternal(Account account, Object name, Object id, Object keyword, Filter searchFilter){
//it was decided to return all offerings for the user's domain, and everything above till root (for normal user or domain admin)
//list all offerings belonging to this domain, and all of its parents
//check the parent, if not null, add offerings for that parent to list
List<DiskOfferingVO> dol = new ArrayList<DiskOfferingVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
boolean includePublicOfferings = true;
if(domainRecord != null)
{
while(true){
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
includePublicOfferings = false;
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
includePublicOfferings = false;
sc.setParameters("id", id);
}
//for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
//search and add for this domain
dol.addAll(_diskOfferingDao.search(sc, searchFilter));
//try and move on to the next domain
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
}
else {
break;//now we got all the offerings for this user/dom adm
}
}
}else{
s_logger.error("Could not find the domainId for account:"+account.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:"+account.getAccountName());
}
//add all the public offerings to the sol list before returning
if(includePublicOfferings) {
dol.addAll(_diskOfferingDao.findPublicDiskOfferings());
}
return dol;
}
@Override
public List<DiskOfferingVO> searchForDiskOfferings(ListDiskOfferingsCmd cmd) {
//Note
//The list method for offerings is being modified in accordance with discussion with Will/Kevin
//For now, we will be listing the following based on the usertype
//1. For root, we will list all offerings
//2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way till root
Filter searchFilter = new Filter(DiskOfferingVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
// SearchBuilder and SearchCriteria are now flexible so that the search builder can be built with all possible
// search terms and only those with criteria can be set. The proper SQL should be generated as a result.
Account account = UserContext.current().getCaller();
Object name = cmd.getDiskOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long domainId = cmd.getDomainId();
//Keeping this logic consistent with domain specific zones
//if a domainId is provided, we just return the disk offering associated with this domain
if(domainId != null){
if(account.getType() == Account.ACCOUNT_TYPE_ADMIN){
return _diskOfferingDao.listByDomainId(domainId);//no perm check
}else{
//check if the user's domain == do's domain || user's domain is a child of so's domain
if(isPermissible(account.getDomainId(), domainId)){
//perm check succeeded
return _diskOfferingDao.listByDomainId(domainId);
}else{
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "The account:"+account.getAccountName()+" does not fall in the same domain hierarchy as the disk offering");
}
}
}
//For non-root users
if((account.getType() == Account.ACCOUNT_TYPE_NORMAL || account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)){
return searchDiskOfferingsInternal(account, name, id, keyword, searchFilter);
}
//For root users, preserving existing flow
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
sb.addAnd("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
if (domainId != null) {
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.addAnd("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId());
}
*/
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
if (domainId != null) {
sc.setParameters("domainId", domainId);
//
//DomainVO domain = _domainDao.findById((Long)domainId);
//
// I want to join on user_vm.domain_id = domain.id where domain.path like 'foo%'
//sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
//
}
*/
return _diskOfferingDao.search(sc, searchFilter);
}
// @Override
// public AsyncJobResult queryAsyncJobResult(QueryAsyncJobResultCmd cmd) throws PermissionDeniedException {
// return queryAsyncJobResult(cmd.getId());
// }
@Override
public AsyncJobResult queryAsyncJobResult(long jobId) throws PermissionDeniedException {
AsyncJobVO job = _asyncMgr.getAsyncJob(jobId);
if (job == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, invalid job id " + jobId);
}
throw new PermissionDeniedException("Permission denied, invalid job id " + jobId);
}
// treat any requests from API server as trusted requests
if (!UserContext.current().isApiServer() && job.getAccountId() != UserContext.current().getCaller().getId()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Mismatched account id in job and user context, perform further securty check. job id: "
+ jobId + ", job owner account: " + job.getAccountId() + ", accound id in current context: " + UserContext.current().getCaller().getId());
}
Account account = UserContext.current().getCaller();
if (account != null) {
if (isAdmin(account.getType())) {
Account jobAccount = _accountDao.findById(job.getAccountId());
if (jobAccount == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, account no long exist for account id in context, job id: " + jobId
+ ", accountId " + job.getAccountId());
}
throw new PermissionDeniedException("Permission denied, invalid job ownership, job id: " + jobId);
}
if (!_domainDao.isChildDomain(account.getDomainId(), jobAccount.getDomainId())) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, invalid ownership for job " + jobId + ", job account owner: "
+ job.getAccountId() + " in domain: " + jobAccount.getDomainId() + ", account id in context: " + account.getId() +
" in domain: " + account.getDomainId());
}
throw new PermissionDeniedException("Permission denied, invalid job ownership, job id: " + jobId);
}
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, invalid ownership for job " + jobId + ", job account owner: "
+ job.getAccountId() + ", account id in context: " + account.getId());
}
throw new PermissionDeniedException("Permission denied, invalid job ownership, job id: " + jobId);
}
}
}
return _asyncMgr.queryAsyncJobResult(jobId);
}
@Override
public AsyncJobVO findAsyncJobById(long jobId) {
return _asyncMgr.getAsyncJob(jobId);
}
@Override
public String[] getApiConfig() {
return new String[] { "commands.properties" };
}
protected class AccountCleanupTask implements Runnable {
@Override
public void run() {
try {
GlobalLock lock = GlobalLock.getInternLock("AccountCleanup");
if (lock == null) {
s_logger.debug("Couldn't get the global lock");
return;
}
if (!lock.lock(30)) {
s_logger.debug("Couldn't lock the db");
return;
}
Transaction txn = null;
try {
txn = Transaction.open(Transaction.CLOUD_DB);
List<AccountVO> accounts = _accountDao.findCleanups();
s_logger.info("Found " + accounts.size() + " accounts to cleanup");
for (AccountVO account : accounts) {
s_logger.debug("Cleaning up " + account.getId());
try {
_accountMgr.cleanupAccount(account, _accountMgr.getSystemUser().getId(), _accountMgr.getSystemAccount());
} catch (Exception e) {
s_logger.error("Skipping due to error on account " + account.getId(), e);
}
}
} catch (Exception e) {
s_logger.error("Exception ", e);
} finally {
if(txn != null) {
txn.close();
}
lock.unlock();
}
} catch (Exception e) {
s_logger.error("Exception ", e);
}
}
}
protected class EventPurgeTask implements Runnable {
@Override
public void run() {
try {
GlobalLock lock = GlobalLock.getInternLock("EventPurge");
if (lock == null) {
s_logger.debug("Couldn't get the global lock");
return;
}
if (!lock.lock(30)) {
s_logger.debug("Couldn't lock the db");
return;
}
try {
final Calendar purgeCal = Calendar.getInstance();
purgeCal.add(Calendar.DAY_OF_YEAR, -_purgeDelay);
Date purgeTime = purgeCal.getTime();
s_logger.debug("Deleting events older than: "+purgeTime.toString());
List<EventVO> oldEvents = _eventDao.listOlderEvents(purgeTime);
s_logger.debug("Found "+oldEvents.size()+" events to be purged");
for (EventVO event : oldEvents){
_eventDao.expunge(event.getId());
}
} catch (Exception e) {
s_logger.error("Exception ", e);
} finally {
lock.unlock();
}
} catch (Exception e) {
s_logger.error("Exception ", e);
}
}
}
@Override
public StoragePoolVO findPoolById(Long id) {
return _poolDao.findById(id);
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(ListStoragePoolsCmd cmd) {
Criteria c = new Criteria("id", Boolean.TRUE, cmd.getStartIndex(), cmd.getPageSizeVal());
c.addCriteria(Criteria.ID, cmd.getId());
c.addCriteria(Criteria.NAME, cmd.getStoragePoolName());
c.addCriteria(Criteria.CLUSTERID, cmd.getClusterId());
c.addCriteria(Criteria.ADDRESS, cmd.getIpAddress());
c.addCriteria(Criteria.KEYWORD, cmd.getKeyword());
c.addCriteria(Criteria.PATH, cmd.getPath());
c.addCriteria(Criteria.PODID, cmd.getPodId());
c.addCriteria(Criteria.DATACENTERID, cmd.getZoneId());
return searchForStoragePools(c);
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(Criteria c) {
Filter searchFilter = new Filter(StoragePoolVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<StoragePoolVO> sc = _poolDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object host = c.getCriteria(Criteria.HOST);
Object path = c.getCriteria(Criteria.PATH);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object cluster = c.getCriteria(Criteria.CLUSTERID);
Object address = c.getCriteria(Criteria.ADDRESS);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<StoragePoolVO> ssc = _poolDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("poolType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (host != null) {
sc.addAnd("host", SearchCriteria.Op.EQ, host);
}
if (path != null) {
sc.addAnd("path", SearchCriteria.Op.EQ, path);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (address != null) {
sc.addAnd("hostAddress", SearchCriteria.Op.EQ, address);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
return _poolDao.search(sc, searchFilter);
}
@Override
public List<String> searchForStoragePoolDetails(long poolId, String value)
{
return _poolDao.searchForStoragePoolDetails(poolId, value);
}
@Override
public List<AsyncJobVO> searchForAsyncJobs(ListAsyncJobsCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Filter searchFilter = new Filter(AsyncJobVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<AsyncJobVO> sb = _jobDao.createSearchBuilder();
Object accountId = null;
Long domainId = cmd.getDomainId();
Account account = UserContext.current().getCaller();
if ((account == null) || isAdmin(account.getType())) {
String accountName = cmd.getAccountName();
if ((accountName != null) && (domainId != null)) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Failed to list async jobs for account " + accountName + " in domain " + domainId + "; account not found.");
}
} else if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Failed to list async jobs for domain " + domainId + "; permission denied.");
}
// we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
SearchBuilder<AccountVO> accountSearch = _accountDao.createSearchBuilder();
accountSearch.join("domainSearch", domainSearch, accountSearch.entity().getDomainId(), domainSearch.entity().getId(), JoinType.INNER);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinType.INNER);
}
} else {
accountId = account.getId();
}
Object keyword = cmd.getKeyword();
Object startDate = cmd.getStartDate();
SearchCriteria<AsyncJobVO> sc = _jobDao.createSearchCriteria();
if (keyword != null) {
sc.addAnd("cmd", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (accountId != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (startDate != null) {
sc.addAnd("created", SearchCriteria.Op.GTEQ, startDate);
}
return _jobDao.search(sc, searchFilter);
}
@Override
public boolean isChildDomain(Long parentId, Long childId) {
return _domainDao.isChildDomain(parentId, childId);
}
public SecondaryStorageVmVO startSecondaryStorageVm(long instanceId) {
return _secStorageVmMgr.startSecStorageVm(instanceId);
}
public SecondaryStorageVmVO stopSecondaryStorageVm(long instanceId) {
_secStorageVmMgr.stopSecStorageVm(instanceId);
return _secStorageVmDao.findById(instanceId);
}
public SecondaryStorageVmVO rebootSecondaryStorageVm(long instanceId) {
_secStorageVmMgr.rebootSecStorageVm(instanceId);
return _secStorageVmDao.findById(instanceId);
}
public boolean destroySecondaryStorageVm(long instanceId) {
return _secStorageVmMgr.destroySecStorageVm(instanceId);
}
@Override
public List<SecondaryStorageVmVO> searchForSecondaryStorageVm(Criteria c) {
Filter searchFilter = new Filter(SecondaryStorageVmVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<SecondaryStorageVmVO> sc = _secStorageVmDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object state = c.getCriteria(Criteria.STATE);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object hostId = c.getCriteria(Criteria.HOSTID);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<SecondaryStorageVmVO> ssc = _secStorageVmDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if(id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (state != null) {
sc.addAnd("state", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (hostId != null) {
sc.addAnd("hostId", SearchCriteria.Op.EQ, hostId);
}
return _secStorageVmDao.search(sc, searchFilter);
}
@Override @SuppressWarnings({"unchecked", "rawtypes"})
public List<? extends VMInstanceVO> searchForSystemVm(ListSystemVMsCmd cmd) {
Criteria c = new Criteria("id", Boolean.TRUE, cmd.getStartIndex(), cmd.getPageSizeVal());
c.addCriteria(Criteria.KEYWORD, cmd.getKeyword());
c.addCriteria(Criteria.ID, cmd.getId());
c.addCriteria(Criteria.DATACENTERID, cmd.getZoneId());
c.addCriteria(Criteria.PODID, cmd.getPodId());
c.addCriteria(Criteria.HOSTID, cmd.getHostId());
c.addCriteria(Criteria.NAME, cmd.getSystemVmName());
c.addCriteria(Criteria.STATE, cmd.getState());
String type = cmd.getSystemVmType();
List systemVMs = new ArrayList();
if (type == null) { //search for all vm types
systemVMs.addAll(searchForConsoleProxy(c));
systemVMs.addAll(searchForSecondaryStorageVm(c));
} else if((type != null) && (type.equalsIgnoreCase("secondarystoragevm"))) { // search for ssvm
systemVMs.addAll(searchForSecondaryStorageVm(c));
} else if((type != null) && (type.equalsIgnoreCase("consoleproxy"))) { // search for consoleproxy
systemVMs.addAll(searchForConsoleProxy(c));
}
return systemVMs;
}
@Override
public VMInstanceVO findSystemVMById(long instanceId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(instanceId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if(systemVm == null) {
return null;
}
if(systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return _consoleProxyDao.findById(instanceId);
}
return _secStorageVmDao.findById(instanceId);
}
@Override
public VirtualMachine startSystemVM(StartSystemVMCmd cmd) {
return startSystemVm(cmd.getId());
}
@Override
public VirtualMachine startSystemVm(long vmId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(vmId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("unable to find a system vm with id " + vmId);
}
if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return startConsoleProxy(vmId);
} else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
return startSecondaryStorageVm(vmId);
} else {
throw new InvalidParameterValueException("Unable to find a system vm: " + vmId);
}
}
@Override
public VMInstanceVO stopSystemVM(StopSystemVmCmd cmd) {
Long id = cmd.getId();
// verify parameters
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(id, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new ServerApiException (BaseCmd.PARAM_ERROR, "unable to find a system vm with id " + id);
}
// FIXME: We need to return the system VM from this method, so what do we do with the boolean response from stopConsoleProxy and stopSecondaryStorageVm?
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)){
return stopConsoleProxy(id);
} else {
return stopSecondaryStorageVm(id);
}
}
@Override
public VMInstanceVO rebootSystemVM(RebootSystemVmCmd cmd) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new ServerApiException (BaseCmd.PARAM_ERROR, "unable to find a system vm with id " + cmd.getId());
}
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)){
return rebootConsoleProxy(cmd.getId());
} else {
return rebootSecondaryStorageVm(cmd.getId());
}
}
private String signRequest(String request, String key) {
try
{
s_logger.info("Request: "+request);
s_logger.info("Key: "+key);
if(key != null && request != null)
{
Mac mac = Mac.getInstance("HmacSHA1");
SecretKeySpec keySpec = new SecretKeySpec(key.getBytes(),
"HmacSHA1");
mac.init(keySpec);
mac.update(request.getBytes());
byte[] encryptedBytes = mac.doFinal();
return new String ((Base64.encodeBase64(encryptedBytes)));
}
} catch (Exception ex) {
s_logger.error("unable to sign request", ex);
}
return null;
}
@Override
public ArrayList<String> getCloudIdentifierResponse(GetCloudIdentifierCmd cmd) throws InvalidParameterValueException{
Long userId = cmd.getUserId();
//verify that user exists
User user = findUserById(userId);
if ((user == null) || (user.getRemoved() != null)) {
throw new InvalidParameterValueException("Unable to find active user by id " + userId);
}
String cloudIdentifier = _configDao.getValue("cloud.identifier");
if (cloudIdentifier == null) {
cloudIdentifier = "";
}
String signature = "";
try {
//get the user obj to get his secret key
user = getUser(userId);
String secretKey = user.getSecretKey();
String input = cloudIdentifier;
signature = signRequest(input, secretKey);
} catch (Exception e) {
s_logger.warn("Exception whilst creating a signature:"+e);
}
ArrayList<String> cloudParams = new ArrayList<String>();
cloudParams.add(cloudIdentifier);
cloudParams.add(signature);
return cloudParams;
}
@Override
public SecurityGroupVO findNetworkGroupByName(Long accountId, String groupName) {
SecurityGroupVO groupVO = _networkSecurityGroupDao.findByAccountAndName(accountId, groupName);
return groupVO;
}
@Override
public SecurityGroupVO findNetworkGroupById(long networkGroupId) {
SecurityGroupVO groupVO = _networkSecurityGroupDao.findById(networkGroupId);
return groupVO;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHypervisorSnapshotCapable() {
return _isHypervisorSnapshotCapable;
}
@Override
public List<EventVO> listPendingEvents(int entryTime, int duration) {
Calendar calMin = Calendar.getInstance();
Calendar calMax = Calendar.getInstance();
calMin.add(Calendar.SECOND, -entryTime);
calMax.add(Calendar.SECOND, -duration);
Date minTime = calMin.getTime();
Date maxTime = calMax.getTime();
List<EventVO> startedEvents = _eventDao.listStartedEvents(minTime, maxTime);
List<EventVO> pendingEvents = new ArrayList<EventVO>();
for (EventVO event : startedEvents){
EventVO completedEvent = _eventDao.findCompletedEvent(event.getId());
if(completedEvent == null){
pendingEvents.add(event);
}
}
return pendingEvents;
}
@Override
public List<PreallocatedLunVO> getPreAllocatedLuns(ListPreallocatedLunsCmd cmd) {
Filter searchFilter = new Filter(PreallocatedLunVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<PreallocatedLunVO> sc = _lunDao.createSearchCriteria();
Object targetIqn = cmd.getTargetIqn();
Object scope = cmd.getScope();
if (targetIqn != null) {
sc.addAnd("targetIqn", SearchCriteria.Op.EQ, targetIqn);
}
if (scope == null || scope.toString().equalsIgnoreCase("ALL")) {
return _lunDao.search(sc, searchFilter);
} else if(scope.toString().equalsIgnoreCase("ALLOCATED")) {
sc.addAnd("volumeId", SearchCriteria.Op.NNULL);
sc.addAnd("taken", SearchCriteria.Op.NNULL);
return _lunDao.search(sc, searchFilter);
} else if(scope.toString().equalsIgnoreCase("FREE")) {
sc.addAnd("volumeId", SearchCriteria.Op.NULL);
sc.addAnd("taken", SearchCriteria.Op.NULL);
return _lunDao.search(sc, searchFilter);
}
return null;
}
@Override
public boolean checkLocalStorageConfigVal()
{
String value = _configs.get("use.local.storage");
if(value!=null && value.equalsIgnoreCase("true")) {
return true;
} else {
return false;
}
}
@Override
public boolean checkIfMaintenable(long hostId) {
//get the poolhostref record
List<StoragePoolHostVO> poolHostRecordSet = _poolHostDao.listByHostId(hostId);
if(poolHostRecordSet!=null)
{
//the above list has only 1 record
StoragePoolHostVO poolHostRecord = poolHostRecordSet.get(0);
//get the poolId and get hosts associated in that pool
List<StoragePoolHostVO> hostsInPool = _poolHostDao.listByPoolId(poolHostRecord.getPoolId());
if(hostsInPool!=null && hostsInPool.size()>1)
{
return true; //since there are other hosts to take over as master in this pool
}
}
return false;
}
@Override
public Map<String, Object> listCapabilities(ListCapabilitiesCmd cmd) {
Map<String, Object> capabilities = new HashMap<String, Object>();
String securityGroupsEnabled = _configs.get(Config.DirectAttachSecurityGroupsEnabled.key());
String userPublicTemplateEnabled = _configs.get(Config.AllowPublicUserTemplates.key());
capabilities.put("securityGroupsEnabled", (securityGroupsEnabled == null || securityGroupsEnabled.equals("false") ? false : true));
capabilities.put("userPublicTemplateEnabled", (userPublicTemplateEnabled == null || userPublicTemplateEnabled.equals("false") ? false : true));
capabilities.put("cloudStackVersion", getVersion());
return capabilities;
}
@Override
public GuestOSVO getGuestOs(Long guestOsId)
{
return _guestOSDao.findById(guestOsId);
}
@Override
public VolumeVO getRootVolume(Long instanceId)
{
return _volumeDao.findByInstanceAndType(instanceId, Volume.VolumeType.ROOT).get(0);
}
@Override
public long getPsMaintenanceCount(long podId){
List<StoragePoolVO> poolsInTransition = new ArrayList<StoragePoolVO>();
poolsInTransition.addAll(_poolDao.listPoolsByStatus(Status.Maintenance));
poolsInTransition.addAll(_poolDao.listPoolsByStatus(Status.PrepareForMaintenance));
poolsInTransition.addAll(_poolDao.listPoolsByStatus(Status.ErrorInMaintenance));
return poolsInTransition.size();
}
@Override
public boolean isPoolUp(long instanceId){
VolumeVO rootVolume = _volumeDao.findByInstance(instanceId).get(0);
if(rootVolume!=null){
Status poolStatus = _poolDao.findById(rootVolume.getPoolId()).getStatus();
if(!poolStatus.equals(Status.Up)) {
return false;
} else {
return true;
}
}
return false;
}
@Override
public Long extractVolume(ExtractVolumeCmd cmd) throws URISyntaxException {
Long volumeId = cmd.getId();
String url = cmd.getUrl();
Long zoneId = cmd.getZoneId();
AsyncJobVO job = null; // FIXME: cmd.getJob();
String mode = cmd.getMode();
Account account = UserContext.current().getCaller();
VolumeVO volume = _volumeDao.findById(volumeId);
if (volume == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Unable to find volume with id " + volumeId);
}
if (_dcDao.findById(zoneId) == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please specify a valid zone.");
}
if(volume.getPoolId() == null){
throw new ServerApiException(BaseCmd.PARAM_ERROR, "The volume doesnt belong to a storage pool so cant extract it");
}
//Extract activity only for detached volumes or for volumes whose instance is stopped
if(volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped ){
s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
throw new PermissionDeniedException("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
}
VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId());
boolean isExtractable = template != null && template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
if( !isExtractable && account!=null && account.getType() != Account.ACCOUNT_TYPE_ADMIN){ // Global admins are allowed to extract
throw new PermissionDeniedException("The volume:" +volumeId+ " is not allowed to be extracted");
}
Upload.Mode extractMode;
if( mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString())) ){
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please specify a valid extract Mode ");
}else{
extractMode = mode.equals(Upload.Mode.FTP_UPLOAD.toString()) ? Upload.Mode.FTP_UPLOAD : Upload.Mode.HTTP_DOWNLOAD;
}
if (account != null) {
if(!isAdmin(account.getType())){
if (volume.getAccountId() != account.getId()){
throw new PermissionDeniedException("Unable to find volume with ID: " + volumeId + " for account: " + account.getAccountName());
}
} else {
Account userAccount = _accountDao.findById(volume.getAccountId());
if((userAccount == null) || !_domainDao.isChildDomain(account.getDomainId(), userAccount.getDomainId())) {
throw new PermissionDeniedException("Unable to extract volume:" + volumeId + " - permission denied.");
}
}
}
// If mode is upload perform extra checks on url and also see if there is an ongoing upload on the same.
if (extractMode == Upload.Mode.FTP_UPLOAD){
URI uri = new URI(url);
if ( (uri.getScheme() == null) || (!uri.getScheme().equalsIgnoreCase("ftp") )) {
throw new IllegalArgumentException("Unsupported scheme for url: " + url);
}
String host = uri.getHost();
try {
InetAddress hostAddr = InetAddress.getByName(host);
if (hostAddr.isAnyLocalAddress() || hostAddr.isLinkLocalAddress() || hostAddr.isLoopbackAddress() || hostAddr.isMulticastAddress() ) {
throw new IllegalArgumentException("Illegal host specified in url");
}
if (hostAddr instanceof Inet6Address) {
throw new IllegalArgumentException("IPV6 addresses not supported (" + hostAddr.getHostAddress() + ")");
}
} catch (UnknownHostException uhe) {
throw new IllegalArgumentException("Unable to resolve " + host);
}
if ( _uploadMonitor.isTypeUploadInProgress(volumeId, Upload.Type.VOLUME) ){
throw new IllegalArgumentException(volume.getName() + " upload is in progress. Please wait for some time to schedule another upload for the same");
}
}
long userId = UserContext.current().getCallerUserId();
long accountId = volume.getAccountId();
String secondaryStorageURL = _storageMgr.getSecondaryStorageURL(zoneId);
StoragePoolVO srcPool = _poolDao.findById(volume.getPoolId());
Long sourceHostId = _storageMgr.findHostIdForStoragePool(srcPool);
List<HostVO> storageServers = _hostDao.listByTypeDataCenter(Host.Type.SecondaryStorage, zoneId);
HostVO sserver = storageServers.get(0);
List<UploadVO> extractURLList = _uploadDao.listByTypeUploadStatus(volumeId, Upload.Type.VOLUME, UploadVO.Status.DOWNLOAD_URL_CREATED);
if (extractMode == Upload.Mode.HTTP_DOWNLOAD && extractURLList.size() > 0){
return extractURLList.get(0).getId(); // If download url already exists then return
}else {
UploadVO uploadJob = _uploadMonitor.createNewUploadEntry(sserver.getId(), volumeId, UploadVO.Status.COPY_IN_PROGRESS, Upload.Type.VOLUME, url, extractMode);
s_logger.debug("Extract Mode - " +uploadJob.getMode());
uploadJob = _uploadDao.createForUpdate(uploadJob.getId());
// Update the async Job
ExtractResponse resultObj = new ExtractResponse(volumeId, volume.getName(), accountId, UploadVO.Status.COPY_IN_PROGRESS.toString(), uploadJob.getId());
resultObj.setResponseName(cmd.getCommandName());
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor.getCurrentExecutor();
if (asyncExecutor != null) {
job = asyncExecutor.getJob();
_asyncMgr.updateAsyncJobAttachment(job.getId(), Upload.Type.VOLUME.toString(), volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(), AsyncJobResult.STATUS_IN_PROGRESS, resultObj);
}
// Copy the volume from the source storage pool to secondary storage
CopyVolumeCommand cvCmd = new CopyVolumeCommand(volume.getId(), volume.getPath(), srcPool, secondaryStorageURL, true);
CopyVolumeAnswer cvAnswer = (CopyVolumeAnswer) _agentMgr.easySend(sourceHostId, cvCmd);
// Check if you got a valid answer.
if (cvAnswer == null || !cvAnswer.getResult()) {
String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage.";
//Update the async job.
resultObj.setResultString(errorString);
resultObj.setUploadStatus(UploadVO.Status.COPY_ERROR.toString());
if (asyncExecutor != null) {
_asyncMgr.completeAsyncJob(job.getId(), AsyncJobResult.STATUS_FAILED, 0, resultObj);
}
//Update the DB that volume couldn't be copied
uploadJob.setUploadState(UploadVO.Status.COPY_ERROR);
uploadJob.setErrorString(errorString);
uploadJob.setLastUpdated(new Date());
_uploadDao.update(uploadJob.getId(), uploadJob);
throw new CloudRuntimeException(errorString);
}
String volumeLocalPath = "volumes/"+volume.getId()+"/"+cvAnswer.getVolumePath()+".vhd";
//Update the DB that volume is copied and volumePath
uploadJob.setUploadState(UploadVO.Status.COPY_COMPLETE);
uploadJob.setLastUpdated(new Date());
uploadJob.setInstallPath(volumeLocalPath);
_uploadDao.update(uploadJob.getId(), uploadJob);
if (extractMode == Mode.FTP_UPLOAD){ // Now that the volume is copied perform the actual uploading
_uploadMonitor.extractVolume(uploadJob, sserver, volume, url, zoneId, volumeLocalPath, cmd.getStartEventId(), job.getId(), _asyncMgr);
return uploadJob.getId();
}else{ // Volume is copied now make it visible under apache and create a URL.
_uploadMonitor.createVolumeDownloadURL(volumeId, volumeLocalPath, Upload.Type.VOLUME, zoneId, uploadJob.getId());
return uploadJob.getId();
}
}
}
@Override
public InstanceGroupVO updateVmGroup(UpdateVMGroupCmd cmd) {
Account account = UserContext.current().getCaller();
Long groupId = cmd.getId();
String groupName = cmd.getGroupName();
// Verify input parameters
InstanceGroupVO group = _vmGroupDao.findById(groupId.longValue());
if (group == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "unable to find a vm group with id " + groupId);
}
if (account != null) {
Account tempAccount = _accountDao.findById(group.getAccountId());
if (!isAdmin(account.getType()) && (account.getId() != group.getAccountId())) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to find a group with id " + groupId + " for this account");
} else if (!_domainDao.isChildDomain(account.getDomainId(), tempAccount.getDomainId())) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Invalid group id (" + groupId + ") given, unable to update the group.");
}
}
//Check if name is already in use by this account (exclude this group)
boolean isNameInUse = _vmGroupDao.isNameInUse(group.getAccountId(), groupName);
if (isNameInUse && !group.getName().equals(groupName)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Unable to update vm group, a group with name " + groupName + " already exisits for account");
}
if (groupName != null) {
_vmGroupDao.updateVmGroup(groupId, groupName);
}
InstanceGroupVO vmGroup = _vmGroupDao.findById(groupId);
return vmGroup;
}
@Override
public List<InstanceGroupVO> searchForVmGroups(ListVMGroupsCmd cmd) {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
if ((account == null) || isAdmin(account.getType())) {
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid domain id (" + domainId + ") given, unable to list vm groups.");
}
if (accountName != null) {
account = _accountDao.findActiveAccount(accountName, domainId);
if (account == null) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to find account " + accountName + " in domain " + domainId);
}
accountId = account.getId();
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountName = account.getAccountName();
accountId = account.getId();
domainId = account.getDomainId();
}
Filter searchFilter = new Filter(InstanceGroupVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Object name = cmd.getGroupName();
Object keyword = cmd.getKeyword();
SearchBuilder<InstanceGroupVO> sb = _vmGroupDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<InstanceGroupVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<InstanceGroupVO> ssc = _vmGroupDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (accountId != null) {
sc.setParameters("accountId", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (domain != null){
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
}
return _vmGroupDao.search(sc, searchFilter);
}
@Override
public InstanceGroupVO getGroupForVm(long vmId){
return _vmMgr.getGroupForVm(vmId);
}
@Override
public List<VlanVO> searchForZoneWideVlans(long dcId, String vlanType, String vlanId){
return _vlanDao.searchForZoneWideVlans(dcId, vlanType, vlanId);
}
@Override
public String getVersion(){
final Class<?> c = ManagementServer.class;
String fullVersion = c.getPackage().getImplementationVersion();
if(fullVersion.length() > 0){
return fullVersion;
}
return "unknown";
}
private Long saveScheduledEvent(Long userId, Long accountId, String type, String description)
{
EventVO event = new EventVO();
event.setUserId(userId);
event.setAccountId(accountId);
event.setType(type);
event.setState(Event.State.Scheduled);
event.setDescription("Scheduled async job for "+description);
event = _eventDao.persist(event);
return event.getId();
}
@Override
public Long saveStartedEvent(Long userId, Long accountId, String type, String description, long startEventId)
{
return EventUtils.saveStartedEvent(userId, accountId, type, description, startEventId);
}
@Override
public Long saveCompletedEvent(Long userId, Long accountId, String level, String type, String description, long startEventId)
{
return EventUtils.saveEvent(userId, accountId, level, type, description, startEventId);
}
@Override @DB
public String uploadCertificate(UploadCustomCertificateCmd cmd) throws ServerApiException{
CertificateVO cert = null;
Long certVOId = null;
try
{
Transaction.currentTxn();
String certificate = cmd.getCertificate();
cert = _certDao.listAll().get(0); //always 1 record in db (from the deploydb time)
cert = _certDao.acquireInLockTable(cert.getId());
if(cert == null){
String msg = "Unable to obtain lock on the cert from uploadCertificate()";
s_logger.error(msg);
throw new ConcurrentOperationException(msg);
}else{
if(cert.getUpdated().equalsIgnoreCase("Y")){
if(s_logger.isDebugEnabled()) {
s_logger.debug("A custom certificate already exists in the DB, will replace it with the new one being uploaded");
}
}else{
if(s_logger.isDebugEnabled()) {
s_logger.debug("No custom certificate exists in the DB, will upload a new one");
}
}
//validate if the cert follows X509 format, if not, don't persist to db
InputStream is = new ByteArrayInputStream(certificate.getBytes("UTF-8"));
BufferedInputStream bis = new BufferedInputStream(is);
CertificateFactory cf = CertificateFactory.getInstance("X.509");
while (bis.available() > 1) {
Certificate localCert = cf.generateCertificate(bis);//throws certexception if not valid cert format
if(s_logger.isDebugEnabled()){
s_logger.debug("The custom certificate generated for validation is:"+localCert.toString());
}
}
certVOId = _certDao.persistCustomCertToDb(certificate,cert,this.getId());//0 implies failure
if(s_logger.isDebugEnabled()) {
s_logger.debug("Custom certificate persisted to the DB");
}
}
if (certVOId != 0)
{
//certficate uploaded to db successfully
//get a list of all Console proxies from the cp table
List<ConsoleProxyVO> cpList = _consoleProxyDao.listAll();
if(cpList.size() == 0){
String msg = "Unable to find any console proxies in the system for certificate update";
s_logger.warn(msg);
throw new ExecutionException(msg);
}
//get a list of all hosts in host table for type cp
List<HostVO> cpHosts = _hostDao.listByType(com.cloud.host.Host.Type.ConsoleProxy);
if(cpHosts.size() == 0){
String msg = "Unable to find any console proxy hosts in the system for certificate update";
s_logger.warn(msg);
throw new ExecutionException(msg);
}
//create a hashmap for fast lookup
Map<String,Long> hostNameToHostIdMap = new HashMap<String, Long>();
//updated console proxies id list
List<Long> updatedCpIdList = new ArrayList<Long>();
for(HostVO cpHost : cpHosts){
hostNameToHostIdMap.put(cpHost.getName(), cpHost.getId());
}
for(ConsoleProxyVO cp : cpList)
{
Long cpHostId = hostNameToHostIdMap.get(cp.getName());
//now send a command to each console proxy host
UpdateCertificateCommand certCmd = new UpdateCertificateCommand(_certDao.findById(certVOId).getCertificate(), false);
try {
Answer updateCertAns = _agentMgr.send(cpHostId, certCmd);
if(updateCertAns.getResult() == true)
{
//we have the cert copied over on cpvm
_consoleProxyMgr.rebootProxy(cp.getId());
//when cp reboots, the context will be reinit with the new cert
if(s_logger.isDebugEnabled()) {
s_logger.debug("Successfully updated custom certificate on console proxy vm id:"+cp.getId()+" ,console proxy host id:"+cpHostId);
}
updatedCpIdList.add(cp.getId());
}
} catch (AgentUnavailableException e) {
s_logger.warn("Unable to send update certificate command to the console proxy resource as agent is unavailable for console proxy vm id:"+cp.getId()+" ,console proxy host id:"+cpHostId, e);
} catch (OperationTimedoutException e) {
s_logger.warn("Unable to send update certificate command to the console proxy resource as there was a timeout for console proxy vm id:"+cp.getId()+" ,console proxy host id:"+cpHostId, e);
}
}
if(updatedCpIdList.size() == cpList.size()){
//success case, all updated
return ("Updated:"+updatedCpIdList.size()+" out of:"+cpList.size()+" console proxies");
}else{
//failure case, if even one update fails
throw new ManagementServerException("Updated:"+updatedCpIdList.size()+" out of:"+cpList.size()+" console proxies with successfully updated console proxy ids being:"+(updatedCpIdList.size() > 0 ? updatedCpIdList.toString():""));
}
}
else
{
throw new ManagementServerException("Unable to persist custom certificate to the cloud db");
}
}catch (Exception e) {
s_logger.warn("Failed to successfully update the cert across console proxies on management server:"+this.getId());
if(e instanceof ExecutionException) {
throw new ServerApiException(BaseCmd.RESOURCE_UNAVAILABLE_ERROR, e.getMessage());
} else if(e instanceof ManagementServerException) {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, e.getMessage());
} else if(e instanceof IndexOutOfBoundsException){
String msg = "Custom certificate record in the db deleted; this should never happen. Please create a new record in the certificate table";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof FileNotFoundException){
String msg = "Invalid file path for custom cert found during cert validation";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof CertificateException){
String msg = "The file format for custom cert does not conform to the X.509 specification";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof UnsupportedEncodingException){
String msg = "Unable to encode the certificate into UTF-8 input stream for validation";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof IOException){
String msg = "Cannot generate input stream during custom cert validation";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
} else {
String msg = "Cannot upload custom certificate, internal error.";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
}finally{
_certDao.releaseFromLockTable(cert.getId());
}
}
@Override
public String[] getHypervisors(ListHypervisorsCmd cmd) {
String hypers = _configDao.getValue(Config.HypervisorList.key());
if (hypers == "" || hypers == null) {
return null;
}
return hypers.split(",");
}
@Override
public String getHashKey() {
// although we may have race conditioning here, database transaction serialization should
// give us the same key
if(_hashKey == null) {
_hashKey = _configDao.getValueAndInitIfNotExist(Config.HashKey.key(), UUID.randomUUID().toString());
}
return _hashKey;
}
@Override
public SSHKeyPair createSSHKeyPair(CreateSSHKeyPairCmd cmd) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO s = _sshKeyPairDao.findByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (s != null)
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
SSHKeysHelper keys = new SSHKeysHelper();
String name = cmd.getName();
String publicKey = keys.getPublicKey();
String fingerprint = keys.getPublicKeyFingerPrint();
String privateKey = keys.getPrivateKey();
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, privateKey);
}
@Override
public boolean deleteSSHKeyPair(DeleteSSHKeyPairCmd cmd) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO s = _sshKeyPairDao.findByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (s == null)
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' does not exist.");
return _sshKeyPairDao.deleteByName(account.getAccountId(), account.getDomainId(), cmd.getName());
}
@Override
public List<? extends SSHKeyPair> listSSHKeyPairs(ListSSHKeyPairsCmd cmd) {
Account account = UserContext.current().getCaller();
if (cmd.getName() != null && cmd.getName().length() > 0)
return _sshKeyPairDao.listKeyPairsByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (cmd.getFingerprint() != null && cmd.getFingerprint().length() > 0)
return _sshKeyPairDao.listKeyPairsByFingerprint(account.getAccountId(), account.getDomainId(), cmd.getFingerprint());
return _sshKeyPairDao.listKeyPairs(account.getAccountId(), account.getDomainId());
}
@Override
public SSHKeyPair registerSSHKeyPair(RegisterSSHKeyPairCmd cmd) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO s = _sshKeyPairDao.findByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (s != null)
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
String name = cmd.getName();
String publicKey = SSHKeysHelper.getPublicKeyFromKeyMaterial(cmd.getPublicKey());
String fingerprint = SSHKeysHelper.getPublicKeyFingerprint(publicKey);
if (publicKey == null)
throw new InvalidParameterValueException("Public key is invalid");
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, null);
}
private SSHKeyPair createAndSaveSSHKeyPair(String name, String fingerprint, String publicKey, String privateKey) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO newPair = new SSHKeyPairVO();
newPair.setAccountId(account.getAccountId());
newPair.setDomainId(account.getDomainId());
newPair.setName(name);
newPair.setFingerprint(fingerprint);
newPair.setPublicKey(publicKey);
newPair.setPrivateKey(privateKey); // transient; not saved.
_sshKeyPairDao.persist(newPair);
return newPair;
}
@Override
public String getVMPassword(GetVMPasswordCmd cmd) {
Account account = UserContext.current().getCaller();
UserVmVO vm = _userVmDao.findById(cmd.getId());
if (vm == null || vm.getAccountId() != account.getAccountId())
throw new InvalidParameterValueException("No VM with id '" + cmd.getId() + "' found.");
_userVmDao.loadDetails(vm);
String password = vm.getDetail("Encrypted.Password");
if (password == null || password.equals(""))
throw new InvalidParameterValueException("No password for VM with id '" + cmd.getId() + "' found.");
return password;
}
}
|
server/src/com/cloud/server/ManagementServerImpl.java
|
/**
* Copyright (C) 2010 Cloud.com, Inc. All rights reserved.
*
* This software is licensed under the GNU General Public License v3 or later.
*
* It is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.cloud.server;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.net.UnknownHostException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.crypto.KeyGenerator;
import javax.crypto.Mac;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.GetVncPortAnswer;
import com.cloud.agent.api.GetVncPortCommand;
import com.cloud.agent.api.proxy.UpdateCertificateCommand;
import com.cloud.agent.api.storage.CopyVolumeAnswer;
import com.cloud.agent.api.storage.CopyVolumeCommand;
import com.cloud.alert.Alert;
import com.cloud.alert.AlertManager;
import com.cloud.alert.AlertVO;
import com.cloud.alert.dao.AlertDao;
import com.cloud.api.ApiDBUtils;
import com.cloud.api.BaseCmd;
import com.cloud.api.ServerApiException;
import com.cloud.api.commands.CreateDomainCmd;
import com.cloud.api.commands.CreateSSHKeyPairCmd;
import com.cloud.api.commands.DeleteDomainCmd;
import com.cloud.api.commands.DeletePreallocatedLunCmd;
import com.cloud.api.commands.DeleteSSHKeyPairCmd;
import com.cloud.api.commands.ExtractVolumeCmd;
import com.cloud.api.commands.GetCloudIdentifierCmd;
import com.cloud.api.commands.GetVMPasswordCmd;
import com.cloud.api.commands.ListAccountsCmd;
import com.cloud.api.commands.ListAlertsCmd;
import com.cloud.api.commands.ListAsyncJobsCmd;
import com.cloud.api.commands.ListCapabilitiesCmd;
import com.cloud.api.commands.ListCapacityCmd;
import com.cloud.api.commands.ListCfgsByCmd;
import com.cloud.api.commands.ListClustersCmd;
import com.cloud.api.commands.ListDiskOfferingsCmd;
import com.cloud.api.commands.ListDomainChildrenCmd;
import com.cloud.api.commands.ListDomainsCmd;
import com.cloud.api.commands.ListEventsCmd;
import com.cloud.api.commands.ListGuestOsCategoriesCmd;
import com.cloud.api.commands.ListGuestOsCmd;
import com.cloud.api.commands.ListHostsCmd;
import com.cloud.api.commands.ListHypervisorsCmd;
import com.cloud.api.commands.ListIsosCmd;
import com.cloud.api.commands.ListPodsByCmd;
import com.cloud.api.commands.ListPreallocatedLunsCmd;
import com.cloud.api.commands.ListPublicIpAddressesCmd;
import com.cloud.api.commands.ListRoutersCmd;
import com.cloud.api.commands.ListSSHKeyPairsCmd;
import com.cloud.api.commands.ListServiceOfferingsCmd;
import com.cloud.api.commands.ListStoragePoolsCmd;
import com.cloud.api.commands.ListSystemVMsCmd;
import com.cloud.api.commands.ListTemplateOrIsoPermissionsCmd;
import com.cloud.api.commands.ListTemplatesCmd;
import com.cloud.api.commands.ListUsersCmd;
import com.cloud.api.commands.ListVMGroupsCmd;
import com.cloud.api.commands.ListVlanIpRangesCmd;
import com.cloud.api.commands.ListVolumesCmd;
import com.cloud.api.commands.ListZonesByCmd;
import com.cloud.api.commands.RebootSystemVmCmd;
import com.cloud.api.commands.RegisterCmd;
import com.cloud.api.commands.RegisterPreallocatedLunCmd;
import com.cloud.api.commands.RegisterSSHKeyPairCmd;
import com.cloud.api.commands.StartSystemVMCmd;
import com.cloud.api.commands.StopSystemVmCmd;
import com.cloud.api.commands.UpdateDomainCmd;
import com.cloud.api.commands.UpdateIsoCmd;
import com.cloud.api.commands.UpdateIsoPermissionsCmd;
import com.cloud.api.commands.UpdateTemplateCmd;
import com.cloud.api.commands.UpdateTemplateOrIsoCmd;
import com.cloud.api.commands.UpdateTemplateOrIsoPermissionsCmd;
import com.cloud.api.commands.UpdateTemplatePermissionsCmd;
import com.cloud.api.commands.UpdateVMGroupCmd;
import com.cloud.api.commands.UploadCustomCertificateCmd;
import com.cloud.api.response.ExtractResponse;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobResult;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.async.dao.AsyncJobDao;
import com.cloud.capacity.CapacityVO;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.certificate.CertificateVO;
import com.cloud.certificate.dao.CertificateDao;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationManager;
import com.cloud.configuration.ConfigurationVO;
import com.cloud.configuration.ResourceLimitVO;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.configuration.dao.ResourceLimitDao;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.AccountVlanMapVO;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenterIpAddressVO;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.PodVlanMapVO;
import com.cloud.dc.Vlan.VlanType;
import com.cloud.dc.VlanVO;
import com.cloud.dc.dao.AccountVlanMapDao;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.DataCenterIpAddressDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.dc.dao.PodVlanMapDao;
import com.cloud.dc.dao.VlanDao;
import com.cloud.domain.DomainVO;
import com.cloud.domain.dao.DomainDao;
import com.cloud.event.ActionEvent;
import com.cloud.event.Event;
import com.cloud.event.EventTypes;
import com.cloud.event.EventUtils;
import com.cloud.event.EventVO;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.CloudAuthenticationException;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.ManagementServerException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.host.Host;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.info.ConsoleProxyInfo;
import com.cloud.network.IPAddressVO;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.router.VirtualNetworkApplianceManager;
import com.cloud.network.security.SecurityGroupVO;
import com.cloud.network.security.dao.SecurityGroupDao;
import com.cloud.offering.ServiceOffering;
import com.cloud.server.auth.UserAuthenticator;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.storage.GuestOSCategoryVO;
import com.cloud.storage.GuestOSVO;
import com.cloud.storage.LaunchPermissionVO;
import com.cloud.storage.Storage;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Storage.TemplateType;
import com.cloud.storage.StorageManager;
import com.cloud.storage.StoragePoolHostVO;
import com.cloud.storage.StoragePoolVO;
import com.cloud.storage.StorageStats;
import com.cloud.storage.Upload;
import com.cloud.storage.Upload.Mode;
import com.cloud.storage.UploadVO;
import com.cloud.storage.VMTemplateVO;
import com.cloud.storage.Volume;
import com.cloud.storage.VolumeStats;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.GuestOSCategoryDao;
import com.cloud.storage.dao.GuestOSDao;
import com.cloud.storage.dao.LaunchPermissionDao;
import com.cloud.storage.dao.StoragePoolDao;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.UploadDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.preallocatedlun.PreallocatedLunVO;
import com.cloud.storage.preallocatedlun.dao.PreallocatedLunDao;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.upload.UploadMonitor;
import com.cloud.template.TemplateManager;
import com.cloud.template.VirtualMachineTemplate.TemplateFilter;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.AccountVO;
import com.cloud.user.SSHKeyPair;
import com.cloud.user.SSHKeyPairVO;
import com.cloud.user.User;
import com.cloud.user.UserAccount;
import com.cloud.user.UserAccountVO;
import com.cloud.user.UserContext;
import com.cloud.user.UserVO;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.SSHKeyPairDao;
import com.cloud.user.dao.UserAccountDao;
import com.cloud.user.dao.UserDao;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.PasswordGenerator;
import com.cloud.utils.component.Adapters;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.JoinBuilder;
import com.cloud.utils.db.JoinBuilder.JoinType;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.exception.ExecutionException;
import com.cloud.utils.net.MacAddress;
import com.cloud.utils.net.NetUtils;
import com.cloud.utils.ssh.SSHKeysHelper;
import com.cloud.vm.ConsoleProxyVO;
import com.cloud.vm.DomainRouterVO;
import com.cloud.vm.InstanceGroupVO;
import com.cloud.vm.SecondaryStorageVmVO;
import com.cloud.vm.UserVmDetailVO;
import com.cloud.vm.UserVmManager;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.DomainRouterDao;
import com.cloud.vm.dao.InstanceGroupDao;
import com.cloud.vm.dao.SecondaryStorageVmDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.UserVmDetailsDao;
import com.cloud.vm.dao.VMInstanceDao;
public class ManagementServerImpl implements ManagementServer {
public static final Logger s_logger = Logger.getLogger(ManagementServerImpl.class.getName());
private final AccountManager _accountMgr;
private final AgentManager _agentMgr;
private final ConfigurationManager _configMgr;
private final SecurityGroupDao _networkSecurityGroupDao;
private final IPAddressDao _publicIpAddressDao;
private final DataCenterIpAddressDao _privateIpAddressDao;
private final DomainRouterDao _routerDao;
private final ConsoleProxyDao _consoleProxyDao;
private final ClusterDao _clusterDao;
private final SecondaryStorageVmDao _secStorageVmDao;
private final EventDao _eventDao;
private final DataCenterDao _dcDao;
private final VlanDao _vlanDao;
private final AccountVlanMapDao _accountVlanMapDao;
private final PodVlanMapDao _podVlanMapDao;
private final HostDao _hostDao;
private final UserDao _userDao;
private final UserVmDao _userVmDao;
private final ConfigurationDao _configDao;
private final UserVmManager _vmMgr;
private final ConsoleProxyManager _consoleProxyMgr;
private final SecondaryStorageVmManager _secStorageVmMgr;
private final ServiceOfferingDao _offeringsDao;
private final DiskOfferingDao _diskOfferingDao;
private final VMTemplateDao _templateDao;
private final LaunchPermissionDao _launchPermissionDao;
private final DomainDao _domainDao;
private final AccountDao _accountDao;
private final ResourceLimitDao _resourceLimitDao;
private final UserAccountDao _userAccountDao;
private final AlertDao _alertDao;
private final CapacityDao _capacityDao;
private final GuestOSDao _guestOSDao;
private final GuestOSCategoryDao _guestOSCategoryDao;
private final StoragePoolDao _poolDao;
private final StoragePoolHostDao _poolHostDao;
private final StorageManager _storageMgr;
private final Adapters<UserAuthenticator> _userAuthenticators;
private final HostPodDao _hostPodDao;
private final VMInstanceDao _vmInstanceDao;
private final VolumeDao _volumeDao;
private final AlertManager _alertMgr;
private final AsyncJobDao _jobDao;
private final AsyncJobManager _asyncMgr;
private final TemplateManager _tmpltMgr;
private final int _purgeDelay;
private final PreallocatedLunDao _lunDao;
private final InstanceGroupDao _vmGroupDao;
private final UploadMonitor _uploadMonitor;
private final UploadDao _uploadDao;
private final CertificateDao _certDao;
private final SSHKeyPairDao _sshKeyPairDao;
private final ScheduledExecutorService _eventExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("EventChecker"));
private final StatsCollector _statsCollector;
private final Map<String, String> _configs;
private String _domain;
private final int _routerRamSize;
private final int _proxyRamSize;
private final int _ssRamSize;
private final Map<String, Boolean> _availableIdsMap;
private boolean _isHypervisorSnapshotCapable = false;
private String _hashKey = null;
protected ManagementServerImpl() {
ComponentLocator locator = ComponentLocator.getLocator(Name);
_lunDao = locator.getDao(PreallocatedLunDao.class);
_configDao = locator.getDao(ConfigurationDao.class);
_routerDao = locator.getDao(DomainRouterDao.class);
_eventDao = locator.getDao(EventDao.class);
_dcDao = locator.getDao(DataCenterDao.class);
_vlanDao = locator.getDao(VlanDao.class);
_accountVlanMapDao = locator.getDao(AccountVlanMapDao.class);
_podVlanMapDao = locator.getDao(PodVlanMapDao.class);
_hostDao = locator.getDao(HostDao.class);
_hostPodDao = locator.getDao(HostPodDao.class);
_jobDao = locator.getDao(AsyncJobDao.class);
_clusterDao = locator.getDao(ClusterDao.class);
_accountMgr = locator.getManager(AccountManager.class);
_agentMgr = locator.getManager(AgentManager.class);
_configMgr = locator.getManager(ConfigurationManager.class);
_vmMgr = locator.getManager(UserVmManager.class);
_consoleProxyMgr = locator.getManager(ConsoleProxyManager.class);
_secStorageVmMgr = locator.getManager(SecondaryStorageVmManager.class);
_storageMgr = locator.getManager(StorageManager.class);
_networkSecurityGroupDao = locator.getDao(SecurityGroupDao.class);
_publicIpAddressDao = locator.getDao(IPAddressDao.class);
_privateIpAddressDao = locator.getDao(DataCenterIpAddressDao.class);
_consoleProxyDao = locator.getDao(ConsoleProxyDao.class);
_secStorageVmDao = locator.getDao(SecondaryStorageVmDao.class);
_userDao = locator.getDao(UserDao.class);
_userVmDao = locator.getDao(UserVmDao.class);
_offeringsDao = locator.getDao(ServiceOfferingDao.class);
_diskOfferingDao = locator.getDao(DiskOfferingDao.class);
_templateDao = locator.getDao(VMTemplateDao.class);
_launchPermissionDao = locator.getDao(LaunchPermissionDao.class);
_domainDao = locator.getDao(DomainDao.class);
_accountDao = locator.getDao(AccountDao.class);
_resourceLimitDao = locator.getDao(ResourceLimitDao.class);
_userAccountDao = locator.getDao(UserAccountDao.class);
_alertDao = locator.getDao(AlertDao.class);
_capacityDao = locator.getDao(CapacityDao.class);
_guestOSDao = locator.getDao(GuestOSDao.class);
_guestOSCategoryDao = locator.getDao(GuestOSCategoryDao.class);
_poolDao = locator.getDao(StoragePoolDao.class);
_poolHostDao = locator.getDao(StoragePoolHostDao.class);
_vmGroupDao = locator.getDao(InstanceGroupDao.class);
_uploadDao = locator.getDao(UploadDao.class);
_certDao = locator.getDao(CertificateDao.class);
_configs = _configDao.getConfiguration();
_vmInstanceDao = locator.getDao(VMInstanceDao.class);
_volumeDao = locator.getDao(VolumeDao.class);
_alertMgr = locator.getManager(AlertManager.class);
_asyncMgr = locator.getManager(AsyncJobManager.class);
_tmpltMgr = locator.getManager(TemplateManager.class);
_uploadMonitor = locator.getManager(UploadMonitor.class);
_sshKeyPairDao = locator.getDao(SSHKeyPairDao.class);
_userAuthenticators = locator.getAdapters(UserAuthenticator.class);
if (_userAuthenticators == null || !_userAuthenticators.isSet()) {
s_logger.error("Unable to find an user authenticator.");
}
_domain = _configs.get("domain");
if (_domain == null) {
_domain = ".myvm.com";
}
if (!_domain.startsWith(".")) {
_domain = "." + _domain;
}
String value = _configs.get("account.cleanup.interval");
int cleanup = NumbersUtil.parseInt(value, 60 * 60 * 24); // 1 hour.
// Parse the max number of UserVMs and public IPs from server-setup.xml,
// and set them in the right places
_routerRamSize = NumbersUtil.parseInt(_configs.get("router.ram.size"),VirtualNetworkApplianceManager.DEFAULT_ROUTER_VM_RAMSIZE);
_proxyRamSize = NumbersUtil.parseInt(_configs.get("consoleproxy.ram.size"), ConsoleProxyManager.DEFAULT_PROXY_VM_RAMSIZE);
_ssRamSize = NumbersUtil.parseInt(_configs.get("secstorage.ram.size"), SecondaryStorageVmManager.DEFAULT_SS_VM_RAMSIZE);
_statsCollector = StatsCollector.getInstance(_configs);
_purgeDelay = NumbersUtil.parseInt(_configs.get("event.purge.delay"), 0);
if(_purgeDelay != 0){
_eventExecutor.scheduleAtFixedRate(new EventPurgeTask(), cleanup, cleanup, TimeUnit.SECONDS);
}
String[] availableIds = TimeZone.getAvailableIDs();
_availableIdsMap = new HashMap<String, Boolean>(availableIds.length);
for (String id: availableIds) {
_availableIdsMap.put(id, true);
}
}
protected Map<String, String> getConfigs() {
return _configs;
}
@Override
public StorageStats getStorageStatistics(long hostId) {
return _statsCollector.getStorageStats(hostId);
}
@Override
public PreallocatedLunVO registerPreallocatedLun(RegisterPreallocatedLunCmd cmd) {
Long zoneId = cmd.getZoneId();
String portal = cmd.getPortal();
String targetIqn = cmd.getTargetIqn();
Integer lun = cmd.getLun();
Long size = cmd.getDiskSize();
String t = cmd.getTags();
String[] tags = null;
if (t != null) {
tags = t.split(",");
for (int i = 0; i < tags.length; i++) {
tags[i] = tags[i].trim();
}
} else {
tags = new String[0];
}
PreallocatedLunVO vo = new PreallocatedLunVO(zoneId, portal, targetIqn, lun, size);
return _lunDao.persist(vo, tags);
}
@Override
public boolean unregisterPreallocatedLun(DeletePreallocatedLunCmd cmd) throws IllegalArgumentException {
Long id = cmd.getId();
PreallocatedLunVO lun = null;
if ((lun = _lunDao.findById(id)) == null) {
throw new IllegalArgumentException("Unable to find a LUN with ID " + id);
}
if (lun.getTaken() != null) {
throw new IllegalArgumentException("The LUN is currently in use and cannot be deleted.");
}
return _lunDao.delete(id);
}
@Override
public VolumeStats[] getVolumeStatistics(long[] volIds) {
return _statsCollector.getVolumeStats(volIds);
}
@Override
public String updateAdminPassword(long userId, String oldPassword, String newPassword) {
// String old = StringToMD5(oldPassword);
// User user = getUser(userId);
// if (old.equals(user.getPassword())) {
UserVO userVO = _userDao.createForUpdate(userId);
userVO.setPassword(StringToMD5(newPassword));
_userDao.update(userId, userVO);
return newPassword;
// } else {
// return null;
// }
}
private String StringToMD5(String string) {
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new CloudRuntimeException("Error", e);
}
md5.reset();
BigInteger pwInt = new BigInteger(1, md5.digest(string.getBytes()));
// make sure our MD5 hash value is 32 digits long...
StringBuffer sb = new StringBuffer();
String pwStr = pwInt.toString(16);
int padding = 32 - pwStr.length();
for (int i = 0; i < padding; i++) {
sb.append('0');
}
sb.append(pwStr);
return sb.toString();
}
@Override
public User getUser(long userId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving user with id: " + userId);
}
UserVO user = _userDao.getUser(userId);
if (user == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find user with id " + userId);
}
return null;
}
return user;
}
@Override
public User getUser(long userId, boolean active) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving user with id: " + userId + " and active = " + active);
}
if (active) {
return _userDao.getUser(userId);
} else {
return _userDao.findById(userId);
}
}
@Override
public UserAccount getUserAccount(String username, Long domainId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving user: " + username + " in domain " + domainId);
}
UserAccount userAccount = _userAccountDao.getUserAccount(username, domainId);
if (userAccount == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find user with name " + username + " in domain " + domainId);
}
return null;
}
return userAccount;
}
private UserAccount getUserAccount(String username, String password, Long domainId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Attempting to log in user: " + username + " in domain " + domainId);
}
UserAccount userAccount = _userAccountDao.getUserAccount(username, domainId);
if (userAccount == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find user with name " + username + " in domain " + domainId);
}
return null;
}
DomainVO domain = _domainDao.findById(domainId);
String domainName = null;
if(domain != null) {
domainName = domain.getName();
}
if (!userAccount.getState().equalsIgnoreCase(Account.State.enabled.toString()) || !userAccount.getAccountState().equalsIgnoreCase(Account.State.enabled.toString())) {
if (s_logger.isInfoEnabled()) {
s_logger.info("User " + username + " in domain " + domainName + " is disabled/locked (or account is disabled/locked)");
}
throw new CloudAuthenticationException("User " + username + " in domain " + domainName + " is disabled/locked (or account is disabled/locked)");
//return null;
}
// We only use the first adapter even if multiple have been
// configured
Enumeration<UserAuthenticator> en = _userAuthenticators.enumeration();
UserAuthenticator authenticator = en.nextElement();
boolean authenticated = authenticator.authenticate(username, password, domainId);
if (authenticated) {
return userAccount;
} else {
return null;
}
}
@Override
public Pair<User, Account> findUserByApiKey(String apiKey) {
return _accountDao.findUserAccountByApiKey(apiKey);
}
@Override
public Account getAccount(long accountId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Retrieiving account with id: " + accountId);
}
AccountVO account = _accountDao.findById(Long.valueOf(accountId));
if (account == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Unable to find account with id " + accountId);
}
return null;
}
return account;
}
@Override
public String[] createApiKeyAndSecretKey(RegisterCmd cmd) {
Long userId = cmd.getId();
User user = _userDao.findById(userId);
if (user == null) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to find user for id : " + userId);
}
// generate both an api key and a secret key, update the user table with the keys, return the keys to the user
String[] keys = new String[2];
keys[0] = createApiKey(userId);
keys[1] = createSecretKey(userId);
return keys;
}
private String createApiKey(Long userId) {
User user = findUserById(userId);
try {
UserVO updatedUser = _userDao.createForUpdate();
String encodedKey = null;
Pair<User, Account> userAcct = null;
int retryLimit = 10;
do {
// FIXME: what algorithm should we use for API keys?
KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
SecretKey key = generator.generateKey();
encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());
userAcct = _accountDao.findUserAccountByApiKey(encodedKey);
retryLimit--;
} while ((userAcct != null) && (retryLimit >= 0));
if (userAcct != null) {
return null;
}
updatedUser.setApiKey(encodedKey);
_userDao.update(user.getId(), updatedUser);
return encodedKey;
} catch (NoSuchAlgorithmException ex) {
s_logger.error("error generating secret key for user: " + user.getUsername(), ex);
}
return null;
}
private String createSecretKey(Long userId) {
User user = findUserById(userId);
try {
UserVO updatedUser = _userDao.createForUpdate();
String encodedKey = null;
int retryLimit = 10;
UserVO userBySecretKey = null;
do {
KeyGenerator generator = KeyGenerator.getInstance("HmacSHA1");
SecretKey key = generator.generateKey();
encodedKey = Base64.encodeBase64URLSafeString(key.getEncoded());
userBySecretKey = _userDao.findUserBySecretKey(encodedKey);
retryLimit--;
} while ((userBySecretKey != null) && (retryLimit >= 0));
if (userBySecretKey != null) {
return null;
}
updatedUser.setSecretKey(encodedKey);
_userDao.update(user.getId(), updatedUser);
return encodedKey;
} catch (NoSuchAlgorithmException ex) {
s_logger.error("error generating secret key for user: " + user.getUsername(), ex);
}
return null;
}
@Override
public List<IPAddressVO> listPublicIpAddressesBy(Long accountId, boolean allocatedOnly, Long zoneId, Long vlanDbId) {
SearchCriteria<IPAddressVO> sc = _publicIpAddressDao.createSearchCriteria();
if (accountId != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (vlanDbId != null) {
sc.addAnd("vlanDbId", SearchCriteria.Op.EQ, vlanDbId);
}
if (allocatedOnly) {
sc.addAnd("allocated", SearchCriteria.Op.NNULL);
}
return _publicIpAddressDao.search(sc, null);
}
@Override
public List<DataCenterIpAddressVO> listPrivateIpAddressesBy(Long podId, Long zoneId) {
if (podId != null && zoneId != null) {
return _privateIpAddressDao.listByPodIdDcId(podId.longValue(), zoneId.longValue());
} else {
return new ArrayList<DataCenterIpAddressVO>();
}
}
@Override
public String generateRandomPassword() {
return PasswordGenerator.generateRandomPassword(6);
}
@Override
public boolean attachISOToVM(long vmId, long userId, long isoId, boolean attach) {
UserVmVO vm = _userVmDao.findById(vmId);
VMTemplateVO iso = _templateDao.findById(isoId);
boolean success = _vmMgr.attachISOToVM(vmId, isoId, attach);
if (success) {
if (attach) {
vm.setIsoId(iso.getId());
} else {
vm.setIsoId(null);
}
_userVmDao.update(vmId, vm);
}
return success;
}
@Override
public List<DataCenterVO> listDataCenters(ListZonesByCmd cmd) {
Account account = UserContext.current().getCaller();
List<DataCenterVO> dcs = null;
Long domainId = cmd.getDomainId();
Long id = cmd.getId();
if(domainId != null){
//for domainId != null
//right now, we made the decision to only list zones associated with this domain
dcs = _dcDao.findZonesByDomainId(domainId); //private zones
}
else if((account == null || account.getType() == Account.ACCOUNT_TYPE_ADMIN)){
dcs = _dcDao.listAll(); //all zones
}else if(account.getType() == Account.ACCOUNT_TYPE_NORMAL){
//it was decided to return all zones for the user's domain, and everything above till root
//list all zones belonging to this domain, and all of its parents
//check the parent, if not null, add zones for that parent to list
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
if(domainRecord != null)
{
while(true){
dcs.addAll(_dcDao.findZonesByDomainId(domainRecord.getId()));
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
//add all public zones too
dcs.addAll(_dcDao.listPublicZones());
}else if(account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN){
//it was decided to return all zones for the domain admin, and everything above till root
dcs = new ArrayList<DataCenterVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
//this covers path till root
if(domainRecord != null)
{
DomainVO localRecord = domainRecord;
while(true){
dcs.addAll(_dcDao.findZonesByDomainId(localRecord.getId()));
if(localRecord.getParent() != null) {
localRecord = _domainDao.findById(localRecord.getParent());
} else {
break;
}
}
}
//this covers till leaf
if(domainRecord != null){
//find all children for this domain based on a like search by path
List<DomainVO> allChildDomains = _domainDao.findAllChildren(domainRecord.getPath(), domainRecord.getId());
List<Long> allChildDomainIds = new ArrayList<Long>();
//create list of domainIds for search
for(DomainVO domain : allChildDomains){
allChildDomainIds.add(domain.getId());
}
//now make a search for zones based on this
if(allChildDomainIds.size() > 0){
List<DataCenterVO> childZones = _dcDao.findChildZones((allChildDomainIds.toArray()));
dcs.addAll(childZones);
}
}
//add all public zones too
dcs.addAll(_dcDao.listPublicZones());
}
Boolean available = cmd.isAvailable();
if (account != null) {
if ((available != null) && Boolean.FALSE.equals(available)) {
List<DomainRouterVO> routers = _routerDao.listBy(account.getId());
for (Iterator<DataCenterVO> iter = dcs.iterator(); iter.hasNext();) {
DataCenterVO dc = iter.next();
boolean found = false;
for (DomainRouterVO router : routers) {
if (dc.getId() == router.getDataCenterId()) {
found = true;
break;
}
}
if (!found) {
iter.remove();
}
}
}
}
if (id != null) {
List<DataCenterVO> singleZone = new ArrayList<DataCenterVO>();
for (DataCenterVO zone : dcs) {
if (zone.getId() == id) {
singleZone.add(zone);
}
}
return singleZone;
}
return dcs;
}
@Override
public HostVO getHostBy(long hostId) {
return _hostDao.findById(hostId);
}
@Override
public long getId() {
return MacAddress.getMacAddress().toLong();
}
protected void checkPortParameters(String publicPort, String privatePort, String privateIp, String proto) throws InvalidParameterValueException {
if (!NetUtils.isValidPort(publicPort)) {
throw new InvalidParameterValueException("publicPort is an invalid value");
}
if (!NetUtils.isValidPort(privatePort)) {
throw new InvalidParameterValueException("privatePort is an invalid value");
}
// s_logger.debug("Checking if " + privateIp + " is a valid private IP address. Guest IP address is: " + _configs.get("guest.ip.network"));
//
// if (!NetUtils.isValidPrivateIp(privateIp, _configs.get("guest.ip.network"))) {
// throw new InvalidParameterValueException("Invalid private ip address");
// }
if (!NetUtils.isValidProto(proto)) {
throw new InvalidParameterValueException("Invalid protocol");
}
}
@Override
public List<EventVO> getEvents(long userId, long accountId, Long domainId, String type, String level, Date startDate, Date endDate) {
SearchCriteria<EventVO> sc = _eventDao.createSearchCriteria();
if (userId > 0) {
sc.addAnd("userId", SearchCriteria.Op.EQ, userId);
}
if (accountId > 0) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
}
if (domainId != null) {
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
if (level != null) {
sc.addAnd("level", SearchCriteria.Op.EQ, level);
}
if (startDate != null && endDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.BETWEEN, startDate, endDate);
} else if (startDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
sc.addAnd("createDate", SearchCriteria.Op.GTEQ, startDate);
} else if (endDate != null) {
endDate = massageDate(endDate, 23, 59, 59);
sc.addAnd("createDate", SearchCriteria.Op.LTEQ, endDate);
}
return _eventDao.search(sc, null);
}
private Date massageDate(Date date, int hourOfDay, int minute, int second) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, hourOfDay);
cal.set(Calendar.MINUTE, minute);
cal.set(Calendar.SECOND, second);
return cal.getTime();
}
@Override
public List<UserAccountVO> searchForUsers(ListUsersCmd cmd) throws PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list users.");
}
} else {
// default domainId to the admin's domain
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
Filter searchFilter = new Filter(UserAccountVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
Object username = cmd.getUsername();
Object type = cmd.getAccountType();
Object accountName = cmd.getAccountName();
Object state = cmd.getState();
Object keyword = cmd.getKeyword();
SearchBuilder<UserAccountVO> sb = _userAccountDao.createSearchBuilder();
sb.and("username", sb.entity().getUsername(), SearchCriteria.Op.LIKE);
if (id != null && id == 1) {
//system user should NOT be searchable
List<UserAccountVO> emptyList = new ArrayList<UserAccountVO>();
return emptyList;
} else if (id != null) {
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
} else {
//this condition is used to exclude system user from the search results
sb.and("id", sb.entity().getId(), SearchCriteria.Op.NEQ);
}
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
sb.and("accountName", sb.entity().getAccountName(), SearchCriteria.Op.LIKE);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
if ((accountName == null) && (domainId != null)) {
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<UserAccountVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<UserAccountVO> ssc = _userAccountDao.createSearchCriteria();
ssc.addOr("username", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("firstname", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("lastname", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("email", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("accountName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("accountState", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("username", SearchCriteria.Op.SC, ssc);
}
if (username != null) {
sc.setParameters("username", "%" + username + "%");
}
if (id != null) {
sc.setParameters("id", id);
} else {
//Don't return system user, search builder with NEQ
sc.setParameters("id", 1);
}
if (type != null) {
sc.setParameters("type", type);
}
if (accountName != null) {
sc.setParameters("accountName", "%" + accountName + "%");
if (domainId != null) {
sc.setParameters("domainId", domainId);
}
} else if (domainId != null) {
DomainVO domainVO = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domainVO.getPath() + "%");
}
if (state != null) {
sc.setParameters("state", state);
}
return _userAccountDao.search(sc, searchFilter);
}
//This method is used for permissions check for both disk and service offerings
private boolean isPermissible(Long accountDomainId, Long offeringDomainId){
if(accountDomainId == offeringDomainId)
{
return true; // account and service offering in same domain
}
DomainVO domainRecord = _domainDao.findById(accountDomainId);
if(domainRecord != null){
while(true){
if(domainRecord.getId() == offeringDomainId) {
return true;
}
//try and move on to the next domain
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
} else {
break;
}
}
}
return false;
}
@Override
public List<ServiceOfferingVO> searchForServiceOfferings(ListServiceOfferingsCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
//Note
//The list method for offerings is being modified in accordance with discussion with Will/Kevin
//For now, we will be listing the following based on the usertype
//1. For root, we will list all offerings
//2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way till root
Filter searchFilter = new Filter(ServiceOfferingVO.class, "created", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
Account account = UserContext.current().getCaller();
Object name = cmd.getServiceOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long vmId = cmd.getVirtualMachineId();
Long domainId = cmd.getDomainId();
//Keeping this logic consistent with domain specific zones
//if a domainId is provided, we just return the so associated with this domain
if(domainId != null){
if(account.getType() == Account.ACCOUNT_TYPE_ADMIN){
return _offeringsDao.findServiceOfferingByDomainId(domainId);//no perm check
}else{
//check if the user's domain == so's domain || user's domain is a child of so's domain
if(isPermissible(account.getDomainId(), domainId)){
//perm check succeeded
return _offeringsDao.findServiceOfferingByDomainId(domainId);
}else{
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "The account:"+account.getAccountName()+" does not fall in the same domain hierarchy as the service offering");
}
}
}
//For non-root users
if((account.getType() == Account.ACCOUNT_TYPE_NORMAL || account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)){
return searchServiceOfferingsInternal(account, name, id, vmId, keyword, searchFilter);
}
//for root users, the existing flow
if (keyword != null) {
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
}
if ((account != null) && !isAdmin(account.getType())) {
if (account.getId() != vmInstance.getAccountId()) {
throw new PermissionDeniedException("unable to find a virtual machine with id " + vmId + " for this account");
}
}
ServiceOfferingVO offering = _offeringsDao.findById(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
// Only return offerings with the same Guest IP type and storage pool preference
//sc.addAnd("guestIpType", SearchCriteria.Op.EQ, offering.getGuestIpType());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, false);
return _offeringsDao.search(sc, searchFilter);
}
private List<ServiceOfferingVO> searchServiceOfferingsInternal(Account account, Object name, Object id, Long vmId, Object keyword, Filter searchFilter){
//it was decided to return all offerings for the user's domain, and everything above till root (for normal user or domain admin)
//list all offerings belonging to this domain, and all of its parents
//check the parent, if not null, add offerings for that parent to list
List<ServiceOfferingVO> sol = new ArrayList<ServiceOfferingVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
boolean includePublicOfferings = true;
if(domainRecord != null)
{
while(true){
SearchCriteria<ServiceOfferingVO> sc = _offeringsDao.createSearchCriteria();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<ServiceOfferingVO> ssc = _offeringsDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
} else if (vmId != null) {
includePublicOfferings = false;
UserVmVO vmInstance = _userVmDao.findById(vmId);
if ((vmInstance == null) || (vmInstance.getRemoved() != null)) {
throw new InvalidParameterValueException("unable to find a virtual machine with id " + vmId);
}
if ((account != null) && !isAdmin(account.getType())) {
if (account.getId() != vmInstance.getAccountId()) {
throw new PermissionDeniedException("unable to find a virtual machine with id " + vmId + " for this account");
}
}
ServiceOfferingVO offering = _offeringsDao.findById(vmInstance.getServiceOfferingId());
sc.addAnd("id", SearchCriteria.Op.NEQ, offering.getId());
// Only return offerings with the same Guest IP type and storage pool preference
sc.addAnd("guestIpType", SearchCriteria.Op.EQ, offering.getGuestIpType());
sc.addAnd("useLocalStorage", SearchCriteria.Op.EQ, offering.getUseLocalStorage());
}
if (id != null) {
includePublicOfferings = false;
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
includePublicOfferings = false;
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
sc.addAnd("systemUse", SearchCriteria.Op.EQ, false);
//for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
//search and add for this domain
sol.addAll(_offeringsDao.search(sc, searchFilter));
//try and move on to the next domain
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
}
else {
break;//now we got all the offerings for this user/dom adm
}
}
}else{
s_logger.error("Could not find the domainId for account:"+account.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:"+account.getAccountName());
}
//add all the public offerings to the sol list before returning
if(includePublicOfferings) {
sol.addAll(_offeringsDao.findPublicServiceOfferings());
}
return sol;
}
@Override
public List<ClusterVO> searchForClusters(ListClustersCmd cmd) {
Filter searchFilter = new Filter(ClusterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ClusterVO> sc = _clusterDao.createSearchCriteria();
Object id = cmd.getId();
Object name = cmd.getClusterName();
Object podId = cmd.getPodId();
Object zoneId = cmd.getZoneId();
Object hypervisorType = cmd.getHypervisorType();
Object clusterType = cmd.getClusterType();
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (podId != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, podId);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if(hypervisorType != null) {
sc.addAnd("hypervisorType", SearchCriteria.Op.EQ, hypervisorType);
}
if(clusterType != null) {
sc.addAnd("clusterType", SearchCriteria.Op.EQ, clusterType);
}
return _clusterDao.search(sc, searchFilter);
}
@Override
public List<HostVO> searchForServers(ListHostsCmd cmd) {
Object name = cmd.getHostName();
Object type = cmd.getType();
Object state = cmd.getState();
Object zone = cmd.getZoneId();
Object pod = cmd.getPodId();
Object cluster = cmd.getClusterId();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
return searchForServers(cmd.getStartIndex(), cmd.getPageSizeVal(), name, type, state, zone, pod, cluster, id, keyword);
}
private List<HostVO> searchForServers(Long startIndex, Long pageSize, Object name, Object type, Object state, Object zone, Object pod, Object cluster, Object id, Object keyword) {
Filter searchFilter = new Filter(HostVO.class, "id", Boolean.TRUE, startIndex, pageSize);
SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<HostVO> ssc = _hostDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("status", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
if (state != null) {
sc.addAnd("status", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
return _hostDao.search(sc, searchFilter);
}
@Override
public List<HostPodVO> searchForPods(ListPodsByCmd cmd) {
Filter searchFilter = new Filter(HostPodVO.class, "dataCenterId", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<HostPodVO> sc = _hostPodDao.createSearchCriteria();
String podName = cmd.getPodName();
Long id = cmd.getId();
Long zoneId = cmd.getZoneId();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<HostPodVO> ssc = _hostPodDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (podName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + podName + "%");
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
return _hostPodDao.search(sc, searchFilter);
}
@Override
public List<DataCenterVO> searchForZones(Criteria c) {
Long dataCenterId = (Long) c.getCriteria(Criteria.DATACENTERID);
if (dataCenterId != null) {
DataCenterVO dc = _dcDao.findById(dataCenterId);
List<DataCenterVO> datacenters = new ArrayList<DataCenterVO>();
datacenters.add(dc);
return datacenters;
}
Filter searchFilter = new Filter(DataCenterVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<DataCenterVO> sc = _dcDao.createSearchCriteria();
String zoneName = (String) c.getCriteria(Criteria.ZONENAME);
if (zoneName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + zoneName + "%");
}
return _dcDao.search(sc, searchFilter);
}
@Override
public List<VlanVO> searchForVlans(ListVlanIpRangesCmd cmd) throws InvalidParameterValueException {
// If an account name and domain ID are specified, look up the account
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
Long accountId = null;
Long networkId = cmd.getNetworkId();
Boolean forVirtual = cmd.getForVirtualNetwork();
String vlanType = null;
if (accountName != null && domainId != null) {
Account account = _accountDao.findActiveAccount(accountName, domainId);
if (account == null) {
throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain " + domainId);
} else {
accountId = account.getId();
}
}
if (forVirtual != null) {
if (forVirtual) {
vlanType = VlanType.VirtualNetwork.toString();
} else {
vlanType = VlanType.DirectAttached.toString();
}
}
Filter searchFilter = new Filter(VlanVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Object vlan = cmd.getVlan();
Object dataCenterId = cmd.getZoneId();
Object podId = cmd.getPodId();
Object keyword = cmd.getKeyword();
SearchBuilder<VlanVO> sb = _vlanDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("vlan", sb.entity().getVlanTag(), SearchCriteria.Op.EQ);
sb.and("networkId", sb.entity().getNetworkId(), SearchCriteria.Op.EQ);
sb.and("vlanType", sb.entity().getVlanType(), SearchCriteria.Op.EQ);
if (accountId != null) {
SearchBuilder<AccountVlanMapVO> accountVlanMapSearch = _accountVlanMapDao.createSearchBuilder();
accountVlanMapSearch.and("accountId", accountVlanMapSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.join("accountVlanMapSearch", accountVlanMapSearch, sb.entity().getId(), accountVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
if (podId != null) {
SearchBuilder<PodVlanMapVO> podVlanMapSearch = _podVlanMapDao.createSearchBuilder();
podVlanMapSearch.and("podId", podVlanMapSearch.entity().getPodId(), SearchCriteria.Op.EQ);
sb.join("podVlanMapSearch", podVlanMapSearch, sb.entity().getId(), podVlanMapSearch.entity().getVlanDbId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<VlanVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VlanVO> ssc = _vlanDao.createSearchCriteria();
ssc.addOr("vlanId", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("ipRange", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("vlanId", SearchCriteria.Op.SC, ssc);
} else {
if (id != null) {
sc.setParameters("id", id);
}
if (vlan != null) {
sc.setParameters("vlan", vlan);
}
if (dataCenterId != null) {
sc.setParameters("dataCenterId", dataCenterId);
}
if (networkId != null) {
sc.setParameters("networkId", networkId);
}
if (accountId != null) {
sc.setJoinParameters("accountVlanMapSearch", "accountId", accountId);
}
if (podId != null) {
sc.setJoinParameters("podVlanMapSearch", "podId", podId);
}
if (vlanType != null) {
sc.setParameters("vlanType", vlanType);
}
}
return _vlanDao.search(sc, searchFilter);
}
@Override
public Long getPodIdForVlan(long vlanDbId) {
List<PodVlanMapVO> podVlanMaps = _podVlanMapDao.listPodVlanMapsByVlan(vlanDbId);
if (podVlanMaps.isEmpty()) {
return null;
} else {
return podVlanMaps.get(0).getPodId();
}
}
@Override
public List<ConfigurationVO> searchForConfigurations(ListCfgsByCmd cmd) {
Filter searchFilter = new Filter(ConfigurationVO.class, "name", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<ConfigurationVO> sc = _configDao.createSearchCriteria();
Object name = cmd.getConfigName();
Object category = cmd.getCategory();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<ConfigurationVO> ssc = _configDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instance", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("component", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("category", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("value", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (category != null) {
sc.addAnd("category", SearchCriteria.Op.EQ, category);
}
// hidden configurations are not displayed using the search API
sc.addAnd("category", SearchCriteria.Op.NEQ, "Hidden");
return _configDao.search(sc, searchFilter);
}
@Override
public List<HostVO> searchForAlertServers(Criteria c) {
Filter searchFilter = new Filter(HostVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<HostVO> sc = _hostDao.createSearchCriteria();
Object[] states = (Object[]) c.getCriteria(Criteria.STATE);
if (states != null) {
sc.addAnd("status", SearchCriteria.Op.IN, states);
}
return _hostDao.search(sc, searchFilter);
}
@Override
public List<VMTemplateVO> searchForTemplates(Criteria c) {
Filter searchFilter = new Filter(VMTemplateVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
Object name = c.getCriteria(Criteria.NAME);
Object isPublic = c.getCriteria(Criteria.ISPUBLIC);
Object id = c.getCriteria(Criteria.ID);
Object keyword = c.getCriteria(Criteria.KEYWORD);
Long creator = (Long) c.getCriteria(Criteria.CREATED_BY);
SearchBuilder<VMTemplateVO> sb = _templateDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("publicTemplate", sb.entity().isPublicTemplate(), SearchCriteria.Op.EQ);
sb.and("format", sb.entity().getFormat(), SearchCriteria.Op.NEQ);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
SearchCriteria<VMTemplateVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VMTemplateVO> ssc = _templateDao.createSearchCriteria();
ssc.addOr("displayName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("group", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instanceName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (isPublic != null) {
sc.setParameters("publicTemplate", isPublic);
}
if (creator != null) {
sc.setParameters("accountId", creator);
}
sc.setParameters("format", ImageFormat.ISO);
return _templateDao.search(sc, searchFilter);
}
@Override
public Set<Pair<Long, Long>> listIsos(ListIsosCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter isoFilter = TemplateFilter.valueOf(cmd.getIsoFilter());
Long accountId = null;
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
// validate domainId before proceeding
if ((domainId != null) && (accountName != null)) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new InvalidParameterValueException("Invalid domain id (" + domainId + ") given, unable to list events.");
}
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Failed to list ISOs. Unable to find account " + accountName + " in domain " + domainId);
}
} else if (account != null) {
accountId = account.getId();
}
} else {
accountId = account.getId();
}
//It is account specific if account is admin type and domainId and accountName are not null
boolean isAccountSpecific = (account == null || isAdmin(account.getType()))
&& (accountName != null)
&& (domainId != null);
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(cmd.getId(), cmd.getIsoName(), cmd.getKeyword(), isoFilter, true, cmd.isBootable(), accountId, cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, isAccountSpecific, true);
}
@Override
public Set<Pair<Long, Long>> listTemplates(ListTemplatesCmd cmd) throws IllegalArgumentException, InvalidParameterValueException {
TemplateFilter templateFilter = TemplateFilter.valueOf(cmd.getTemplateFilter());
Long accountId = null;
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
if ((account == null) || (account.getType() == Account.ACCOUNT_TYPE_ADMIN)) {
// validate domainId before proceeding
if ((domainId != null) && (accountName != null)) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new InvalidParameterValueException("Invalid domain id (" + domainId + ") given, unable to list events.");
}
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Failed to list ISOs. Unable to find account " + accountName + " in domain " + domainId);
}
} else if (account != null) {
accountId = account.getId();
}
} else {
accountId = account.getId();
}
//It is account specific if account is admin type and domainId and accountName are not null
boolean isAccountSpecific = (account == null || isAdmin(account.getType()))
&& (accountName != null)
&& (domainId != null);
boolean showDomr = (templateFilter != TemplateFilter.selfexecutable);
HypervisorType hypervisorType = HypervisorType.getType(cmd.getHypervisor());
return listTemplates(cmd.getId(), cmd.getTemplateName(), cmd.getKeyword(), templateFilter, false, null, accountId, cmd.getPageSizeVal(), cmd.getStartIndex(), cmd.getZoneId(), hypervisorType, isAccountSpecific, showDomr);
}
private Set<Pair<Long, Long>> listTemplates(Long templateId, String name, String keyword, TemplateFilter templateFilter, boolean isIso, Boolean bootable, Long accountId, Long pageSize, Long startIndex, Long zoneId, HypervisorType hyperType, boolean isAccountSpecific, boolean showDomr) throws InvalidParameterValueException {
VMTemplateVO template = null;
if (templateId != null) {
template = _templateDao.findById(templateId);
if (template == null) {
throw new InvalidParameterValueException("Please specify a valid template ID.");
}// If ISO requested then it should be ISO.
if (isIso && template.getFormat() != ImageFormat.ISO){
s_logger.error("Template Id " + templateId + " is not an ISO");
throw new InvalidParameterValueException("Template Id " + templateId + " is not an ISO");
}// If ISO not requested then it shouldn't be an ISO.
if (!isIso && template.getFormat() == ImageFormat.ISO){
s_logger.error("Incorrect format of the template id " + templateId);
throw new InvalidParameterValueException("Incorrect format " + template.getFormat() + " of the template id " + templateId);
}
}
// Show only those that are downloaded.
boolean onlyReady = (templateFilter == TemplateFilter.featured) ||
(templateFilter == TemplateFilter.selfexecutable) ||
(templateFilter == TemplateFilter.sharedexecutable) ||
(templateFilter == TemplateFilter.executable && isAccountSpecific) ||
(templateFilter == TemplateFilter.community);
Account account = null;
DomainVO domain = null;
if (accountId != null) {
account = _accountDao.findById(accountId);
domain = _domainDao.findById(account.getDomainId());
} else {
domain = _domainDao.findById(DomainVO.ROOT_DOMAIN);
}
Set<Pair<Long, Long>> templateZonePairSet = new HashSet<Pair<Long,Long>>();
if (template == null) {
templateZonePairSet = _templateDao.searchTemplates(name, keyword, templateFilter, isIso, bootable, account, domain, pageSize, startIndex, zoneId, hyperType, onlyReady, showDomr);
} else {
templateZonePairSet.add(new Pair<Long,Long>(template.getId(), zoneId));
}
return templateZonePairSet;
}
@Override
public List<VMTemplateVO> listPermittedTemplates(long accountId) {
return _launchPermissionDao.listPermittedTemplates(accountId);
}
@Override
public List<HostPodVO> listPods(long dataCenterId) {
return _hostPodDao.listByDataCenterId(dataCenterId);
}
@Override
public String changePrivateIPRange(boolean add, Long podId, String startIP, String endIP) throws InvalidParameterValueException {
return _configMgr.changePrivateIPRange(add, podId, startIP, endIP);
}
@Override
public User findUserById(Long userId) {
return _userDao.findById(userId);
}
@Override
public List<AccountVO> findAccountsLike(String accountName) {
return _accountDao.findAccountsLike(accountName);
}
@Override
public Account findActiveAccountByName(String accountName) {
return _accountDao.findActiveAccountByName(accountName);
}
@Override
public Account findActiveAccount(String accountName, Long domainId) {
if (domainId == null) {
domainId = DomainVO.ROOT_DOMAIN;
}
return _accountDao.findActiveAccount(accountName, domainId);
}
@Override
public Account findAccountByName(String accountName, Long domainId) {
if (domainId == null) {
domainId = DomainVO.ROOT_DOMAIN;
}
return _accountDao.findAccount(accountName, domainId);
}
@Override
public Account findAccountById(Long accountId) {
return _accountDao.findById(accountId);
}
@Override
public List<AccountVO> searchForAccounts(ListAccountsCmd cmd) {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
Long accountId = cmd.getId();
String accountName = null;
if(accountId != null && accountId == 1){
//system account should NOT be searchable
List<AccountVO> emptyList = new ArrayList<AccountVO>();
return emptyList;
}
if ((account == null) || isAdmin(account.getType())) {
accountName = cmd.getSearchName(); // admin's can specify a name to search for
if (domainId == null) {
// default domainId to the admin's domain
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
} else if (account != null) {
if (!_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid domain id (" + domainId + ") given, unable to list accounts");
}
}
} else {
accountId = account.getId();
accountName = account.getAccountName(); // regular users must be constrained to their own account
}
Filter searchFilter = new Filter(AccountVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object type = cmd.getAccountType();
Object state = cmd.getState();
Object isCleanupRequired = cmd.isCleanupRequired();
Object keyword = cmd.getKeyword();
SearchBuilder<AccountVO> sb = _accountDao.createSearchBuilder();
sb.and("accountName", sb.entity().getAccountName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("nid", sb.entity().getId(), SearchCriteria.Op.NEQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("needsCleanup", sb.entity().getNeedsCleanup(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<AccountVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<AccountVO> ssc = _accountDao.createSearchCriteria();
ssc.addOr("accountName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("accountName", SearchCriteria.Op.SC, ssc);
}
if (accountName != null) {
sc.setParameters("accountName", "%" + accountName + "%");
}
if (accountId != null) {
sc.setParameters("id", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
// I want to join on user_vm.domain_id = domain.id where domain.path like 'foo%'
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
sc.setParameters("nid", 1L);
} else {
sc.setParameters("nid", 1L);
}
if (type != null) {
sc.setParameters("type", type);
}
if (state != null) {
sc.setParameters("state", state);
}
if (isCleanupRequired != null) {
sc.setParameters("needsCleanup", isCleanupRequired);
}
return _accountDao.search(sc, searchFilter);
}
@Override
public boolean deleteLimit(Long limitId) {
// A limit ID must be passed in
if (limitId == null) {
return false;
}
return _resourceLimitDao.expunge(limitId);
}
@Override
public ResourceLimitVO findLimitById(long limitId) {
return _resourceLimitDao.findById(limitId);
}
@Override
public List<VMTemplateVO> listIsos(Criteria c) {
Filter searchFilter = new Filter(VMTemplateVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
Boolean ready = (Boolean) c.getCriteria(Criteria.READY);
Boolean isPublic = (Boolean) c.getCriteria(Criteria.ISPUBLIC);
Long creator = (Long) c.getCriteria(Criteria.CREATED_BY);
Object keyword = c.getCriteria(Criteria.KEYWORD);
SearchCriteria<VMTemplateVO> sc = _templateDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<VMTemplateVO> ssc = _templateDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (creator != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, creator);
}
if (ready != null) {
sc.addAnd("ready", SearchCriteria.Op.EQ, ready);
}
if (isPublic != null) {
sc.addAnd("publicTemplate", SearchCriteria.Op.EQ, isPublic);
}
sc.addAnd("format", SearchCriteria.Op.EQ, ImageFormat.ISO);
return _templateDao.search(sc, searchFilter);
}
@Override
public List<VMInstanceVO> findVMInstancesLike(String vmInstanceName) {
return _vmInstanceDao.findVMInstancesLike(vmInstanceName);
}
@Override
public VMInstanceVO findVMInstanceById(long vmId) {
return _vmInstanceDao.findById(vmId);
}
@Override
public UserVmVO findUserVMInstanceById(long userVmId) {
return _userVmDao.findById(userVmId);
}
@Override
public ServiceOfferingVO findServiceOfferingById(long offeringId) {
return _offeringsDao.findById(offeringId);
}
@Override
public List<ServiceOfferingVO> listAllServiceOfferings() {
return _offeringsDao.listAllIncludingRemoved();
}
@Override
public List<HostVO> listAllActiveHosts() {
return _hostDao.listAll();
}
@Override
public DataCenterVO findDataCenterById(long dataCenterId) {
return _dcDao.findById(dataCenterId);
}
@Override
public VMTemplateVO updateTemplate(UpdateIsoCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
return updateTemplateOrIso(cmd);
}
@Override
public VMTemplateVO updateTemplate(UpdateTemplateCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
return updateTemplateOrIso(cmd);
}
private VMTemplateVO updateTemplateOrIso(UpdateTemplateOrIsoCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Long id = cmd.getId();
String name = cmd.getTemplateName();
String displayText = cmd.getDisplayText();
String format = cmd.getFormat();
Long guestOSId = cmd.getOsTypeId();
Boolean passwordEnabled = cmd.isPasswordEnabled();
Boolean bootable = cmd.isBootable();
Account account= UserContext.current().getCaller();
//verify that template exists
VMTemplateVO template = findTemplateById(id);
if (template == null) {
throw new InvalidParameterValueException("unable to find template/iso with id " + id);
}
//Don't allow to modify system template
if (id == Long.valueOf(1)) {
throw new InvalidParameterValueException("Unable to update template/iso with id " + id);
}
//do a permission check
if (account != null) {
Long templateOwner = template.getAccountId();
if (!BaseCmd.isAdmin(account.getType())) {
if ((templateOwner == null) || (account.getId() != templateOwner.longValue())) {
throw new PermissionDeniedException("Unable to modify template/iso with id " + id + ", permission denied.");
}
} else if (account.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Long templateOwnerDomainId = findDomainIdByAccountId(templateOwner);
if (!isChildDomain(account.getDomainId(), templateOwnerDomainId)) {
throw new PermissionDeniedException("Unable to modify template/iso with id " + id + ", permission denied");
}
}
}
boolean updateNeeded = !(name == null && displayText == null && format == null && guestOSId == null && passwordEnabled == null && bootable == null);
if (!updateNeeded) {
return template;
}
template = _templateDao.createForUpdate(id);
if (name != null) {
template.setName(name);
}
if (displayText != null) {
template.setDisplayText(displayText);
}
ImageFormat imageFormat = null;
if (format != null) {
try {
imageFormat = ImageFormat.valueOf(format.toUpperCase());
} catch (IllegalArgumentException e) {
throw new InvalidParameterValueException("Image format: " + format + " is incorrect. Supported formats are " + EnumUtils.listValues(ImageFormat.values()));
}
template.setFormat(imageFormat);
}
if (guestOSId != null) {
GuestOSVO guestOS = _guestOSDao.findById(guestOSId);
if (guestOS == null) {
throw new InvalidParameterValueException("Please specify a valid guest OS ID.");
} else {
template.setGuestOSId(guestOSId);
}
}
if (passwordEnabled != null) {
template.setEnablePassword(passwordEnabled);
}
if (bootable != null) {
template.setBootable(bootable);
}
_templateDao.update(id, template);
return _templateDao.findById(id);
}
@Override
public boolean copyTemplate(long userId, long templateId, long sourceZoneId, long destZoneId) {
boolean success = false;
try {
success = _tmpltMgr.copy(userId, templateId, sourceZoneId, destZoneId);
} catch (Exception e) {
s_logger.warn("Unable to copy template " + templateId + " from zone " + sourceZoneId + " to " + destZoneId , e);
success = false;
}
return success;
}
@Override
public VMTemplateVO findTemplateById(long templateId) {
return _templateDao.findById(templateId);
}
@Override
public List<EventVO> searchForEvents(ListEventsCmd cmd) throws PermissionDeniedException, InvalidParameterValueException {
Account account = UserContext.current().getCaller();
Long accountId = null;
boolean isAdmin = false;
String accountName = cmd.getAccountName();
Long domainId = cmd.getDomainId();
if ((account == null) || isAdmin(account.getType())) {
isAdmin = true;
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list events.");
}
if (accountName != null) {
Account userAccount = _accountDao.findAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountId = account.getId();
}
Filter searchFilter = new Filter(EventVO.class, "createDate", false, cmd.getStartIndex(), cmd.getPageSizeVal());
Object type = cmd.getType();
Object level = cmd.getLevel();
Date startDate = cmd.getStartDate();
Date endDate = cmd.getEndDate();
Object keyword = cmd.getKeyword();
Integer entryTime = cmd.getEntryTime();
Integer duration = cmd.getDuration();
if ((entryTime != null) && (duration != null)) {
if (entryTime <= duration){
throw new InvalidParameterValueException("Entry time must be greater than duration");
}
return listPendingEvents(entryTime, duration);
}
SearchBuilder<EventVO> sb = _eventDao.createSearchBuilder();
sb.and("levelL", sb.entity().getLevel(), SearchCriteria.Op.LIKE);
sb.and("levelEQ", sb.entity().getLevel(), SearchCriteria.Op.EQ);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.and("accountName", sb.entity().getAccountName(), SearchCriteria.Op.LIKE);
sb.and("domainIdEQ", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
sb.and("type", sb.entity().getType(), SearchCriteria.Op.EQ);
sb.and("createDateB", sb.entity().getCreateDate(), SearchCriteria.Op.BETWEEN);
sb.and("createDateG", sb.entity().getCreateDate(), SearchCriteria.Op.GTEQ);
sb.and("createDateL", sb.entity().getCreateDate(), SearchCriteria.Op.LTEQ);
if ((accountId == null) && (accountName == null) && (domainId != null) && isAdmin) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<EventVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<EventVO> ssc = _eventDao.createSearchCriteria();
ssc.addOr("type", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("description", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("level", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("level", SearchCriteria.Op.SC, ssc);
}
if (level != null) {
sc.setParameters("levelEQ", level);
}
if (accountId != null) {
sc.setParameters("accountId", accountId);
} else if (domainId != null) {
if (accountName != null) {
sc.setParameters("domainIdEQ", domainId);
sc.setParameters("accountName", "%" + accountName + "%");
sc.addAnd("removed", SearchCriteria.Op.NULL);
} else if (isAdmin) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
}
if (type != null) {
sc.setParameters("type", type);
}
if (startDate != null && endDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
endDate = massageDate(endDate, 23, 59, 59);
sc.setParameters("createDateB", startDate, endDate);
} else if (startDate != null) {
startDate = massageDate(startDate, 0, 0, 0);
sc.setParameters("createDateG", startDate);
} else if (endDate != null) {
endDate = massageDate(endDate, 23, 59, 59);
sc.setParameters("createDateL", endDate);
}
return _eventDao.searchAllEvents(sc, searchFilter);
}
@Override
public List<DomainRouterVO> listRoutersByHostId(long hostId) {
return _routerDao.listByHostId(hostId);
}
@Override
public List<DomainRouterVO> listAllActiveRouters() {
return _routerDao.listAll();
}
@Override
public List<DomainRouterVO> searchForRouters(ListRoutersCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
Account account = UserContext.current().getCaller();
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list routers");
}
if (accountName != null) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
Filter searchFilter = new Filter(DomainRouterVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object name = cmd.getRouterName();
Object state = cmd.getState();
Object zone = cmd.getZoneId();
Object pod = cmd.getPodId();
Object hostId = cmd.getHostId();
Object keyword = cmd.getKeyword();
SearchBuilder<DomainRouterVO> sb = _routerDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodId(), SearchCriteria.Op.EQ);
sb.and("hostId", sb.entity().getHostId(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<DomainRouterVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DomainRouterVO> ssc = _routerDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("instanceName", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (accountId != null) {
sc.setParameters("accountId", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (state != null) {
sc.setParameters("state", state);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (pod != null) {
sc.setParameters("podId", pod);
}
if (hostId != null) {
sc.setParameters("hostId", hostId);
}
return _routerDao.search(sc, searchFilter);
}
@Override
public List<ConsoleProxyVO> searchForConsoleProxy(Criteria c) {
Filter searchFilter = new Filter(ConsoleProxyVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<ConsoleProxyVO> sc = _consoleProxyDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object state = c.getCriteria(Criteria.STATE);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object hostId = c.getCriteria(Criteria.HOSTID);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<ConsoleProxyVO> ssc = _consoleProxyDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if(id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (state != null) {
sc.addAnd("state", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (hostId != null) {
sc.addAnd("hostId", SearchCriteria.Op.EQ, hostId);
}
return _consoleProxyDao.search(sc, searchFilter);
}
@Override
public List<VolumeVO> searchForVolumes(ListVolumesCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
boolean isAdmin = false;
if ((account == null) || isAdmin(account.getType())) {
isAdmin = true;
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list volumes.");
}
if (accountName != null) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("could not find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountId = account.getId();
}
Filter searchFilter = new Filter(VolumeVO.class, "created", false, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Long vmInstanceId = cmd.getVirtualMachineId();
Object name = cmd.getVolumeName();
Object keyword = cmd.getKeyword();
Object type = cmd.getType();
Object zone = null;
Object pod = null;
//Object host = null; TODO
if (isAdmin) {
zone = cmd.getZoneId();
pod = cmd.getPodId();
// host = cmd.getHostId(); TODO
} else {
domainId = null;
}
// hack for now, this should be done better but due to needing a join I opted to
// do this quickly and worry about making it pretty later
SearchBuilder<VolumeVO> sb = _volumeDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("accountIdEQ", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
sb.and("accountIdIN", sb.entity().getAccountId(), SearchCriteria.Op.IN);
sb.and("volumeType", sb.entity().getVolumeType(), SearchCriteria.Op.LIKE);
sb.and("instanceId", sb.entity().getInstanceId(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("podId", sb.entity().getPodId(), SearchCriteria.Op.EQ);
// Don't return DomR and ConsoleProxy volumes
sb.and("domRNameLabel", sb.entity().getName(), SearchCriteria.Op.NLIKE);
sb.and("domPNameLabel", sb.entity().getName(), SearchCriteria.Op.NLIKE);
sb.and("domSNameLabel", sb.entity().getName(), SearchCriteria.Op.NLIKE);
// Only return Volumes that are in the "Created" state
sb.and("status", sb.entity().getStatus(), SearchCriteria.Op.EQ);
// Only return volumes that are not destroyed
sb.and("destroyed", sb.entity().getDestroyed(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
// now set the SC criteria...
SearchCriteria<VolumeVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<VolumeVO> ssc = _volumeDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("volumeType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (accountId != null) {
sc.setParameters("accountIdEQ", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (type != null) {
sc.setParameters("volumeType", "%" + type + "%");
}
if (vmInstanceId != null) {
sc.setParameters("instanceId", vmInstanceId);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if (pod != null) {
sc.setParameters("podId", pod);
}
// Don't return DomR and ConsoleProxy volumes
/*
sc.setParameters("domRNameLabel", "r-%");
sc.setParameters("domPNameLabel", "v-%");
sc.setParameters("domSNameLabel", "s-%");
*/
// Only return volumes that are not destroyed
sc.setParameters("destroyed", false);
List<VolumeVO> allVolumes = _volumeDao.search(sc, searchFilter);
List<VolumeVO> returnableVolumes = new ArrayList<VolumeVO>(); //these are ones without domr and console proxy
for(VolumeVO v:allVolumes)
{
VMTemplateVO template = _templateDao.findById(v.getTemplateId());
if(template!=null && (template.getTemplateType() == TemplateType.SYSTEM))
{
//do nothing
}
else
{
returnableVolumes.add(v);
}
}
return returnableVolumes;
}
@Override
public VolumeVO findVolumeByInstanceAndDeviceId(long instanceId, long deviceId) {
VolumeVO volume = _volumeDao.findByInstanceAndDeviceId(instanceId, deviceId).get(0);
if (volume != null && !volume.getDestroyed() && volume.getRemoved() == null) {
return volume;
} else {
return null;
}
}
@Override
public HostPodVO findHostPodById(long podId) {
return _hostPodDao.findById(podId);
}
@Override
public HostVO findSecondaryStorageHosT(long zoneId) {
return _storageMgr.getSecondaryStorageHost(zoneId);
}
@Override
public List<IPAddressVO> searchForIPAddresses(ListPublicIpAddressesCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
if ((account == null) || isAdmin(account.getType())) {
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Unable to list IP addresses for domain " + domainId + ", permission denied.");
}
if (accountName != null) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain " + domainId);
}
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountId = account.getId();
}
Boolean isAllocated = cmd.isAllocatedOnly();
if (isAllocated == null) {
isAllocated = Boolean.TRUE;
}
Filter searchFilter = new Filter(IPAddressVO.class, "address", false, cmd.getStartIndex(), cmd.getPageSizeVal());
Object zone = cmd.getZoneId();
Object address = cmd.getIpAddress();
Object vlan = cmd.getVlanId();
Object keyword = cmd.getKeyword();
Object forVirtualNetwork = cmd.isForVirtualNetwork();
SearchBuilder<IPAddressVO> sb = _publicIpAddressDao.createSearchBuilder();
sb.and("accountIdEQ", sb.entity().getAllocatedToAccountId(), SearchCriteria.Op.EQ);
sb.and("dataCenterId", sb.entity().getDataCenterId(), SearchCriteria.Op.EQ);
sb.and("address", sb.entity().getAddress(), SearchCriteria.Op.EQ);
sb.and("vlanDbId", sb.entity().getVlanId(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getAllocatedInDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if (forVirtualNetwork != null) {
SearchBuilder<VlanVO> vlanSearch = _vlanDao.createSearchBuilder();
vlanSearch.and("vlanType", vlanSearch.entity().getVlanType(), SearchCriteria.Op.EQ);
sb.join("vlanSearch", vlanSearch, sb.entity().getVlanId(), vlanSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
if ((isAllocated != null) && (isAllocated == true)) {
sb.and("allocated", sb.entity().getAllocatedTime(), SearchCriteria.Op.NNULL);
}
SearchCriteria<IPAddressVO> sc = sb.create();
if (accountId != null) {
sc.setParameters("accountIdEQ", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (forVirtualNetwork != null) {
VlanType vlanType = (Boolean) forVirtualNetwork ? VlanType.VirtualNetwork : VlanType.DirectAttached;
sc.setJoinParameters("vlanSearch", "vlanType", vlanType);
}
if (zone != null) {
sc.setParameters("dataCenterId", zone);
}
if ((address == null) && (keyword != null)) {
address = keyword;
}
if (address != null) {
sc.setParameters("address", address);
}
if (vlan != null) {
sc.setParameters("vlanDbId", vlan);
}
return _publicIpAddressDao.search(sc, searchFilter);
}
@Override
public UserAccount authenticateUser(String username, String password, Long domainId, Map<String, Object[]> requestParameters) {
UserAccount user = null;
if (password != null) {
user = getUserAccount(username, password, domainId);
} else {
String key = getConfigurationValue("security.singlesignon.key");
if (key == null) {
// the SSO key is gone, don't authenticate
return null;
}
String singleSignOnTolerance = getConfigurationValue("security.singlesignon.tolerance.millis");
if (singleSignOnTolerance == null) {
// the SSO tolerance is gone (how much time before/after system time we'll allow the login request to be valid), don't authenticate
return null;
}
long tolerance = Long.parseLong(singleSignOnTolerance);
String signature = null;
long timestamp = 0L;
String unsignedRequest = null;
// - build a request string with sorted params, make sure it's all lowercase
// - sign the request, verify the signature is the same
List<String> parameterNames = new ArrayList<String>();
for (Object paramNameObj : requestParameters.keySet()) {
parameterNames.add((String)paramNameObj); // put the name in a list that we'll sort later
}
Collections.sort(parameterNames);
try {
for (String paramName : parameterNames) {
// parameters come as name/value pairs in the form String/String[]
String paramValue = ((String[])requestParameters.get(paramName))[0];
if ("signature".equalsIgnoreCase(paramName)) {
signature = paramValue;
} else {
if ("timestamp".equalsIgnoreCase(paramName)) {
String timestampStr = paramValue;
try {
// If the timestamp is in a valid range according to our tolerance, verify the request signature, otherwise return null to indicate authentication failure
timestamp = Long.parseLong(timestampStr);
long currentTime = System.currentTimeMillis();
if (Math.abs(currentTime - timestamp) > tolerance) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Expired timestamp passed in to login, current time = " + currentTime + ", timestamp = " + timestamp);
}
return null;
}
} catch (NumberFormatException nfe) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Invalid timestamp passed in to login: " + timestampStr);
}
return null;
}
}
if (unsignedRequest == null) {
unsignedRequest = paramName + "=" + URLEncoder.encode(paramValue, "UTF-8").replaceAll("\\+", "%20");
} else {
unsignedRequest = unsignedRequest + "&" + paramName + "=" + URLEncoder.encode(paramValue, "UTF-8").replaceAll("\\+", "%20");
}
}
}
if ((signature == null) || (timestamp == 0L)) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Missing parameters in login request, signature = " + signature + ", timestamp = " + timestamp);
}
return null;
}
unsignedRequest = unsignedRequest.toLowerCase();
Mac mac = Mac.getInstance("HmacSHA1");
SecretKeySpec keySpec = new SecretKeySpec(key.getBytes(), "HmacSHA1");
mac.init(keySpec);
mac.update(unsignedRequest.getBytes());
byte[] encryptedBytes = mac.doFinal();
String computedSignature = new String(Base64.encodeBase64(encryptedBytes));
boolean equalSig = signature.equals(computedSignature);
if (!equalSig) {
s_logger.info("User signature: " + signature + " is not equaled to computed signature: " + computedSignature);
} else {
user = getUserAccount(username, domainId);
}
} catch (Exception ex) {
s_logger.error("Exception authenticating user", ex);
return null;
}
}
if (user != null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("User: " + username + " in domain " + domainId + " has successfully logged in");
}
return user;
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("User: " + username + " in domain " + domainId + " has failed to log in");
}
return null;
}
}
@Override
public void logoutUser(Long userId) {
UserAccount userAcct = _userAccountDao.findById(userId);
if (userAcct != null) {
EventUtils.saveEvent(userId, userAcct.getAccountId(), EventTypes.EVENT_USER_LOGOUT, "user has logged out");
} // else log some kind of error event? This likely means the user doesn't exist, or has been deleted...
}
@Override
public List<VMTemplateVO> listAllTemplates() {
return _templateDao.listAllIncludingRemoved();
}
@Override
public List<GuestOSVO> listGuestOSByCriteria(ListGuestOsCmd cmd) {
Filter searchFilter = new Filter(GuestOSVO.class, "displayName", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
Long osCategoryId = cmd.getOsCategoryId();
SearchBuilder<GuestOSVO> sb = _guestOSDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("categoryId", sb.entity().getCategoryId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSVO> sc = sb.create();
if (id != null) {
sc.setParameters("id",id);
}
if (osCategoryId != null) {
sc.setParameters("categoryId", osCategoryId);
}
return _guestOSDao.search(sc, searchFilter);
}
@Override
public List<GuestOSCategoryVO> listGuestOSCategoriesByCriteria(ListGuestOsCategoriesCmd cmd) {
Filter searchFilter = new Filter(GuestOSCategoryVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long id = cmd.getId();
SearchBuilder<GuestOSCategoryVO> sb = _guestOSCategoryDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
SearchCriteria<GuestOSCategoryVO> sc = sb.create();
if (id != null) {
sc.setParameters("id",id);
}
return _guestOSCategoryDao.search(sc, searchFilter);
}
@Override
public String getConfigurationValue(String name) {
return _configDao.getValue(name);
}
@Override
public ConsoleProxyInfo getConsoleProxy(long dataCenterId, long userVmId) {
return _consoleProxyMgr.assignProxy(dataCenterId, userVmId);
}
@Override
public ConsoleProxyVO startConsoleProxy(long instanceId) {
return _consoleProxyMgr.startProxy(instanceId);
}
@Override
public ConsoleProxyVO stopConsoleProxy(long instanceId) {
_consoleProxyMgr.stopProxy(instanceId);
return _consoleProxyDao.findById(instanceId);
}
@Override
public ConsoleProxyVO rebootConsoleProxy(long instanceId) {
_consoleProxyMgr.rebootProxy(instanceId);
return _consoleProxyDao.findById(instanceId);
}
@Override
public String getConsoleAccessUrlRoot(long vmId) {
VMInstanceVO vm = this.findVMInstanceById(vmId);
if (vm != null) {
ConsoleProxyInfo proxy = getConsoleProxy(vm.getDataCenterId(), vmId);
if (proxy != null) {
return proxy.getProxyImageUrl();
}
}
return null;
}
@Override
public Pair<String, Integer> getVncPort(VirtualMachine vm) {
if (vm.getHostId() == null) {
s_logger.warn("VM " + vm.getName() + " does not have host, return -1 for its VNC port");
return new Pair<String, Integer>(null, -1);
}
if(s_logger.isTraceEnabled()) {
s_logger.trace("Trying to retrieve VNC port from agent about VM " + vm.getName());
}
GetVncPortAnswer answer = (GetVncPortAnswer) _agentMgr.easySend(vm.getHostId(), new GetVncPortCommand(vm.getId(), vm.getInstanceName()));
if(answer != null && answer.getResult()) {
return new Pair<String, Integer>(answer.getAddress(), answer.getPort());
}
return new Pair<String, Integer>(null, -1);
}
@Override
public ConsoleProxyVO findConsoleProxyById(long instanceId) {
return _consoleProxyDao.findById(instanceId);
}
@Override
public List<DomainVO> searchForDomains(ListDomainsCmd cmd) throws PermissionDeniedException {
Long domainId = cmd.getId();
Account account = UserContext.current().getCaller();
String path = null;
if (account != null && account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) {
DomainVO domain = _domainDao.findById(account.getDomainId());
if (domain != null) {
path = domain.getPath();
}
}
Filter searchFilter = new Filter(DomainVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
String domainName = cmd.getDomainName();
Integer level = cmd.getLevel();
Object keyword = cmd.getKeyword();
SearchBuilder<DomainVO> sb = _domainDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("level", sb.entity().getLevel(), SearchCriteria.Op.EQ);
sb.and("path", sb.entity().getPath(), SearchCriteria.Op.LIKE);
SearchCriteria<DomainVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DomainVO> ssc = _domainDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (domainName != null) {
sc.setParameters("name", "%" + domainName + "%");
}
if (level != null) {
sc.setParameters("level", level);
}
if (domainId != null) {
sc.setParameters("id", domainId);
}
if (path != null) {
sc.setParameters("path", "%" +path+"%");
}
return _domainDao.search(sc, searchFilter);
}
@Override
public List<DomainVO> searchForDomainChildren(ListDomainChildrenCmd cmd) throws PermissionDeniedException {
Filter searchFilter = new Filter(DomainVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Long domainId = cmd.getId();
String domainName = cmd.getDomainName();
Boolean isRecursive = cmd.isRecursive();
Object keyword = cmd.getKeyword();
String path = null;
if (isRecursive == null) {
isRecursive = false;
}
Account account = UserContext.current().getCaller();
if (account != null) {
if (domainId != null) {
if (!_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Unable to list domains children for domain id " + domainId + ", permission denied.");
}
} else {
domainId = account.getDomainId();
}
}
DomainVO domain = _domainDao.findById(domainId);
if (domain != null && isRecursive) {
path = domain.getPath();
domainId = null;
}
List<DomainVO> domainList = searchForDomainChildren(searchFilter, domainId, domainName,
keyword, path);
return domainList;
}
private List<DomainVO> searchForDomainChildren(Filter searchFilter,
Long domainId, String domainName, Object keyword, String path) {
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
if (keyword != null) {
SearchCriteria<DomainVO> ssc = _domainDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (domainId != null) {
sc.addAnd("parent", SearchCriteria.Op.EQ, domainId);
}
if (domainName != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + domainName + "%");
}
if (path != null) {
sc.addAnd("path", SearchCriteria.Op.NEQ, path);
sc.addAnd("path", SearchCriteria.Op.LIKE, path + "%");
}
return _domainDao.search(sc, searchFilter);
}
@Override
public DomainVO createDomain(CreateDomainCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
String name = cmd.getDomainName();
Long parentId = cmd.getParentDomainId();
Long ownerId = UserContext.current().getCaller().getId();
Account account = UserContext.current().getCaller();
if (ownerId == null) {
ownerId = Long.valueOf(1);
}
if (parentId == null) {
parentId = Long.valueOf(DomainVO.ROOT_DOMAIN);
}
DomainVO parentDomain = _domainDao.findById(parentId);
if (parentDomain == null) {
throw new InvalidParameterValueException("Unable to create domain " + name + ", parent domain " + parentId + " not found.");
}
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), parentId)) {
throw new PermissionDeniedException("Unable to create domain " + name + ", permission denied.");
}
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("name", SearchCriteria.Op.EQ, name);
sc.addAnd("parent", SearchCriteria.Op.EQ, parentId);
List<DomainVO> domains = _domainDao.search(sc, null);
if ((domains == null) || domains.isEmpty()) {
DomainVO domain = new DomainVO(name, ownerId, parentId);
try {
return _domainDao.create(domain);
} catch (IllegalArgumentException ex) {
s_logger.warn("Failed to create domain ", ex);
throw ex;
}
} else {
throw new InvalidParameterValueException("Domain with name " + name + " already exists for the parent id=" + parentId);
}
}
@Override
public boolean deleteDomain(DeleteDomainCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getId();
Boolean cleanup = cmd.getCleanup();
if ((domainId == DomainVO.ROOT_DOMAIN) || ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId))) {
throw new PermissionDeniedException("Unable to delete domain " + domainId + ", permission denied.");
}
try {
DomainVO domain = _domainDao.findById(domainId);
if (domain != null) {
long ownerId = domain.getAccountId();
if ((cleanup != null) && cleanup.booleanValue()) {
boolean success = cleanupDomain(domainId, ownerId);
if (!success) {
s_logger.error("Failed to clean up domain resources and sub domains, delete failed on domain " + domain.getName() + " (id: " + domainId + ").");
return false;
}
} else {
if (!_domainDao.remove(domainId)) {
s_logger.error("Delete failed on domain " + domain.getName() + " (id: " + domainId + "); please make sure all users and sub domains have been removed from the domain before deleting");
return false;
}
}
} else {
throw new InvalidParameterValueException("Failed to delete domain nable " + domainId + ", domain not found");
}
return true;
} catch (InvalidParameterValueException ex) {
throw ex;
} catch (Exception ex) {
s_logger.error("Exception deleting domain with id " + domainId, ex);
return false;
}
}
private boolean cleanupDomain(Long domainId, Long ownerId) throws ConcurrentOperationException, ResourceUnavailableException{
boolean success = true;
{
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("parent", SearchCriteria.Op.EQ, domainId);
List<DomainVO> domains = _domainDao.search(sc, null);
// cleanup sub-domains first
for (DomainVO domain : domains) {
success = (success && cleanupDomain(domain.getId(), domain.getAccountId()));
}
}
{
// delete users which will also delete accounts and release resources for those accounts
SearchCriteria<AccountVO> sc = _accountDao.createSearchCriteria();
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainId);
List<AccountVO> accounts = _accountDao.search(sc, null);
for (AccountVO account : accounts) {
success = (success && _accountMgr.cleanupAccount(account, UserContext.current().getCallerUserId(), UserContext.current().getCaller()));
}
}
// delete the domain itself
boolean deleteDomainSuccess = _domainDao.remove(domainId);
return success && deleteDomainSuccess;
}
@Override
public DomainVO updateDomain(UpdateDomainCmd cmd) throws InvalidParameterValueException, PermissionDeniedException{
Long domainId = cmd.getId();
String domainName = cmd.getDomainName();
//check if domain exists in the system
DomainVO domain = _domainDao.findById(domainId);
if (domain == null) {
throw new InvalidParameterValueException("Unable to find domain " + domainId);
} else if (domain.getParent() == null) {
//check if domain is ROOT domain - and deny to edit it
throw new InvalidParameterValueException("ROOT domain can not be edited");
}
// check permissions
Account account = UserContext.current().getCaller();
if ((account != null) && !isChildDomain(account.getDomainId(), domain.getId())) {
throw new PermissionDeniedException("Unable to update domain " + domainId + ", permission denied");
}
if (domainName == null || domainName.equals(domain.getName())) {
return _domainDao.findById(domainId);
}
SearchCriteria<DomainVO> sc = _domainDao.createSearchCriteria();
sc.addAnd("name", SearchCriteria.Op.EQ, domainName);
List<DomainVO> domains = _domainDao.search(sc, null);
if ((domains == null) || domains.isEmpty()) {
//whilst updating a domain name, update its path and update all its children's path
domain = _domainDao.findById(domainId);
String updatedDomainPath = getUpdatedDomainPath(domain.getPath(),domainName);
updateDomainChildren(domain,updatedDomainPath);
_domainDao.update(domainId, domainName, updatedDomainPath);
return _domainDao.findById(domainId);
} else {
domain = _domainDao.findById(domainId);
s_logger.error("Domain with name " + domainName + " already exists in the system");
throw new CloudRuntimeException("Failed to update domain " + domainId);
}
}
private String getUpdatedDomainPath(String oldPath, String newName){
String[] tokenizedPath = oldPath.split("/");
tokenizedPath[tokenizedPath.length-1] = newName;
StringBuilder finalPath = new StringBuilder();
for(String token : tokenizedPath){
finalPath.append(token);
finalPath.append("/");
}
return finalPath.toString();
}
private void updateDomainChildren(DomainVO domain, String updatedDomainPrefix){
List<DomainVO> domainChildren = _domainDao.findAllChildren(domain.getPath(), domain.getId());
//for each child, update the path
for(DomainVO dom : domainChildren){
dom.setPath(dom.getPath().replaceFirst(domain.getPath(), updatedDomainPrefix));
_domainDao.update(dom.getId(), dom);
}
}
@Override
public Long findDomainIdByAccountId(Long accountId) {
if (accountId == null) {
return null;
}
AccountVO account = _accountDao.findById(accountId);
if (account != null) {
return account.getDomainId();
}
return null;
}
@Override
public DomainVO findDomainByPath(String domainPath) {
return _domainDao.findDomainByPath(domainPath);
}
@Override
public List<? extends Alert> searchForAlerts(ListAlertsCmd cmd) {
Filter searchFilter = new Filter(AlertVO.class, "lastSent", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<AlertVO> sc = _alertDao.createSearchCriteria();
Object type = cmd.getType();
Object keyword = cmd.getKeyword();
if (keyword != null) {
SearchCriteria<AlertVO> ssc = _alertDao.createSearchCriteria();
ssc.addOr("subject", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("subject", SearchCriteria.Op.SC, ssc);
}
if (type != null) {
sc.addAnd("type", SearchCriteria.Op.EQ, type);
}
return _alertDao.search(sc, searchFilter);
}
@Override
public List<CapacityVO> listCapacities(ListCapacityCmd cmd) {
// make sure capacity is accurate before displaying it anywhere
// NOTE: listCapacities is currently called by the UI only, so this
// shouldn't be called much since it checks all hosts/VMs
// to figure out what has been allocated.
_alertMgr.recalculateCapacity();
Filter searchFilter = new Filter(CapacityVO.class, "capacityType", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<CapacityVO> sc = _capacityDao.createSearchCriteria();
Object type = cmd.getType();
Object zoneId = cmd.getZoneId();
Object podId = cmd.getPodId();
Object hostId = cmd.getHostId();
if (type != null) {
sc.addAnd("capacityType", SearchCriteria.Op.EQ, type);
}
if (zoneId != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zoneId);
}
if (podId != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, podId);
}
if (hostId != null) {
sc.addAnd("hostOrPoolId", SearchCriteria.Op.EQ, hostId);
}
return _capacityDao.search(sc, searchFilter);
}
@Override
public long getMemoryUsagebyHost(Long hostId) {
long mem = 0;
List<VMInstanceVO> vms = _vmInstanceDao.listUpByHostIdTypes(hostId, VirtualMachine.Type.DomainRouter);
mem += vms.size() * _routerRamSize * 1024L * 1024L;
vms = _vmInstanceDao.listUpByHostIdTypes(hostId, VirtualMachine.Type.SecondaryStorageVm);
mem += vms.size() * _ssRamSize * 1024L * 1024L;
vms = _vmInstanceDao.listUpByHostIdTypes(hostId, VirtualMachine.Type.ConsoleProxy);
mem += vms.size() * _proxyRamSize * 1024L * 1024L;
List<UserVmVO> instances = _userVmDao.listUpByHostId(hostId);
for (UserVmVO vm : instances) {
ServiceOffering so = findServiceOfferingById(vm.getServiceOfferingId());
if (so != null) {
mem += so.getRamSize() * 1024L * 1024L;
}
}
return mem;
}
@Override
public DiskOfferingVO findDiskOfferingById(long diskOfferingId) {
return _diskOfferingDao.findById(diskOfferingId);
}
@Override
public List<DiskOfferingVO> findPrivateDiskOffering() {
return _diskOfferingDao.findPrivateDiskOffering();
}
protected boolean templateIsCorrectType(VMTemplateVO template) {
return true;
}
public static boolean isAdmin(short accountType) {
return ((accountType == Account.ACCOUNT_TYPE_ADMIN) ||
(accountType == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) ||
(accountType == Account.ACCOUNT_TYPE_READ_ONLY_ADMIN));
}
@Override @DB
public boolean updateTemplatePermissions(UpdateTemplatePermissionsCmd cmd) {
return updateTemplateOrIsoPermissions(cmd);
}
@Override @DB
public boolean updateTemplatePermissions(UpdateIsoPermissionsCmd cmd) {
return updateTemplateOrIsoPermissions(cmd);
}
@DB
protected boolean updateTemplateOrIsoPermissions(UpdateTemplateOrIsoPermissionsCmd cmd) {
Transaction txn = Transaction.currentTxn();
//Input validation
Long id = cmd.getId();
Account account = UserContext.current().getCaller();
List<String> accountNames = cmd.getAccountNames();
Long userId = UserContext.current().getCallerUserId();
Boolean isFeatured = cmd.isFeatured();
Boolean isPublic = cmd.isPublic();
String operation = cmd.getOperation();
String mediaType = "";
VMTemplateVO template = _templateDao.findById(id);
if (template == null || !templateIsCorrectType(template)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "unable to find " + mediaType + " with id " + id);
}
if(cmd instanceof UpdateTemplatePermissionsCmd)
{
mediaType = "template";
if(template.getFormat().equals(ImageFormat.ISO))
{
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please provide a valid template");
}
}
if(cmd instanceof UpdateIsoPermissionsCmd)
{
mediaType = "iso";
if(!template.getFormat().equals(ImageFormat.ISO))
{
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please provide a valid iso");
}
}
if (account != null)
{
if (!isAdmin(account.getType()) && (template.getAccountId() != account.getId())) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to update permissions for " + mediaType + " with id " + id);
} else if (account.getType() != Account.ACCOUNT_TYPE_ADMIN) {
Long templateOwnerDomainId = findDomainIdByAccountId(template.getAccountId());
if (!isChildDomain(account.getDomainId(), templateOwnerDomainId)) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to update permissions for " + mediaType + " with id " + id);
}
}
}
// If command is executed via 8096 port, set userId to the id of System account (1)
if (userId == null) {
userId = Long.valueOf(User.UID_SYSTEM);
}
// If the template is removed throw an error.
if (template.getRemoved() != null){
s_logger.error("unable to update permissions for " + mediaType + " with id " + id + " as it is removed ");
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to update permissions for " + mediaType + " with id " + id + " as it is removed ");
}
if (id == Long.valueOf(1)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "unable to update permissions for " + mediaType + " with id " + id);
}
boolean isAdmin = ((account == null) || isAdmin(account.getType()));
boolean allowPublicUserTemplates = Boolean.parseBoolean(getConfigurationValue("allow.public.user.templates"));
if (!isAdmin && !allowPublicUserTemplates && isPublic != null && isPublic) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Only private " + mediaType + "s can be created.");
}
// // package up the accountNames as a list
// List<String> accountNameList = new ArrayList<String>();
if (accountNames != null)
{
if ((operation == null) || (!operation.equalsIgnoreCase("add") && !operation.equalsIgnoreCase("remove") && !operation.equalsIgnoreCase("reset")))
{
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid operation on accounts, the operation must be either 'add' or 'remove' in order to modify launch permissions." +
" Given operation is: '" + operation + "'");
}
// StringTokenizer st = new StringTokenizer(accountNames, ",");
// while (st.hasMoreTokens()) {
// accountNameList.add(st.nextToken());
// }
}
Long accountId = template.getAccountId();
if (accountId == null) {
// if there is no owner of the template then it's probably already a public template (or domain private template) so publishing to individual users is irrelevant
throw new InvalidParameterValueException("Update template permissions is an invalid operation on template " + template.getName());
}
VMTemplateVO updatedTemplate = _templateDao.createForUpdate();
if (isPublic != null) {
updatedTemplate.setPublicTemplate(isPublic.booleanValue());
}
if (isFeatured != null) {
updatedTemplate.setFeatured(isFeatured.booleanValue());
}
_templateDao.update(template.getId(), updatedTemplate);
Long domainId;
domainId = (null == account) ? DomainVO.ROOT_DOMAIN : account.getDomainId(); // Account == null for 8096 and so its safe for domainid = ROOT
if ("add".equalsIgnoreCase(operation)) {
txn.start();
for (String accountName : accountNames) {
Account permittedAccount = _accountDao.findActiveAccount(accountName, domainId);
if (permittedAccount != null) {
if (permittedAccount.getId() == account.getId()) {
continue; // don't grant permission to the template owner, they implicitly have permission
}
LaunchPermissionVO existingPermission = _launchPermissionDao.findByTemplateAndAccount(id, permittedAccount.getId());
if (existingPermission == null) {
LaunchPermissionVO launchPermission = new LaunchPermissionVO(id, permittedAccount.getId());
_launchPermissionDao.persist(launchPermission);
}
} else {
txn.rollback();
throw new InvalidParameterValueException("Unable to grant a launch permission to account " + accountName + ", account not found. "
+ "No permissions updated, please verify the account names and retry.");
}
}
txn.commit();
} else if ("remove".equalsIgnoreCase(operation)) {
List<Long> accountIds = new ArrayList<Long>();
for (String accountName : accountNames) {
Account permittedAccount = _accountDao.findActiveAccount(accountName, domainId);
if (permittedAccount != null) {
accountIds.add(permittedAccount.getId());
}
}
_launchPermissionDao.removePermissions(id, accountIds);
} else if ("reset".equalsIgnoreCase(operation)) {
// do we care whether the owning account is an admin? if the
// owner is an admin, will we still set public to false?
updatedTemplate = _templateDao.createForUpdate();
updatedTemplate.setPublicTemplate(false);
updatedTemplate.setFeatured(false);
_templateDao.update(template.getId(), updatedTemplate);
_launchPermissionDao.removeAllPermissions(id);
}
return true;
}
@Override
public List<String> listTemplatePermissions(ListTemplateOrIsoPermissionsCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String acctName = cmd.getAccountName();
Long id = cmd.getId();
Long accountId = null;
if ((account == null) || account.getType() == Account.ACCOUNT_TYPE_ADMIN) {
// validate domainId before proceeding
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Invalid domain id (" + domainId + ") given, unable to list " + cmd.getMediaType() + " permissions.");
}
if (acctName != null) {
Account userAccount = _accountDao.findActiveAccount(acctName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new PermissionDeniedException("Unable to find account " + acctName + " in domain " + domainId);
}
}
}
} else {
accountId = account.getId();
}
VMTemplateVO template = _templateDao.findById(id.longValue());
if (template == null || !templateIsCorrectType(template)) {
throw new InvalidParameterValueException("unable to find " + cmd.getMediaType() + " with id " + id);
}
if (accountId != null && !template.isPublicTemplate()) {
if (account.getType() == Account.ACCOUNT_TYPE_NORMAL && template.getAccountId() != accountId) {
throw new PermissionDeniedException("unable to list permissions for " + cmd.getMediaType() + " with id " + id);
} else if (account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN) {
DomainVO accountDomain = _domainDao.findById(account.getDomainId());
Account templateAccount = _accountDao.findById(template.getAccountId());
DomainVO templateDomain = _domainDao.findById(templateAccount.getDomainId());
if (!templateDomain.getPath().contains(accountDomain.getPath())) {
throw new PermissionDeniedException("unable to list permissions for " + cmd.getMediaType() + " with id " + id);
}
}
}
if (id == Long.valueOf(1)) {
throw new PermissionDeniedException("unable to list permissions for " + cmd.getMediaType() + " with id " + id);
}
List<String> accountNames = new ArrayList<String>();
List<LaunchPermissionVO> permissions = _launchPermissionDao.findByTemplate(id);
if ((permissions != null) && !permissions.isEmpty()) {
for (LaunchPermissionVO permission : permissions) {
Account acct = _accountDao.findById(permission.getAccountId());
accountNames.add(acct.getAccountName());
}
}
return accountNames;
}
private List<DiskOfferingVO> searchDiskOfferingsInternal(Account account, Object name, Object id, Object keyword, Filter searchFilter){
//it was decided to return all offerings for the user's domain, and everything above till root (for normal user or domain admin)
//list all offerings belonging to this domain, and all of its parents
//check the parent, if not null, add offerings for that parent to list
List<DiskOfferingVO> dol = new ArrayList<DiskOfferingVO>();
DomainVO domainRecord = _domainDao.findById(account.getDomainId());
boolean includePublicOfferings = true;
if(domainRecord != null)
{
while(true){
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
includePublicOfferings = false;
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
includePublicOfferings = false;
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
includePublicOfferings = false;
sc.setParameters("id", id);
}
//for this domain
sc.addAnd("domainId", SearchCriteria.Op.EQ, domainRecord.getId());
//search and add for this domain
dol.addAll(_diskOfferingDao.search(sc, searchFilter));
//try and move on to the next domain
if(domainRecord.getParent() != null) {
domainRecord = _domainDao.findById(domainRecord.getParent());
}
else {
break;//now we got all the offerings for this user/dom adm
}
}
}else{
s_logger.error("Could not find the domainId for account:"+account.getAccountName());
throw new CloudAuthenticationException("Could not find the domainId for account:"+account.getAccountName());
}
//add all the public offerings to the sol list before returning
if(includePublicOfferings) {
dol.addAll(_diskOfferingDao.findPublicDiskOfferings());
}
return dol;
}
@Override
public List<DiskOfferingVO> searchForDiskOfferings(ListDiskOfferingsCmd cmd) {
//Note
//The list method for offerings is being modified in accordance with discussion with Will/Kevin
//For now, we will be listing the following based on the usertype
//1. For root, we will list all offerings
//2. For domainAdmin and regular users, we will list everything in their domains+parent domains ... all the way till root
Filter searchFilter = new Filter(DiskOfferingVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<DiskOfferingVO> sb = _diskOfferingDao.createSearchBuilder();
// SearchBuilder and SearchCriteria are now flexible so that the search builder can be built with all possible
// search terms and only those with criteria can be set. The proper SQL should be generated as a result.
Account account = UserContext.current().getCaller();
Object name = cmd.getDiskOfferingName();
Object id = cmd.getId();
Object keyword = cmd.getKeyword();
Long domainId = cmd.getDomainId();
//Keeping this logic consistent with domain specific zones
//if a domainId is provided, we just return the disk offering associated with this domain
if(domainId != null){
if(account.getType() == Account.ACCOUNT_TYPE_ADMIN){
return _diskOfferingDao.listByDomainId(domainId);//no perm check
}else{
//check if the user's domain == do's domain || user's domain is a child of so's domain
if(isPermissible(account.getDomainId(), domainId)){
//perm check succeeded
return _diskOfferingDao.listByDomainId(domainId);
}else{
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "The account:"+account.getAccountName()+" does not fall in the same domain hierarchy as the disk offering");
}
}
}
//For non-root users
if((account.getType() == Account.ACCOUNT_TYPE_NORMAL || account.getType() == Account.ACCOUNT_TYPE_DOMAIN_ADMIN)){
return searchDiskOfferingsInternal(account, name, id, keyword, searchFilter);
}
//For root users, preserving existing flow
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
sb.addAnd("domainId", sb.entity().getDomainId(), SearchCriteria.Op.EQ);
if (domainId != null) {
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.addAnd("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId());
}
*/
SearchCriteria<DiskOfferingVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<DiskOfferingVO> ssc = _diskOfferingDao.createSearchCriteria();
ssc.addOr("displayText", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
// FIXME: disk offerings should search back up the hierarchy for available disk offerings...
/*
if (domainId != null) {
sc.setParameters("domainId", domainId);
//
//DomainVO domain = _domainDao.findById((Long)domainId);
//
// I want to join on user_vm.domain_id = domain.id where domain.path like 'foo%'
//sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
//
}
*/
return _diskOfferingDao.search(sc, searchFilter);
}
// @Override
// public AsyncJobResult queryAsyncJobResult(QueryAsyncJobResultCmd cmd) throws PermissionDeniedException {
// return queryAsyncJobResult(cmd.getId());
// }
@Override
public AsyncJobResult queryAsyncJobResult(long jobId) throws PermissionDeniedException {
AsyncJobVO job = _asyncMgr.getAsyncJob(jobId);
if (job == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, invalid job id " + jobId);
}
throw new PermissionDeniedException("Permission denied, invalid job id " + jobId);
}
// treat any requests from API server as trusted requests
if (!UserContext.current().isApiServer() && job.getAccountId() != UserContext.current().getCaller().getId()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Mismatched account id in job and user context, perform further securty check. job id: "
+ jobId + ", job owner account: " + job.getAccountId() + ", accound id in current context: " + UserContext.current().getCaller().getId());
}
Account account = UserContext.current().getCaller();
if (account != null) {
if (isAdmin(account.getType())) {
Account jobAccount = _accountDao.findById(job.getAccountId());
if (jobAccount == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, account no long exist for account id in context, job id: " + jobId
+ ", accountId " + job.getAccountId());
}
throw new PermissionDeniedException("Permission denied, invalid job ownership, job id: " + jobId);
}
if (!_domainDao.isChildDomain(account.getDomainId(), jobAccount.getDomainId())) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, invalid ownership for job " + jobId + ", job account owner: "
+ job.getAccountId() + " in domain: " + jobAccount.getDomainId() + ", account id in context: " + account.getId() +
" in domain: " + account.getDomainId());
}
throw new PermissionDeniedException("Permission denied, invalid job ownership, job id: " + jobId);
}
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("queryAsyncJobResult error: Permission denied, invalid ownership for job " + jobId + ", job account owner: "
+ job.getAccountId() + ", account id in context: " + account.getId());
}
throw new PermissionDeniedException("Permission denied, invalid job ownership, job id: " + jobId);
}
}
}
return _asyncMgr.queryAsyncJobResult(jobId);
}
@Override
public AsyncJobVO findAsyncJobById(long jobId) {
return _asyncMgr.getAsyncJob(jobId);
}
@Override
public String[] getApiConfig() {
return new String[] { "commands.properties" };
}
protected class AccountCleanupTask implements Runnable {
@Override
public void run() {
try {
GlobalLock lock = GlobalLock.getInternLock("AccountCleanup");
if (lock == null) {
s_logger.debug("Couldn't get the global lock");
return;
}
if (!lock.lock(30)) {
s_logger.debug("Couldn't lock the db");
return;
}
Transaction txn = null;
try {
txn = Transaction.open(Transaction.CLOUD_DB);
List<AccountVO> accounts = _accountDao.findCleanups();
s_logger.info("Found " + accounts.size() + " accounts to cleanup");
for (AccountVO account : accounts) {
s_logger.debug("Cleaning up " + account.getId());
try {
_accountMgr.cleanupAccount(account, _accountMgr.getSystemUser().getId(), _accountMgr.getSystemAccount());
} catch (Exception e) {
s_logger.error("Skipping due to error on account " + account.getId(), e);
}
}
} catch (Exception e) {
s_logger.error("Exception ", e);
} finally {
if(txn != null) {
txn.close();
}
lock.unlock();
}
} catch (Exception e) {
s_logger.error("Exception ", e);
}
}
}
protected class EventPurgeTask implements Runnable {
@Override
public void run() {
try {
GlobalLock lock = GlobalLock.getInternLock("EventPurge");
if (lock == null) {
s_logger.debug("Couldn't get the global lock");
return;
}
if (!lock.lock(30)) {
s_logger.debug("Couldn't lock the db");
return;
}
try {
final Calendar purgeCal = Calendar.getInstance();
purgeCal.add(Calendar.DAY_OF_YEAR, -_purgeDelay);
Date purgeTime = purgeCal.getTime();
s_logger.debug("Deleting events older than: "+purgeTime.toString());
List<EventVO> oldEvents = _eventDao.listOlderEvents(purgeTime);
s_logger.debug("Found "+oldEvents.size()+" events to be purged");
for (EventVO event : oldEvents){
_eventDao.expunge(event.getId());
}
} catch (Exception e) {
s_logger.error("Exception ", e);
} finally {
lock.unlock();
}
} catch (Exception e) {
s_logger.error("Exception ", e);
}
}
}
@Override
public StoragePoolVO findPoolById(Long id) {
return _poolDao.findById(id);
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(ListStoragePoolsCmd cmd) {
Criteria c = new Criteria("id", Boolean.TRUE, cmd.getStartIndex(), cmd.getPageSizeVal());
c.addCriteria(Criteria.ID, cmd.getId());
c.addCriteria(Criteria.NAME, cmd.getStoragePoolName());
c.addCriteria(Criteria.CLUSTERID, cmd.getClusterId());
c.addCriteria(Criteria.ADDRESS, cmd.getIpAddress());
c.addCriteria(Criteria.KEYWORD, cmd.getKeyword());
c.addCriteria(Criteria.PATH, cmd.getPath());
c.addCriteria(Criteria.PODID, cmd.getPodId());
c.addCriteria(Criteria.DATACENTERID, cmd.getZoneId());
return searchForStoragePools(c);
}
@Override
public List<? extends StoragePoolVO> searchForStoragePools(Criteria c) {
Filter searchFilter = new Filter(StoragePoolVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<StoragePoolVO> sc = _poolDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object host = c.getCriteria(Criteria.HOST);
Object path = c.getCriteria(Criteria.PATH);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object cluster = c.getCriteria(Criteria.CLUSTERID);
Object address = c.getCriteria(Criteria.ADDRESS);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<StoragePoolVO> ssc = _poolDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("poolType", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if (id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (host != null) {
sc.addAnd("host", SearchCriteria.Op.EQ, host);
}
if (path != null) {
sc.addAnd("path", SearchCriteria.Op.EQ, path);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (address != null) {
sc.addAnd("hostAddress", SearchCriteria.Op.EQ, address);
}
if (cluster != null) {
sc.addAnd("clusterId", SearchCriteria.Op.EQ, cluster);
}
return _poolDao.search(sc, searchFilter);
}
@Override
public List<String> searchForStoragePoolDetails(long poolId, String value)
{
return _poolDao.searchForStoragePoolDetails(poolId, value);
}
@Override
public List<AsyncJobVO> searchForAsyncJobs(ListAsyncJobsCmd cmd) throws InvalidParameterValueException, PermissionDeniedException {
Filter searchFilter = new Filter(AsyncJobVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<AsyncJobVO> sb = _jobDao.createSearchBuilder();
Object accountId = null;
Long domainId = cmd.getDomainId();
Account account = UserContext.current().getCaller();
if ((account == null) || isAdmin(account.getType())) {
String accountName = cmd.getAccountName();
if ((accountName != null) && (domainId != null)) {
Account userAccount = _accountDao.findActiveAccount(accountName, domainId);
if (userAccount != null) {
accountId = userAccount.getId();
} else {
throw new InvalidParameterValueException("Failed to list async jobs for account " + accountName + " in domain " + domainId + "; account not found.");
}
} else if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new PermissionDeniedException("Failed to list async jobs for domain " + domainId + "; permission denied.");
}
// we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
SearchBuilder<AccountVO> accountSearch = _accountDao.createSearchBuilder();
accountSearch.join("domainSearch", domainSearch, accountSearch.entity().getDomainId(), domainSearch.entity().getId(), JoinType.INNER);
sb.join("accountSearch", accountSearch, sb.entity().getAccountId(), accountSearch.entity().getId(), JoinType.INNER);
}
} else {
accountId = account.getId();
}
Object keyword = cmd.getKeyword();
Object startDate = cmd.getStartDate();
SearchCriteria<AsyncJobVO> sc = _jobDao.createSearchCriteria();
if (keyword != null) {
sc.addAnd("cmd", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (accountId != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
if (startDate != null) {
sc.addAnd("created", SearchCriteria.Op.GTEQ, startDate);
}
return _jobDao.search(sc, searchFilter);
}
@Override
public boolean isChildDomain(Long parentId, Long childId) {
return _domainDao.isChildDomain(parentId, childId);
}
public SecondaryStorageVmVO startSecondaryStorageVm(long instanceId) {
return _secStorageVmMgr.startSecStorageVm(instanceId);
}
public SecondaryStorageVmVO stopSecondaryStorageVm(long instanceId) {
_secStorageVmMgr.stopSecStorageVm(instanceId);
return _secStorageVmDao.findById(instanceId);
}
public SecondaryStorageVmVO rebootSecondaryStorageVm(long instanceId) {
_secStorageVmMgr.rebootSecStorageVm(instanceId);
return _secStorageVmDao.findById(instanceId);
}
public boolean destroySecondaryStorageVm(long instanceId) {
return _secStorageVmMgr.destroySecStorageVm(instanceId);
}
@Override
public List<SecondaryStorageVmVO> searchForSecondaryStorageVm(Criteria c) {
Filter searchFilter = new Filter(SecondaryStorageVmVO.class, c.getOrderBy(), c.getAscending(), c.getOffset(), c.getLimit());
SearchCriteria<SecondaryStorageVmVO> sc = _secStorageVmDao.createSearchCriteria();
Object id = c.getCriteria(Criteria.ID);
Object name = c.getCriteria(Criteria.NAME);
Object state = c.getCriteria(Criteria.STATE);
Object zone = c.getCriteria(Criteria.DATACENTERID);
Object pod = c.getCriteria(Criteria.PODID);
Object hostId = c.getCriteria(Criteria.HOSTID);
Object keyword = c.getCriteria(Criteria.KEYWORD);
if (keyword != null) {
SearchCriteria<SecondaryStorageVmVO> ssc = _secStorageVmDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
ssc.addOr("state", SearchCriteria.Op.LIKE, "%" + keyword + "%");
sc.addAnd("name", SearchCriteria.Op.SC, ssc);
}
if(id != null) {
sc.addAnd("id", SearchCriteria.Op.EQ, id);
}
if (name != null) {
sc.addAnd("name", SearchCriteria.Op.LIKE, "%" + name + "%");
}
if (state != null) {
sc.addAnd("state", SearchCriteria.Op.EQ, state);
}
if (zone != null) {
sc.addAnd("dataCenterId", SearchCriteria.Op.EQ, zone);
}
if (pod != null) {
sc.addAnd("podId", SearchCriteria.Op.EQ, pod);
}
if (hostId != null) {
sc.addAnd("hostId", SearchCriteria.Op.EQ, hostId);
}
return _secStorageVmDao.search(sc, searchFilter);
}
@Override @SuppressWarnings({"unchecked", "rawtypes"})
public List<? extends VMInstanceVO> searchForSystemVm(ListSystemVMsCmd cmd) {
Criteria c = new Criteria("id", Boolean.TRUE, cmd.getStartIndex(), cmd.getPageSizeVal());
c.addCriteria(Criteria.KEYWORD, cmd.getKeyword());
c.addCriteria(Criteria.ID, cmd.getId());
c.addCriteria(Criteria.DATACENTERID, cmd.getZoneId());
c.addCriteria(Criteria.PODID, cmd.getPodId());
c.addCriteria(Criteria.HOSTID, cmd.getHostId());
c.addCriteria(Criteria.NAME, cmd.getSystemVmName());
c.addCriteria(Criteria.STATE, cmd.getState());
String type = cmd.getSystemVmType();
List systemVMs = new ArrayList();
if (type == null) { //search for all vm types
systemVMs.addAll(searchForConsoleProxy(c));
systemVMs.addAll(searchForSecondaryStorageVm(c));
} else if((type != null) && (type.equalsIgnoreCase("secondarystoragevm"))) { // search for ssvm
systemVMs.addAll(searchForSecondaryStorageVm(c));
} else if((type != null) && (type.equalsIgnoreCase("consoleproxy"))) { // search for consoleproxy
systemVMs.addAll(searchForConsoleProxy(c));
}
return systemVMs;
}
@Override
public VMInstanceVO findSystemVMById(long instanceId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(instanceId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if(systemVm == null) {
return null;
}
if(systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return _consoleProxyDao.findById(instanceId);
}
return _secStorageVmDao.findById(instanceId);
}
@Override
public VirtualMachine startSystemVM(StartSystemVMCmd cmd) {
return startSystemVm(cmd.getId());
}
@Override
public VirtualMachine startSystemVm(long vmId) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(vmId, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new InvalidParameterValueException("unable to find a system vm with id " + vmId);
}
if (systemVm.getType() == VirtualMachine.Type.ConsoleProxy) {
return startConsoleProxy(vmId);
} else if (systemVm.getType() == VirtualMachine.Type.SecondaryStorageVm) {
return startSecondaryStorageVm(vmId);
} else {
throw new InvalidParameterValueException("Unable to find a system vm: " + vmId);
}
}
@Override
public VMInstanceVO stopSystemVM(StopSystemVmCmd cmd) {
Long id = cmd.getId();
// verify parameters
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(id, VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new ServerApiException (BaseCmd.PARAM_ERROR, "unable to find a system vm with id " + id);
}
// FIXME: We need to return the system VM from this method, so what do we do with the boolean response from stopConsoleProxy and stopSecondaryStorageVm?
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)){
return stopConsoleProxy(id);
} else {
return stopSecondaryStorageVm(id);
}
}
@Override
public VMInstanceVO rebootSystemVM(RebootSystemVmCmd cmd) {
VMInstanceVO systemVm = _vmInstanceDao.findByIdTypes(cmd.getId(), VirtualMachine.Type.ConsoleProxy, VirtualMachine.Type.SecondaryStorageVm);
if (systemVm == null) {
throw new ServerApiException (BaseCmd.PARAM_ERROR, "unable to find a system vm with id " + cmd.getId());
}
if (systemVm.getType().equals(VirtualMachine.Type.ConsoleProxy)){
return rebootConsoleProxy(cmd.getId());
} else {
return rebootSecondaryStorageVm(cmd.getId());
}
}
private String signRequest(String request, String key) {
try
{
s_logger.info("Request: "+request);
s_logger.info("Key: "+key);
if(key != null && request != null)
{
Mac mac = Mac.getInstance("HmacSHA1");
SecretKeySpec keySpec = new SecretKeySpec(key.getBytes(),
"HmacSHA1");
mac.init(keySpec);
mac.update(request.getBytes());
byte[] encryptedBytes = mac.doFinal();
return new String ((Base64.encodeBase64(encryptedBytes)));
}
} catch (Exception ex) {
s_logger.error("unable to sign request", ex);
}
return null;
}
@Override
public ArrayList<String> getCloudIdentifierResponse(GetCloudIdentifierCmd cmd) throws InvalidParameterValueException{
Long userId = cmd.getUserId();
//verify that user exists
User user = findUserById(userId);
if ((user == null) || (user.getRemoved() != null)) {
throw new InvalidParameterValueException("Unable to find active user by id " + userId);
}
String cloudIdentifier = _configDao.getValue("cloud.identifier");
if (cloudIdentifier == null) {
cloudIdentifier = "";
}
String signature = "";
try {
//get the user obj to get his secret key
user = getUser(userId);
String secretKey = user.getSecretKey();
String input = cloudIdentifier;
signature = signRequest(input, secretKey);
} catch (Exception e) {
s_logger.warn("Exception whilst creating a signature:"+e);
}
ArrayList<String> cloudParams = new ArrayList<String>();
cloudParams.add(cloudIdentifier);
cloudParams.add(signature);
return cloudParams;
}
@Override
public SecurityGroupVO findNetworkGroupByName(Long accountId, String groupName) {
SecurityGroupVO groupVO = _networkSecurityGroupDao.findByAccountAndName(accountId, groupName);
return groupVO;
}
@Override
public SecurityGroupVO findNetworkGroupById(long networkGroupId) {
SecurityGroupVO groupVO = _networkSecurityGroupDao.findById(networkGroupId);
return groupVO;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHypervisorSnapshotCapable() {
return _isHypervisorSnapshotCapable;
}
@Override
public List<EventVO> listPendingEvents(int entryTime, int duration) {
Calendar calMin = Calendar.getInstance();
Calendar calMax = Calendar.getInstance();
calMin.add(Calendar.SECOND, -entryTime);
calMax.add(Calendar.SECOND, -duration);
Date minTime = calMin.getTime();
Date maxTime = calMax.getTime();
List<EventVO> startedEvents = _eventDao.listStartedEvents(minTime, maxTime);
List<EventVO> pendingEvents = new ArrayList<EventVO>();
for (EventVO event : startedEvents){
EventVO completedEvent = _eventDao.findCompletedEvent(event.getId());
if(completedEvent == null){
pendingEvents.add(event);
}
}
return pendingEvents;
}
@Override
public List<PreallocatedLunVO> getPreAllocatedLuns(ListPreallocatedLunsCmd cmd) {
Filter searchFilter = new Filter(PreallocatedLunVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchCriteria<PreallocatedLunVO> sc = _lunDao.createSearchCriteria();
Object targetIqn = cmd.getTargetIqn();
Object scope = cmd.getScope();
if (targetIqn != null) {
sc.addAnd("targetIqn", SearchCriteria.Op.EQ, targetIqn);
}
if (scope == null || scope.toString().equalsIgnoreCase("ALL")) {
return _lunDao.search(sc, searchFilter);
} else if(scope.toString().equalsIgnoreCase("ALLOCATED")) {
sc.addAnd("volumeId", SearchCriteria.Op.NNULL);
sc.addAnd("taken", SearchCriteria.Op.NNULL);
return _lunDao.search(sc, searchFilter);
} else if(scope.toString().equalsIgnoreCase("FREE")) {
sc.addAnd("volumeId", SearchCriteria.Op.NULL);
sc.addAnd("taken", SearchCriteria.Op.NULL);
return _lunDao.search(sc, searchFilter);
}
return null;
}
@Override
public boolean checkLocalStorageConfigVal()
{
String value = _configs.get("use.local.storage");
if(value!=null && value.equalsIgnoreCase("true")) {
return true;
} else {
return false;
}
}
@Override
public boolean checkIfMaintenable(long hostId) {
//get the poolhostref record
List<StoragePoolHostVO> poolHostRecordSet = _poolHostDao.listByHostId(hostId);
if(poolHostRecordSet!=null)
{
//the above list has only 1 record
StoragePoolHostVO poolHostRecord = poolHostRecordSet.get(0);
//get the poolId and get hosts associated in that pool
List<StoragePoolHostVO> hostsInPool = _poolHostDao.listByPoolId(poolHostRecord.getPoolId());
if(hostsInPool!=null && hostsInPool.size()>1)
{
return true; //since there are other hosts to take over as master in this pool
}
}
return false;
}
@Override
public Map<String, Object> listCapabilities(ListCapabilitiesCmd cmd) {
Map<String, Object> capabilities = new HashMap<String, Object>();
String securityGroupsEnabled = _configs.get(Config.DirectAttachSecurityGroupsEnabled.key());
String userPublicTemplateEnabled = _configs.get(Config.AllowPublicUserTemplates.key());
capabilities.put("securityGroupsEnabled", (securityGroupsEnabled == null || securityGroupsEnabled.equals("false") ? false : true));
capabilities.put("userPublicTemplateEnabled", (userPublicTemplateEnabled == null || userPublicTemplateEnabled.equals("false") ? false : true));
capabilities.put("cloudStackVersion", getVersion());
return capabilities;
}
@Override
public GuestOSVO getGuestOs(Long guestOsId)
{
return _guestOSDao.findById(guestOsId);
}
@Override
public VolumeVO getRootVolume(Long instanceId)
{
return _volumeDao.findByInstanceAndType(instanceId, Volume.VolumeType.ROOT).get(0);
}
@Override
public long getPsMaintenanceCount(long podId){
List<StoragePoolVO> poolsInTransition = new ArrayList<StoragePoolVO>();
poolsInTransition.addAll(_poolDao.listPoolsByStatus(Status.Maintenance));
poolsInTransition.addAll(_poolDao.listPoolsByStatus(Status.PrepareForMaintenance));
poolsInTransition.addAll(_poolDao.listPoolsByStatus(Status.ErrorInMaintenance));
return poolsInTransition.size();
}
@Override
public boolean isPoolUp(long instanceId){
VolumeVO rootVolume = _volumeDao.findByInstance(instanceId).get(0);
if(rootVolume!=null){
Status poolStatus = _poolDao.findById(rootVolume.getPoolId()).getStatus();
if(!poolStatus.equals(Status.Up)) {
return false;
} else {
return true;
}
}
return false;
}
@Override
public Long extractVolume(ExtractVolumeCmd cmd) throws URISyntaxException {
Long volumeId = cmd.getId();
String url = cmd.getUrl();
Long zoneId = cmd.getZoneId();
AsyncJobVO job = null; // FIXME: cmd.getJob();
String mode = cmd.getMode();
Account account = UserContext.current().getCaller();
VolumeVO volume = _volumeDao.findById(volumeId);
if (volume == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Unable to find volume with id " + volumeId);
}
if (_dcDao.findById(zoneId) == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please specify a valid zone.");
}
if(volume.getPoolId() == null){
throw new ServerApiException(BaseCmd.PARAM_ERROR, "The volume doesnt belong to a storage pool so cant extract it");
}
//Extract activity only for detached volumes or for volumes whose instance is stopped
if(volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped ){
s_logger.debug("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
throw new PermissionDeniedException("Invalid state of the volume with ID: " + volumeId + ". It should be either detached or the VM should be in stopped state.");
}
VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId());
boolean isExtractable = template != null && template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
if( !isExtractable && account!=null && account.getType() != Account.ACCOUNT_TYPE_ADMIN){ // Global admins are allowed to extract
throw new PermissionDeniedException("The volume:" +volumeId+ " is not allowed to be extracted");
}
Upload.Mode extractMode;
if( mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString())) ){
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Please specify a valid extract Mode ");
}else{
extractMode = mode.equals(Upload.Mode.FTP_UPLOAD.toString()) ? Upload.Mode.FTP_UPLOAD : Upload.Mode.HTTP_DOWNLOAD;
}
if (account != null) {
if(!isAdmin(account.getType())){
if (volume.getAccountId() != account.getId()){
throw new PermissionDeniedException("Unable to find volume with ID: " + volumeId + " for account: " + account.getAccountName());
}
} else {
Account userAccount = _accountDao.findById(volume.getAccountId());
if((userAccount == null) || !_domainDao.isChildDomain(account.getDomainId(), userAccount.getDomainId())) {
throw new PermissionDeniedException("Unable to extract volume:" + volumeId + " - permission denied.");
}
}
}
// If mode is upload perform extra checks on url and also see if there is an ongoing upload on the same.
if (extractMode == Upload.Mode.FTP_UPLOAD){
URI uri = new URI(url);
if ( (uri.getScheme() == null) || (!uri.getScheme().equalsIgnoreCase("ftp") )) {
throw new IllegalArgumentException("Unsupported scheme for url: " + url);
}
String host = uri.getHost();
try {
InetAddress hostAddr = InetAddress.getByName(host);
if (hostAddr.isAnyLocalAddress() || hostAddr.isLinkLocalAddress() || hostAddr.isLoopbackAddress() || hostAddr.isMulticastAddress() ) {
throw new IllegalArgumentException("Illegal host specified in url");
}
if (hostAddr instanceof Inet6Address) {
throw new IllegalArgumentException("IPV6 addresses not supported (" + hostAddr.getHostAddress() + ")");
}
} catch (UnknownHostException uhe) {
throw new IllegalArgumentException("Unable to resolve " + host);
}
if ( _uploadMonitor.isTypeUploadInProgress(volumeId, Upload.Type.VOLUME) ){
throw new IllegalArgumentException(volume.getName() + " upload is in progress. Please wait for some time to schedule another upload for the same");
}
}
long userId = UserContext.current().getCallerUserId();
long accountId = volume.getAccountId();
String secondaryStorageURL = _storageMgr.getSecondaryStorageURL(zoneId);
StoragePoolVO srcPool = _poolDao.findById(volume.getPoolId());
Long sourceHostId = _storageMgr.findHostIdForStoragePool(srcPool);
List<HostVO> storageServers = _hostDao.listByTypeDataCenter(Host.Type.SecondaryStorage, zoneId);
HostVO sserver = storageServers.get(0);
List<UploadVO> extractURLList = _uploadDao.listByTypeUploadStatus(volumeId, Upload.Type.VOLUME, UploadVO.Status.DOWNLOAD_URL_CREATED);
if (extractMode == Upload.Mode.HTTP_DOWNLOAD && extractURLList.size() > 0){
return extractURLList.get(0).getId(); // If download url already exists then return
}else {
UploadVO uploadJob = _uploadMonitor.createNewUploadEntry(sserver.getId(), volumeId, UploadVO.Status.COPY_IN_PROGRESS, Upload.Type.VOLUME, url, extractMode);
s_logger.debug("Extract Mode - " +uploadJob.getMode());
uploadJob = _uploadDao.createForUpdate(uploadJob.getId());
// Update the async Job
ExtractResponse resultObj = new ExtractResponse(volumeId, volume.getName(), accountId, UploadVO.Status.COPY_IN_PROGRESS.toString(), uploadJob.getId());
resultObj.setResponseName(cmd.getCommandName());
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor.getCurrentExecutor();
if (asyncExecutor != null) {
job = asyncExecutor.getJob();
_asyncMgr.updateAsyncJobAttachment(job.getId(), Upload.Type.VOLUME.toString(), volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(), AsyncJobResult.STATUS_IN_PROGRESS, resultObj);
}
// Copy the volume from the source storage pool to secondary storage
CopyVolumeCommand cvCmd = new CopyVolumeCommand(volume.getId(), volume.getPath(), srcPool, secondaryStorageURL, true);
CopyVolumeAnswer cvAnswer = (CopyVolumeAnswer) _agentMgr.easySend(sourceHostId, cvCmd);
// Check if you got a valid answer.
if (cvAnswer == null || !cvAnswer.getResult()) {
String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage.";
//Update the async job.
resultObj.setResultString(errorString);
resultObj.setUploadStatus(UploadVO.Status.COPY_ERROR.toString());
if (asyncExecutor != null) {
_asyncMgr.completeAsyncJob(job.getId(), AsyncJobResult.STATUS_FAILED, 0, resultObj);
}
//Update the DB that volume couldn't be copied
uploadJob.setUploadState(UploadVO.Status.COPY_ERROR);
uploadJob.setErrorString(errorString);
uploadJob.setLastUpdated(new Date());
_uploadDao.update(uploadJob.getId(), uploadJob);
throw new CloudRuntimeException(errorString);
}
String volumeLocalPath = "volumes/"+volume.getId()+"/"+cvAnswer.getVolumePath()+".vhd";
//Update the DB that volume is copied and volumePath
uploadJob.setUploadState(UploadVO.Status.COPY_COMPLETE);
uploadJob.setLastUpdated(new Date());
uploadJob.setInstallPath(volumeLocalPath);
_uploadDao.update(uploadJob.getId(), uploadJob);
if (extractMode == Mode.FTP_UPLOAD){ // Now that the volume is copied perform the actual uploading
_uploadMonitor.extractVolume(uploadJob, sserver, volume, url, zoneId, volumeLocalPath, cmd.getStartEventId(), job.getId(), _asyncMgr);
return uploadJob.getId();
}else{ // Volume is copied now make it visible under apache and create a URL.
_uploadMonitor.createVolumeDownloadURL(volumeId, volumeLocalPath, Upload.Type.VOLUME, zoneId, uploadJob.getId());
return uploadJob.getId();
}
}
}
@Override
public InstanceGroupVO updateVmGroup(UpdateVMGroupCmd cmd) {
Account account = UserContext.current().getCaller();
Long groupId = cmd.getId();
String groupName = cmd.getGroupName();
// Verify input parameters
InstanceGroupVO group = _vmGroupDao.findById(groupId.longValue());
if (group == null) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "unable to find a vm group with id " + groupId);
}
if (account != null) {
Account tempAccount = _accountDao.findById(group.getAccountId());
if (!isAdmin(account.getType()) && (account.getId() != group.getAccountId())) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "unable to find a group with id " + groupId + " for this account");
} else if (!_domainDao.isChildDomain(account.getDomainId(), tempAccount.getDomainId())) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Invalid group id (" + groupId + ") given, unable to update the group.");
}
}
//Check if name is already in use by this account (exclude this group)
boolean isNameInUse = _vmGroupDao.isNameInUse(group.getAccountId(), groupName);
if (isNameInUse && !group.getName().equals(groupName)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Unable to update vm group, a group with name " + groupName + " already exisits for account");
}
if (groupName != null) {
_vmGroupDao.updateVmGroup(groupId, groupName);
}
InstanceGroupVO vmGroup = _vmGroupDao.findById(groupId);
return vmGroup;
}
@Override
public List<InstanceGroupVO> searchForVmGroups(ListVMGroupsCmd cmd) {
Account account = UserContext.current().getCaller();
Long domainId = cmd.getDomainId();
String accountName = cmd.getAccountName();
Long accountId = null;
if ((account == null) || isAdmin(account.getType())) {
if (domainId != null) {
if ((account != null) && !_domainDao.isChildDomain(account.getDomainId(), domainId)) {
throw new ServerApiException(BaseCmd.PARAM_ERROR, "Invalid domain id (" + domainId + ") given, unable to list vm groups.");
}
if (accountName != null) {
account = _accountDao.findActiveAccount(accountName, domainId);
if (account == null) {
throw new ServerApiException(BaseCmd.ACCOUNT_ERROR, "Unable to find account " + accountName + " in domain " + domainId);
}
accountId = account.getId();
}
} else {
domainId = ((account == null) ? DomainVO.ROOT_DOMAIN : account.getDomainId());
}
} else {
accountName = account.getAccountName();
accountId = account.getId();
domainId = account.getDomainId();
}
Filter searchFilter = new Filter(InstanceGroupVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal());
Object id = cmd.getId();
Object name = cmd.getGroupName();
Object keyword = cmd.getKeyword();
SearchBuilder<InstanceGroupVO> sb = _vmGroupDao.createSearchBuilder();
sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ);
sb.and("name", sb.entity().getName(), SearchCriteria.Op.LIKE);
sb.and("accountId", sb.entity().getAccountId(), SearchCriteria.Op.EQ);
if ((accountId == null) && (domainId != null)) {
// if accountId isn't specified, we can do a domain match for the admin case
SearchBuilder<DomainVO> domainSearch = _domainDao.createSearchBuilder();
domainSearch.and("path", domainSearch.entity().getPath(), SearchCriteria.Op.LIKE);
sb.join("domainSearch", domainSearch, sb.entity().getDomainId(), domainSearch.entity().getId(), JoinBuilder.JoinType.INNER);
}
SearchCriteria<InstanceGroupVO> sc = sb.create();
if (keyword != null) {
SearchCriteria<InstanceGroupVO> ssc = _vmGroupDao.createSearchCriteria();
ssc.addOr("name", SearchCriteria.Op.LIKE, "%" + keyword + "%");
}
if (id != null) {
sc.setParameters("id", id);
}
if (name != null) {
sc.setParameters("name", "%" + name + "%");
}
if (accountId != null) {
sc.setParameters("accountId", accountId);
} else if (domainId != null) {
DomainVO domain = _domainDao.findById(domainId);
if (domain != null){
sc.setJoinParameters("domainSearch", "path", domain.getPath() + "%");
}
}
return _vmGroupDao.search(sc, searchFilter);
}
@Override
public InstanceGroupVO getGroupForVm(long vmId){
return _vmMgr.getGroupForVm(vmId);
}
@Override
public List<VlanVO> searchForZoneWideVlans(long dcId, String vlanType, String vlanId){
return _vlanDao.searchForZoneWideVlans(dcId, vlanType, vlanId);
}
@Override
public String getVersion(){
final Class<?> c = ManagementServer.class;
String fullVersion = c.getPackage().getImplementationVersion();
if(fullVersion.length() > 0){
return fullVersion;
}
return "unknown";
}
private Long saveScheduledEvent(Long userId, Long accountId, String type, String description)
{
EventVO event = new EventVO();
event.setUserId(userId);
event.setAccountId(accountId);
event.setType(type);
event.setState(Event.State.Scheduled);
event.setDescription("Scheduled async job for "+description);
event = _eventDao.persist(event);
return event.getId();
}
@Override
public Long saveStartedEvent(Long userId, Long accountId, String type, String description, long startEventId)
{
return EventUtils.saveStartedEvent(userId, accountId, type, description, startEventId);
}
@Override
public Long saveCompletedEvent(Long userId, Long accountId, String level, String type, String description, long startEventId)
{
return EventUtils.saveEvent(userId, accountId, level, type, description, startEventId);
}
@Override @DB
public String uploadCertificate(UploadCustomCertificateCmd cmd) throws ServerApiException{
CertificateVO cert = null;
Long certVOId = null;
try
{
Transaction.currentTxn();
String certificate = cmd.getCertificate();
cert = _certDao.listAll().get(0); //always 1 record in db (from the deploydb time)
cert = _certDao.acquireInLockTable(cert.getId());
if(cert == null){
String msg = "Unable to obtain lock on the cert from uploadCertificate()";
s_logger.error(msg);
throw new ConcurrentOperationException(msg);
}else{
if(cert.getUpdated().equalsIgnoreCase("Y")){
if(s_logger.isDebugEnabled()) {
s_logger.debug("A custom certificate already exists in the DB, will replace it with the new one being uploaded");
}
}else{
if(s_logger.isDebugEnabled()) {
s_logger.debug("No custom certificate exists in the DB, will upload a new one");
}
}
//validate if the cert follows X509 format, if not, don't persist to db
InputStream is = new ByteArrayInputStream(certificate.getBytes("UTF-8"));
BufferedInputStream bis = new BufferedInputStream(is);
CertificateFactory cf = CertificateFactory.getInstance("X.509");
while (bis.available() > 1) {
Certificate localCert = cf.generateCertificate(bis);//throws certexception if not valid cert format
if(s_logger.isDebugEnabled()){
s_logger.debug("The custom certificate generated for validation is:"+localCert.toString());
}
}
certVOId = _certDao.persistCustomCertToDb(certificate,cert,this.getId());//0 implies failure
if(s_logger.isDebugEnabled()) {
s_logger.debug("Custom certificate persisted to the DB");
}
}
if (certVOId != 0)
{
//certficate uploaded to db successfully
//get a list of all Console proxies from the cp table
List<ConsoleProxyVO> cpList = _consoleProxyDao.listAll();
if(cpList.size() == 0){
String msg = "Unable to find any console proxies in the system for certificate update";
s_logger.warn(msg);
throw new ExecutionException(msg);
}
//get a list of all hosts in host table for type cp
List<HostVO> cpHosts = _hostDao.listByType(com.cloud.host.Host.Type.ConsoleProxy);
if(cpHosts.size() == 0){
String msg = "Unable to find any console proxy hosts in the system for certificate update";
s_logger.warn(msg);
throw new ExecutionException(msg);
}
//create a hashmap for fast lookup
Map<String,Long> hostNameToHostIdMap = new HashMap<String, Long>();
//updated console proxies id list
List<Long> updatedCpIdList = new ArrayList<Long>();
for(HostVO cpHost : cpHosts){
hostNameToHostIdMap.put(cpHost.getName(), cpHost.getId());
}
for(ConsoleProxyVO cp : cpList)
{
Long cpHostId = hostNameToHostIdMap.get(cp.getName());
//now send a command to each console proxy host
UpdateCertificateCommand certCmd = new UpdateCertificateCommand(_certDao.findById(certVOId).getCertificate(), false);
try {
Answer updateCertAns = _agentMgr.send(cpHostId, certCmd);
if(updateCertAns.getResult() == true)
{
//we have the cert copied over on cpvm
_consoleProxyMgr.rebootProxy(cp.getId());
//when cp reboots, the context will be reinit with the new cert
if(s_logger.isDebugEnabled()) {
s_logger.debug("Successfully updated custom certificate on console proxy vm id:"+cp.getId()+" ,console proxy host id:"+cpHostId);
}
updatedCpIdList.add(cp.getId());
}
} catch (AgentUnavailableException e) {
s_logger.warn("Unable to send update certificate command to the console proxy resource as agent is unavailable for console proxy vm id:"+cp.getId()+" ,console proxy host id:"+cpHostId, e);
} catch (OperationTimedoutException e) {
s_logger.warn("Unable to send update certificate command to the console proxy resource as there was a timeout for console proxy vm id:"+cp.getId()+" ,console proxy host id:"+cpHostId, e);
}
}
if(updatedCpIdList.size() == cpList.size()){
//success case, all updated
return ("Updated:"+updatedCpIdList.size()+" out of:"+cpList.size()+" console proxies");
}else{
//failure case, if even one update fails
throw new ManagementServerException("Updated:"+updatedCpIdList.size()+" out of:"+cpList.size()+" console proxies with successfully updated console proxy ids being:"+(updatedCpIdList.size() > 0 ? updatedCpIdList.toString():""));
}
}
else
{
throw new ManagementServerException("Unable to persist custom certificate to the cloud db");
}
}catch (Exception e) {
s_logger.warn("Failed to successfully update the cert across console proxies on management server:"+this.getId());
if(e instanceof ExecutionException) {
throw new ServerApiException(BaseCmd.RESOURCE_UNAVAILABLE_ERROR, e.getMessage());
} else if(e instanceof ManagementServerException) {
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, e.getMessage());
} else if(e instanceof IndexOutOfBoundsException){
String msg = "Custom certificate record in the db deleted; this should never happen. Please create a new record in the certificate table";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof FileNotFoundException){
String msg = "Invalid file path for custom cert found during cert validation";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof CertificateException){
String msg = "The file format for custom cert does not conform to the X.509 specification";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof UnsupportedEncodingException){
String msg = "Unable to encode the certificate into UTF-8 input stream for validation";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
else if(e instanceof IOException){
String msg = "Cannot generate input stream during custom cert validation";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
} else {
String msg = "Cannot upload custom certificate, internal error.";
s_logger.error(msg,e);
throw new ServerApiException(BaseCmd.INTERNAL_ERROR, msg);
}
}finally{
_certDao.releaseFromLockTable(cert.getId());
}
}
@Override
public String[] getHypervisors(ListHypervisorsCmd cmd) {
String hypers = _configDao.getValue(Config.HypervisorList.key());
if (hypers == "" || hypers == null) {
return null;
}
return hypers.split(",");
}
@Override
public String getHashKey() {
// although we may have race conditioning here, database transaction serialization should
// give us the same key
if(_hashKey == null) {
_hashKey = _configDao.getValueAndInitIfNotExist(Config.HashKey.key(), UUID.randomUUID().toString());
}
return _hashKey;
}
@Override
public SSHKeyPair createSSHKeyPair(CreateSSHKeyPairCmd cmd) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO s = _sshKeyPairDao.findByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (s != null)
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
SSHKeysHelper keys = new SSHKeysHelper();
String name = cmd.getName();
String publicKey = keys.getPublicKey();
String fingerprint = keys.getPublicKeyFingerPrint();
String privateKey = keys.getPrivateKey();
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, privateKey);
}
@Override
public boolean deleteSSHKeyPair(DeleteSSHKeyPairCmd cmd) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO s = _sshKeyPairDao.findByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (s == null)
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' does not exist.");
return _sshKeyPairDao.deleteByName(account.getAccountId(), account.getDomainId(), cmd.getName());
}
@Override
public List<? extends SSHKeyPair> listSSHKeyPairs(ListSSHKeyPairsCmd cmd) {
Account account = UserContext.current().getCaller();
if (cmd.getName() != null && cmd.getName().length() > 0)
return _sshKeyPairDao.listKeyPairsByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (cmd.getFingerprint() != null && cmd.getFingerprint().length() > 0)
return _sshKeyPairDao.listKeyPairsByFingerprint(account.getAccountId(), account.getDomainId(), cmd.getFingerprint());
return _sshKeyPairDao.listKeyPairs(account.getAccountId(), account.getDomainId());
}
@Override
public SSHKeyPair registerSSHKeyPair(RegisterSSHKeyPairCmd cmd) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO s = _sshKeyPairDao.findByName(account.getAccountId(), account.getDomainId(), cmd.getName());
if (s != null)
throw new InvalidParameterValueException("A key pair with name '" + cmd.getName() + "' already exists.");
String name = cmd.getName();
String publicKey = SSHKeysHelper.getPublicKeyFromKeyMaterial(cmd.getPublicKey());
String fingerprint = SSHKeysHelper.getPublicKeyFingerprint(publicKey);
if (publicKey == null)
throw new InvalidParameterValueException("Public key is invalid");
return createAndSaveSSHKeyPair(name, fingerprint, publicKey, null);
}
private SSHKeyPair createAndSaveSSHKeyPair(String name, String fingerprint, String publicKey, String privateKey) {
Account account = UserContext.current().getCaller();
SSHKeyPairVO newPair = new SSHKeyPairVO();
newPair.setAccountId(account.getAccountId());
newPair.setDomainId(account.getDomainId());
newPair.setName(name);
newPair.setFingerprint(fingerprint);
newPair.setPublicKey(publicKey);
newPair.setPrivateKey(privateKey); // transient; not saved.
_sshKeyPairDao.persist(newPair);
return newPair;
}
@Override
public String getVMPassword(GetVMPasswordCmd cmd) {
Account account = UserContext.current().getCaller();
UserVmVO vm = _userVmDao.findById(cmd.getId());
if (vm == null || vm.getAccountId() != account.getAccountId())
throw new InvalidParameterValueException("No VM with id '" + cmd.getId() + "' found.");
_userVmDao.loadDetails(vm);
String password = vm.getDetail("Encrypted.Password");
if (password == null || password.equals(""))
throw new InvalidParameterValueException("No password for VM with id '" + cmd.getId() + "' found.");
return password;
}
}
|
bug 8048: preventing returning of volumes attached to destroyed vms, when list vols is executed by normal user
status 8048: resolved fixed
|
server/src/com/cloud/server/ManagementServerImpl.java
|
bug 8048: preventing returning of volumes attached to destroyed vms, when list vols is executed by normal user status 8048: resolved fixed
|
<ide><path>erver/src/com/cloud/server/ManagementServerImpl.java
<ide> }
<ide> else
<ide> {
<del> returnableVolumes.add(v);
<add> //do not add to returnable list if vol belongs to a user vm that is destoyed and cmd called by user
<add> if(v.getInstanceId() == null) {
<add> returnableVolumes.add(v);
<add> }else {
<add> if (account.getType() == Account.ACCOUNT_TYPE_NORMAL){
<add> VMInstanceVO owningVm = _vmInstanceDao.findById(v.getInstanceId());
<add> if(owningVm != null && owningVm.getType().equals(VirtualMachine.Type.User) && owningVm.getState().equals(VirtualMachine.State.Destroyed)){
<add> // do not show volumes
<add> // do nothing
<add> }else {
<add> returnableVolumes.add(v);
<add> }
<add> }else {
<add> returnableVolumes.add(v);
<add> }
<add> }
<ide> }
<ide> }
<ide>
|
|
Java
|
apache-2.0
|
bb8fedd95d5851f8f886d43b674ad4bf9f7d443b
| 0 |
zwsong/wicket,zwsong/wicket,dashorst/wicket,mosoft521/wicket,apache/wicket,mosoft521/wicket,bitstorm/wicket,AlienQueen/wicket,astrapi69/wicket,astrapi69/wicket,topicusonderwijs/wicket,apache/wicket,mafulafunk/wicket,apache/wicket,martin-g/wicket-osgi,astrapi69/wicket,selckin/wicket,martin-g/wicket-osgi,mosoft521/wicket,mosoft521/wicket,bitstorm/wicket,AlienQueen/wicket,aldaris/wicket,selckin/wicket,topicusonderwijs/wicket,freiheit-com/wicket,aldaris/wicket,apache/wicket,AlienQueen/wicket,klopfdreh/wicket,selckin/wicket,martin-g/wicket-osgi,zwsong/wicket,bitstorm/wicket,topicusonderwijs/wicket,mosoft521/wicket,topicusonderwijs/wicket,topicusonderwijs/wicket,astrapi69/wicket,freiheit-com/wicket,aldaris/wicket,AlienQueen/wicket,selckin/wicket,klopfdreh/wicket,dashorst/wicket,zwsong/wicket,freiheit-com/wicket,aldaris/wicket,dashorst/wicket,aldaris/wicket,AlienQueen/wicket,klopfdreh/wicket,mafulafunk/wicket,dashorst/wicket,klopfdreh/wicket,klopfdreh/wicket,mafulafunk/wicket,dashorst/wicket,freiheit-com/wicket,selckin/wicket,bitstorm/wicket,apache/wicket,freiheit-com/wicket,bitstorm/wicket
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.request.resource;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
import javax.servlet.http.HttpServletResponse;
import org.apache.wicket.Application;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.protocol.http.RequestUtils;
import org.apache.wicket.request.Response;
import org.apache.wicket.request.http.WebRequest;
import org.apache.wicket.request.http.WebResponse;
import org.apache.wicket.settings.IResourceSettings;
import org.apache.wicket.util.io.Streams;
import org.apache.wicket.util.lang.Args;
/**
* Convenience resource implementation. The subclass must implement
* {@link #newResourceResponse(org.apache.wicket.request.resource.IResource.Attributes)} method.
*
* @author Matej Knopp
*/
public abstract class AbstractResource implements IResource
{
private static final long serialVersionUID = 1L;
private static final String CACHE_CONTROL = "Cache-Control";
/**
* Construct.
*/
public AbstractResource()
{
}
/**
* Override this method to return a {@link ResourceResponse} for the request.
*
* @param attributes request attributes
* @return resource data instance
*/
protected abstract ResourceResponse newResourceResponse(Attributes attributes);
/**
* Represents data used to configure response and write resource data.
*
* @author Matej Knopp
*/
public static class ResourceResponse
{
private Integer errorCode;
private String errorMessage;
private String fileName = null;
private ContentDisposition contentDisposition = ContentDisposition.INLINE;
private String contentType = null;
private String textEncoding;
private long contentLength = -1;
private Date lastModified = null;
private WriteCallback writeCallback;
private boolean cacheable = true;
private long cacheDuration;
/**
* Construct.
*/
public ResourceResponse()
{
cacheDuration = Application.get().getResourceSettings().getDefaultCacheDuration();
}
/**
* Sets the error code for resource. If there is an error code set the data will not be
* rendered and the code will be sent to client.
*
* @param errorCode error code
*/
public void setError(Integer errorCode)
{
setError(errorCode, null);
}
/**
* Sets the error code and message for resource. If there is an error code set the data will
* not be rendered and the code and message will be sent to client.
*
* @param errorCode error code
* @param errorMessage
* error message
*/
public void setError(Integer errorCode, String errorMessage)
{
this.errorCode = errorCode;
this.errorMessage = errorMessage;
}
/**
* @return error code or <code>null</code>
*/
public Integer getErrorCode()
{
return errorCode;
}
/**
* @return error message or <code>null</code>
*/
public String getErrorMessage()
{
return errorMessage;
}
/**
* Sets the file name of the resource.
*
* @param fileName file name
*/
public void setFileName(String fileName)
{
this.fileName = fileName;
}
/**
* @return resource file name
*/
public String getFileName()
{
return fileName;
}
/**
* Determines whether the resource will be inline or an attachment.
*
* @see ContentDisposition
*
* @param contentDisposition content disposition (attachment or inline)
*/
public void setContentDisposition(ContentDisposition contentDisposition)
{
Args.notNull(contentDisposition, "contentDisposition");
this.contentDisposition = contentDisposition;
}
/**
* @return whether the resource is inline or attachment
*/
public ContentDisposition getContentDisposition()
{
return contentDisposition;
}
/**
* Sets the content type for the resource. If no content type is set it will be determined
* by the extension.
*
* @param contentType content type (also known as mime type)
*/
public void setContentType(String contentType)
{
this.contentType = contentType;
}
/**
* @return resource content type
*/
public String getContentType()
{
if (contentType == null && fileName != null)
{
contentType = Application.get().getMimeType(fileName);
}
return contentType;
}
/**
* Sets the text encoding for the resource. The encoding is only used if the content type
* indicates a textual resource.
*
* @param textEncoding character encoding of text body
*/
public void setTextEncoding(String textEncoding)
{
this.textEncoding = textEncoding;
}
/**
* @return text encoding for resource
*/
protected String getTextEncoding()
{
return textEncoding;
}
/**
* Sets the content length (in bytes) of the data. Content length is optional but it's
* recommended to set it so that the browser can show download progress.
*
* @param contentLength length of response body
*/
public void setContentLength(long contentLength)
{
this.contentLength = contentLength;
}
/**
* @return content length (in bytes)
*/
public long getContentLength()
{
return contentLength;
}
/**
* Sets the last modified data of the resource. Even though this method is optional it is
* recommended to set the date. If the date is set properly Wicket can check the
* <code>If-Modified-Since</code> to determine if the actuall data really needs to be sent
* to client.
*
* @param lastModified last modification date
*/
public void setLastModified(Date lastModified)
{
this.lastModified = lastModified;
}
/**
* @return last modified date
*/
public Date getLastModified()
{
return lastModified;
}
/**
* Check to determine if the resource data needs to be written. This method checks the
* <code>If-Modified-Since</code> request header and compares it to lastModified property.
* In order for this method to work {@link #setLastModified(Date)} has to be called first.
*
* @param attributes request attributes
* @return <code>true</code> if the resource data does need to be written,
* <code>false</code> otherwise.
*/
public boolean dataNeedsToBeWritten(Attributes attributes)
{
WebRequest request = (WebRequest)attributes.getRequest();
Date ifModifiedSince = request.getIfModifiedSinceHeader();
Date lastModified = getLastModified();
if (ifModifiedSince != null && lastModified != null)
{
// [Last-Modified] headers have a maximum precision of one second
// so we have to truncate the milliseconds part for a proper compare.
// that's stupid, since changes within one second will not be reliably
// detected by the client ... any hint or clarification to improve this
// situation will be appreciated...
long modified = this.lastModified.getTime() / 1000 * 1000;
return ifModifiedSince.getTime() < modified;
}
else
{
return true;
}
}
/**
* Cachable resources are cached on client. This flag affects the <code>Expires</code> and
* <code>Cache-Control</code> headers.
*
* @see #setCacheDuration(long)
*
* @param cacheable resource may be cached (true/false)
*/
public void setCacheable(boolean cacheable)
{
this.cacheable = cacheable;
}
/**
* @return returns whether this resource is cacheable
*/
public boolean isCacheable()
{
return cacheable;
}
/**
* Sets the duration for which this resource should be cached on client (in seconds). #see
* {@link IResourceSettings#setDefaultCacheDuration(int)}
*
* @param cacheDuration caching duration in seconds
*/
public void setCacheDuration(long cacheDuration)
{
this.cacheDuration = cacheDuration;
}
/**
* @return duration for which the resource shoudl be cached on client (in seconds)
*/
public long getCacheDuration()
{
return cacheDuration;
}
/**
* Sets the {@link WriteCallback}. The callback is responsible for generating the response
* data.
* <p>
* It is necessary to set the {@link WriteCallback} if
* {@link #dataNeedsToBeWritten(org.apache.wicket.request.resource.IResource.Attributes)} returns
* <code>true</code> and {@link #setError(Integer)} has not been called.
*
* @param writeCallback write callback
*/
public void setWriteCallback(final WriteCallback writeCallback)
{
Args.notNull(writeCallback, "writeCallback");
this.writeCallback = writeCallback;
}
/**
* @return write callback.
*/
public WriteCallback getWriteCallback()
{
return writeCallback;
}
}
/**
* Configure the web response header for client cache control.
*
* @param request web request
* @param response web response
* @param data resource data
* @param attributes request attributes
*/
protected void configureCache(final WebRequest request, final WebResponse response,
final ResourceResponse data, final Attributes attributes)
{
if (data.isCacheable())
{
long now = System.currentTimeMillis();
// Time of message generation
response.setDateHeader("Date", now);
// Time for cache expiry
response.setDateHeader("Expires", now + (data.getCacheDuration() * 1000L));
// Allow caching even for public proxies or CDN providers
response.setHeader(CACHE_CONTROL, "public, max-age=" + data.getCacheDuration());
// Let caches distinguish between compressed and uncompressed
// versions of the resource so they can serve them properly
response.setHeader("Vary", "Accept-Encoding");
}
else
{
RequestUtils.disableCaching(response);
}
}
/**
*
* @see org.apache.wicket.request.resource.IResource#respond(org.apache.wicket.request.resource.IResource.Attributes)
*/
public final void respond(final Attributes attributes)
{
// Get a "new" ResourceResponse to write a response
ResourceResponse data = newResourceResponse(attributes);
WebRequest request = (WebRequest)attributes.getRequest();
WebResponse response = (WebResponse)attributes.getResponse();
// 1. Last Modified
Date lastModified = data.getLastModified();
if (lastModified != null)
{
response.setLastModifiedTime(lastModified.getTime());
}
// 2. Caching
configureCache(request, response, data, attributes);
if (!data.dataNeedsToBeWritten(attributes))
{
response.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
if (data.getErrorCode() != null)
{
response.sendError(data.getErrorCode(), data.getErrorMessage());
return;
}
if (data.getWriteCallback() == null)
{
throw new IllegalStateException(
"ResourceData#setWriteCallback must be called for AbstractResource.");
}
String fileName = data.getFileName();
ContentDisposition disposition = data.getContentDisposition();
String mimeType = data.getContentType();
String encoding = null;
if (mimeType != null && mimeType.indexOf("text") != -1)
{
encoding = data.getTextEncoding();
}
long contentLength = data.getContentLength();
// 3. Content Disposition
if (ContentDisposition.ATTACHMENT == disposition)
{
response.setAttachmentHeader(fileName);
}
else if (ContentDisposition.INLINE == disposition)
{
response.setInlineHeader(fileName);
}
// 4. Mime Type (+ encoding)
if (mimeType != null)
{
if (encoding == null)
{
response.setContentType(mimeType);
}
else
{
response.setContentType(mimeType + "; charset=" + encoding);
}
}
// 5. Content Length
if (contentLength != -1)
{
response.setContentLength(contentLength);
}
// 6. Flush the response
// This is necessary for firefox if this resource is an image, otherwise it messes up
// other images on page
response.flush();
// 7. Write Data
data.getWriteCallback().writeData(attributes);
}
/**
* Callback invoked when resource data needs to be written to response. Subclass needs to
* implement the {@link #writeData(org.apache.wicket.request.resource.IResource.Attributes)} method.
*
* @author Matej Knopp
*/
public static abstract class WriteCallback
{
/**
* Write the resource data to response.
*
* @param attributes request attributes
*/
public abstract void writeData(Attributes attributes);
/**
* Convenience method to write an {@link InputStream} to response.
*
* @param attributes request attributes
* @param stream input stream
*/
protected final void writeStream(Attributes attributes, InputStream stream)
{
final Response response = attributes.getResponse();
OutputStream s = new OutputStream()
{
@Override
public void write(int b) throws IOException
{
response.write(new byte[] { (byte)b });
}
@Override
public void write(byte[] b) throws IOException
{
response.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException
{
if (off == 0 && len == b.length)
{
write(b);
}
else
{
byte copy[] = new byte[len];
System.arraycopy(b, off, copy, 0, len);
write(copy);
}
}
};
try
{
Streams.copy(stream, s);
}
catch (IOException e)
{
throw new WicketRuntimeException(e);
}
}
}
}
|
wicket/src/main/java/org/apache/wicket/request/resource/AbstractResource.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.request.resource;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
import javax.servlet.http.HttpServletResponse;
import org.apache.wicket.Application;
import org.apache.wicket.WicketRuntimeException;
import org.apache.wicket.protocol.http.RequestUtils;
import org.apache.wicket.request.Response;
import org.apache.wicket.request.http.WebRequest;
import org.apache.wicket.request.http.WebResponse;
import org.apache.wicket.settings.IResourceSettings;
import org.apache.wicket.util.io.Streams;
import org.apache.wicket.util.lang.Args;
/**
* Convenience resource implementation. The subclass must implement
* {@link #newResourceResponse(org.apache.wicket.request.resource.IResource.Attributes)} method.
*
* @author Matej Knopp
*/
public abstract class AbstractResource implements IResource
{
private static final long serialVersionUID = 1L;
private static final String CACHE_CONTROL = "Cache-Control";
/**
* Construct.
*/
public AbstractResource()
{
}
/**
* Override this method to return a {@link ResourceResponse} for the request.
*
* @param attributes request attributes
* @return resource data instance
*/
protected abstract ResourceResponse newResourceResponse(Attributes attributes);
/**
* Represents data used to configure response and write resource data.
*
* @author Matej Knopp
*/
public static class ResourceResponse
{
private Integer errorCode;
private String errorMessage;
private String fileName = null;
private ContentDisposition contentDisposition = ContentDisposition.INLINE;
private String contentType = null;
private String textEncoding;
private long contentLength = -1;
private Date lastModified = null;
private WriteCallback writeCallback;
private boolean cacheable = true;
private long cacheDuration;
/**
* Construct.
*/
public ResourceResponse()
{
cacheDuration = Application.get().getResourceSettings().getDefaultCacheDuration();
}
/**
* Sets the error code for resource. If there is an error code set the data will not be
* rendered and the code will be sent to client.
*
* @param errorCode error code
*/
public void setError(Integer errorCode)
{
setError(errorCode, null);
}
/**
* Sets the error code and message for resource. If there is an error code set the data will
* not be rendered and the code and message will be sent to client.
*
* @param errorCode error code
* @param errorMessage
* error message
*/
public void setError(Integer errorCode, String errorMessage)
{
this.errorCode = errorCode;
this.errorMessage = errorMessage;
}
/**
* @return error code or <code>null</code>
*/
public Integer getErrorCode()
{
return errorCode;
}
/**
* @return error message or <code>null</code>
*/
public String getErrorMessage()
{
return errorMessage;
}
/**
* Sets the file name of the resource.
*
* @param fileName file name
*/
public void setFileName(String fileName)
{
this.fileName = fileName;
}
/**
* @return resource file name
*/
public String getFileName()
{
return fileName;
}
/**
* Determines whether the resource will be inline or an attachment.
*
* @see ContentDisposition
*
* @param contentDisposition content disposition (attachment or inline)
*/
public void setContentDisposition(ContentDisposition contentDisposition)
{
Args.notNull(contentDisposition, "contentDisposition");
this.contentDisposition = contentDisposition;
}
/**
* @return whether the resource is inline or attachment
*/
public ContentDisposition getContentDisposition()
{
return contentDisposition;
}
/**
* Sets the content type for the resource. If no content type is set it will be determined
* by the extension.
*
* @param contentType content type (also known as mime type)
*/
public void setContentType(String contentType)
{
this.contentType = contentType;
}
/**
* @return resource content type
*/
public String getContentType()
{
if (contentType == null && fileName != null)
{
contentType = Application.get().getMimeType(fileName);
}
return contentType;
}
/**
* Sets the text encoding for the resource. The encoding is only used if the content type
* indicates a textual resource.
*
* @param textEncoding character encoding of text body
*/
public void setTextEncoding(String textEncoding)
{
this.textEncoding = textEncoding;
}
/**
* @return text encoding for resource
*/
protected String getTextEncoding()
{
return textEncoding;
}
/**
* Sets the content length (in bytes) of the data. Content length is optional but it's
* recommended to set it so that the browser can show download progress.
*
* @param contentLength length of response body
*/
public void setContentLength(long contentLength)
{
this.contentLength = contentLength;
}
/**
* @return content length (in bytes)
*/
public long getContentLength()
{
return contentLength;
}
/**
* Sets the last modified data of the resource. Even though this method is optional it is
* recommended to set the date. If the date is set properly Wicket can check the
* <code>If-Modified-Since</code> to determine if the actuall data really needs to be sent
* to client.
*
* @param lastModified last modification date
*/
public void setLastModified(Date lastModified)
{
this.lastModified = lastModified;
}
/**
* @return last modified date
*/
public Date getLastModified()
{
return lastModified;
}
/**
* Check to determine if the resource data needs to be written. This method checks the
* <code>If-Modified-Since</code> request header and compares it to lastModified property.
* In order for this method to work {@link #setLastModified(Date)} has to be called first.
*
* @param attributes request attributes
* @return <code>true</code> if the resource data does need to be written,
* <code>false</code> otherwise.
*/
public boolean dataNeedsToBeWritten(Attributes attributes)
{
WebRequest request = (WebRequest)attributes.getRequest();
Date ifModifiedSince = request.getIfModifiedSinceHeader();
Date lastModified = getLastModified();
if (ifModifiedSince != null && lastModified != null)
{
// Round down to the nearest second for a proper compare
long modified = this.lastModified.getTime() / 1000 * 1000;
return ifModifiedSince.getTime() < modified;
}
else
{
return true;
}
}
/**
* Cachable resources are cached on client. This flag affects the <code>Expires</code> and
* <code>Cache-Control</code> headers.
*
* @see #setCacheDuration(long)
*
* @param cacheable resource may be cached (true/false)
*/
public void setCacheable(boolean cacheable)
{
this.cacheable = cacheable;
}
/**
* @return returns whether this resource is cacheable
*/
public boolean isCacheable()
{
return cacheable;
}
/**
* Sets the duration for which this resource should be cached on client (in seconds). #see
* {@link IResourceSettings#setDefaultCacheDuration(int)}
*
* @param cacheDuration caching duration in seconds
*/
public void setCacheDuration(long cacheDuration)
{
this.cacheDuration = cacheDuration;
}
/**
* @return duration for which the resource shoudl be cached on client (in seconds)
*/
public long getCacheDuration()
{
return cacheDuration;
}
/**
* Sets the {@link WriteCallback}. The callback is responsible for generating the response
* data.
* <p>
* It is necessary to set the {@link WriteCallback} if
* {@link #dataNeedsToBeWritten(org.apache.wicket.request.resource.IResource.Attributes)} returns
* <code>true</code> and {@link #setError(Integer)} has not been called.
*
* @param writeCallback write callback
*/
public void setWriteCallback(final WriteCallback writeCallback)
{
Args.notNull(writeCallback, "writeCallback");
this.writeCallback = writeCallback;
}
/**
* @return write callback.
*/
public WriteCallback getWriteCallback()
{
return writeCallback;
}
}
/**
* Configure the web response header for client cache control.
*
* @param request web request
* @param response web response
* @param data resource data
* @param attributes request attributes
*/
protected void configureCache(final WebRequest request, final WebResponse response,
final ResourceResponse data, final Attributes attributes)
{
if (data.isCacheable())
{
long now = System.currentTimeMillis();
// Time of message generation
response.setDateHeader("Date", now);
// Time for cache expiry
response.setDateHeader("Expires", now + (data.getCacheDuration() * 1000L));
// Allow caching even for public proxies or CDN providers
response.setHeader(CACHE_CONTROL, "public, max-age=" + data.getCacheDuration());
// Let caches distinguish between compressed and uncompressed
// versions of the resource so they can serve them properly
response.setHeader("Vary", "Accept-Encoding");
}
else
{
RequestUtils.disableCaching(response);
}
}
/**
*
* @see org.apache.wicket.request.resource.IResource#respond(org.apache.wicket.request.resource.IResource.Attributes)
*/
public final void respond(final Attributes attributes)
{
// Get a "new" ResourceResponse to write a response
ResourceResponse data = newResourceResponse(attributes);
WebRequest request = (WebRequest)attributes.getRequest();
WebResponse response = (WebResponse)attributes.getResponse();
// 1. Last Modified
Date lastModified = data.getLastModified();
if (lastModified != null)
{
response.setLastModifiedTime(lastModified.getTime());
}
// 2. Caching
configureCache(request, response, data, attributes);
if (!data.dataNeedsToBeWritten(attributes))
{
response.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
if (data.getErrorCode() != null)
{
response.sendError(data.getErrorCode(), data.getErrorMessage());
return;
}
if (data.getWriteCallback() == null)
{
throw new IllegalStateException(
"ResourceData#setWriteCallback must be called for AbstractResource.");
}
String fileName = data.getFileName();
ContentDisposition disposition = data.getContentDisposition();
String mimeType = data.getContentType();
String encoding = null;
if (mimeType != null && mimeType.indexOf("text") != -1)
{
encoding = data.getTextEncoding();
}
long contentLength = data.getContentLength();
// 3. Content Disposition
if (ContentDisposition.ATTACHMENT == disposition)
{
response.setAttachmentHeader(fileName);
}
else if (ContentDisposition.INLINE == disposition)
{
response.setInlineHeader(fileName);
}
// 4. Mime Type (+ encoding)
if (mimeType != null)
{
if (encoding == null)
{
response.setContentType(mimeType);
}
else
{
response.setContentType(mimeType + "; charset=" + encoding);
}
}
// 5. Content Length
if (contentLength != -1)
{
response.setContentLength(contentLength);
}
// 6. Flush the response
// This is necessary for firefox if this resource is an image, otherwise it messes up
// other images on page
response.flush();
// 7. Write Data
data.getWriteCallback().writeData(attributes);
}
/**
* Callback invoked when resource data needs to be written to response. Subclass needs to
* implement the {@link #writeData(org.apache.wicket.request.resource.IResource.Attributes)} method.
*
* @author Matej Knopp
*/
public static abstract class WriteCallback
{
/**
* Write the resource data to response.
*
* @param attributes request attributes
*/
public abstract void writeData(Attributes attributes);
/**
* Convenience method to write an {@link InputStream} to response.
*
* @param attributes request attributes
* @param stream input stream
*/
protected final void writeStream(Attributes attributes, InputStream stream)
{
final Response response = attributes.getResponse();
OutputStream s = new OutputStream()
{
@Override
public void write(int b) throws IOException
{
response.write(new byte[] { (byte)b });
}
@Override
public void write(byte[] b) throws IOException
{
response.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException
{
if (off == 0 && len == b.length)
{
write(b);
}
else
{
byte copy[] = new byte[len];
System.arraycopy(b, off, copy, 0, len);
write(copy);
}
}
};
try
{
Streams.copy(stream, s);
}
catch (IOException e)
{
throw new WicketRuntimeException(e);
}
}
}
}
|
some clarification on truncation Last-Modified down to the nearest second for modification check
git-svn-id: 5a74b5304d8e7e474561603514f78b697e5d94c4@995862 13f79535-47bb-0310-9956-ffa450edef68
|
wicket/src/main/java/org/apache/wicket/request/resource/AbstractResource.java
|
some clarification on truncation Last-Modified down to the nearest second for modification check
|
<ide><path>icket/src/main/java/org/apache/wicket/request/resource/AbstractResource.java
<ide> /**
<ide> * Convenience resource implementation. The subclass must implement
<ide> * {@link #newResourceResponse(org.apache.wicket.request.resource.IResource.Attributes)} method.
<del> *
<add> *
<ide> * @author Matej Knopp
<ide> */
<ide> public abstract class AbstractResource implements IResource
<ide>
<ide> /**
<ide> * Override this method to return a {@link ResourceResponse} for the request.
<del> *
<add> *
<ide> * @param attributes request attributes
<ide> * @return resource data instance
<ide> */
<ide>
<ide> /**
<ide> * Represents data used to configure response and write resource data.
<del> *
<add> *
<ide> * @author Matej Knopp
<ide> */
<ide> public static class ResourceResponse
<ide> /**
<ide> * Sets the error code for resource. If there is an error code set the data will not be
<ide> * rendered and the code will be sent to client.
<del> *
<add> *
<ide> * @param errorCode error code
<ide> */
<ide> public void setError(Integer errorCode)
<ide> /**
<ide> * Sets the error code and message for resource. If there is an error code set the data will
<ide> * not be rendered and the code and message will be sent to client.
<del> *
<add> *
<ide> * @param errorCode error code
<ide> * @param errorMessage
<ide> * error message
<ide>
<ide> /**
<ide> * Sets the file name of the resource.
<del> *
<add> *
<ide> * @param fileName file name
<ide> */
<ide> public void setFileName(String fileName)
<ide>
<ide> /**
<ide> * Determines whether the resource will be inline or an attachment.
<del> *
<add> *
<ide> * @see ContentDisposition
<del> *
<add> *
<ide> * @param contentDisposition content disposition (attachment or inline)
<ide> */
<ide> public void setContentDisposition(ContentDisposition contentDisposition)
<ide> /**
<ide> * Sets the content type for the resource. If no content type is set it will be determined
<ide> * by the extension.
<del> *
<add> *
<ide> * @param contentType content type (also known as mime type)
<ide> */
<ide> public void setContentType(String contentType)
<ide> /**
<ide> * Sets the text encoding for the resource. The encoding is only used if the content type
<ide> * indicates a textual resource.
<del> *
<add> *
<ide> * @param textEncoding character encoding of text body
<ide> */
<ide> public void setTextEncoding(String textEncoding)
<ide> /**
<ide> * Sets the content length (in bytes) of the data. Content length is optional but it's
<ide> * recommended to set it so that the browser can show download progress.
<del> *
<add> *
<ide> * @param contentLength length of response body
<ide> */
<ide> public void setContentLength(long contentLength)
<ide> * recommended to set the date. If the date is set properly Wicket can check the
<ide> * <code>If-Modified-Since</code> to determine if the actuall data really needs to be sent
<ide> * to client.
<del> *
<add> *
<ide> * @param lastModified last modification date
<ide> */
<ide> public void setLastModified(Date lastModified)
<ide> * Check to determine if the resource data needs to be written. This method checks the
<ide> * <code>If-Modified-Since</code> request header and compares it to lastModified property.
<ide> * In order for this method to work {@link #setLastModified(Date)} has to be called first.
<del> *
<add> *
<ide> * @param attributes request attributes
<ide> * @return <code>true</code> if the resource data does need to be written,
<ide> * <code>false</code> otherwise.
<ide>
<ide> if (ifModifiedSince != null && lastModified != null)
<ide> {
<del> // Round down to the nearest second for a proper compare
<add> // [Last-Modified] headers have a maximum precision of one second
<add> // so we have to truncate the milliseconds part for a proper compare.
<add> // that's stupid, since changes within one second will not be reliably
<add> // detected by the client ... any hint or clarification to improve this
<add> // situation will be appreciated...
<ide> long modified = this.lastModified.getTime() / 1000 * 1000;
<ide>
<ide> return ifModifiedSince.getTime() < modified;
<ide> /**
<ide> * Cachable resources are cached on client. This flag affects the <code>Expires</code> and
<ide> * <code>Cache-Control</code> headers.
<del> *
<add> *
<ide> * @see #setCacheDuration(long)
<del> *
<add> *
<ide> * @param cacheable resource may be cached (true/false)
<ide> */
<ide> public void setCacheable(boolean cacheable)
<ide> /**
<ide> * Sets the duration for which this resource should be cached on client (in seconds). #see
<ide> * {@link IResourceSettings#setDefaultCacheDuration(int)}
<del> *
<add> *
<ide> * @param cacheDuration caching duration in seconds
<ide> */
<ide> public void setCacheDuration(long cacheDuration)
<ide> * It is necessary to set the {@link WriteCallback} if
<ide> * {@link #dataNeedsToBeWritten(org.apache.wicket.request.resource.IResource.Attributes)} returns
<ide> * <code>true</code> and {@link #setError(Integer)} has not been called.
<del> *
<add> *
<ide> * @param writeCallback write callback
<ide> */
<ide> public void setWriteCallback(final WriteCallback writeCallback)
<ide>
<ide> /**
<ide> * Configure the web response header for client cache control.
<del> *
<add> *
<ide> * @param request web request
<ide> * @param response web response
<ide> * @param data resource data
<ide> }
<ide>
<ide> /**
<del> *
<add> *
<ide> * @see org.apache.wicket.request.resource.IResource#respond(org.apache.wicket.request.resource.IResource.Attributes)
<ide> */
<ide> public final void respond(final Attributes attributes)
<ide> /**
<ide> * Callback invoked when resource data needs to be written to response. Subclass needs to
<ide> * implement the {@link #writeData(org.apache.wicket.request.resource.IResource.Attributes)} method.
<del> *
<add> *
<ide> * @author Matej Knopp
<ide> */
<ide> public static abstract class WriteCallback
<ide> {
<ide> /**
<ide> * Write the resource data to response.
<del> *
<add> *
<ide> * @param attributes request attributes
<ide> */
<ide> public abstract void writeData(Attributes attributes);
<ide>
<ide> /**
<ide> * Convenience method to write an {@link InputStream} to response.
<del> *
<add> *
<ide> * @param attributes request attributes
<ide> * @param stream input stream
<ide> */
|
|
Java
|
mpl-2.0
|
eabf06c0cca3ba3be8e471cf8babc767087fff14
| 0 |
Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV,Helioviewer-Project/JHelioviewer-SWHV
|
package org.helioviewer.jhv.export;
import java.awt.EventQueue;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.Date;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.helioviewer.jhv.JHVDirectory;
import org.helioviewer.jhv.JHVGlobals;
import org.helioviewer.jhv.base.ImageUtils;
import org.helioviewer.jhv.base.time.TimeUtils;
import org.helioviewer.jhv.display.Displayer;
import org.helioviewer.jhv.gui.ImageViewerGui;
import org.helioviewer.jhv.gui.components.MoviePanel;
import org.helioviewer.jhv.gui.components.MoviePanel.RecordMode;
import org.helioviewer.jhv.layers.FrameListener;
import org.helioviewer.jhv.layers.Layers;
import org.helioviewer.jhv.opengl.GLGrab;
import org.helioviewer.jhv.threads.JHVThread;
import com.jogamp.opengl.GL2;
public class ExportMovie implements FrameListener {
private static MovieExporter exporter;
private static GLGrab grabber;
private static RecordMode mode;
private static boolean stopped = false;
private final int NUM_FRAMES = 512;
private final ArrayBlockingQueue<Runnable> frameQueue = new ArrayBlockingQueue<Runnable>(2 * NUM_FRAMES);
private final ThreadPoolExecutor executor = new ThreadPoolExecutor(1, 1, 10000L, TimeUnit.MILLISECONDS, frameQueue, new JHVThread.NamedThreadFactory("Export Movie"), new ThreadPoolExecutor.DiscardPolicy());
public static BufferedImage EVEImage = null;
public static int EVEMovieLinePosition = -1;
public void disposeMovieWriter(boolean keep) {
if (exporter != null) {
Runnable runnable = new CloseWriter(exporter, keep);
if (keep) {
executor.submit(runnable);
} else {
executor.shutdownNow();
runnable.run();
}
exporter = null;
}
}
private void exportMovieFinish(GL2 gl) {
ImageViewerGui.getMainComponent().detachExport();
MoviePanel.recordPanelSetEnabled(true);
try {
grabber.dispose(gl);
disposeMovieWriter(true);
} catch (Exception e) {
e.printStackTrace();
}
}
public void handleMovieExport(GL2 gl) {
if (stopped) {
exportMovieFinish(gl);
return;
}
BufferedImage screenshot = grabber.renderFrame(gl);
try {
if (mode == RecordMode.SHOT || frameQueue.size() <= NUM_FRAMES)
executor.submit(new FrameConsumer(exporter, screenshot, EVEImage, EVEMovieLinePosition));
} catch (Exception e) {
e.printStackTrace();
}
if (mode == RecordMode.SHOT) {
stop();
}
}
private static final int MACROBLOCK = 8;
public static void start(int _w, int _h, boolean isInternal, int fps, RecordMode _mode) {
int scrw = 1;
int scrh = 0;
if (EVEImage != null) {
scrw = Math.max(1, EVEImage.getWidth());
scrh = EVEImage.getHeight();
}
int canvasWidth, canvasHeight, exportHeight;
mode = _mode;
if (mode == RecordMode.SHOT)
canvasWidth = _w;
else
canvasWidth = (_w / MACROBLOCK) * MACROBLOCK; // video formats
int sh = (int) (scrh / (double) scrw * canvasWidth + .5);
if (isInternal)
canvasHeight = _h - sh;
else
canvasHeight = _h;
if (mode == RecordMode.SHOT)
exportHeight = canvasHeight + sh;
else
exportHeight = ((canvasHeight + sh) / MACROBLOCK) * MACROBLOCK; // video formats
canvasHeight = exportHeight - sh;
stopped = false;
currentFrame = 0;
MoviePanel.recordPanelSetEnabled(false);
grabber = new GLGrab(canvasWidth, canvasHeight);
ImageViewerGui.getMainComponent().attachExport(instance);
String prefix = JHVDirectory.EXPORTS.getPath() + "JHV_" + TimeUtils.filenameDateFormat.format(new Date());
if (mode == RecordMode.SHOT) {
try {
exporter = new PNGExporter();
exporter.open(prefix + ".png", canvasWidth, exportHeight, fps);
} catch (Exception e) {
e.printStackTrace();
}
Displayer.display();
} else {
try {
exporter = new JCodecExporter();
exporter.open(prefix + ".mp4", canvasWidth, exportHeight, fps);
} catch (Exception e) {
e.printStackTrace();
}
if (mode == RecordMode.LOOP) {
Layers.addFrameListener(instance);
Layers.setFrame(0);
Layers.playMovie();
}
}
}
public static void stop() {
if (!stopped) {
stopped = true;
if (mode == RecordMode.LOOP)
Layers.removeFrameListener(instance);
if (mode != RecordMode.FREE)
MoviePanel.clickRecordButton();
Displayer.display(); // force detach
}
}
private static int currentFrame = 0;
// loop mode only
@Override
public void frameChanged(int frame) {
if (frame < currentFrame)
stop();
else
currentFrame = frame;
}
private static class FrameConsumer implements Runnable {
private final MovieExporter movieExporter;
private BufferedImage mainImage;
private BufferedImage eveImage;
private final int movieLinePosition;
public FrameConsumer(MovieExporter _movieExporter, BufferedImage _mainImage, BufferedImage _eveImage, int _movieLinePosition) {
movieExporter = _movieExporter;
mainImage = _mainImage;
if (_eveImage == null)
eveImage = null;
else
eveImage = ImageUtils.deepCopy(_eveImage);
movieLinePosition = _movieLinePosition;
}
@Override
public void run() {
try {
BufferedImage composite = ExportUtils.pasteCanvases(mainImage, eveImage, movieLinePosition, movieExporter.getHeight());
mainImage = null;
eveImage = null;
movieExporter.encode(composite);
composite = null;
} catch (Exception e) {
e.printStackTrace();
}
}
}
private static class CloseWriter implements Runnable {
private final MovieExporter movieExporter;
private final boolean keep;
public CloseWriter(MovieExporter _movieExporter, boolean _keep) {
movieExporter = _movieExporter;
keep = _keep;
}
@Override
public void run() {
boolean failed = false;
try {
if (keep) {
movieExporter.close();
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
JHVGlobals.displayNotification(movieExporter.getPath());
}
});
}
} catch (Exception e) {
e.printStackTrace();
failed = true;
}
if (!keep || failed) {
File f = new File(movieExporter.getPath());
f.delete();
}
}
}
private static final ExportMovie instance = new ExportMovie();
private ExportMovie() {
}
public static ExportMovie getInstance() {
return instance;
}
}
|
src/jhv/src/org/helioviewer/jhv/export/ExportMovie.java
|
package org.helioviewer.jhv.export;
import java.awt.EventQueue;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.Date;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.helioviewer.jhv.JHVDirectory;
import org.helioviewer.jhv.JHVGlobals;
import org.helioviewer.jhv.base.ImageUtils;
import org.helioviewer.jhv.base.time.TimeUtils;
import org.helioviewer.jhv.display.Displayer;
import org.helioviewer.jhv.gui.ImageViewerGui;
import org.helioviewer.jhv.gui.components.MoviePanel;
import org.helioviewer.jhv.gui.components.MoviePanel.RecordMode;
import org.helioviewer.jhv.layers.FrameListener;
import org.helioviewer.jhv.layers.Layers;
import org.helioviewer.jhv.opengl.GLGrab;
import org.helioviewer.jhv.threads.JHVThread;
import com.jogamp.opengl.GL2;
public class ExportMovie implements FrameListener {
private static MovieExporter exporter;
private static GLGrab grabber;
private static RecordMode mode;
private static boolean stopped = false;
private final ArrayBlockingQueue<Runnable> frameQueue = new ArrayBlockingQueue<Runnable>(512);
private final ThreadPoolExecutor executor = new ThreadPoolExecutor(1, 1, 10000L, TimeUnit.MILLISECONDS, frameQueue, new JHVThread.NamedThreadFactory("Export Movie"), new ThreadPoolExecutor.DiscardPolicy());
public static BufferedImage EVEImage = null;
public static int EVEMovieLinePosition = -1;
public void disposeMovieWriter(boolean keep) {
if (exporter != null) {
Runnable runnable = new CloseWriter(exporter, keep);
if (keep) {
if (frameQueue.remainingCapacity() == 0)
frameQueue.poll();
executor.submit(runnable);
} else {
executor.shutdownNow();
runnable.run();
}
exporter = null;
}
}
private void exportMovieFinish(GL2 gl) {
ImageViewerGui.getMainComponent().detachExport();
MoviePanel.recordPanelSetEnabled(true);
try {
grabber.dispose(gl);
disposeMovieWriter(true);
} catch (Exception e) {
e.printStackTrace();
}
}
public void handleMovieExport(GL2 gl) {
if (stopped) {
exportMovieFinish(gl);
return;
}
BufferedImage screenshot = grabber.renderFrame(gl);
try {
executor.submit(new FrameConsumer(exporter, screenshot, EVEImage, EVEMovieLinePosition));
} catch (Exception e) {
e.printStackTrace();
}
if (mode == RecordMode.SHOT) {
stop();
}
}
private static final int MACROBLOCK = 8;
public static void start(int _w, int _h, boolean isInternal, int fps, RecordMode _mode) {
int scrw = 1;
int scrh = 0;
if (EVEImage != null) {
scrw = Math.max(1, EVEImage.getWidth());
scrh = EVEImage.getHeight();
}
int canvasWidth, canvasHeight, exportHeight;
mode = _mode;
if (mode == RecordMode.SHOT)
canvasWidth = _w;
else
canvasWidth = (_w / MACROBLOCK) * MACROBLOCK; // video formats
int sh = (int) (scrh / (double) scrw * canvasWidth + .5);
if (isInternal)
canvasHeight = _h - sh;
else
canvasHeight = _h;
if (mode == RecordMode.SHOT)
exportHeight = canvasHeight + sh;
else
exportHeight = ((canvasHeight + sh) / MACROBLOCK) * MACROBLOCK; // video formats
canvasHeight = exportHeight - sh;
stopped = false;
currentFrame = 0;
String prefix = JHVDirectory.EXPORTS.getPath() + "JHV_" + TimeUtils.filenameDateFormat.format(new Date());
String moviePath = prefix + ".mp4";
String imagePath = prefix + ".png";
MoviePanel.recordPanelSetEnabled(false);
grabber = new GLGrab(canvasWidth, canvasHeight);
ImageViewerGui.getMainComponent().attachExport(instance);
if (mode == RecordMode.SHOT) {
try {
exporter = new PNGExporter();
exporter.open(imagePath, canvasWidth, exportHeight, fps);
} catch (Exception e) {
e.printStackTrace();
}
Displayer.display();
} else {
try {
exporter = new JCodecExporter();
exporter.open(moviePath, canvasWidth, exportHeight, fps);
} catch (Exception e) {
e.printStackTrace();
}
if (mode == RecordMode.LOOP) {
Layers.addFrameListener(instance);
Layers.setFrame(0);
Layers.playMovie();
}
}
}
public static void stop() {
if (!stopped) {
stopped = true;
if (mode == RecordMode.LOOP)
Layers.removeFrameListener(instance);
if (mode != RecordMode.FREE)
MoviePanel.clickRecordButton();
Displayer.display(); // force detach
}
}
private static int currentFrame = 0;
// loop mode only
@Override
public void frameChanged(int frame) {
if (frame < currentFrame)
stop();
else
currentFrame = frame;
}
private static class FrameConsumer implements Runnable {
private final MovieExporter movieExporter;
private BufferedImage mainImage;
private BufferedImage eveImage;
private final int movieLinePosition;
public FrameConsumer(MovieExporter _movieExporter, BufferedImage _mainImage, BufferedImage _eveImage, int _movieLinePosition) {
movieExporter = _movieExporter;
mainImage = _mainImage;
if (_eveImage == null)
eveImage = null;
else
eveImage = ImageUtils.deepCopy(_eveImage);
movieLinePosition = _movieLinePosition;
}
@Override
public void run() {
try {
BufferedImage composite = ExportUtils.pasteCanvases(mainImage, eveImage, movieLinePosition, movieExporter.getHeight());
mainImage = null;
eveImage = null;
movieExporter.encode(composite);
composite = null;
} catch (Exception e) {
e.printStackTrace();
}
}
}
private static class CloseWriter implements Runnable {
private final MovieExporter movieExporter;
private final boolean keep;
public CloseWriter(MovieExporter _movieExporter, boolean _keep) {
movieExporter = _movieExporter;
keep = _keep;
}
@Override
public void run() {
boolean failed = false;
try {
if (keep) {
movieExporter.close();
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
JHVGlobals.displayNotification(movieExporter.getPath());
}
});
}
} catch (Exception e) {
e.printStackTrace();
failed = true;
}
if (!keep || failed) {
File f = new File(movieExporter.getPath());
f.delete();
}
}
}
private static final ExportMovie instance = new ExportMovie();
private ExportMovie() {
}
public static ExportMovie getInstance() {
return instance;
}
}
|
Reserve a number of tasks for screenshot and finalizer
git-svn-id: 4e353c0944fe8da334633afc35765ef362dec675@7494 b4e469a2-07ce-4b26-9273-4d7d95a670c7
|
src/jhv/src/org/helioviewer/jhv/export/ExportMovie.java
|
Reserve a number of tasks for screenshot and finalizer
|
<ide><path>rc/jhv/src/org/helioviewer/jhv/export/ExportMovie.java
<ide> private static RecordMode mode;
<ide> private static boolean stopped = false;
<ide>
<del> private final ArrayBlockingQueue<Runnable> frameQueue = new ArrayBlockingQueue<Runnable>(512);
<add> private final int NUM_FRAMES = 512;
<add> private final ArrayBlockingQueue<Runnable> frameQueue = new ArrayBlockingQueue<Runnable>(2 * NUM_FRAMES);
<ide> private final ThreadPoolExecutor executor = new ThreadPoolExecutor(1, 1, 10000L, TimeUnit.MILLISECONDS, frameQueue, new JHVThread.NamedThreadFactory("Export Movie"), new ThreadPoolExecutor.DiscardPolicy());
<ide>
<ide> public static BufferedImage EVEImage = null;
<ide> if (exporter != null) {
<ide> Runnable runnable = new CloseWriter(exporter, keep);
<ide> if (keep) {
<del> if (frameQueue.remainingCapacity() == 0)
<del> frameQueue.poll();
<ide> executor.submit(runnable);
<ide> } else {
<ide> executor.shutdownNow();
<ide>
<ide> BufferedImage screenshot = grabber.renderFrame(gl);
<ide> try {
<del> executor.submit(new FrameConsumer(exporter, screenshot, EVEImage, EVEMovieLinePosition));
<add> if (mode == RecordMode.SHOT || frameQueue.size() <= NUM_FRAMES)
<add> executor.submit(new FrameConsumer(exporter, screenshot, EVEImage, EVEMovieLinePosition));
<ide> } catch (Exception e) {
<ide> e.printStackTrace();
<ide> }
<ide> stopped = false;
<ide> currentFrame = 0;
<ide>
<del> String prefix = JHVDirectory.EXPORTS.getPath() + "JHV_" + TimeUtils.filenameDateFormat.format(new Date());
<del> String moviePath = prefix + ".mp4";
<del> String imagePath = prefix + ".png";
<del>
<ide> MoviePanel.recordPanelSetEnabled(false);
<ide>
<ide> grabber = new GLGrab(canvasWidth, canvasHeight);
<ide> ImageViewerGui.getMainComponent().attachExport(instance);
<ide>
<add> String prefix = JHVDirectory.EXPORTS.getPath() + "JHV_" + TimeUtils.filenameDateFormat.format(new Date());
<ide> if (mode == RecordMode.SHOT) {
<ide> try {
<ide> exporter = new PNGExporter();
<del> exporter.open(imagePath, canvasWidth, exportHeight, fps);
<add> exporter.open(prefix + ".png", canvasWidth, exportHeight, fps);
<ide> } catch (Exception e) {
<ide> e.printStackTrace();
<ide> }
<ide> } else {
<ide> try {
<ide> exporter = new JCodecExporter();
<del> exporter.open(moviePath, canvasWidth, exportHeight, fps);
<add> exporter.open(prefix + ".mp4", canvasWidth, exportHeight, fps);
<ide> } catch (Exception e) {
<ide> e.printStackTrace();
<ide> }
|
|
Java
|
mit
|
fe7535a6e385bb5b64befc978f614ad9c532896b
| 0 |
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
|
package com.elmakers.mine.bukkit.utility.platform.base;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.WeakHashMap;
import java.util.function.Consumer;
import java.util.logging.Level;
import javax.annotation.Nonnull;
import org.apache.commons.lang.StringUtils;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Chunk;
import org.bukkit.Effect;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.attribute.Attribute;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.block.BlockState;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.ArmorStand;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.ComplexEntityPart;
import org.bukkit.entity.Damageable;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.FallingBlock;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.entity.Projectile;
import org.bukkit.entity.SpectralArrow;
import org.bukkit.entity.Tameable;
import org.bukkit.entity.ThrownPotion;
import org.bukkit.entity.TippedArrow;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.inventory.FurnaceRecipe;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.Recipe;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.material.Torch;
import org.bukkit.plugin.Plugin;
import org.bukkit.potion.PotionEffect;
import org.bukkit.projectiles.ProjectileSource;
import org.bukkit.scheduler.BukkitTask;
import org.bukkit.util.Vector;
import org.spigotmc.event.entity.EntityDismountEvent;
import com.elmakers.mine.bukkit.api.magic.Messages;
import com.elmakers.mine.bukkit.utility.EnteredStateTracker;
import com.elmakers.mine.bukkit.utility.LoadingChunk;
import com.elmakers.mine.bukkit.utility.TeleportPassengerTask;
import com.elmakers.mine.bukkit.utility.platform.CompatibilityUtils;
import com.elmakers.mine.bukkit.utility.platform.PaperUtils;
import com.elmakers.mine.bukkit.utility.platform.Platform;
import com.google.gson.Gson;
import com.google.gson.stream.JsonReader;
public abstract class CompatibilityUtilsBase implements CompatibilityUtils {
// This is really here to prevent infinite loops, but sometimes these requests legitimately come in many time
// (for instance when undoing a spell in an unloaded chunk that threw a ton of different falling blocks)
// So putting some lower number on this will trigger a lot of false-positives.
protected static final int MAX_CHUNK_LOAD_TRY = 10000;
protected static final int MAX_ENTITY_RANGE = 72;
protected static boolean USE_MAGIC_DAMAGE = true;
protected static int BLOCK_BREAK_RANGE = 64;
protected final UUID emptyUUID = new UUID(0L, 0L);
protected Gson gson;
protected ItemStack dummyItem;
protected boolean hasDumpedStack = false;
protected boolean teleporting = false;
protected final Map<World.Environment, Integer> maxHeights = new HashMap<>();
protected final Map<LoadingChunk, Integer> loadingChunks = new HashMap<>();
protected final EnteredStateTracker isDamaging = new EnteredStateTracker();
protected final Map<World, WeakReference<ThrownPotion>> worldPotions = new WeakHashMap<>();
public Map<Integer, Material> materialIdMap;
protected final Platform platform;
private Messages messages;
protected CompatibilityUtilsBase(final Platform platform) {
this.platform = platform;
}
protected Gson getGson() {
if (gson == null) {
gson = new Gson();
}
return gson;
}
@Override
public void setMessages(Messages messages) {
this.messages = messages;
}
@Override
public boolean isDamaging() {
return isDamaging.isInside();
}
@Override
public void applyPotionEffects(LivingEntity entity, Collection<PotionEffect> effects) {
for (PotionEffect effect : effects) {
applyPotionEffect(entity, effect);
}
}
@Override
public boolean applyPotionEffect(LivingEntity entity, PotionEffect effect) {
// Avoid nerfing existing effects
boolean applyEffect = true;
Collection<PotionEffect> currentEffects = entity.getActivePotionEffects();
for (PotionEffect currentEffect : currentEffects) {
if (currentEffect.getType().equals(effect.getType())) {
if (effect.getAmplifier() < 0) {
applyEffect = false;
break;
} else if (currentEffect.getAmplifier() > effect.getAmplifier() || effect.getDuration() > Integer.MAX_VALUE / 4) {
applyEffect = false;
break;
}
}
}
if (applyEffect) {
entity.addPotionEffect(effect, true);
}
return applyEffect;
}
@Override
public void setInvulnerable(Entity entity) {
setInvulnerable(entity, true);
}
@Override
public ArmorStand createArmorStand(Location location) {
return (ArmorStand)createEntity(location, EntityType.ARMOR_STAND);
}
@Override
public Runnable getTaskRunnable(BukkitTask task) {
return null;
}
@Override
public void damage(Damageable target, double amount, Entity source) {
if (target == null || target.isDead()) return;
while (target instanceof ComplexEntityPart) {
target = ((ComplexEntityPart) target).getParent();
}
if (USE_MAGIC_DAMAGE && target.getType() == EntityType.ENDER_DRAGON) {
magicDamage(target, amount, source);
return;
}
try (EnteredStateTracker.Touchable damaging = isDamaging.enter()) {
damaging.touch();
if (target instanceof ArmorStand) {
double newHealth = Math.max(0, target.getHealth() - amount);
if (newHealth <= 0) {
EntityDeathEvent deathEvent = new EntityDeathEvent((ArmorStand) target, new ArrayList<>());
Bukkit.getPluginManager().callEvent(deathEvent);
target.remove();
} else {
target.setHealth(newHealth);
}
} else {
target.damage(amount, source);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* Lazily creates potion entities that can be used when damaging players.
*
* @param location The location the potion should be placed at.
* @return A potion entity placed ad the given location.
*/
protected ThrownPotion getOrCreatePotionEntity(Location location) {
World world = location.getWorld();
// Maintain a separate potion entity for every world so that
// potion.getWorld() reports the correct result.
WeakReference<ThrownPotion> ref = worldPotions.get(world);
ThrownPotion potion = ref == null ? null : ref.get();
if (potion == null) {
potion = (ThrownPotion) world.spawnEntity(
location,
EntityType.SPLASH_POTION);
potion.remove();
ref = new WeakReference<>(potion);
worldPotions.put(world, ref);
} else {
// TODO: Make sure this actually works?
potion.teleport(location);
}
return potion;
}
@Override
public Location getEyeLocation(Entity entity) {
if (entity instanceof LivingEntity) {
return ((LivingEntity) entity).getEyeLocation();
}
return entity.getLocation();
}
@Override
public ConfigurationSection loadConfiguration(String fileName) throws IOException, InvalidConfigurationException {
YamlConfiguration configuration = new YamlConfiguration();
try {
configuration.load(fileName);
} catch (FileNotFoundException ignore) {
}
return configuration;
}
@Override
public ConfigurationSection loadConfiguration(File file) throws IOException, InvalidConfigurationException {
YamlConfiguration configuration = new YamlConfiguration();
try {
configuration.load(file);
} catch (FileNotFoundException ignore) {
} catch (Throwable ex) {
platform.getLogger().log(Level.SEVERE, "Error reading configuration file '" + file.getAbsolutePath() + "'");
throw ex;
}
return configuration;
}
@Override
public YamlConfiguration loadConfiguration(InputStream stream, String fileName) throws IOException, InvalidConfigurationException {
YamlConfiguration configuration = new YamlConfiguration();
if (stream == null) {
platform.getLogger().log(Level.SEVERE, "Could not find builtin configuration file '" + fileName + "'");
return configuration;
}
try {
configuration.load(new InputStreamReader(stream, "UTF-8"));
} catch (FileNotFoundException ignore) {
}
return configuration;
}
@Override
public YamlConfiguration loadBuiltinConfiguration(String fileName) throws IOException, InvalidConfigurationException {
Plugin plugin = platform.getPlugin();
return loadConfiguration(plugin.getResource(fileName), fileName);
}
@Override
public int getFacing(BlockFace direction) {
int dir;
switch (direction) {
case SOUTH:
default:
dir = 0;
break;
case WEST:
dir = 1;
break;
case NORTH:
dir = 2;
break;
case EAST:
dir = 3;
break;
}
return dir;
}
@Override
public Map<String, Object> getMap(ConfigurationSection section) {
return getTypedMap(section);
}
@Override
public Vector getNormal(Block block, Location intersection) {
double x = intersection.getX() - (block.getX() + 0.5);
double y = intersection.getY() - (block.getY() + 0.5);
double z = intersection.getZ() - (block.getZ() + 0.5);
double ax = Math.abs(x);
double ay = Math.abs(y);
double az = Math.abs(z);
if (ax > ay && ax > az) {
return new Vector(Math.signum(x), 0, 0);
} else if (ay > ax && ay > az) {
return new Vector(0, Math.signum(y), 0);
}
return new Vector(0, 0, Math.signum(z));
}
@Override
public void configureMaxHeights(ConfigurationSection config) {
maxHeights.clear();
if (config == null) return;
Collection<String> keys = config.getKeys(false);
for (String key : keys) {
try {
World.Environment worldType = World.Environment.valueOf(key.toUpperCase());
maxHeights.put(worldType, config.getInt(key));
} catch (Exception ex) {
platform.getLogger().log(Level.WARNING, "Invalid environment type: " + key, ex);
}
}
}
@Override
public int getMinHeight(World world) {
if (!platform.isCurrentVersion()) {
return 0;
}
return -64;
}
@Override
public int getMaxHeight(World world) {
Integer maxHeight = maxHeights.get(world.getEnvironment());
if (maxHeight == null) {
maxHeight = world.getMaxHeight();
}
return maxHeight;
}
@Override
public int getMaxEntityRange() {
return MAX_ENTITY_RANGE;
}
@Override
public void load(ConfigurationSection properties) {
USE_MAGIC_DAMAGE = properties.getBoolean("use_magic_damage", USE_MAGIC_DAMAGE);
}
// Taken from CraftBukkit code.
protected String toMinecraftAttribute(Attribute attribute) {
String bukkit = attribute.name();
int first = bukkit.indexOf('_');
int second = bukkit.indexOf('_', first + 1);
StringBuilder sb = new StringBuilder(bukkit.toLowerCase(java.util.Locale.ENGLISH));
sb.setCharAt(first, '.');
if (second != -1) {
sb.deleteCharAt(second);
sb.setCharAt(second, bukkit.charAt(second + 1));
}
return sb.toString();
}
@Override
public boolean setItemAttribute(ItemStack item, Attribute attribute, double value, String slot, int attributeOperation) {
return setItemAttribute(item, attribute, value, slot, attributeOperation, UUID.randomUUID());
}
@Override
public void applyItemData(ItemStack item, Block block) {
try {
Object entityDataTag = platform.getNBTUtils().getTag(item, "BlockEntityTag");
if (entityDataTag == null) return;
setTileEntityData(block.getLocation(), entityDataTag);
} catch (Exception ex) {
ex.printStackTrace();
}
}
private int getBlockEntityId(Block block) {
// There will be some overlap here, but these effects are very localized so it should be OK.
return ((block.getX() & 0xFFF) << 20)
| ((block.getZ() & 0xFFF) << 8)
| (block.getY() & 0xFF);
}
@Override
public void clearBreaking(Block block) {
setBreaking(block, 10, BLOCK_BREAK_RANGE);
}
@Override
public void setBreaking(Block block, double percentage) {
// Block break states are 0 - 9
int breakState = (int)Math.ceil(9 * percentage);
setBreaking(block, breakState, BLOCK_BREAK_RANGE);
}
@Override
public void setBreaking(Block block, int breakAmount) {
setBreaking(block, breakAmount, BLOCK_BREAK_RANGE);
}
@Override
public void setBreaking(Block block, int breakAmount, int range) {
String worldName = block.getWorld().getName();
Location location = block.getLocation();
int rangeSquared = range * range;
for (Player player : Bukkit.getOnlinePlayers()) {
if (!player.getWorld().getName().equals(worldName) || player.getLocation().distanceSquared(location) > rangeSquared) {
continue;
}
sendBreaking(player, getBlockEntityId(block), location, breakAmount);
}
}
@Override
public boolean setBlockFast(Block block, Material material, int data) {
return setBlockFast(block.getChunk(), block.getX(), block.getY(), block.getZ(), material, data);
}
@Override
@SuppressWarnings("deprecation")
public Material getMaterial(int id, byte data) {
Material material = getMaterial(id);
if (material != null) {
material = fromLegacy(new org.bukkit.material.MaterialData(material, data));
}
if (material == null) {
material = Material.AIR;
}
return material;
}
@Override
@SuppressWarnings("deprecation")
public Material getMaterial(int id) {
if (materialIdMap == null) {
materialIdMap = new HashMap<>();
Object[] allMaterials = Material.AIR.getDeclaringClass().getEnumConstants();
for (Object o : allMaterials) {
Material material = (Material)o;
if (!hasLegacyMaterials() || isLegacy(material)) {
materialIdMap.put(material.getId(), material);
}
}
}
return materialIdMap.get(id);
}
@Override
public Material getMaterial(String blockData) {
String[] pieces = StringUtils.split(blockData, "[", 2);
if (pieces.length == 0) return null;
pieces = StringUtils.split(pieces[0], ":", 2);
if (pieces.length == 0) return null;
String materialKey = "";
if (pieces.length == 2) {
if (!pieces[0].equals("minecraft")) return null;
materialKey = pieces[1];
} else {
materialKey = pieces[0];
}
try {
return Material.valueOf(materialKey.toUpperCase());
} catch (Exception ignore) {
}
return null;
}
@Override
@SuppressWarnings("deprecation")
public Material migrateMaterial(Material material, byte data) {
return fromLegacy(new org.bukkit.material.MaterialData(material, data));
}
@Override
@SuppressWarnings("deprecation")
public String migrateMaterial(String materialKey) {
if (materialKey == null || materialKey.isEmpty()) return materialKey;
byte data = 0;
String[] pieces = StringUtils.split(materialKey, ':');
String textData = "";
if (pieces.length > 1) {
textData = pieces[1];
try {
data = Byte.parseByte(pieces[1]);
textData = "";
} catch (Exception ignore) {
}
}
String materialName = pieces[0].toUpperCase();
Material material = Material.getMaterial(materialName);
if (material != null && data == 0) {
return material.name().toLowerCase();
}
Material legacyMaterial = data == 0 ? getLegacyMaterial(materialName) : Material.getMaterial("LEGACY_" + materialName);
if (legacyMaterial != null) {
org.bukkit.material.MaterialData materialData = new org.bukkit.material.MaterialData(legacyMaterial, data);
legacyMaterial = fromLegacy(materialData);
if (legacyMaterial != null) {
material = legacyMaterial;
}
}
if (material != null) {
materialKey = material.name().toLowerCase();
// This mainly covers player skulls, but .. maybe other things? Maps?
if (!textData.isEmpty()) {
materialKey += ":" + textData;
}
}
return materialKey;
}
@Override
public boolean isChunkLoaded(Block block) {
return isChunkLoaded(block.getLocation());
}
@Override
public boolean isChunkLoaded(Location location) {
int chunkX = location.getBlockX() >> 4;
int chunkZ = location.getBlockZ() >> 4;
World world = location.getWorld();
return world.isChunkLoaded(chunkX, chunkZ);
}
@Override
public boolean checkChunk(Location location) {
return checkChunk(location, true);
}
/**
* Take care if setting generate to false, the chunk will load but not show as loaded
*/
@Override
public boolean checkChunk(Location location, boolean generate) {
int chunkX = location.getBlockX() >> 4;
int chunkZ = location.getBlockZ() >> 4;
World world = location.getWorld();
return checkChunk(world, chunkX, chunkZ, generate);
}
@Override
public boolean checkChunk(World world, int chunkX, int chunkZ) {
return checkChunk(world, chunkX, chunkZ, true);
}
/**
* Take care if setting generate to false, the chunk will load but not show as loaded
*/
@Override
public boolean checkChunk(World world, int chunkX, int chunkZ, boolean generate) {
if (!world.isChunkLoaded(chunkX, chunkZ)) {
loadChunk(world, chunkX, chunkZ, generate);
return false;
}
return isReady(world.getChunkAt(chunkX, chunkZ));
}
@Override
public boolean isTopBlock(Block block) {
// Yes this is an ugly way to do it.
String blockData = getBlockData(block);
return blockData != null && blockData.contains("type=top");
}
@Override
public ItemStack getKnowledgeBook() {
ItemStack book = null;
try {
Material bookMaterial = Material.valueOf("KNOWLEDGE_BOOK");
book = new ItemStack(bookMaterial);
} catch (Exception ignore) {
}
return book;
}
@Override
public Entity getSource(Entity entity) {
if (entity instanceof Projectile) {
ProjectileSource source = ((Projectile)entity).getShooter();
if (source instanceof Entity) {
entity = (Entity)source;
}
}
return entity;
}
@Override
public BlockFace getCCW(BlockFace face) {
switch (face) {
case NORTH:
return BlockFace.WEST;
case SOUTH:
return BlockFace.EAST;
case WEST:
return BlockFace.SOUTH;
case EAST:
return BlockFace.NORTH;
default:
throw new IllegalStateException("Unable to get CCW facing of " + face);
}
}
@Override
public void loadChunk(Location location, boolean generate, Consumer<Chunk> consumer) {
loadChunk(location.getWorld(), location.getBlockX() >> 4, location.getBlockZ() >> 4, generate, consumer);
}
@Override
public void loadChunk(World world, int x, int z, boolean generate) {
loadChunk(world, x, z, generate, null);
}
/**
* This will load chunks asynchronously if possible.
*
* <p>But note that it will never be truly asynchronous, it is important not to call this in a tight retry loop,
* the main server thread needs to free up to actually process the async chunk loads.
*/
@Override
public void loadChunk(World world, int x, int z, boolean generate, Consumer<Chunk> consumer) {
PaperUtils paperUtils = platform.getPaperUtils();
if (paperUtils == null) {
Chunk chunk = world.getChunkAt(x, z);
chunk.load();
if (consumer != null) {
consumer.accept(chunk);
}
return;
}
final LoadingChunk loading = new LoadingChunk(world, x, z);
Integer requestCount = loadingChunks.get(loading);
if (requestCount != null) {
requestCount++;
if (requestCount > MAX_CHUNK_LOAD_TRY) {
platform.getLogger().warning("Exceeded retry count for asynchronous chunk load, loading synchronously");
if (!hasDumpedStack) {
hasDumpedStack = true;
Thread.dumpStack();
}
Chunk chunk = world.getChunkAt(x, z);
chunk.load();
if (consumer != null) {
consumer.accept(chunk);
}
loadingChunks.remove(loading);
return;
}
loadingChunks.put(loading, requestCount);
return;
}
loadingChunks.put(loading, 1);
paperUtils.loadChunk(world, x, z, generate, chunk -> {
loadingChunks.remove(loading);
if (consumer != null) {
consumer.accept(chunk);
}
});
}
@Override
public Entity getRootVehicle(Entity entity) {
if (entity == null) {
return null;
}
Entity vehicle = entity.getVehicle();
while (vehicle != null) {
entity = vehicle;
vehicle = entity.getVehicle();
}
return entity;
}
protected void teleportPassengers(Entity vehicle, Location location, Collection<Entity> passengers) {
for (Entity passenger : passengers) {
if (passenger instanceof Player) {
TeleportPassengerTask task = new TeleportPassengerTask(this, vehicle, passenger, location);
Plugin plugin = platform.getPlugin();
plugin.getServer().getScheduler().runTaskLater(plugin, task, 2);
} else {
// TODO: If there is a player midway in a stack of mobs do the mobs need to wait... ?
// Might have to rig up something weird to test.
// Otherwise this seems like too complicated of an edge case to worry about
teleportVehicle(passenger, location);
addPassenger(vehicle, passenger);
}
}
}
@Override
public void teleportVehicle(Entity vehicle, Location location) {
List<Entity> passengers = getPassengers(vehicle);
vehicle.eject();
vehicle.teleport(location);
// eject seems to just not work sometimes? (on chunk load, maybe)
// So let's try to avoid exponentially adding passengers.
List<Entity> newPassengers = getPassengers(vehicle);
if (newPassengers.isEmpty()) {
teleportPassengers(vehicle, location, passengers);
} else {
platform.getLogger().warning("Entity.eject failed!");
}
}
@Override
public void teleportWithVehicle(Entity entity, Location location) {
teleporting = true;
if (entity != null && entity.isValid()) {
final Entity vehicle = getRootVehicle(entity);
teleportVehicle(vehicle, location);
}
teleporting = false;
}
@Override
public boolean isTeleporting() {
return teleporting;
}
@Override
public void playRecord(Location location, Material record) {
if (platform.isLegacy()) {
location.getWorld().playEffect(location, Effect.RECORD_PLAY,
platform.getDeprecatedUtils().getId(record));
} else {
location.getWorld().playEffect(location, Effect.RECORD_PLAY, record);
}
}
@Override
public void cancelDismount(EntityDismountEvent event) {
// This event can't be cancelled in this version of Spigot
}
@Override
public String translateColors(String message) {
return ChatColor.translateAlternateColorCodes('&', message);
}
@Override
public String getEnchantmentKey(Enchantment enchantment) {
return enchantment.getName().toLowerCase();
}
@Override
public Enchantment getEnchantmentByKey(String key) {
return Enchantment.getByName(key.toUpperCase());
}
@Override
public boolean setTorchFacingDirection(Block block, BlockFace facing) {
BlockState state = block.getState();
Object data = state.getData();
if (data instanceof Torch) {
Torch torchData = (Torch)data;
torchData.setFacingDirection(facing);
state.setData(torchData);
state.update();
return true;
}
return false;
}
@Override
public boolean tame(Entity entity, Player tamer) {
if (!(entity instanceof Tameable)) {
return false;
}
Tameable tameable = (Tameable)entity;
if (tameable.isTamed()) {
return false;
}
tameable.setTamed(true);
if (tamer != null) {
tameable.setOwner(tamer);
}
return true;
}
@Override
public boolean isArrow(Entity projectile) {
return (projectile instanceof Arrow) || (projectile instanceof TippedArrow) || (projectile instanceof SpectralArrow);
}
@Override
public void setMaterialCooldown(Player player, Material material, int duration) {
// Not going to mess about with packets for this.
}
@Override
public FurnaceRecipe createFurnaceRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createBlastingRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createCampfireRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createSmokingRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createStonecuttingRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage) {
return null;
}
@Override
public Recipe createSmithingRecipe(String key, ItemStack item, ItemStack source, ItemStack addition) {
return null;
}
@Override
public String convertParticle(String particle) {
switch (particle.toLowerCase()) {
case "dust_color_transition":
return "redstone";
}
return particle;
}
@Override
@SuppressWarnings("deprecation")
public void sendBlockChange(Player player, Block block) {
player.sendBlockChange(block.getLocation(), block.getType(), block.getData());
}
@Override
@SuppressWarnings("deprecation")
public void sendBlockChange(Player player, Location location, Material material, String blockData) {
player.sendBlockChange(location, material, (byte)0);
}
@Override
@Nonnull
@SuppressWarnings("deprecation")
public FallingBlock spawnFallingBlock(Location location, Material material, String blockData) {
return location.getWorld().spawnFallingBlock(location, material, (byte)0);
}
protected void getSimpleMessage(Map<String,Object> mapped, StringBuilder plainMessage) {
for (Map.Entry<String,Object> entry : mapped.entrySet()) {
if (entry.getKey().equals("color")) {
String colorKey = entry.getValue().toString();
try {
ChatColor color = ChatColor.valueOf(colorKey.toUpperCase());
plainMessage.append(color);
} catch (Exception ex) {
platform.getLogger().warning("Invalid color in json message: " + colorKey);
}
} if (entry.getKey().equals("text")) {
plainMessage.append(entry.getValue());
} else if (entry.getKey().equals("keybind")) {
String key = entry.getValue().toString().replace("key.", "");
if (messages != null) {
key = messages.get("keybind." + key, key);
}
plainMessage.append(key);
} else if (entry.getKey().equals("extra")) {
Object rawExtra = entry.getValue();
if (rawExtra instanceof List) {
List<Map<String, Object>> mapList = (List<Map<String, Object>>)rawExtra;
for (Map<String, Object> child : mapList) {
getSimpleMessage(child, plainMessage);
}
}
}
}
}
protected String getSimpleMessage(String containsJson) {
String[] components = getComponents(containsJson);
StringBuilder plainMessage = new StringBuilder();
for (String component : components) {
if (component.startsWith("{")) {
try {
JsonReader reader = new JsonReader(new StringReader(component));
reader.setLenient(true);
Map<String, Object> mapped = getGson().fromJson(reader, Map.class);
getSimpleMessage(mapped, plainMessage);
} catch (Exception ex) {
plainMessage.append(component);
}
} else {
plainMessage.append(component);
}
}
return plainMessage.toString();
}
protected String[] getComponents(String containsJson) {
return StringUtils.split(containsJson, "`");
}
@Override
public void sendChatComponents(CommandSender sender, String containsJson) {
sender.sendMessage(getSimpleMessage(containsJson));
}
@Override
public boolean setDisplayNameRaw(ItemStack itemStack, String displayName) {
Object handle = platform.getItemUtils().getHandle(itemStack);
if (handle == null) return false;
Object tag = platform.getItemUtils().getTag(handle);
if (tag == null) return false;
Object displayNode = platform.getNBTUtils().createTag(tag, "display");
if (displayNode == null) return false;
platform.getNBTUtils().setString(displayNode, "Name", displayName);
return true;
}
@Override
public boolean setDisplayName(ItemStack itemStack, String displayName) {
ItemMeta meta = itemStack.getItemMeta();
meta.setDisplayName(displayName);
itemStack.setItemMeta(meta);
return true;
}
@Override
public boolean setLore(ItemStack itemStack, List<String> lore) {
ItemMeta meta = itemStack.getItemMeta();
// Convert chat components
for (int i = 0; i < lore.size(); i++) {
String line = lore.get(i);
if (line.contains("`{")) {
lore.set(i, getSimpleMessage(line));
}
}
meta.setLore(lore);
itemStack.setItemMeta(meta);
return true;
}
}
|
CompatibilityLib/base/src/main/java/com/elmakers/mine/bukkit/utility/platform/base/CompatibilityUtilsBase.java
|
package com.elmakers.mine.bukkit.utility.platform.base;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.WeakHashMap;
import java.util.function.Consumer;
import java.util.logging.Level;
import javax.annotation.Nonnull;
import org.apache.commons.lang.StringUtils;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Chunk;
import org.bukkit.Effect;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.attribute.Attribute;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.block.BlockState;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.ArmorStand;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.ComplexEntityPart;
import org.bukkit.entity.Damageable;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.FallingBlock;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.entity.Projectile;
import org.bukkit.entity.SpectralArrow;
import org.bukkit.entity.Tameable;
import org.bukkit.entity.ThrownPotion;
import org.bukkit.entity.TippedArrow;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.inventory.FurnaceRecipe;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.Recipe;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.material.Torch;
import org.bukkit.plugin.Plugin;
import org.bukkit.potion.PotionEffect;
import org.bukkit.projectiles.ProjectileSource;
import org.bukkit.scheduler.BukkitTask;
import org.bukkit.util.Vector;
import org.spigotmc.event.entity.EntityDismountEvent;
import com.elmakers.mine.bukkit.api.magic.Messages;
import com.elmakers.mine.bukkit.utility.EnteredStateTracker;
import com.elmakers.mine.bukkit.utility.LoadingChunk;
import com.elmakers.mine.bukkit.utility.TeleportPassengerTask;
import com.elmakers.mine.bukkit.utility.platform.CompatibilityUtils;
import com.elmakers.mine.bukkit.utility.platform.PaperUtils;
import com.elmakers.mine.bukkit.utility.platform.Platform;
import com.google.gson.Gson;
import com.google.gson.stream.JsonReader;
public abstract class CompatibilityUtilsBase implements CompatibilityUtils {
// This is really here to prevent infinite loops, but sometimes these requests legitimately come in many time
// (for instance when undoing a spell in an unloaded chunk that threw a ton of different falling blocks)
// So putting some lower number on this will trigger a lot of false-positives.
protected static final int MAX_CHUNK_LOAD_TRY = 10000;
protected static final int MAX_ENTITY_RANGE = 72;
protected static boolean USE_MAGIC_DAMAGE = true;
protected static int BLOCK_BREAK_RANGE = 64;
protected final UUID emptyUUID = new UUID(0L, 0L);
protected Gson gson;
protected ItemStack dummyItem;
protected boolean hasDumpedStack = false;
protected boolean teleporting = false;
protected final Map<World.Environment, Integer> maxHeights = new HashMap<>();
protected final Map<LoadingChunk, Integer> loadingChunks = new HashMap<>();
protected final EnteredStateTracker isDamaging = new EnteredStateTracker();
protected final Map<World, WeakReference<ThrownPotion>> worldPotions = new WeakHashMap<>();
public Map<Integer, Material> materialIdMap;
protected final Platform platform;
private Messages messages;
protected CompatibilityUtilsBase(final Platform platform) {
this.platform = platform;
}
protected Gson getGson() {
if (gson == null) {
gson = new Gson();
}
return gson;
}
@Override
public boolean isDamaging() {
return isDamaging.isInside();
}
@Override
public void applyPotionEffects(LivingEntity entity, Collection<PotionEffect> effects) {
for (PotionEffect effect : effects) {
applyPotionEffect(entity, effect);
}
}
@Override
public boolean applyPotionEffect(LivingEntity entity, PotionEffect effect) {
// Avoid nerfing existing effects
boolean applyEffect = true;
Collection<PotionEffect> currentEffects = entity.getActivePotionEffects();
for (PotionEffect currentEffect : currentEffects) {
if (currentEffect.getType().equals(effect.getType())) {
if (effect.getAmplifier() < 0) {
applyEffect = false;
break;
} else if (currentEffect.getAmplifier() > effect.getAmplifier() || effect.getDuration() > Integer.MAX_VALUE / 4) {
applyEffect = false;
break;
}
}
}
if (applyEffect) {
entity.addPotionEffect(effect, true);
}
return applyEffect;
}
@Override
public boolean setDisplayNameRaw(ItemStack itemStack, String displayName) {
Object handle = platform.getItemUtils().getHandle(itemStack);
if (handle == null) return false;
Object tag = platform.getItemUtils().getTag(handle);
if (tag == null) return false;
Object displayNode = platform.getNBTUtils().createTag(tag, "display");
if (displayNode == null) return false;
platform.getNBTUtils().setString(displayNode, "Name", displayName);
return true;
}
@Override
public boolean setDisplayName(ItemStack itemStack, String displayName) {
ItemMeta meta = itemStack.getItemMeta();
meta.setDisplayName(displayName);
itemStack.setItemMeta(meta);
return true;
}
@Override
public boolean setLore(ItemStack itemStack, List<String> lore) {
ItemMeta meta = itemStack.getItemMeta();
meta.setLore(lore);
itemStack.setItemMeta(meta);
return true;
}
@Override
public void setInvulnerable(Entity entity) {
setInvulnerable(entity, true);
}
@Override
public ArmorStand createArmorStand(Location location) {
return (ArmorStand)createEntity(location, EntityType.ARMOR_STAND);
}
@Override
public Runnable getTaskRunnable(BukkitTask task) {
return null;
}
@Override
public void damage(Damageable target, double amount, Entity source) {
if (target == null || target.isDead()) return;
while (target instanceof ComplexEntityPart) {
target = ((ComplexEntityPart) target).getParent();
}
if (USE_MAGIC_DAMAGE && target.getType() == EntityType.ENDER_DRAGON) {
magicDamage(target, amount, source);
return;
}
try (EnteredStateTracker.Touchable damaging = isDamaging.enter()) {
damaging.touch();
if (target instanceof ArmorStand) {
double newHealth = Math.max(0, target.getHealth() - amount);
if (newHealth <= 0) {
EntityDeathEvent deathEvent = new EntityDeathEvent((ArmorStand) target, new ArrayList<>());
Bukkit.getPluginManager().callEvent(deathEvent);
target.remove();
} else {
target.setHealth(newHealth);
}
} else {
target.damage(amount, source);
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* Lazily creates potion entities that can be used when damaging players.
*
* @param location The location the potion should be placed at.
* @return A potion entity placed ad the given location.
*/
protected ThrownPotion getOrCreatePotionEntity(Location location) {
World world = location.getWorld();
// Maintain a separate potion entity for every world so that
// potion.getWorld() reports the correct result.
WeakReference<ThrownPotion> ref = worldPotions.get(world);
ThrownPotion potion = ref == null ? null : ref.get();
if (potion == null) {
potion = (ThrownPotion) world.spawnEntity(
location,
EntityType.SPLASH_POTION);
potion.remove();
ref = new WeakReference<>(potion);
worldPotions.put(world, ref);
} else {
// TODO: Make sure this actually works?
potion.teleport(location);
}
return potion;
}
@Override
public Location getEyeLocation(Entity entity) {
if (entity instanceof LivingEntity) {
return ((LivingEntity) entity).getEyeLocation();
}
return entity.getLocation();
}
@Override
public ConfigurationSection loadConfiguration(String fileName) throws IOException, InvalidConfigurationException {
YamlConfiguration configuration = new YamlConfiguration();
try {
configuration.load(fileName);
} catch (FileNotFoundException ignore) {
}
return configuration;
}
@Override
public ConfigurationSection loadConfiguration(File file) throws IOException, InvalidConfigurationException {
YamlConfiguration configuration = new YamlConfiguration();
try {
configuration.load(file);
} catch (FileNotFoundException ignore) {
} catch (Throwable ex) {
platform.getLogger().log(Level.SEVERE, "Error reading configuration file '" + file.getAbsolutePath() + "'");
throw ex;
}
return configuration;
}
@Override
public YamlConfiguration loadConfiguration(InputStream stream, String fileName) throws IOException, InvalidConfigurationException {
YamlConfiguration configuration = new YamlConfiguration();
if (stream == null) {
platform.getLogger().log(Level.SEVERE, "Could not find builtin configuration file '" + fileName + "'");
return configuration;
}
try {
configuration.load(new InputStreamReader(stream, "UTF-8"));
} catch (FileNotFoundException ignore) {
}
return configuration;
}
@Override
public YamlConfiguration loadBuiltinConfiguration(String fileName) throws IOException, InvalidConfigurationException {
Plugin plugin = platform.getPlugin();
return loadConfiguration(plugin.getResource(fileName), fileName);
}
@Override
public int getFacing(BlockFace direction) {
int dir;
switch (direction) {
case SOUTH:
default:
dir = 0;
break;
case WEST:
dir = 1;
break;
case NORTH:
dir = 2;
break;
case EAST:
dir = 3;
break;
}
return dir;
}
@Override
public Map<String, Object> getMap(ConfigurationSection section) {
return getTypedMap(section);
}
@Override
public Vector getNormal(Block block, Location intersection) {
double x = intersection.getX() - (block.getX() + 0.5);
double y = intersection.getY() - (block.getY() + 0.5);
double z = intersection.getZ() - (block.getZ() + 0.5);
double ax = Math.abs(x);
double ay = Math.abs(y);
double az = Math.abs(z);
if (ax > ay && ax > az) {
return new Vector(Math.signum(x), 0, 0);
} else if (ay > ax && ay > az) {
return new Vector(0, Math.signum(y), 0);
}
return new Vector(0, 0, Math.signum(z));
}
@Override
public void configureMaxHeights(ConfigurationSection config) {
maxHeights.clear();
if (config == null) return;
Collection<String> keys = config.getKeys(false);
for (String key : keys) {
try {
World.Environment worldType = World.Environment.valueOf(key.toUpperCase());
maxHeights.put(worldType, config.getInt(key));
} catch (Exception ex) {
platform.getLogger().log(Level.WARNING, "Invalid environment type: " + key, ex);
}
}
}
@Override
public int getMinHeight(World world) {
if (!platform.isCurrentVersion()) {
return 0;
}
return -64;
}
@Override
public int getMaxHeight(World world) {
Integer maxHeight = maxHeights.get(world.getEnvironment());
if (maxHeight == null) {
maxHeight = world.getMaxHeight();
}
return maxHeight;
}
@Override
public int getMaxEntityRange() {
return MAX_ENTITY_RANGE;
}
@Override
public void load(ConfigurationSection properties) {
USE_MAGIC_DAMAGE = properties.getBoolean("use_magic_damage", USE_MAGIC_DAMAGE);
}
// Taken from CraftBukkit code.
protected String toMinecraftAttribute(Attribute attribute) {
String bukkit = attribute.name();
int first = bukkit.indexOf('_');
int second = bukkit.indexOf('_', first + 1);
StringBuilder sb = new StringBuilder(bukkit.toLowerCase(java.util.Locale.ENGLISH));
sb.setCharAt(first, '.');
if (second != -1) {
sb.deleteCharAt(second);
sb.setCharAt(second, bukkit.charAt(second + 1));
}
return sb.toString();
}
@Override
public boolean setItemAttribute(ItemStack item, Attribute attribute, double value, String slot, int attributeOperation) {
return setItemAttribute(item, attribute, value, slot, attributeOperation, UUID.randomUUID());
}
@Override
public void applyItemData(ItemStack item, Block block) {
try {
Object entityDataTag = platform.getNBTUtils().getTag(item, "BlockEntityTag");
if (entityDataTag == null) return;
setTileEntityData(block.getLocation(), entityDataTag);
} catch (Exception ex) {
ex.printStackTrace();
}
}
private int getBlockEntityId(Block block) {
// There will be some overlap here, but these effects are very localized so it should be OK.
return ((block.getX() & 0xFFF) << 20)
| ((block.getZ() & 0xFFF) << 8)
| (block.getY() & 0xFF);
}
@Override
public void clearBreaking(Block block) {
setBreaking(block, 10, BLOCK_BREAK_RANGE);
}
@Override
public void setBreaking(Block block, double percentage) {
// Block break states are 0 - 9
int breakState = (int)Math.ceil(9 * percentage);
setBreaking(block, breakState, BLOCK_BREAK_RANGE);
}
@Override
public void setBreaking(Block block, int breakAmount) {
setBreaking(block, breakAmount, BLOCK_BREAK_RANGE);
}
@Override
public void setBreaking(Block block, int breakAmount, int range) {
String worldName = block.getWorld().getName();
Location location = block.getLocation();
int rangeSquared = range * range;
for (Player player : Bukkit.getOnlinePlayers()) {
if (!player.getWorld().getName().equals(worldName) || player.getLocation().distanceSquared(location) > rangeSquared) {
continue;
}
sendBreaking(player, getBlockEntityId(block), location, breakAmount);
}
}
@Override
public boolean setBlockFast(Block block, Material material, int data) {
return setBlockFast(block.getChunk(), block.getX(), block.getY(), block.getZ(), material, data);
}
@Override
@SuppressWarnings("deprecation")
public Material getMaterial(int id, byte data) {
Material material = getMaterial(id);
if (material != null) {
material = fromLegacy(new org.bukkit.material.MaterialData(material, data));
}
if (material == null) {
material = Material.AIR;
}
return material;
}
@Override
@SuppressWarnings("deprecation")
public Material getMaterial(int id) {
if (materialIdMap == null) {
materialIdMap = new HashMap<>();
Object[] allMaterials = Material.AIR.getDeclaringClass().getEnumConstants();
for (Object o : allMaterials) {
Material material = (Material)o;
if (!hasLegacyMaterials() || isLegacy(material)) {
materialIdMap.put(material.getId(), material);
}
}
}
return materialIdMap.get(id);
}
@Override
public Material getMaterial(String blockData) {
String[] pieces = StringUtils.split(blockData, "[", 2);
if (pieces.length == 0) return null;
pieces = StringUtils.split(pieces[0], ":", 2);
if (pieces.length == 0) return null;
String materialKey = "";
if (pieces.length == 2) {
if (!pieces[0].equals("minecraft")) return null;
materialKey = pieces[1];
} else {
materialKey = pieces[0];
}
try {
return Material.valueOf(materialKey.toUpperCase());
} catch (Exception ignore) {
}
return null;
}
@Override
@SuppressWarnings("deprecation")
public Material migrateMaterial(Material material, byte data) {
return fromLegacy(new org.bukkit.material.MaterialData(material, data));
}
@Override
@SuppressWarnings("deprecation")
public String migrateMaterial(String materialKey) {
if (materialKey == null || materialKey.isEmpty()) return materialKey;
byte data = 0;
String[] pieces = StringUtils.split(materialKey, ':');
String textData = "";
if (pieces.length > 1) {
textData = pieces[1];
try {
data = Byte.parseByte(pieces[1]);
textData = "";
} catch (Exception ignore) {
}
}
String materialName = pieces[0].toUpperCase();
Material material = Material.getMaterial(materialName);
if (material != null && data == 0) {
return material.name().toLowerCase();
}
Material legacyMaterial = data == 0 ? getLegacyMaterial(materialName) : Material.getMaterial("LEGACY_" + materialName);
if (legacyMaterial != null) {
org.bukkit.material.MaterialData materialData = new org.bukkit.material.MaterialData(legacyMaterial, data);
legacyMaterial = fromLegacy(materialData);
if (legacyMaterial != null) {
material = legacyMaterial;
}
}
if (material != null) {
materialKey = material.name().toLowerCase();
// This mainly covers player skulls, but .. maybe other things? Maps?
if (!textData.isEmpty()) {
materialKey += ":" + textData;
}
}
return materialKey;
}
@Override
public boolean isChunkLoaded(Block block) {
return isChunkLoaded(block.getLocation());
}
@Override
public boolean isChunkLoaded(Location location) {
int chunkX = location.getBlockX() >> 4;
int chunkZ = location.getBlockZ() >> 4;
World world = location.getWorld();
return world.isChunkLoaded(chunkX, chunkZ);
}
@Override
public boolean checkChunk(Location location) {
return checkChunk(location, true);
}
/**
* Take care if setting generate to false, the chunk will load but not show as loaded
*/
@Override
public boolean checkChunk(Location location, boolean generate) {
int chunkX = location.getBlockX() >> 4;
int chunkZ = location.getBlockZ() >> 4;
World world = location.getWorld();
return checkChunk(world, chunkX, chunkZ, generate);
}
@Override
public boolean checkChunk(World world, int chunkX, int chunkZ) {
return checkChunk(world, chunkX, chunkZ, true);
}
/**
* Take care if setting generate to false, the chunk will load but not show as loaded
*/
@Override
public boolean checkChunk(World world, int chunkX, int chunkZ, boolean generate) {
if (!world.isChunkLoaded(chunkX, chunkZ)) {
loadChunk(world, chunkX, chunkZ, generate);
return false;
}
return isReady(world.getChunkAt(chunkX, chunkZ));
}
@Override
public boolean isTopBlock(Block block) {
// Yes this is an ugly way to do it.
String blockData = getBlockData(block);
return blockData != null && blockData.contains("type=top");
}
@Override
public ItemStack getKnowledgeBook() {
ItemStack book = null;
try {
Material bookMaterial = Material.valueOf("KNOWLEDGE_BOOK");
book = new ItemStack(bookMaterial);
} catch (Exception ignore) {
}
return book;
}
@Override
public Entity getSource(Entity entity) {
if (entity instanceof Projectile) {
ProjectileSource source = ((Projectile)entity).getShooter();
if (source instanceof Entity) {
entity = (Entity)source;
}
}
return entity;
}
@Override
public BlockFace getCCW(BlockFace face) {
switch (face) {
case NORTH:
return BlockFace.WEST;
case SOUTH:
return BlockFace.EAST;
case WEST:
return BlockFace.SOUTH;
case EAST:
return BlockFace.NORTH;
default:
throw new IllegalStateException("Unable to get CCW facing of " + face);
}
}
@Override
public void loadChunk(Location location, boolean generate, Consumer<Chunk> consumer) {
loadChunk(location.getWorld(), location.getBlockX() >> 4, location.getBlockZ() >> 4, generate, consumer);
}
@Override
public void loadChunk(World world, int x, int z, boolean generate) {
loadChunk(world, x, z, generate, null);
}
/**
* This will load chunks asynchronously if possible.
*
* <p>But note that it will never be truly asynchronous, it is important not to call this in a tight retry loop,
* the main server thread needs to free up to actually process the async chunk loads.
*/
@Override
public void loadChunk(World world, int x, int z, boolean generate, Consumer<Chunk> consumer) {
PaperUtils paperUtils = platform.getPaperUtils();
if (paperUtils == null) {
Chunk chunk = world.getChunkAt(x, z);
chunk.load();
if (consumer != null) {
consumer.accept(chunk);
}
return;
}
final LoadingChunk loading = new LoadingChunk(world, x, z);
Integer requestCount = loadingChunks.get(loading);
if (requestCount != null) {
requestCount++;
if (requestCount > MAX_CHUNK_LOAD_TRY) {
platform.getLogger().warning("Exceeded retry count for asynchronous chunk load, loading synchronously");
if (!hasDumpedStack) {
hasDumpedStack = true;
Thread.dumpStack();
}
Chunk chunk = world.getChunkAt(x, z);
chunk.load();
if (consumer != null) {
consumer.accept(chunk);
}
loadingChunks.remove(loading);
return;
}
loadingChunks.put(loading, requestCount);
return;
}
loadingChunks.put(loading, 1);
paperUtils.loadChunk(world, x, z, generate, chunk -> {
loadingChunks.remove(loading);
if (consumer != null) {
consumer.accept(chunk);
}
});
}
@Override
public Entity getRootVehicle(Entity entity) {
if (entity == null) {
return null;
}
Entity vehicle = entity.getVehicle();
while (vehicle != null) {
entity = vehicle;
vehicle = entity.getVehicle();
}
return entity;
}
protected void teleportPassengers(Entity vehicle, Location location, Collection<Entity> passengers) {
for (Entity passenger : passengers) {
if (passenger instanceof Player) {
TeleportPassengerTask task = new TeleportPassengerTask(this, vehicle, passenger, location);
Plugin plugin = platform.getPlugin();
plugin.getServer().getScheduler().runTaskLater(plugin, task, 2);
} else {
// TODO: If there is a player midway in a stack of mobs do the mobs need to wait... ?
// Might have to rig up something weird to test.
// Otherwise this seems like too complicated of an edge case to worry about
teleportVehicle(passenger, location);
addPassenger(vehicle, passenger);
}
}
}
@Override
public void teleportVehicle(Entity vehicle, Location location) {
List<Entity> passengers = getPassengers(vehicle);
vehicle.eject();
vehicle.teleport(location);
// eject seems to just not work sometimes? (on chunk load, maybe)
// So let's try to avoid exponentially adding passengers.
List<Entity> newPassengers = getPassengers(vehicle);
if (newPassengers.isEmpty()) {
teleportPassengers(vehicle, location, passengers);
} else {
platform.getLogger().warning("Entity.eject failed!");
}
}
@Override
public void teleportWithVehicle(Entity entity, Location location) {
teleporting = true;
if (entity != null && entity.isValid()) {
final Entity vehicle = getRootVehicle(entity);
teleportVehicle(vehicle, location);
}
teleporting = false;
}
@Override
public boolean isTeleporting() {
return teleporting;
}
@Override
public void playRecord(Location location, Material record) {
if (platform.isLegacy()) {
location.getWorld().playEffect(location, Effect.RECORD_PLAY,
platform.getDeprecatedUtils().getId(record));
} else {
location.getWorld().playEffect(location, Effect.RECORD_PLAY, record);
}
}
@Override
public void cancelDismount(EntityDismountEvent event) {
// This event can't be cancelled in this version of Spigot
}
@Override
public String translateColors(String message) {
return ChatColor.translateAlternateColorCodes('&', message);
}
@Override
public String getEnchantmentKey(Enchantment enchantment) {
return enchantment.getName().toLowerCase();
}
@Override
public Enchantment getEnchantmentByKey(String key) {
return Enchantment.getByName(key.toUpperCase());
}
@Override
public boolean setTorchFacingDirection(Block block, BlockFace facing) {
BlockState state = block.getState();
Object data = state.getData();
if (data instanceof Torch) {
Torch torchData = (Torch)data;
torchData.setFacingDirection(facing);
state.setData(torchData);
state.update();
return true;
}
return false;
}
@Override
public boolean tame(Entity entity, Player tamer) {
if (!(entity instanceof Tameable)) {
return false;
}
Tameable tameable = (Tameable)entity;
if (tameable.isTamed()) {
return false;
}
tameable.setTamed(true);
if (tamer != null) {
tameable.setOwner(tamer);
}
return true;
}
@Override
public boolean isArrow(Entity projectile) {
return (projectile instanceof Arrow) || (projectile instanceof TippedArrow) || (projectile instanceof SpectralArrow);
}
@Override
public void setMaterialCooldown(Player player, Material material, int duration) {
// Not going to mess about with packets for this.
}
@Override
public FurnaceRecipe createFurnaceRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createBlastingRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createCampfireRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createSmokingRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage, float experience, int cookingTime) {
return null;
}
@Override
public Recipe createStonecuttingRecipe(String key, ItemStack item, ItemStack source, boolean ignoreDamage) {
return null;
}
@Override
public Recipe createSmithingRecipe(String key, ItemStack item, ItemStack source, ItemStack addition) {
return null;
}
@Override
public String convertParticle(String particle) {
switch (particle.toLowerCase()) {
case "dust_color_transition":
return "redstone";
}
return particle;
}
@Override
@SuppressWarnings("deprecation")
public void sendBlockChange(Player player, Block block) {
player.sendBlockChange(block.getLocation(), block.getType(), block.getData());
}
@Override
@SuppressWarnings("deprecation")
public void sendBlockChange(Player player, Location location, Material material, String blockData) {
player.sendBlockChange(location, material, (byte)0);
}
@Override
@Nonnull
@SuppressWarnings("deprecation")
public FallingBlock spawnFallingBlock(Location location, Material material, String blockData) {
return location.getWorld().spawnFallingBlock(location, material, (byte)0);
}
protected void getSimpleMessage(Map<String,Object> mapped, StringBuilder plainMessage) {
for (Map.Entry<String,Object> entry : mapped.entrySet()) {
if (entry.getKey().equals("color")) {
String colorKey = entry.getValue().toString();
try {
ChatColor color = ChatColor.valueOf(colorKey.toUpperCase());
plainMessage.append(color);
} catch (Exception ex) {
platform.getLogger().warning("Invalid color in json message: " + colorKey);
}
} if (entry.getKey().equals("text")) {
plainMessage.append(entry.getValue());
} else if (entry.getKey().equals("keybind")) {
String key = entry.getValue().toString().replace("key.", "");
if (messages != null) {
key = messages.get("keybind." + key, key);
}
plainMessage.append(key);
} else if (entry.getKey().equals("extra")) {
Object rawExtra = entry.getValue();
if (rawExtra instanceof List) {
List<Map<String, Object>> mapList = (List<Map<String, Object>>)rawExtra;
for (Map<String, Object> child : mapList) {
getSimpleMessage(child, plainMessage);
}
}
}
}
}
protected String[] getComponents(String containsJson) {
return StringUtils.split(containsJson, "`");
}
@Override
public void sendChatComponents(CommandSender sender, String containsJson) {
String[] components = getComponents(containsJson);
StringBuilder plainMessage = new StringBuilder();
for (String component : components) {
if (component.startsWith("{")) {
try {
JsonReader reader = new JsonReader(new StringReader(component));
reader.setLenient(true);
Map<String, Object> mapped = getGson().fromJson(reader, Map.class);
getSimpleMessage(mapped, plainMessage);
} catch (Exception ex) {
plainMessage.append(component);
}
} else {
plainMessage.append(component);
}
}
sender.sendMessage(plainMessage.toString());
}
@Override
public void setMessages(Messages messages) {
this.messages = messages;
}
}
|
Convert chat components to simple text when setting item lore
|
CompatibilityLib/base/src/main/java/com/elmakers/mine/bukkit/utility/platform/base/CompatibilityUtilsBase.java
|
Convert chat components to simple text when setting item lore
|
<ide><path>ompatibilityLib/base/src/main/java/com/elmakers/mine/bukkit/utility/platform/base/CompatibilityUtilsBase.java
<ide> }
<ide>
<ide> @Override
<add> public void setMessages(Messages messages) {
<add> this.messages = messages;
<add> }
<add>
<add> @Override
<ide> public boolean isDamaging() {
<ide> return isDamaging.isInside();
<ide> }
<ide> entity.addPotionEffect(effect, true);
<ide> }
<ide> return applyEffect;
<del> }
<del>
<del> @Override
<del> public boolean setDisplayNameRaw(ItemStack itemStack, String displayName) {
<del> Object handle = platform.getItemUtils().getHandle(itemStack);
<del> if (handle == null) return false;
<del> Object tag = platform.getItemUtils().getTag(handle);
<del> if (tag == null) return false;
<del>
<del> Object displayNode = platform.getNBTUtils().createTag(tag, "display");
<del> if (displayNode == null) return false;
<del> platform.getNBTUtils().setString(displayNode, "Name", displayName);
<del> return true;
<del> }
<del>
<del> @Override
<del> public boolean setDisplayName(ItemStack itemStack, String displayName) {
<del> ItemMeta meta = itemStack.getItemMeta();
<del> meta.setDisplayName(displayName);
<del> itemStack.setItemMeta(meta);
<del> return true;
<del> }
<del>
<del> @Override
<del> public boolean setLore(ItemStack itemStack, List<String> lore) {
<del> ItemMeta meta = itemStack.getItemMeta();
<del> meta.setLore(lore);
<del> itemStack.setItemMeta(meta);
<del> return true;
<ide> }
<ide>
<ide> @Override
<ide> }
<ide> }
<ide>
<del> protected String[] getComponents(String containsJson) {
<del> return StringUtils.split(containsJson, "`");
<del> }
<del>
<del> @Override
<del> public void sendChatComponents(CommandSender sender, String containsJson) {
<add> protected String getSimpleMessage(String containsJson) {
<ide> String[] components = getComponents(containsJson);
<ide> StringBuilder plainMessage = new StringBuilder();
<ide> for (String component : components) {
<ide> plainMessage.append(component);
<ide> }
<ide> }
<del> sender.sendMessage(plainMessage.toString());
<del> }
<del>
<del> @Override
<del> public void setMessages(Messages messages) {
<del> this.messages = messages;
<add> return plainMessage.toString();
<add> }
<add>
<add> protected String[] getComponents(String containsJson) {
<add> return StringUtils.split(containsJson, "`");
<add> }
<add>
<add> @Override
<add> public void sendChatComponents(CommandSender sender, String containsJson) {
<add> sender.sendMessage(getSimpleMessage(containsJson));
<add> }
<add>
<add> @Override
<add> public boolean setDisplayNameRaw(ItemStack itemStack, String displayName) {
<add> Object handle = platform.getItemUtils().getHandle(itemStack);
<add> if (handle == null) return false;
<add> Object tag = platform.getItemUtils().getTag(handle);
<add> if (tag == null) return false;
<add>
<add> Object displayNode = platform.getNBTUtils().createTag(tag, "display");
<add> if (displayNode == null) return false;
<add> platform.getNBTUtils().setString(displayNode, "Name", displayName);
<add> return true;
<add> }
<add>
<add> @Override
<add> public boolean setDisplayName(ItemStack itemStack, String displayName) {
<add> ItemMeta meta = itemStack.getItemMeta();
<add> meta.setDisplayName(displayName);
<add> itemStack.setItemMeta(meta);
<add> return true;
<add> }
<add>
<add> @Override
<add> public boolean setLore(ItemStack itemStack, List<String> lore) {
<add> ItemMeta meta = itemStack.getItemMeta();
<add> // Convert chat components
<add> for (int i = 0; i < lore.size(); i++) {
<add> String line = lore.get(i);
<add> if (line.contains("`{")) {
<add> lore.set(i, getSimpleMessage(line));
<add> }
<add> }
<add> meta.setLore(lore);
<add> itemStack.setItemMeta(meta);
<add> return true;
<ide> }
<ide> }
|
|
JavaScript
|
mit
|
45069b103bac6be2d12aea1fdb871f30734ed577
| 0 |
heyjiawei/Curriculum-Vitae-Analyzer,heyjiawei/Curriculum-Vitae-Analyzer
|
'use strict';
angular.module('myApp.factories')
.factory('cvTokenizer', function() {
var summaryKeywords = ["summary", "introduction"];
var skillKeywords = ["skills & expertise", "skill set", "skillset", "preferredSkills"];
var experienceKeywords = ["experience", "employment", "work", "history"];
var projectKeywords = ["projects"];
var educationKeywords = ["education", "educational"];
var languageKeywords = ["languages"];
var interestKeywords = ["interests"];
var refereeKeywords = ["referees", "references", "reference"];
var allHeadingKeywords = [].concat(summaryKeywords, skillKeywords, experienceKeywords,
projectKeywords, educationKeywords, languageKeywords, interestKeywords, refereeKeywords);
var tokenizeCv = function(allTextFromPdf) {
// parse name
// name is usually either big header, or has the word "name" near it
var nameToken = allTextFromPdf[0]; //naive, works for linkedin.
var summaryToken = findToken(summaryKeywords, allTextFromPdf);
var skillToken = findToken(skillKeywords, allTextFromPdf);
var experienceToken = findToken(experienceKeywords, allTextFromPdf);
var projectToken = findToken(projectKeywords, allTextFromPdf);
// parse education
var educationToken = findToken(educationKeywords, allTextFromPdf);
var languageToken = findToken(languageKeywords, allTextFromPdf);
var interestToken = findToken(interestKeywords, allTextFromPdf);
var refereeToken = findToken(refereeKeywords, allTextFromPdf);
return {
name: nameToken,
summary: summaryToken,
skill: skillToken,
experience: experienceToken,
project: projectToken,
education: educationToken,
language: languageToken,
interest: interestToken,
referee: refereeToken
};
};
function findToken(keywords, sourceText) {
var potentialHeadingsIndexes = [];
var token = [];
for (var i = 0; i < sourceText.length; i++) {
// look through entire text
// save every time you see a potential header
// decide on the best header using isHeading
var hasKeyWord = function (keyWord) {
return sourceText[i].toLowerCase().indexOf(keyWord) >= 0;
};
if(keywords.some(hasKeyWord)) {
potentialHeadingsIndexes.push(i);
console.log("findtoken:", sourceText[i], potentialHeadingsIndexes);
}
}
var headingIndex = guessHeadingIndex(potentialHeadingsIndexes, sourceText, keywords);
console.log("headingindex", headingIndex);
if(headingIndex < 0){return token;}
for(var j = headingIndex+1; j < sourceText.length; j++) {
var line = sourceText[j];
console.log("finding next heading", line, j);
console.log("is heading?", line, isHeading(line, allHeadingKeywords));
// if(!isHeading(line)) {
if(isHeading(line, allHeadingKeywords) < 0.5) {
token.push(sourceText[j]);
} else { // found next heading, end of this token
return token;
}
}
return token;
}
function guessHeadingIndex(potentialHeadingsIndexes, sourceText, keywords) {
var headingScores = [];
potentialHeadingsIndexes.forEach(function(potentialHeadingIndex) {
var potentialHeading = sourceText[potentialHeadingIndex];
var score = isHeading(potentialHeading, keywords);
headingScores.push({potentialHeadingIndex: potentialHeadingIndex, score: score});
});
var highestScore = {potentialHeadingIndex: Number.NEGATIVE_INFINITY, score: 0};
headingScores.forEach(function(headingScore) {
console.log("headingscore: ", headingScore);
if(headingScore.score > highestScore.score) {
highestScore.potentialHeadingIndex = headingScore.potentialHeadingIndex;
highestScore.score = headingScore.score;
}
});
return highestScore.potentialHeadingIndex;
}
function isHeading(potentialHeading, keywords) {
// determine how likely it is to be a heading
// it's likely a heading, if
// it matches the heading keywords defined above
// if it's all caps
// it contains a ":" at the end
// it's the only word in the line provided (?)
var score = 0;
var hasKeyWord = function (keyWord) {
var matchExactWordRegex = new RegExp("(?:^|\\s)" + keyWord.toLowerCase() + "(?=\\s|$)", "g");
return potentialHeading.toLowerCase().match(matchExactWordRegex);
};
if(keywords.some(hasKeyWord)) {
score += 0.9;
}
if(isUpperCase(potentialHeading)) {
score += 0.4;
}
if(potentialHeading.trim().slice(-1) == ":") {
score += 0.4;
}
if(isEmptyOrWhiteSpace(potentialHeading) || hasTooManyWords(potentialHeading)) {
score -= 1;
}
// console.log("isheading", potentialHeading, score);
return score;
}
function isUpperCase(str) {
return (str === str.toUpperCase() && str !== str.toLowerCase());
}
function hasTooManyWords(str) {
return str.split(' ').length > 5;
}
function hasDisallowedPunctuation(str) {
return str.match(/[,.!?;:]|\b[A-Za-z' &]+\b/);
}
function isEmptyOrWhiteSpace(str) {
return str === null || str.match(/^\s*$/) !== null;
}
return {
tokenizeCv: tokenizeCv
};
});
|
app/components/factories/cv-tokenizer.js
|
'use strict';
angular.module('myApp.factories')
.factory('cvTokenizer', function() {
var summaryKeywords = ["summary", "introduction"];
var skillKeywords = ["skills & expertise", "skill set", "skillset", "preferredSkills"];
var experienceKeywords = ["experience", "employment", "work", "history"];
var projectKeywords = ["projects"];
var educationKeywords = ["education", "educational"];
var languageKeywords = ["languages"];
var interestKeywords = ["interests"];
var refereeKeywords = ["referees", "references", "reference"];
var allHeadingKeywords = [].concat(summaryKeywords, skillKeywords, experienceKeywords,
projectKeywords, educationKeywords, languageKeywords, interestKeywords, refereeKeywords);
var tokenizeCv = function(allTextFromPdf) {
// parse name
// name is usually either big header, or has the word "name" near it
var nameToken = allTextFromPdf[0]; //naive, works for linkedin.
var summaryToken = findToken(summaryKeywords, allTextFromPdf);
var skillToken = findToken(skillKeywords, allTextFromPdf);
var experienceToken = findToken(experienceKeywords, allTextFromPdf);
var projectToken = findToken(projectKeywords, allTextFromPdf);
// parse education
var educationToken = findToken(educationKeywords, allTextFromPdf);
var languageToken = findToken(languageKeywords, allTextFromPdf);
var interestToken = findToken(interestKeywords, allTextFromPdf);
var refereeToken = findToken(refereeKeywords, allTextFromPdf);
return {
name: nameToken,
summary: summaryToken,
skill: skillToken,
experience: experienceToken,
project: projectToken,
education: educationToken,
language: languageToken,
interest: interestToken,
referee: refereeToken
};
};
function findToken(keywords, sourceText) {
var potentialHeadingsIndexes = [];
var token = [];
for (var i = 0; i < sourceText.length; i++) {
// look through entire text
// save every time you see a potential header
// decide on the best header using isHeading
var hasKeyWord = function (keyWord) {
return sourceText[i].toLowerCase().indexOf(keyWord) >= 0;
};
if(keywords.some(hasKeyWord)) {
potentialHeadingsIndexes.push(i);
console.log("findtoken:", sourceText[i], potentialHeadingsIndexes);
}
}
var headingIndex = guessHeadingIndex(potentialHeadingsIndexes, sourceText, keywords);
console.log("headingindex", headingIndex);
if(headingIndex < 0){return token;}
for(var j = headingIndex+1; j < sourceText.length; j++) {
var line = sourceText[j];
console.log("finding next heading", line, j);
console.log("is heading?", line, isHeading(line, allHeadingKeywords));
// if(!isHeading(line)) {
if(isHeading(line, allHeadingKeywords) < 0) {
token.push(sourceText[j]);
} else { // found next heading, end of this token
return token;
}
}
return token;
}
function guessHeadingIndex(potentialHeadingsIndexes, sourceText, keywords) {
var headingScores = [];
potentialHeadingsIndexes.forEach(function(potentialHeadingIndex) {
var potentialHeading = sourceText[potentialHeadingIndex];
var score = isHeading(potentialHeading, keywords);
headingScores.push({potentialHeadingIndex: potentialHeadingIndex, score: score});
});
var highestScore = {potentialHeadingIndex: Number.NEGATIVE_INFINITY, score: 0};
headingScores.forEach(function(headingScore) {
console.log("headingscore: ", headingScore);
if(headingScore.score > highestScore.score) {
highestScore.potentialHeadingIndex = headingScore.potentialHeadingIndex;
highestScore.score = headingScore.score;
}
});
return highestScore.potentialHeadingIndex;
}
function isHeading(potentialHeading, keywords) {
// determine how likely it is to be a heading
// it's likely a heading, if
// it matches the heading keywords defined above
// if it's all caps
// it contains a ":" at the end
// it's the only word in the line provided (?)
var score = 0;
var hasKeyWord = function (keyWord) {
var matchExactWordRegex = new RegExp("(?:^|\\s)" + keyWord.toLowerCase() + "(?=\\s|$)", "g");
return potentialHeading.toLowerCase().match(matchExactWordRegex);
};
if(keywords.some(hasKeyWord)) {
score += 0.9;
}
if(isUpperCase(potentialHeading)) {
score += 0.9
}
if(potentialHeading.trim().slice(-1) == ":") {
score += 0.4;
}
if(isEmptyOrWhiteSpace(potentialHeading) || hasTooManyWords(potentialHeading)) {
score -= 1;
}
// console.log("isheading", potentialHeading, score);
return score;
}
function isUpperCase(str) {
return (str === str.toUpperCase() && str !== str.toLowerCase());
}
function hasTooManyWords(str) {
return str.split(' ').length > 5;
}
function hasDisallowedPunctuation(str) {
return str.match(/[,.!?;:]|\b[A-Za-z' &]+\b/);
}
function isEmptyOrWhiteSpace(str) {
return str === null || str.match(/^\s*$/) !== null;
}
return {
tokenizeCv: tokenizeCv
};
});
|
CV Tokenizer: Adjusted weight to not immediately assume that uppercase means it's guaranteed to be a heading.
|
app/components/factories/cv-tokenizer.js
|
CV Tokenizer: Adjusted weight to not immediately assume that uppercase means it's guaranteed to be a heading.
|
<ide><path>pp/components/factories/cv-tokenizer.js
<ide> console.log("finding next heading", line, j);
<ide> console.log("is heading?", line, isHeading(line, allHeadingKeywords));
<ide> // if(!isHeading(line)) {
<del> if(isHeading(line, allHeadingKeywords) < 0) {
<add> if(isHeading(line, allHeadingKeywords) < 0.5) {
<ide> token.push(sourceText[j]);
<ide> } else { // found next heading, end of this token
<ide> return token;
<ide> }
<ide>
<ide> if(isUpperCase(potentialHeading)) {
<del> score += 0.9
<add> score += 0.4;
<ide> }
<ide>
<ide> if(potentialHeading.trim().slice(-1) == ":") {
|
|
Java
|
apache-2.0
|
4fff75cffdddadeeacade83e8f004a8363200e37
| 0 |
ottogroup/flink-operator-library
|
/**
* Copyright 2016 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ottogroup.bi.streaming.operator.json.aggregate;
import java.io.Serializable;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.MutablePair;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import com.ottogroup.bi.streaming.operator.json.JsonContentReference;
import com.ottogroup.bi.streaming.operator.json.JsonContentType;
import com.ottogroup.bi.streaming.operator.json.JsonProcessingUtils;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.BooleanContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.DoubleContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.IntegerContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.JsonContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.StringContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.TimestampContentAggregateFunction;
/**
* Implements an {@linkplain https://flink.apache.org Apache Flink} operator which works on windowed {@link DataStream streams}
* and provides content aggregation features. It supports different methods (see {@link ContentAggregator})
* applied on values found in specified locations inside the received {@link JSONObject} instances. If more detailed
* or ordered evaluations are requested the operator provides a group-by feature which sorts values into buckets of alike values
* before applying selected aggregation methods.<br/><br/>
* Implemented basically to support debugging the operator allows to add all raw input data in copies to the output document.
* <br/><br/>
* Optional elements (plus values) may be added to assign static content to each output document. Please use
* {@link AggregatorConfiguration#addOptionalField(String, String)} to add these field types. Aside from static content special
* identifier may be added to insert computed content:
* <ul>
* <li>{@link WindowedJsonContentAggregator#OPTIONAL_FIELD_TYPE_TIMESTAMP} - provided as value the current time stamp is added</li>
* <li>{@link WindowedJsonContentAggregator#OPTIONAL_FIELD_TYPE_TOTAL_MESSAGE_COUNT} - provided as value the message count of the current window is added</li>
* </ul>
* @author mnxfst
* @since Jan 13, 2016
*/
public class WindowedJsonContentAggregator implements AllWindowFunction<JSONObject, JSONObject, TimeWindow> {
private static final long serialVersionUID = -6861608901993095853L;
private static final Logger LOG = LogManager.getLogger(WindowedJsonContentAggregator.class);
private static final String TIMESTAMP_DEFAULT_PATTERN = "yyyy-MM-dd";
public static final String OPERATOR_ELEMENT_ID = "oid";
public static final String OPTIONAL_FIELD_TYPE_TIMESTAMP = "timestamp";
public static final String OPTIONAL_FIELD_TYPE_TOTAL_MESSAGE_COUNT = "totalCount";
private final JsonProcessingUtils jsonUtils = new JsonProcessingUtils();
@SuppressWarnings("rawtypes")
private final Map<JsonContentType, JsonContentAggregateFunction> contentAggregatorFunctions = new HashMap<>();
private final String operatorId;
private final AggregatorConfiguration configuration;
private final SimpleDateFormat timestampFormatter;
private final boolean addRaw;
private final boolean addOptional;
/**
* Initializes the aggregator using the provided input
* @param operatorId
* @param configuration
*/
public WindowedJsonContentAggregator(final String operatorId, final AggregatorConfiguration configuration) {
this.operatorId = operatorId;
this.configuration = configuration;
this.timestampFormatter = new SimpleDateFormat(configuration.getTimestampPattern());
this.contentAggregatorFunctions.put(JsonContentType.STRING, new StringContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.DOUBLE, new DoubleContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.INTEGER, new IntegerContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.TIMESTAMP, new TimestampContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.BOOLEAN, new BooleanContentAggregateFunction());
this.addRaw = configuration.isRaw();
this.addOptional = configuration.getOptionalFields() != null && !configuration.getOptionalFields().isEmpty();
// TODO validate field configuration
}
/**
* @see org.apache.flink.streaming.api.functions.windowing.AllWindowFunction#apply(org.apache.flink.streaming.api.windowing.windows.Window, java.lang.Iterable, org.apache.flink.util.Collector)
*/
public void apply(TimeWindow window, Iterable<JSONObject> windowValues, Collector<JSONObject> out) throws Exception {
// if no values are available .... well .... nothing happens here .... for obvious reasons
if(windowValues == null || out == null)
return;
JSONObject jsonDocument = new JSONObject();
Map<String, Serializable> aggregatedResults = new HashMap<>();
int messageCounter = 0;
// step through events found inside the provided list
for(final JSONObject windowJsonElement : windowValues) {
messageCounter++;
try {
aggregate(windowJsonElement, configuration, aggregatedResults);
} catch(Exception e) {
LOG.error("Failed to aggregated event received from surrounding window [operator="+this.operatorId+"]. Reason: " + e.getMessage());
}
}
// if the message counter shows a value larger than zero, generate and export the output document
if(messageCounter > 0) {
addAggregatedValues(jsonDocument, configuration.getOutputElement(), aggregatedResults);
if(addOptional)
addOptionalFields(jsonDocument, configuration.getOptionalFields(), this.timestampFormatter, messageCounter);
if(addRaw)
addRawMessages(jsonDocument, windowValues);
out.collect(jsonDocument);
}
}
/**
* Aggregates the contents of a single {@link JSONObject} received from the surrounding window
* @param jsonDocument
* The {@link JSONObject} holding content to aggregate
* @param cfg
* The {@link AggregatorConfiguration} which holds all required information to aggregate the content
* @param aggregatedValues
* The {@link Map} to hold the aggregated values
* @throws ParseException
* Thrown in case any error occurs while parsing field values from {@link JSONObject}
* @throws NoSuchElementException
* Thrown in case a field is request to be parsed from a {@link JSONObject} which does not exist
* @throws JSONException
* Thrown in case an error occurs while accessing the provided {@link JSONObject}
* @return The provided {@link Map} to hold the aggregated values updated by current content
*/
protected final Map<String, Serializable> aggregate(final JSONObject jsonDocument, final AggregatorConfiguration cfg, Map<String, Serializable> aggregatedValues) throws ParseException, JSONException {
/////////////////////////////////////////////////////////////////////////////////////
// validate the provided input
if(jsonDocument == null) // as the input should already be validated by the caller, this leads to an exception
throw new JSONException("Missing required input document");
if(aggregatedValues == null)
aggregatedValues = new HashMap<>();
if(cfg == null)
return aggregatedValues;
/////////////////////////////////////////////////////////////////////////////////////
// build a string as concatenation of field values referenced by the grouping information
// the string will later on serve as prefixed for storing the aggregated values inside the provided map
// the string can be viewed as bucket identifier which ensures that only those value are aggregated that belong to the same group
final StringBuffer groupElementKey = new StringBuffer();
for(final Iterator<JsonContentReference> refIter = cfg.getGroupByFields().iterator(); refIter.hasNext();) {
final JsonContentReference jsonContentRef = refIter.next();
String str = (String)getFieldValue(jsonDocument, jsonContentRef.getPath(), JsonContentType.STRING);
str = str.replaceAll("\\.", "_");
groupElementKey.append(str);
if(refIter.hasNext())
groupElementKey.append(".");
}
// read out aggregate configuration for each configured field and aggregated content
for(FieldAggregationConfiguration fieldCfg : cfg.getFieldAggregations()) {
if(fieldCfg.getAggregateField().isRequired()) {
aggregateField(jsonDocument, fieldCfg, groupElementKey.toString(), aggregatedValues);
} else {
try {
aggregateField(jsonDocument, fieldCfg, groupElementKey.toString(), aggregatedValues);
} catch(NoSuchElementException e) {
//
}
}
}
return aggregatedValues;
}
/**
* Aggregates the contents of a single field as provided through the {@link FieldAggregationConfiguration}
* @param jsonDocument
* The {@link JSONObject} to read values from
* @param fieldCfg
* The {@link FieldAggregationConfiguration} required for aggregating values of a selected field
* @param groupByKeyPrefix
* The prefix used to store the aggregated content of the given field. The provided value serves as bucket referenced to group results
* @throws JSONException
* Thrown in case accessing the {@link JSONObject} fails for any reason
* @throws ParseException
* Thrown in case parsing the contents of a {@link JsonContentType#TIMESTAMP} field fails for any reason
*/
protected Map<String, Serializable> aggregateField(final JSONObject jsonDocument, final FieldAggregationConfiguration fieldCfg, final String groupByKeyPrefix,
Map<String, Serializable> aggregatedValues) throws JSONException, ParseException {
//////////////////////////////////////////////////////////////////
// validate the provided input
if(jsonDocument == null)
throw new JSONException("Missing required input document");
if(aggregatedValues == null)
aggregatedValues = new HashMap<>();
//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// fetch the current value from the referenced field
Serializable fieldValue = null;
switch(fieldCfg.getAggregateField().getContentType()) {
case BOOLEAN: {
fieldValue = this.jsonUtils.getBooleanFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case DOUBLE: {
fieldValue = this.jsonUtils.getDoubleFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case INTEGER: {
fieldValue = this.jsonUtils.getIntegerFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case STRING: {
fieldValue = this.jsonUtils.getTextFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case TIMESTAMP: {
fieldValue = this.jsonUtils.getDateTimeFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath(), fieldCfg.getAggregateField().getConversionPattern());
break;
}
}
//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// step through configured aggregation methods for the current field
for(final ContentAggregator method : fieldCfg.getMethods()) {
final StringBuffer aggregatedValueKey = new StringBuffer();
if(StringUtils.isNotBlank(groupByKeyPrefix))
aggregatedValueKey.append(groupByKeyPrefix).append(".").append(fieldCfg.getOutputElement()).append(".").append(method.name());
else
aggregatedValueKey.append(fieldCfg.getOutputElement()).append(".").append(method.name());
try {
aggregatedValues.put(aggregatedValueKey.toString(), aggregateValue(fieldValue, aggregatedValues.get(aggregatedValueKey.toString()), fieldCfg.getAggregateField().getContentType(), method));
} catch (Exception e) {
LOG.error("Failed to aggregate value for field '"+fieldCfg.getOutputElement()+"' [operator="+this.operatorId+"]. Reason: " + e.getMessage());
}
}
//////////////////////////////////////////////////////////////////
return aggregatedValues;
}
/**
* Aggregates a new value by combining it with an existing value under a given method
* @param newValue
* @param existingValue
* @param type
* @param method
* @return
* @throws NoSuchMethodException
* @throws Exception
*/
@SuppressWarnings("unchecked")
protected Serializable aggregateValue(final Serializable newValue, final Serializable existingValue, final JsonContentType type, final ContentAggregator method) throws NoSuchMethodException, Exception {
JsonContentAggregateFunction<Serializable> function = this.contentAggregatorFunctions.get(type);
if(function == null)
throw new NoSuchMethodException("Requested aggregation method '"+method+"' not found for type '"+type+"'");
if(method == null)
throw new NoSuchMethodException("Requested aggregation method 'null' not found for type '"+type+"'");
switch(method) {
case AVG: {
return function.average((MutablePair<Serializable, Integer>)existingValue, newValue);
}
case MIN: {
return function.min(existingValue, newValue);
}
case MAX: {
return function.max(existingValue, newValue);
}
case SUM: {
return function.sum(existingValue, newValue);
}
default: { // COUNT
return function.count((Integer)existingValue);
}
}
}
/**
* Returns the value referenced by the given path from the provided {@link JSONObject}. The result depends on the {@link JsonContentType}. All
* provided input must be checked for not being null and holding valid values before calling this method.
* @param jsonObject
* The {@link JSONObject} to retrieve the value from
* @param path
* The path which points towards the value
* @param contentType
* The expected {@link JsonContentType}
* @return
* The referenced value
* @throws JSONException
* Thrown in case anything fails during JSON content extraction
* @throws ParseException
* @throws NoSuchElementException
* @throws IllegalArgumentException
*/
protected Serializable getFieldValue(final JSONObject jsonObject, final String[] path, final JsonContentType contentType) throws JSONException, IllegalArgumentException, NoSuchElementException, ParseException {
return getFieldValue(jsonObject, path, contentType, TIMESTAMP_DEFAULT_PATTERN);
}
/**
* Returns the value referenced by the given path from the provided {@link JSONObject}. The result depends on the {@link JsonContentType}. All
* provided input must be checked for not being null and holding valid values before calling this method.
* @param jsonObject
* The {@link JSONObject} to retrieve the value from
* @param path
* The path which points towards the value
* @param contentType
* The expected {@link JsonContentType}
* @param formatString
* Optional format string required to parse out date / time values
* @return
* The referenced value
* @throws JSONException
* Thrown in case anything fails during JSON content extraction
* @throws ParseException
* @throws NoSuchElementException
* @throws IllegalArgumentException
*/
protected Serializable getFieldValue(final JSONObject jsonObject, final String[] path, final JsonContentType contentType, final String formatString) throws JSONException, IllegalArgumentException, NoSuchElementException, ParseException {
if(contentType == null)
throw new IllegalArgumentException("Required content type information missing");
switch(contentType) {
case BOOLEAN: {
return this.jsonUtils.getBooleanFieldValue(jsonObject, path);
}
case DOUBLE: {
return this.jsonUtils.getDoubleFieldValue(jsonObject, path);
}
case INTEGER: {
return this.jsonUtils.getIntegerFieldValue(jsonObject, path);
}
case TIMESTAMP: {
return this.jsonUtils.getDateTimeFieldValue(jsonObject, path, formatString);
}
default: {
return this.jsonUtils.getTextFieldValue(jsonObject, path);
}
}
}
/**
* Adds the requested set of optional fields to provided {@link JSONObject}
* @param jsonObject
* The {@link JSONObject} to add optional fields to
* @param optionalFields
* The optional fields along with the requested values to be added to the provided {@link JSONObject}
* @param dateFormatter
* The format to apply when adding time stamp values
* @param totalMessageCount
* The total number of messages received from the window
* @return
* The provided {@link JSONObject} enhanced by the requested values
* @throws JSONException
* Thrown in case anything fails during operations on the JSON object
*/
protected JSONObject addOptionalFields(final JSONObject jsonObject, final Map<String, String> optionalFields, final SimpleDateFormat dateFormatter, final int totalMessageCount) throws JSONException {
// step through the optional fields if any were provided
if(jsonObject != null && optionalFields != null && !optionalFields.isEmpty()) {
for(final String fieldName : optionalFields.keySet()) {
final String value = optionalFields.get(fieldName);
// check if the value references a pre-defined type and thus requests a special value or
// whether the field name must be added along with the value without any modifications
if(StringUtils.equalsIgnoreCase(value, OPTIONAL_FIELD_TYPE_TIMESTAMP))
jsonObject.put(fieldName, dateFormatter.format(new Date()));
else if(StringUtils.equalsIgnoreCase(value, OPTIONAL_FIELD_TYPE_TOTAL_MESSAGE_COUNT))
jsonObject.put(fieldName, totalMessageCount);
else
jsonObject.put(fieldName, value);
}
}
return jsonObject;
}
/**
* Appends the raw messages received from the window to the resulting {@link JSONObject}
* @param jsonObject
* The {@link JSONObject} to add window messages to
* @param values
* The {@link JSONObject} values to be added
* @return
* The input {@link JSONObject} extended by {@link JSONObject} received from the window
* @throws JSONException
* Thrown in case moving the source events to the result
*/
protected JSONObject addRawMessages(final JSONObject jsonObject, Iterable<JSONObject> values) throws JSONException {
if(jsonObject != null && values != null) {
JSONArray rawMessagesArray = new JSONArray();
for(JSONObject jo : values) {
rawMessagesArray.put(jo);
}
jsonObject.put("raw", rawMessagesArray);
}
return jsonObject;
}
/**
* Adds the aggregated values to {@link JSONObject output document} below the given element name. During
* insertion the key which references a value serves as path into the document, eg.: field1.field2.field3 = test leads
* to <code>{"field1":{"field2":{"field3":"test"}}}</code>
* @param jsonObject
* The object the element holding the aggregated values must be attached to
* @param outputElementName
* The name of the element to hold the aggregated values
* @param values
* The aggregated values (key serves as path into document structure of the result)
* @return
* The input document extended by aggregated values
* @throws JSONException
* Thrown in case inserting values fails for any reason
*/
protected JSONObject addAggregatedValues(final JSONObject jsonObject, final String outputElementName, Map<String, Serializable> values) throws JSONException {
if(jsonObject == null)
return null;
JSONObject outputElement = new JSONObject();
if(values != null && !values.isEmpty()) {
// step through keys which represent a node inside the output element
for(final String valueElementName : values.keySet()) {
String[] outputPath = valueElementName.split("\\.");
this.jsonUtils.insertField(outputElement, outputPath, values.get(valueElementName));
}
}
jsonObject.put(StringUtils.isNotBlank(outputElementName) ? outputElementName : "aggregatedValues", outputElement);
return jsonObject;
}
}
|
src/main/java/com/ottogroup/bi/streaming/operator/json/aggregate/WindowedJsonContentAggregator.java
|
/**
* Copyright 2016 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ottogroup.bi.streaming.operator.json.aggregate;
import java.io.Serializable;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.MutablePair;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import com.ottogroup.bi.streaming.operator.json.JsonContentReference;
import com.ottogroup.bi.streaming.operator.json.JsonContentType;
import com.ottogroup.bi.streaming.operator.json.JsonProcessingUtils;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.BooleanContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.DoubleContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.IntegerContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.JsonContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.StringContentAggregateFunction;
import com.ottogroup.bi.streaming.operator.json.aggregate.functions.TimestampContentAggregateFunction;
/**
* Implements an {@linkplain https://flink.apache.org Apache Flink} operator which works on windowed {@link DataStream streams}
* and provides content aggregation features. It supports different methods (see {@link ContentAggregator})
* applied on values found in specified locations inside the received {@link JSONObject} instances. If more detailed
* or ordered evaluations are requested the operator provides a group-by feature which sorts values into buckets of alike values
* before applying selected aggregation methods.<br/><br/>
* Implemented basically to support debugging the operator allows to add all raw input data in copies to the output document.
* <br/><br/>
* Optional elements (plus values) may be added to assign static content to each output document. Please use
* {@link AggregatorConfiguration#addOptionalField(String, String)} to add these field types. Aside from static content special
* identifier may be added to insert computed content:
* <ul>
* <li>{@link WindowedJsonContentAggregator#OPTIONAL_FIELD_TYPE_TIMESTAMP} - provided as value the current time stamp is added</li>
* <li>{@link WindowedJsonContentAggregator#OPTIONAL_FIELD_TYPE_TOTAL_MESSAGE_COUNT} - provided as value the message count of the current window is added</li>
* </ul>
* @author mnxfst
* @since Jan 13, 2016
*/
public class WindowedJsonContentAggregator implements AllWindowFunction<JSONObject, JSONObject, TimeWindow> {
private static final long serialVersionUID = -6861608901993095853L;
private static final Logger LOG = LogManager.getLogger(WindowedJsonContentAggregator.class);
private static final String TIMESTAMP_DEFAULT_PATTERN = "yyyy-MM-dd";
public static final String OPERATOR_ELEMENT_ID = "oid";
public static final String OPTIONAL_FIELD_TYPE_TIMESTAMP = "timestamp";
public static final String OPTIONAL_FIELD_TYPE_TOTAL_MESSAGE_COUNT = "totalCount";
private final JsonProcessingUtils jsonUtils = new JsonProcessingUtils();
@SuppressWarnings("rawtypes")
private final Map<JsonContentType, JsonContentAggregateFunction> contentAggregatorFunctions = new HashMap<>();
private final String operatorId;
private final AggregatorConfiguration configuration;
private final SimpleDateFormat timestampFormatter;
private final boolean addRaw;
private final boolean addOptional;
/**
* Initializes the aggregator using the provided input
* @param operatorId
* @param configuration
*/
public WindowedJsonContentAggregator(final String operatorId, final AggregatorConfiguration configuration) {
this.operatorId = operatorId;
this.configuration = configuration;
this.timestampFormatter = new SimpleDateFormat(configuration.getTimestampPattern());
this.contentAggregatorFunctions.put(JsonContentType.STRING, new StringContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.DOUBLE, new DoubleContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.INTEGER, new IntegerContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.TIMESTAMP, new TimestampContentAggregateFunction());
this.contentAggregatorFunctions.put(JsonContentType.BOOLEAN, new BooleanContentAggregateFunction());
this.addRaw = configuration.isRaw();
this.addOptional = configuration.getOptionalFields() != null && !configuration.getOptionalFields().isEmpty();
// TODO validate field configuration
}
/**
* @see org.apache.flink.streaming.api.functions.windowing.AllWindowFunction#apply(org.apache.flink.streaming.api.windowing.windows.Window, java.lang.Iterable, org.apache.flink.util.Collector)
*/
public void apply(TimeWindow window, Iterable<JSONObject> windowValues, Collector<JSONObject> out) throws Exception {
// if no values are available .... well .... nothing happens here .... for obvious reasons
if(windowValues == null || out == null)
return;
JSONObject jsonDocument = new JSONObject();
Map<String, Serializable> aggregatedResults = new HashMap<>();
int messageCounter = 0;
// step through events found inside the provided list
for(final JSONObject windowJsonElement : windowValues) {
messageCounter++;
try {
aggregate(windowJsonElement, configuration, aggregatedResults);
} catch(Exception e) {
LOG.error("Failed to aggregated event received from surrounding window [operator="+this.operatorId+"]. Reason: " + e.getMessage());
}
}
// if the message counter shows a value larger than zero, generate and export the output document
if(messageCounter > 0) {
addAggregatedValues(jsonDocument, configuration.getOutputElement(), aggregatedResults);
if(addOptional)
addOptionalFields(jsonDocument, configuration.getOptionalFields(), this.timestampFormatter, messageCounter);
if(addRaw)
addRawMessages(jsonDocument, windowValues);
out.collect(jsonDocument);
}
}
/**
* Aggregates the contents of a single {@link JSONObject} received from the surrounding window
* @param jsonDocument
* The {@link JSONObject} holding content to aggregate
* @param cfg
* The {@link AggregatorConfiguration} which holds all required information to aggregate the content
* @param aggregatedValues
* The {@link Map} to hold the aggregated values
* @throws ParseException
* Thrown in case any error occurs while parsing field values from {@link JSONObject}
* @throws NoSuchElementException
* Thrown in case a field is request to be parsed from a {@link JSONObject} which does not exist
* @throws JSONException
* Thrown in case an error occurs while accessing the provided {@link JSONObject}
* @return The provided {@link Map} to hold the aggregated values updated by current content
*/
protected final Map<String, Serializable> aggregate(final JSONObject jsonDocument, final AggregatorConfiguration cfg, Map<String, Serializable> aggregatedValues) throws ParseException, JSONException {
/////////////////////////////////////////////////////////////////////////////////////
// validate the provided input
if(jsonDocument == null) // as the input should already be validated by the caller, this leads to an exception
throw new JSONException("Missing required input document");
if(aggregatedValues == null)
aggregatedValues = new HashMap<>();
if(cfg == null)
return aggregatedValues;
/////////////////////////////////////////////////////////////////////////////////////
// build a string as concatenation of field values referenced by the grouping information
// the string will later on serve as prefixed for storing the aggregated values inside the provided map
// the string can be viewed as bucket identifier which ensures that only those value are aggregated that belong to the same group
final StringBuffer groupElementKey = new StringBuffer();
for(final Iterator<JsonContentReference> refIter = cfg.getGroupByFields().iterator(); refIter.hasNext();) {
final JsonContentReference jsonContentRef = refIter.next();
String str = (String)getFieldValue(jsonDocument, jsonContentRef.getPath(), JsonContentType.STRING);
str = str.replaceAll("\\.", "_");
groupElementKey.append(str);
if(refIter.hasNext())
groupElementKey.append(".");
}
// read out aggregate configuration for each configured field and aggregated content
for(FieldAggregationConfiguration fieldCfg : cfg.getFieldAggregations()) {
if(fieldCfg.getAggregateField().isRequired()) {
aggregateField(jsonDocument, fieldCfg, groupElementKey.toString(), aggregatedValues);
} else {
try {
aggregateField(jsonDocument, fieldCfg, groupElementKey.toString(), aggregatedValues);
} catch(NoSuchElementException e) {
//
}
}
}
return aggregatedValues;
}
/**
* Aggregates the contents of a single field as provided through the {@link FieldAggregationConfiguration}
* @param jsonDocument
* The {@link JSONObject} to read values from
* @param fieldCfg
* The {@link FieldAggregationConfiguration} required for aggregating values of a selected field
* @param groupByKeyPrefix
* The prefix used to store the aggregated content of the given field. The provided value serves as bucket referenced to group results
* @throws JSONException
* Thrown in case accessing the {@link JSONObject} fails for any reason
* @throws ParseException
* Thrown in case parsing the contents of a {@link JsonContentType#TIMESTAMP} field fails for any reason
*/
protected Map<String, Serializable> aggregateField(final JSONObject jsonDocument, final FieldAggregationConfiguration fieldCfg, final String groupByKeyPrefix,
Map<String, Serializable> aggregatedValues) throws JSONException, ParseException {
//////////////////////////////////////////////////////////////////
// validate the provided input
if(jsonDocument == null)
throw new JSONException("Missing required input document");
if(aggregatedValues == null)
aggregatedValues = new HashMap<>();
//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// fetch the current value from the referenced field
Serializable fieldValue = null;
switch(fieldCfg.getAggregateField().getContentType()) {
case BOOLEAN: {
fieldValue = this.jsonUtils.getBooleanFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case DOUBLE: {
fieldValue = this.jsonUtils.getDoubleFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case INTEGER: {
fieldValue = this.jsonUtils.getIntegerFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case STRING: {
fieldValue = this.jsonUtils.getTextFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath());
break;
}
case TIMESTAMP: {
fieldValue = this.jsonUtils.getDateTimeFieldValue(jsonDocument, fieldCfg.getAggregateField().getPath(), fieldCfg.getAggregateField().getConversionPattern());
break;
}
}
//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////
// step through configured aggregation methods for the current field
for(final ContentAggregator method : fieldCfg.getMethods()) {
final StringBuffer aggregatedValueKey = new StringBuffer();
if(StringUtils.isNotBlank(groupByKeyPrefix))
aggregatedValueKey.append(groupByKeyPrefix).append(".").append(fieldCfg.getOutputElement()).append(".").append(method.name());
else
aggregatedValueKey.append(fieldCfg.getOutputElement()).append(".").append(method.name());
try {
aggregatedValues.put(aggregatedValueKey.toString(), aggregateValue(fieldValue, aggregatedValues.get(aggregatedValueKey.toString()), fieldCfg.getAggregateField().getContentType(), method));
} catch (Exception e) {
LOG.error("Failed to aggregate value for field '"+fieldCfg.getOutputElement()+"' [operator="+this.operatorId+"]. Reason: " + e.getMessage());
}
}
//////////////////////////////////////////////////////////////////
return aggregatedValues;
}
/**
* Aggregates a new value by combining it with an existing value under a given method
* @param newValue
* @param existingValue
* @param type
* @param method
* @return
* @throws NoSuchMethodException
* @throws Exception
*/
@SuppressWarnings("unchecked")
protected Serializable aggregateValue(final Serializable newValue, final Serializable existingValue, final JsonContentType type, final ContentAggregator method) throws NoSuchMethodException, Exception {
JsonContentAggregateFunction<Serializable> function = this.contentAggregatorFunctions.get(type);
if(function == null)
throw new NoSuchMethodException("Requested aggregation method '"+method+"' not found for type '"+type+"'");
if(method == null)
throw new NoSuchMethodException("Requested aggregation method 'null' not found for type '"+type+"'");
switch(method) {
case AVG: {
return function.average((MutablePair<Serializable, Integer>)existingValue, (Integer)newValue);
}
case MIN: {
return function.min(existingValue, newValue);
}
case MAX: {
return function.max(existingValue, newValue);
}
case SUM: {
return function.sum(existingValue, newValue);
}
default: { // COUNT
return function.count((Integer)existingValue);
}
}
}
/**
* Returns the value referenced by the given path from the provided {@link JSONObject}. The result depends on the {@link JsonContentType}. All
* provided input must be checked for not being null and holding valid values before calling this method.
* @param jsonObject
* The {@link JSONObject} to retrieve the value from
* @param path
* The path which points towards the value
* @param contentType
* The expected {@link JsonContentType}
* @return
* The referenced value
* @throws JSONException
* Thrown in case anything fails during JSON content extraction
* @throws ParseException
* @throws NoSuchElementException
* @throws IllegalArgumentException
*/
protected Serializable getFieldValue(final JSONObject jsonObject, final String[] path, final JsonContentType contentType) throws JSONException, IllegalArgumentException, NoSuchElementException, ParseException {
return getFieldValue(jsonObject, path, contentType, TIMESTAMP_DEFAULT_PATTERN);
}
/**
* Returns the value referenced by the given path from the provided {@link JSONObject}. The result depends on the {@link JsonContentType}. All
* provided input must be checked for not being null and holding valid values before calling this method.
* @param jsonObject
* The {@link JSONObject} to retrieve the value from
* @param path
* The path which points towards the value
* @param contentType
* The expected {@link JsonContentType}
* @param formatString
* Optional format string required to parse out date / time values
* @return
* The referenced value
* @throws JSONException
* Thrown in case anything fails during JSON content extraction
* @throws ParseException
* @throws NoSuchElementException
* @throws IllegalArgumentException
*/
protected Serializable getFieldValue(final JSONObject jsonObject, final String[] path, final JsonContentType contentType, final String formatString) throws JSONException, IllegalArgumentException, NoSuchElementException, ParseException {
if(contentType == null)
throw new IllegalArgumentException("Required content type information missing");
switch(contentType) {
case BOOLEAN: {
return this.jsonUtils.getBooleanFieldValue(jsonObject, path);
}
case DOUBLE: {
return this.jsonUtils.getDoubleFieldValue(jsonObject, path);
}
case INTEGER: {
return this.jsonUtils.getIntegerFieldValue(jsonObject, path);
}
case TIMESTAMP: {
return this.jsonUtils.getDateTimeFieldValue(jsonObject, path, formatString);
}
default: {
return this.jsonUtils.getTextFieldValue(jsonObject, path);
}
}
}
/**
* Adds the requested set of optional fields to provided {@link JSONObject}
* @param jsonObject
* The {@link JSONObject} to add optional fields to
* @param optionalFields
* The optional fields along with the requested values to be added to the provided {@link JSONObject}
* @param dateFormatter
* The format to apply when adding time stamp values
* @param totalMessageCount
* The total number of messages received from the window
* @return
* The provided {@link JSONObject} enhanced by the requested values
* @throws JSONException
* Thrown in case anything fails during operations on the JSON object
*/
protected JSONObject addOptionalFields(final JSONObject jsonObject, final Map<String, String> optionalFields, final SimpleDateFormat dateFormatter, final int totalMessageCount) throws JSONException {
// step through the optional fields if any were provided
if(jsonObject != null && optionalFields != null && !optionalFields.isEmpty()) {
for(final String fieldName : optionalFields.keySet()) {
final String value = optionalFields.get(fieldName);
// check if the value references a pre-defined type and thus requests a special value or
// whether the field name must be added along with the value without any modifications
if(StringUtils.equalsIgnoreCase(value, OPTIONAL_FIELD_TYPE_TIMESTAMP))
jsonObject.put(fieldName, dateFormatter.format(new Date()));
else if(StringUtils.equalsIgnoreCase(value, OPTIONAL_FIELD_TYPE_TOTAL_MESSAGE_COUNT))
jsonObject.put(fieldName, totalMessageCount);
else
jsonObject.put(fieldName, value);
}
}
return jsonObject;
}
/**
* Appends the raw messages received from the window to the resulting {@link JSONObject}
* @param jsonObject
* The {@link JSONObject} to add window messages to
* @param values
* The {@link JSONObject} values to be added
* @return
* The input {@link JSONObject} extended by {@link JSONObject} received from the window
* @throws JSONException
* Thrown in case moving the source events to the result
*/
protected JSONObject addRawMessages(final JSONObject jsonObject, Iterable<JSONObject> values) throws JSONException {
if(jsonObject != null && values != null) {
JSONArray rawMessagesArray = new JSONArray();
for(JSONObject jo : values) {
rawMessagesArray.put(jo);
}
jsonObject.put("raw", rawMessagesArray);
}
return jsonObject;
}
/**
* Adds the aggregated values to {@link JSONObject output document} below the given element name. During
* insertion the key which references a value serves as path into the document, eg.: field1.field2.field3 = test leads
* to <code>{"field1":{"field2":{"field3":"test"}}}</code>
* @param jsonObject
* The object the element holding the aggregated values must be attached to
* @param outputElementName
* The name of the element to hold the aggregated values
* @param values
* The aggregated values (key serves as path into document structure of the result)
* @return
* The input document extended by aggregated values
* @throws JSONException
* Thrown in case inserting values fails for any reason
*/
protected JSONObject addAggregatedValues(final JSONObject jsonObject, final String outputElementName, Map<String, Serializable> values) throws JSONException {
if(jsonObject == null)
return null;
JSONObject outputElement = new JSONObject();
if(values != null && !values.isEmpty()) {
// step through keys which represent a node inside the output element
for(final String valueElementName : values.keySet()) {
String[] outputPath = valueElementName.split("\\.");
this.jsonUtils.insertField(outputElement, outputPath, values.get(valueElementName));
}
}
jsonObject.put(StringUtils.isNotBlank(outputElementName) ? outputElementName : "aggregatedValues", outputElement);
return jsonObject;
}
}
|
removed type conversion
|
src/main/java/com/ottogroup/bi/streaming/operator/json/aggregate/WindowedJsonContentAggregator.java
|
removed type conversion
|
<ide><path>rc/main/java/com/ottogroup/bi/streaming/operator/json/aggregate/WindowedJsonContentAggregator.java
<ide>
<ide> switch(method) {
<ide> case AVG: {
<del> return function.average((MutablePair<Serializable, Integer>)existingValue, (Integer)newValue);
<add> return function.average((MutablePair<Serializable, Integer>)existingValue, newValue);
<ide> }
<ide> case MIN: {
<ide> return function.min(existingValue, newValue);
|
|
JavaScript
|
apache-2.0
|
dd67e81b940e459d9f2775dde8632b6896639adb
| 0 |
andrejb/cloudant_bigcouch,davidcoallier/bigcouch,PerilousApricot/bigcouch,benoitc/couchit_experiment,bittorrent/bigcouch,kofron/bigcouch,davidcoallier/bigcouch,breezjw/bigcouch,cloudant/bigcouch,KlausTrainer/bigcouch,georgemarshall/bigcouch,kofron/bigcouch,PerilousApricot/bigcouch,benoitc/couchit_experiment,breezjw/bigcouch,georgemarshall/bigcouch,breezjw/bigcouch,Gussy/bigcouch,KlausTrainer/bigcouch,benoitc/bigcouch,cloudant/bigcouch,PerilousApricot/bigcouch,linearregression/bigcouch,Asquera/bigcouch,linearregression/bigcouch,breezjw/bigcouch,davidcoallier/bigcouch,georgemarshall/bigcouch,benoitc/bigcouch,andrejb/cloudant_bigcouch,Asquera/bigcouch,andrejb/cloudant_bigcouch,breezjw/bigcouch,cloudant/bigcouch,breezjw/bigcouch,bittorrent/bigcouch,PerilousApricot/bigcouch,fkaempfer/couchdb,cloudant/bigcouch,fkaempfer/couchdb,linearregression/bigcouch,linearregression/bigcouch,Gussy/bigcouch,KlausTrainer/bigcouch,linearregression/bigcouch,benoitc/couchit_experiment,cloudant/bigcouch,benoitc/couchit_experiment,benoitc/bigcouch,benoitc/bigcouch,Asquera/bigcouch,bittorrent/bigcouch,Gussy/bigcouch,cloudant/bigcouch,kofron/bigcouch,Asquera/bigcouch,KlausTrainer/bigcouch,Gussy/bigcouch,KlausTrainer/bigcouch,andrejb/cloudant_bigcouch,Gussy/bigcouch,linearregression/bigcouch,bittorrent/bigcouch,Asquera/bigcouch,kofron/bigcouch,Gussy/bigcouch,kofron/bigcouch,fkaempfer/couchdb,fkaempfer/couchdb,georgemarshall/bigcouch,bittorrent/bigcouch,davidcoallier/bigcouch,kofron/bigcouch,linearregression/bigcouch,andrejb/cloudant_bigcouch,davidcoallier/bigcouch,fkaempfer/couchdb,andrejb/cloudant_bigcouch,Asquera/bigcouch,PerilousApricot/bigcouch,benoitc/bigcouch,andrejb/cloudant_bigcouch,benoitc/bigcouch,bittorrent/bigcouch,KlausTrainer/bigcouch,benoitc/couchit_experiment,georgemarshall/bigcouch,georgemarshall/bigcouch,cloudant/bigcouch,PerilousApricot/bigcouch,davidcoallier/bigcouch,fkaempfer/couchdb
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
(function($) {
$.futon = $.futon || {};
$.extend($.futon, {
// Page class for browse/index.html
CouchIndexPage: function() {
page = this;
$.futon.storage.declare("per_page", {defaultValue: 10});
this.addDatabase = function() {
$.showDialog("dialog/_create_database.html", {
submit: function(data, callback) {
if (!data.name || data.name.length == 0) {
callback({name: "Please enter a name."});
return;
}
$.couch.db(data.name).create({
error: function(status, id, reason) { callback({name: reason}) },
success: function(resp) {
location.href = "database.html?" + encodeURIComponent(data.name);
callback();
}
});
}
});
return false;
}
this.updateDatabaseListing = function(offset) {
offset |= 0;
var maxPerPage = parseInt($("#perpage").val(), 10);
$.couch.allDbs({
success: function(dbs) {
$("#paging a").unbind();
$("#databases tbody.content").empty();
var dbsOnPage = dbs.slice(offset, offset + maxPerPage);
$.each(dbsOnPage, function(idx, dbName) {
$("#databases tbody.content").append("<tr>" +
"<th><a href='database.html?" + encodeURIComponent(dbName) + "'>" +
dbName + "</a></th>" +
"<td class='size'></td><td class='count'></td>" +
"<td class='seq'></td></tr>");
$.couch.db(dbName).info({
success: function(info) {
$("#databases tbody.content tr:eq(" + idx + ")")
.find("td.size").text($.futon.formatSize(info.disk_size)).end()
.find("td.count").text(info.doc_count).end()
.find("td.seq").text(info.update_seq);
}
});
});
$("#databases tbody tr:odd").addClass("odd");
if (offset > 0) {
$("#paging a.prev").attr("href", "#" + (offset - maxPerPage)).click(function() {
page.updateDatabaseListing(offset - maxPerPage);
});
} else {
$("#paging a.prev").removeAttr("href");
}
if (offset + maxPerPage < dbs.length) {
$("#paging a.next").attr("href", "#" + (offset + maxPerPage)).click(function() {
page.updateDatabaseListing(offset + maxPerPage);
});
} else {
$("#paging a.next").removeAttr("href");
}
var firstNum = offset + 1;
var lastNum = firstNum + dbsOnPage.length - 1;
$("#databases tbody.footer tr td span").text(
"Showing " + firstNum + "-" + lastNum + " of " + dbs.length +
" databases");
}
});
}
},
// Page class for browse/database.html
CouchDatabasePage: function() {
var urlParts = location.search.substr(1).split("/");
var dbName = decodeURIComponent(urlParts.shift());
$.futon.storage.declareWithPrefix(dbName + ".", {
desc: {},
language: {defaultValue: "javascript"},
map_fun: {defaultValue: ""},
reduce_fun: {defaultValue: ""},
reduce: {},
group_level: {defaultValue: 100},
per_page: {defaultValue: 10},
view: {defaultValue: ""}
});
var viewName = (urlParts.length > 0) ? urlParts.join("/") : null;
if (viewName) {
$.futon.storage.set("view", viewName);
} else {
viewName = $.futon.storage.get("view");
if (viewName) {
this.redirecting = true;
location.href = "database.html?" + encodeURIComponent(dbName) +
"/" + viewName;
}
}
var db = $.couch.db(dbName);
this.dbName = dbName;
this.viewName = viewName;
this.viewLanguage = "javascript";
this.db = db;
this.isDirty = false;
this.isTempView = viewName == "_temp_view";
page = this;
var templates = {
javascript: "function(doc) {\n emit(null, doc);\n}",
python: "def fun(doc):\n yield None, doc",
ruby: "lambda {|doc|\n emit(nil, doc);\n}"
}
this.newDocument = function() {
location.href = "document.html?" + encodeURIComponent(db.name);
}
this.compactAndCleanup = function() {
$.showDialog("dialog/_compact_cleanup.html", {
submit: function(data, callback) {
switch (data.action) {
case "compact_database":
db.compact({success: function(resp) { callback() }});
break;
case "compact_views":
var groupname = page.viewName.substring(8,
page.viewName.indexOf("/_view"));
db.compactView(groupname, {success: function(resp) { callback() }});
break;
case "view_cleanup":
db.viewCleanup({success: function(resp) { callback() }});
break;
}
}
});
}
this.deleteDatabase = function() {
$.showDialog("dialog/_delete_database.html", {
submit: function(data, callback) {
db.drop({
success: function(resp) {
callback();
location.href = "index.html";
if (window !== null) {
$("#dbs li").filter(function(index) {
return $("a", this).text() == dbName;
}).remove();
$.futon.navigation.removeDatabase(dbName);
}
}
});
}
});
}
this.databaseSecurity = function() {
$.showDialog("dialog/_database_security.html", {
load : function(d) {
db.getDbProperty("_security", {
success: function(r) {
["admin", "reader"].forEach(function(key) {
var names = [];
var roles = [];
if (r && typeof r[key + "s"] === "object") {
if ($.isArray(r[key + "s"]["names"])) {
names = r[key + "s"]["names"];
}
if ($.isArray(r[key + "s"]["roles"])) {
roles = r[key + "s"]["roles"];
}
}
$("input[name=" + key + "_names]", d).val(JSON.stringify(names));
$("input[name=" + key + "_roles]", d).val(JSON.stringify(roles));
});
}
});
},
// maybe this should be 2 forms
submit: function(data, callback) {
var errors = {};
var secObj = {
admins: {
names: [],
roles: []
},
readers: {
names: [],
roles: []
}
};
["admin", "reader"].forEach(function(key) {
var names, roles;
try {
names = JSON.parse(data[key + "_names"]);
} catch(e) { }
try {
roles = JSON.parse(data[key + "_roles"]);
} catch(e) { }
if ($.isArray(names)) {
secObj[key + "s"]["names"] = names;
} else {
errors[key + "_names"] = "The " + key +
" names must be an array of strings";
}
if ($.isArray(roles)) {
secObj[key + "s"]["roles"] = roles;
} else {
errors[key + "_roles"] = "The " + key +
" roles must be an array of strings";
}
});
if ($.isEmptyObject(errors)) {
db.setDbProperty("_security", secObj);
}
callback(errors);
}
});
}
this.populateViewEditor = function() {
if (viewName.match(/^_design\//)) {
page.revertViewChanges(function() {
var dirtyTimeout = null;
function updateDirtyState() {
clearTimeout(dirtyTimeout);
dirtyTimeout = setTimeout(function() {
var buttons = $("#viewcode button.save, #viewcode button.revert");
var viewCode = {
map: $("#viewcode_map").val(),
reduce: $("#viewcode_reduce").val()
};
$("#reduce, #grouplevel").toggle(!!viewCode.reduce);
page.isDirty = (viewCode.map != page.storedViewCode.map)
|| (viewCode.reduce != (page.storedViewCode.reduce || ""))
|| page.viewLanguage != page.storedViewLanguage;
if (page.isDirty) {
buttons.removeAttr("disabled");
} else {
buttons.attr("disabled", "disabled");
}
}, 100);
}
$("#viewcode textarea").enableTabInsertion()
.bind("input", updateDirtyState);
if ($.browser.msie || $.browser.safari) {
$("#viewcode textarea").bind("paste", updateDirtyState)
.bind("change", updateDirtyState)
.bind("keydown", updateDirtyState)
.bind("keypress", updateDirtyState)
.bind("keyup", updateDirtyState)
.bind("textInput", updateDirtyState);
}
$("#language").change(updateDirtyState);
page.updateDocumentListing();
});
} else if (viewName == "_temp_view") {
$("#viewcode textarea").enableTabInsertion();
page.viewLanguage = $.futon.storage.get("language");
page.updateViewEditor(
$.futon.storage.get("map_fun", templates[page.viewLanguage]),
$.futon.storage.get("reduce_fun")
);
} else {
$("#grouplevel, #reduce").hide();
page.updateDocumentListing();
}
page.populateLanguagesMenu();
if (this.isTempView) {
$("#tempwarn").show();
}
}
// Populate the languages dropdown, and listen to selection changes
this.populateLanguagesMenu = function() {
var all_langs = {};
fill_language = function() {
var select = $("#language");
for (var language in all_langs) {
var option = $(document.createElement("option"))
.attr("value", language).text(language)
.appendTo(select);
}
if (select[0].options.length == 1) {
select[0].disabled = true;
} else {
select[0].disabled = false;
select.val(page.viewLanguage);
select.change(function() {
var language = $("#language").val();
if (language != page.viewLanguage) {
var mapFun = $("#viewcode_map").val();
if (mapFun == "" || mapFun == templates[page.viewLanguage]) {
// no edits made, so change to the new default
$("#viewcode_map").val(templates[language]);
}
page.viewLanguage = language;
$("#viewcode_map")[0].focus();
}
return false;
});
}
}
$.couch.config({
success: function(resp) {
for (var language in resp) {
all_langs[language] = resp[language];
}
$.couch.config({
success: function(resp) {
for (var language in resp) {
all_langs[language] = resp[language];
}
fill_language();
}
}, "native_query_servers");
},
error : function() {}
}, "query_servers");
}
this.populateViewsMenu = function() {
var select = $("#switch select");
db.allDocs({startkey: "_design/", endkey: "_design0",
include_docs: true,
success: function(resp) {
select[0].options.length = 3;
for (var i = 0; i < resp.rows.length; i++) {
var doc = resp.rows[i].doc;
var optGroup = $(document.createElement("optgroup"))
.attr("label", doc._id.substr(8)).appendTo(select);
var viewNames = [];
for (var name in doc.views) {
viewNames.push(name);
}
viewNames.sort();
for (var j = 0; j < viewNames.length; j++) {
var path = $.couch.encodeDocId(doc._id) + "/_view/" +
encodeURIComponent(viewNames[j]);
var option = $(document.createElement("option"))
.attr("value", path).text(viewNames[j]).appendTo(optGroup);
if (path == viewName) {
option[0].selected = true;
}
}
}
}
});
if (!viewName.match(/^_design\//)) {
$.each(["_all_docs", "_design_docs", "_temp_view"], function(idx, name) {
if (viewName == name) {
select[0].options[idx].selected = true;
}
});
}
}
this.revertViewChanges = function(callback) {
if (!page.storedViewCode) {
var viewNameParts = viewName.split("/");
var designDocId = decodeURIComponent(viewNameParts[1]);
var localViewName = decodeURIComponent(viewNameParts[3]);
db.openDoc("_design/" + designDocId, {
error: function(status, error, reason) {
if (status == 404) {
$.futon.storage.del("view");
location.href = "database.html?" + encodeURIComponent(db.name);
}
},
success: function(resp) {
if(!resp.views || !resp.views[localViewName]) {
$.futon.storage.del("view");
location.href = "database.html?" + encodeURIComponent(db.name);
}
var viewCode = resp.views[localViewName];
page.viewLanguage = resp.language || "javascript";
$("#language").val(page.viewLanguage);
page.updateViewEditor(viewCode.map, viewCode.reduce || "");
$("#viewcode button.revert, #viewcode button.save").attr("disabled", "disabled");
page.storedViewCode = viewCode;
page.storedViewLanguage = page.viewLanguage;
if (callback) callback();
}
}, {async: false});
} else {
page.updateViewEditor(page.storedViewCode.map,
page.storedViewCode.reduce || "");
page.viewLanguage = page.storedViewLanguage;
$("#language").val(page.viewLanguage);
$("#viewcode button.revert, #viewcode button.save").attr("disabled", "disabled");
page.isDirty = false;
if (callback) callback();
}
}
this.updateViewEditor = function(mapFun, reduceFun) {
if (!mapFun) return;
$("#viewcode_map").val(mapFun);
$("#viewcode_reduce").val(reduceFun);
var lines = Math.max(
mapFun.split("\n").length,
reduceFun.split("\n").length
);
$("#reduce, #grouplevel").toggle(!!reduceFun);
$("#viewcode textarea").attr("rows", Math.min(15, Math.max(3, lines)));
}
this.saveViewAs = function() {
if (viewName && /^_design/.test(viewName)) {
var viewNameParts = viewName.split("/");
var designDocId = decodeURIComponent(viewNameParts[1]);
var localViewName = decodeURIComponent(viewNameParts[3]);
} else {
var designDocId = "", localViewName = "";
}
$.showDialog("dialog/_save_view_as.html", {
load: function(elem) {
$("#input_docid", elem).val(designDocId).suggest(function(text, callback) {
db.allDocs({
limit: 10, startkey: "_design/" + text, endkey: "_design0",
success: function(docs) {
var matches = [];
for (var i = 0; i < docs.rows.length; i++) {
var docName = docs.rows[i].id.substr(8);
if (docName.indexOf(text) == 0) {
matches[i] = docName;
}
}
callback(matches);
}
});
});
$("#input_name", elem).val(localViewName).suggest(function(text, callback) {
db.openDoc("_design/" + $("#input_docid").val(), {
error: function() {}, // ignore
success: function(doc) {
var matches = [];
if (!doc.views) return;
for (var viewName in doc.views) {
if (viewName.indexOf(text) == 0) {
matches.push(viewName);
}
}
callback(matches);
}
});
});
},
submit: function(data, callback) {
if (!data.docid || !data.name) {
var errors = {};
if (!data.docid) errors.docid = "Please enter a document ID";
if (!data.name) errors.name = "Please enter a view name";
callback(errors);
} else {
var viewCode = {
map: $("#viewcode_map").val(),
reduce: $("#viewcode_reduce").val() || undefined
};
var docId = ["_design", data.docid].join("/");
function save(doc) {
if (!doc) {
doc = {_id: docId, language: page.viewLanguage};
} else {
var numViews = 0;
for (var viewName in (doc.views || {})) {
if (viewName != data.name) numViews++;
}
if (numViews > 0 && page.viewLanguage != doc.language) {
callback({
docid: "Cannot save to " + data.docid +
" because its language is \"" + doc.language +
"\", not \"" + page.viewLanguage + "\"."
});
return;
}
doc.language = page.viewLanguage;
}
if (doc.views === undefined) doc.views = {};
doc.views[data.name] = viewCode;
db.saveDoc(doc, {
success: function(resp) {
callback();
page.isDirty = false;
location.href = "database.html?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(doc._id) +
"/_view/" + encodeURIComponent(data.name);
}
});
}
db.openDoc(docId, {
error: function(status, error, reason) {
if (status == 404) save(null);
else alert(reason);
},
success: function(doc) {
save(doc);
}
});
}
}
});
}
this.saveViewChanges = function() {
var viewNameParts = viewName.split("/");
var designDocId = decodeURIComponent(viewNameParts[1]);
var localViewName = decodeURIComponent(viewNameParts[3]);
db.openDoc("_design/" + designDocId, {
success: function(doc) {
var numViews = 0;
for (var viewName in (doc.views || {})) {
if (viewName != localViewName) numViews++;
}
if (numViews > 0 && page.viewLanguage != doc.language) {
alert("Cannot save view because the design document language " +
"is \"" + doc.language + "\", not \"" +
page.viewLanguage + "\".");
return;
}
doc.language = page.viewLanguage;
var viewDef = doc.views[localViewName];
viewDef.map = $("#viewcode_map").val();
viewDef.reduce = $("#viewcode_reduce").val() || undefined;
db.saveDoc(doc, {
success: function(resp) {
page.isDirty = false;
$("#viewcode button.revert, #viewcode button.save")
.attr("disabled", "disabled");
}
});
}
});
}
this.updateDesignDocLink = function() {
if (viewName && /^_design/.test(viewName)) {
var docId = "_design/" + decodeURIComponent(viewName.split("/")[1]);
$("#designdoc-link").attr("href", "document.html?" +
encodeURIComponent(dbName) + "/" + $.couch.encodeDocId(docId)).text(docId);
} else {
$("#designdoc-link").removeAttr("href").text("");
}
}
this.jumpToDocument = function(docId) {
if (docId != "") {
location.href = 'document.html?' + encodeURIComponent(db.name)
+ "/" + $.couch.encodeDocId(docId);
}
}
this.updateDocumentListing = function(options) {
if (options === undefined) options = {};
if (options.limit === undefined) {
var perPage = parseInt($("#perpage").val(), 10)
// Fetch an extra row so we know when we're on the last page for
// reduce views
options.limit = perPage + 1;
} else {
perPage = options.limit - 1;
}
if ($("#documents thead th.key").is(".desc")) {
if (typeof options.descending == 'undefined') options.descending = true;
var descend = true;
$.futon.storage.set("desc", "1");
} else {
var descend = false;
$.futon.storage.del("desc");
}
$("#paging a").unbind();
$("#documents").find("tbody.content").empty().end().show();
page.updateDesignDocLink();
options.success = function(resp) {
if (resp.offset === undefined) {
resp.offset = 0;
}
var descending_reverse = ((options.descending && !descend) || (descend && (options.descending === false)));
var has_reduce_prev = resp.total_rows === undefined && (descending_reverse ? resp.rows.length > perPage : options.startkey !== undefined);
if (descending_reverse && resp.rows) {
resp.rows = resp.rows.reverse();
if (resp.rows.length > perPage) {
resp.rows.push(resp.rows.shift());
}
}
if (resp.rows !== null && (has_reduce_prev || (descending_reverse ?
(resp.total_rows - resp.offset > perPage) :
(resp.offset > 0)))) {
$("#paging a.prev").attr("href", "#" + (resp.offset - perPage)).click(function() {
var opt = {
descending: !descend,
limit: options.limit
};
if (resp.rows.length > 0) {
var firstDoc = resp.rows[0];
opt.startkey = firstDoc.key !== undefined ? firstDoc.key : null;
if (firstDoc.id !== undefined) {
opt.startkey_docid = firstDoc.id;
}
opt.skip = 1;
}
page.updateDocumentListing(opt);
return false;
});
} else {
$("#paging a.prev").removeAttr("href");
}
var has_reduce_next = resp.total_rows === undefined && (descending_reverse ? options.startkey !== undefined : resp.rows.length > perPage);
if (resp.rows !== null && (has_reduce_next || (descending_reverse ?
(resp.offset - resp.total_rows < perPage) :
(resp.total_rows - resp.offset > perPage)))) {
$("#paging a.next").attr("href", "#" + (resp.offset + perPage)).click(function() {
var opt = {
descending: descend,
limit: options.limit
};
if (resp.rows.length > 0) {
var lastDoc = resp.rows[Math.min(perPage, resp.rows.length) - 1];
opt.startkey = lastDoc.key !== undefined ? lastDoc.key : null;
if (lastDoc.id !== undefined) {
opt.startkey_docid = lastDoc.id;
}
opt.skip = 1;
}
page.updateDocumentListing(opt);
return false;
});
} else {
$("#paging a.next").removeAttr("href");
}
for (var i = 0; i < Math.min(perPage, resp.rows.length); i++) {
var row = resp.rows[i];
var tr = $("<tr></tr>");
var key = "null";
if (row.key !== null) {
key = $.futon.formatJSON(row.key, {indent: 0, linesep: ""});
}
if (row.id) {
$("<td class='key'><a href='document.html?" + encodeURIComponent(db.name) +
"/" + $.couch.encodeDocId(row.id) + "'><strong></strong><br>" +
"<span class='docid'>ID: " + row.id + "</span></a></td>")
.find("strong").text(key).end()
.appendTo(tr);
} else {
$("<td class='key'><strong></strong></td>")
.find("strong").text(key).end()
.appendTo(tr);
}
var value = "null";
if (row.value !== null) {
value = $.futon.formatJSON(row.value, {
html: true, indent: 0, linesep: "", quoteKeys: false
});
}
$("<td class='value'><div></div></td>").find("div").html(value).end()
.appendTo(tr).dblclick(function() {
location.href = this.previousSibling.firstChild.href;
});
tr.appendTo("#documents tbody.content");
}
var firstNum = 1;
var lastNum = totalNum = Math.min(perPage, resp.rows.length);
if (resp.total_rows != null) {
if (descending_reverse) {
lastNum = Math.min(resp.total_rows, resp.total_rows - resp.offset);
firstNum = lastNum - totalNum + 1;
} else {
firstNum = Math.min(resp.total_rows, resp.offset + 1);
lastNum = firstNum + totalNum - 1;
}
totalNum = resp.total_rows;
} else {
totalNum = "unknown";
}
$("#paging").show();
$("#documents tbody.footer td span").text(
"Showing " + firstNum + "-" + lastNum + " of " + totalNum +
" row" + (firstNum != lastNum || totalNum == "unknown" ? "s" : ""));
$("#documents tbody tr:odd").addClass("odd");
}
options.error = function(status, error, reason) {
alert("Error: " + error + "\n\n" + reason);
}
if (!viewName || viewName == "_all_docs") {
$("#switch select")[0].selectedIndex = 0;
db.allDocs(options);
} else {
if (viewName == "_temp_view") {
$("#viewcode").show().removeClass("collapsed");
var mapFun = $("#viewcode_map").val();
$.futon.storage.set("map_fun", mapFun);
var reduceFun = $.trim($("#viewcode_reduce").val()) || null;
if (reduceFun) {
$.futon.storage.set("reduce_fun", reduceFun);
if ($("#reduce :checked").length) {
var level = parseInt($("#grouplevel select").val(), 10);
options.group = level > 0;
if (options.group && level < 100) {
options.group_level = level;
}
} else {
options.reduce = false;
}
}
$.futon.storage.set("language", page.viewLanguage);
db.query(mapFun, reduceFun, page.viewLanguage, options);
} else if (viewName == "_design_docs") {
options.startkey = options.descending ? "_design0" : "_design";
options.endkey = options.descending ? "_design" : "_design0";
db.allDocs(options);
} else {
$("button.compactview").show();
$("#viewcode").show();
var currentMapCode = $("#viewcode_map").val();
var currentReduceCode = $.trim($("#viewcode_reduce").val()) || null;
if (currentReduceCode) {
if ($("#reduce :checked").length) {
var level = parseInt($("#grouplevel select").val(), 10);
options.group = level > 0;
if (options.group && level < 100) {
options.group_level = level;
}
} else {
options.reduce = false;
}
}
if (page.isDirty) {
db.query(currentMapCode, currentReduceCode, page.viewLanguage, options);
} else {
var viewParts = viewName.split('/');
db.view(viewParts[1] + "/" + viewParts[3], options);
}
}
}
}
window.onbeforeunload = function() {
$("#switch select").val(viewName);
if (page.isDirty) {
return "You've made changes to the view code that have not been " +
"saved yet.";
}
}
},
// Page class for browse/document.html
CouchDocumentPage: function() {
var urlParts = location.search.substr(1).split("/");
var dbName = decodeURIComponent(urlParts.shift());
if (urlParts.length) {
var idParts = urlParts.join("/").split("@", 2);
var docId = decodeURIComponent(idParts[0]);
var docRev = (idParts.length > 1) ? idParts[1] : null;
this.isNew = false;
} else {
var docId = $.couch.newUUID();
var docRev = null;
this.isNew = true;
}
var db = $.couch.db(dbName);
$.futon.storage.declare("tab", {defaultValue: "tabular", scope: "cookie"});
this.dbName = dbName;
this.db = db;
this.docId = docId;
this.doc = null;
this.isDirty = this.isNew;
page = this;
this.activateTabularView = function() {
if ($("#fields tbody.source textarea").length > 0)
return;
$.futon.storage.set("tab", "tabular");
$("#tabs li").removeClass("active").filter(".tabular").addClass("active");
$("#fields thead th:first").text("Field").attr("colspan", 1).next().show();
$("#fields tbody.content").show();
$("#fields tbody.source").hide();
return false;
}
this.activateSourceView = function() {
$.futon.storage.set("tab", "source");
$("#tabs li").removeClass("active").filter(".source").addClass("active");
$("#fields thead th:first").text("Source").attr("colspan", 2).next().hide();
$("#fields tbody.content").hide();
$("#fields tbody.source").find("td").each(function() {
$(this).html($("<pre></pre>").html($.futon.formatJSON(page.doc, {html: true})))
.makeEditable({allowEmpty: false,
createInput: function(value) {
var rows = value.split("\n").length;
return $("<textarea rows='" + rows + "' cols='80' spellcheck='false'></textarea>").enableTabInsertion();
},
prepareInput: function(input) {
$(input).makeResizable({vertical: true});
},
end: function() {
$(this).html($("<pre></pre>").html($.futon.formatJSON(page.doc, {html: true})));
},
accept: function(newValue) {
page.doc = JSON.parse(newValue);
page.isDirty = true;
page.updateFieldListing(true);
},
populate: function(value) {
return $.futon.formatJSON(page.doc);
},
validate: function(value) {
try {
var doc = JSON.parse(value);
if (typeof doc != "object")
throw new SyntaxError("Please enter a valid JSON document (for example, {}).");
return true;
} catch (err) {
var msg = err.message;
if (msg == "parseJSON" || msg == "JSON.parse") {
msg = "There is a syntax error in the document.";
}
$("<div class='error'></div>").text(msg).appendTo(this);
return false;
}
}
});
}).end().show();
return false;
}
this.addField = function() {
if (!$("#fields tbody.content:visible").length) {
location.hash = "#tabular";
page.activateTabularView();
}
var fieldName = "unnamed";
var fieldIdx = 1;
while (page.doc.hasOwnProperty(fieldName)) {
fieldName = "unnamed " + fieldIdx++;
}
page.doc[fieldName] = null;
var row = _addRowForField(page.doc, fieldName);
page.isDirty = true;
row.find("th b").dblclick();
}
var _sortFields = function(a, b) {
var a0 = a.charAt(0), b0 = b.charAt(0);
if (a0 == "_" && b0 != "_") {
return -1;
} else if (a0 != "_" && b0 == "_") {
return 1;
} else if (a == "_attachments" || b == "_attachments") {
return a0 == "_attachments" ? 1 : -1;
} else {
return a < b ? -1 : a != b ? 1 : 0;
}
}
this.updateFieldListing = function(noReload) {
$("#fields tbody.content").empty();
function handleResult(doc, revs) {
page.doc = doc;
var propNames = [];
for (var prop in doc) {
propNames.push(prop);
}
// Order properties alphabetically, but put internal fields first
propNames.sort(_sortFields);
for (var pi = 0; pi < propNames.length; pi++) {
_addRowForField(doc, propNames[pi]);
}
if (revs.length > 1) {
var currentIndex = 0;
for (var i = 0; i < revs.length; i++) {
if (revs[i].rev == doc._rev) {
currentIndex = i;
break;
}
}
if (currentIndex < revs.length - 1) {
var prevRev = revs[currentIndex + 1].rev;
$("#paging a.prev").attr("href", "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(docId) + "@" + prevRev);
}
if (currentIndex > 0) {
var nextRev = revs[currentIndex - 1].rev;
$("#paging a.next").attr("href", "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(docId) + "@" + nextRev);
}
$("#fields tbody.footer td span").text("Showing revision " +
(revs.length - currentIndex) + " of " + revs.length);
}
if ($.futon.storage.get("tab") == "source") {
page.activateSourceView();
}
}
if (noReload) {
handleResult(page.doc, []);
return;
}
if (!page.isNew) {
db.openDoc(docId, {revs_info: true,
success: function(doc) {
var revs = doc._revs_info || [];
delete doc._revs_info;
if (docRev != null) {
db.openDoc(docId, {rev: docRev,
error: function(status, error, reason) {
alert("The requested revision was not found. You will " +
"be redirected back to the latest revision.");
location.href = "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(docId);
},
success: function(doc) {
handleResult(doc, revs);
}
});
} else {
handleResult(doc, revs);
}
}
});
} else {
handleResult({_id: docId}, []);
$("#fields tbody td").dblclick();
}
}
this.deleteDocument = function() {
$.showDialog("dialog/_delete_document.html", {
submit: function(data, callback) {
db.removeDoc(page.doc, {
success: function(resp) {
callback();
location.href = "database.html?" + encodeURIComponent(dbName);
}
});
}
});
}
this.saveDocument = function() {
db.saveDoc(page.doc, {
error: function(status, error, reason) {
alert("Error: " + error + "\n\n" + reason);
},
success: function(resp) {
page.isDirty = false;
location.href = "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(page.docId);
}
});
}
this.uploadAttachment = function() {
if (page.isDirty) {
alert("You need to save or revert any changes you have made to the " +
"document before you can attach a new file.");
return false;
}
$.showDialog("dialog/_upload_attachment.html", {
load: function(elem) {
$("input[name='_rev']", elem).val(page.doc._rev);
},
submit: function(data, callback) {
if (!data._attachments || data._attachments.length == 0) {
callback({_attachments: "Please select a file to upload."});
return;
}
var form = $("#upload-form");
form.find("#progress").css("visibility", "visible");
form.ajaxSubmit({
url: db.uri + $.couch.encodeDocId(page.docId),
success: function(resp) {
form.find("#progress").css("visibility", "hidden");
page.isDirty = false;
location.href = "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(page.docId);
}
});
}
});
}
window.onbeforeunload = function() {
if (page.isDirty) {
return "You've made changes to this document that have not been " +
"saved yet.";
}
}
function _addRowForField(doc, fieldName) {
var row = $("<tr><th></th><td></td></tr>")
.find("th").append($("<b></b>").text(fieldName)).end()
.appendTo("#fields tbody.content");
if (fieldName == "_attachments") {
row.find("td").append(_renderAttachmentList(doc[fieldName]));
} else {
row.find("td").append(_renderValue(doc[fieldName]));
_initKey(doc, row, fieldName);
_initValue(doc, row, fieldName);
}
$("#fields tbody.content tr").removeClass("odd").filter(":odd").addClass("odd");
row.data("name", fieldName);
return row;
}
function _initKey(doc, row, fieldName) {
if (fieldName == "_id" || fieldName == "_rev") {
return;
}
var cell = row.find("th");
$("<button type='button' class='delete' title='Delete field'></button>").click(function() {
delete doc[fieldName];
row.remove();
page.isDirty = true;
$("#fields tbody.content tr").removeClass("odd").filter(":odd").addClass("odd");
}).prependTo(cell);
cell.find("b").makeEditable({allowEmpty: false,
accept: function(newName, oldName) {
doc[newName] = doc[oldName];
delete doc[oldName];
row.data("name", newName);
$(this).text(newName);
page.isDirty = true;
},
begin: function() {
row.find("th button.delete").hide();
return true;
},
end: function(keyCode) {
row.find("th button.delete").show();
if (keyCode == 9) { // tab, move to editing the value
row.find("td").dblclick();
}
},
validate: function(newName, oldName) {
$("div.error", this).remove();
if (newName != oldName && doc[newName] !== undefined) {
$("<div class='error'>Already have field with that name.</div>")
.appendTo(this);
return false;
}
return true;
}
});
}
function _initValue(doc, row, fieldName) {
if ((fieldName == "_id" && !page.isNew) || fieldName == "_rev") {
return;
}
row.find("td").makeEditable({acceptOnBlur: false, allowEmpty: true,
createInput: function(value) {
value = doc[row.data("name")];
var elem = $(this);
if (elem.find("dl").length > 0 ||
elem.find("code").is(".array, .object") ||
typeof(value) == "string" && (value.length > 60 || value.match(/\n/))) {
return $("<textarea rows='1' cols='40' spellcheck='false'></textarea>");
}
return $("<input type='text' spellcheck='false'>");
},
end: function() {
$(this).children().remove();
$(this).append(_renderValue(doc[row.data("name")]));
},
prepareInput: function(input) {
if ($(input).is("textarea")) {
var height = Math.min(input.scrollHeight, document.body.clientHeight - 100);
$(input).height(height).makeResizable({vertical: true}).enableTabInsertion();
}
},
accept: function(newValue) {
var fieldName = row.data("name");
try {
doc[fieldName] = JSON.parse(newValue);
} catch (err) {
doc[fieldName] = newValue;
}
page.isDirty = true;
if (fieldName == "_id") {
page.docId = page.doc._id = doc[fieldName];
$("h1 strong").text(page.docId);
}
},
populate: function(value) {
value = doc[row.data("name")];
if (typeof(value) == "string") {
return value;
}
return $.futon.formatJSON(value);
},
validate: function(value) {
$("div.error", this).remove();
try {
var parsed = JSON.parse(value);
if (row.data("name") == "_id" && typeof(parsed) != "string") {
$("<div class='error'>The document ID must be a string.</div>")
.appendTo(this);
return false;
}
return true;
} catch (err) {
return true;
}
}
});
}
function _renderValue(value) {
function isNullOrEmpty(val) {
if (val == null) return true;
for (var i in val) return false;
return true;
}
function render(val) {
var type = typeof(val);
if (type == "object" && !isNullOrEmpty(val)) {
var list = $("<dl></dl>");
for (var i in val) {
$("<dt></dt>").text(i).appendTo(list);
$("<dd></dd>").append(render(val[i])).appendTo(list);
}
return list;
} else {
var html = $.futon.formatJSON(val, {
html: true,
escapeStrings: false
});
var n = $(html);
if (n.text().length > 140) {
// This code reduces a long string in to a summarized string with a link to expand it.
// Someone, somewhere, is doing something nasty with the event after it leaves these handlers.
// At this time I can't track down the offender, it might actually be a jQuery propogation issue.
var fulltext = n.text();
var mintext = n.text().slice(0, 140);
var e = $('<a href="#expand">...</a>');
var m = $('<a href="#min">X</a>');
var expand = function (evt) {
n.empty();
n.text(fulltext);
n.append(m);
evt.stopPropagation();
evt.stopImmediatePropagation();
evt.preventDefault();
}
var minimize = function (evt) {
n.empty();
n.text(mintext);
// For some reason the old element's handler won't fire after removed and added again.
e = $('<a href="#expand">...</a>');
e.click(expand);
n.append(e);
evt.stopPropagation();
evt.stopImmediatePropagation();
evt.preventDefault();
}
e.click(expand);
n.click(minimize);
n.text(mintext);
n.append(e)
}
return n;
}
}
var elem = render(value);
elem.find("dd:has(dl)").hide().prev("dt").addClass("collapsed");
elem.find("dd:not(:has(dl))").addClass("inline").prev().addClass("inline");
elem.find("dt.collapsed").click(function() {
$(this).toggleClass("collapsed").next().toggle();
});
return elem;
}
function _renderAttachmentList(attachments) {
var ul = $("<ul></ul>").addClass("attachments");
$.each(attachments, function(idx, attachment) {
_renderAttachmentItem(idx, attachment).appendTo(ul);
});
return ul;
}
function _renderAttachmentItem(name, attachment) {
var attachmentHref = db.uri + $.couch.encodeDocId(page.docId)
+ "/" + encodeAttachment(name);
var li = $("<li></li>");
$("<a href='' title='Download file' target='_top'></a>").text(name)
.attr("href", attachmentHref)
.wrapInner("<tt></tt>").appendTo(li);
$("<span>()</span>").text("" + $.futon.formatSize(attachment.length) +
", " + attachment.content_type).addClass("info").appendTo(li);
if (name == "tests.js") {
li.find('span.info').append(', <a href="/_utils/couch_tests.html?'
+ attachmentHref + '">open in test runner</a>');
}
_initAttachmentItem(name, attachment, li);
return li;
}
function _initAttachmentItem(name, attachment, li) {
$("<button type='button' class='delete' title='Delete attachment'></button>").click(function() {
if (!li.siblings("li").length) {
delete page.doc._attachments;
li.parents("tr").remove();
$("#fields tbody.content tr").removeClass("odd").filter(":odd").addClass("odd");
} else {
delete page.doc._attachments[name];
li.remove();
}
page.isDirty = true;
return false;
}).prependTo($("a", li));
}
},
});
function encodeAttachment(name) {
var encoded = [], parts = name.split('/');
for (var i=0; i < parts.length; i++) {
encoded.push(encodeURIComponent(parts[i]));
};
return encoded.join('%2f');
}
})(jQuery);
|
share/www/script/futon.browse.js
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
(function($) {
$.futon = $.futon || {};
$.extend($.futon, {
// Page class for browse/index.html
CouchIndexPage: function() {
page = this;
$.futon.storage.declare("per_page", {defaultValue: 10});
this.addDatabase = function() {
$.showDialog("dialog/_create_database.html", {
submit: function(data, callback) {
if (!data.name || data.name.length == 0) {
callback({name: "Please enter a name."});
return;
}
$.couch.db(data.name).create({
error: function(status, id, reason) { callback({name: reason}) },
success: function(resp) {
location.href = "database.html?" + encodeURIComponent(data.name);
callback();
}
});
}
});
return false;
}
this.updateDatabaseListing = function(offset) {
offset |= 0;
var maxPerPage = parseInt($("#perpage").val(), 10);
$.couch.allDbs({
success: function(dbs) {
$("#paging a").unbind();
$("#databases tbody.content").empty();
var dbsOnPage = dbs.slice(offset, offset + maxPerPage);
$.each(dbsOnPage, function(idx, dbName) {
$("#databases tbody.content").append("<tr>" +
"<th><a href='database.html?" + encodeURIComponent(dbName) + "'>" +
dbName + "</a></th>" +
"<td class='size'></td><td class='count'></td>" +
"<td class='seq'></td></tr>");
$.couch.db(dbName).info({
success: function(info) {
$("#databases tbody.content tr:eq(" + idx + ")")
.find("td.size").text($.futon.formatSize(info.disk_size)).end()
.find("td.count").text(info.doc_count).end()
.find("td.seq").text(info.update_seq);
}
});
});
$("#databases tbody tr:odd").addClass("odd");
if (offset > 0) {
$("#paging a.prev").attr("href", "#" + (offset - maxPerPage)).click(function() {
page.updateDatabaseListing(offset - maxPerPage);
});
} else {
$("#paging a.prev").removeAttr("href");
}
if (offset + maxPerPage < dbs.length) {
$("#paging a.next").attr("href", "#" + (offset + maxPerPage)).click(function() {
page.updateDatabaseListing(offset + maxPerPage);
});
} else {
$("#paging a.next").removeAttr("href");
}
var firstNum = offset + 1;
var lastNum = firstNum + dbsOnPage.length - 1;
$("#databases tbody.footer tr td span").text(
"Showing " + firstNum + "-" + lastNum + " of " + dbs.length +
" databases");
}
});
}
},
// Page class for browse/database.html
CouchDatabasePage: function() {
var urlParts = location.search.substr(1).split("/");
var dbName = decodeURIComponent(urlParts.shift());
$.futon.storage.declareWithPrefix(dbName + ".", {
desc: {},
language: {defaultValue: "javascript"},
map_fun: {defaultValue: ""},
reduce_fun: {defaultValue: ""},
reduce: {},
group_level: {defaultValue: 100},
per_page: {defaultValue: 10},
view: {defaultValue: ""}
});
var viewName = (urlParts.length > 0) ? urlParts.join("/") : null;
if (viewName) {
$.futon.storage.set("view", viewName);
} else {
viewName = $.futon.storage.get("view");
if (viewName) {
this.redirecting = true;
location.href = "database.html?" + encodeURIComponent(dbName) +
"/" + viewName;
}
}
var db = $.couch.db(dbName);
this.dbName = dbName;
this.viewName = viewName;
this.viewLanguage = "javascript";
this.db = db;
this.isDirty = false;
this.isTempView = viewName == "_temp_view";
page = this;
var templates = {
javascript: "function(doc) {\n emit(null, doc);\n}",
python: "def fun(doc):\n yield None, doc",
ruby: "lambda {|doc|\n emit(nil, doc);\n}"
}
this.newDocument = function() {
location.href = "document.html?" + encodeURIComponent(db.name);
}
this.compactAndCleanup = function() {
$.showDialog("dialog/_compact_cleanup.html", {
submit: function(data, callback) {
switch (data.action) {
case "compact_database":
db.compact({success: function(resp) { callback() }});
break;
case "compact_views":
var groupname = page.viewName.substring(8,
page.viewName.indexOf("/_view"));
db.compactView(groupname, {success: function(resp) { callback() }});
break;
case "view_cleanup":
db.viewCleanup({success: function(resp) { callback() }});
break;
}
}
});
}
this.deleteDatabase = function() {
$.showDialog("dialog/_delete_database.html", {
submit: function(data, callback) {
db.drop({
success: function(resp) {
callback();
location.href = "index.html";
if (window !== null) {
$("#dbs li").filter(function(index) {
return $("a", this).text() == dbName;
}).remove();
$.futon.navigation.removeDatabase(dbName);
}
}
});
}
});
}
this.databaseSecurity = function() {
$.showDialog("dialog/_database_security.html", {
load : function(d) {
db.getDbProperty("_security", {
success: function(r) {
["admin", "reader"].forEach(function(key) {
var names = [];
var roles = [];
if (r && typeof r[key + "s"] === "object") {
if ($.isArray(r[key + "s"]["names"])) {
names = r[key + "s"]["names"];
}
if ($.isArray(r[key + "s"]["roles"])) {
roles = r[key + "s"]["roles"];
}
}
$("input[name=" + key + "_names]", d).val(JSON.stringify(names));
$("input[name=" + key + "_roles]", d).val(JSON.stringify(roles));
});
}
});
},
// maybe this should be 2 forms
submit: function(data, callback) {
var errors = {};
var secObj = {
admins: {
names: [],
roles: []
},
readers: {
names: [],
roles: []
}
};
["admin", "reader"].forEach(function(key) {
var names, roles;
try {
names = JSON.parse(data[key + "_names"]);
} catch(e) { }
try {
roles = JSON.parse(data[key + "_roles"]);
} catch(e) { }
if ($.isArray(names)) {
secObj[key + "s"]["names"] = names;
} else {
errors[key + "_names"] = "The " + key +
" names must be an array of strings";
}
if ($.isArray(roles)) {
secObj[key + "s"]["roles"] = roles;
} else {
errors[key + "_roles"] = "The " + key +
" roles must be an array of strings";
}
});
if ($.isEmptyObject(errors)) {
db.setDbProperty("_security", secObj);
}
callback(errors);
}
});
}
this.populateViewEditor = function() {
if (viewName.match(/^_design\//)) {
page.revertViewChanges(function() {
var dirtyTimeout = null;
function updateDirtyState() {
clearTimeout(dirtyTimeout);
dirtyTimeout = setTimeout(function() {
var buttons = $("#viewcode button.save, #viewcode button.revert");
var viewCode = {
map: $("#viewcode_map").val(),
reduce: $("#viewcode_reduce").val()
};
$("#reduce, #grouplevel").toggle(!!viewCode.reduce);
page.isDirty = (viewCode.map != page.storedViewCode.map)
|| (viewCode.reduce != (page.storedViewCode.reduce || ""))
|| page.viewLanguage != page.storedViewLanguage;
if (page.isDirty) {
buttons.removeAttr("disabled");
} else {
buttons.attr("disabled", "disabled");
}
}, 100);
}
$("#viewcode textarea").enableTabInsertion()
.bind("input", updateDirtyState);
if ($.browser.msie || $.browser.safari) {
$("#viewcode textarea").bind("paste", updateDirtyState)
.bind("change", updateDirtyState)
.bind("keydown", updateDirtyState)
.bind("keypress", updateDirtyState)
.bind("keyup", updateDirtyState)
.bind("textInput", updateDirtyState);
}
$("#language").change(updateDirtyState);
page.updateDocumentListing();
});
} else if (viewName == "_temp_view") {
$("#viewcode textarea").enableTabInsertion();
page.viewLanguage = $.futon.storage.get("language");
page.updateViewEditor(
$.futon.storage.get("map_fun", templates[page.viewLanguage]),
$.futon.storage.get("reduce_fun")
);
} else {
$("#grouplevel, #reduce").hide();
page.updateDocumentListing();
}
page.populateLanguagesMenu();
if (this.isTempView) {
$("#tempwarn").show();
}
}
// Populate the languages dropdown, and listen to selection changes
this.populateLanguagesMenu = function() {
var all_langs = {};
fill_language = function() {
var select = $("#language");
for (var language in all_langs) {
var option = $(document.createElement("option"))
.attr("value", language).text(language)
.appendTo(select);
}
if (select[0].options.length == 1) {
select[0].disabled = true;
} else {
select[0].disabled = false;
select.val(page.viewLanguage);
select.change(function() {
var language = $("#language").val();
if (language != page.viewLanguage) {
var mapFun = $("#viewcode_map").val();
if (mapFun == "" || mapFun == templates[page.viewLanguage]) {
// no edits made, so change to the new default
$("#viewcode_map").val(templates[language]);
}
page.viewLanguage = language;
$("#viewcode_map")[0].focus();
}
return false;
});
}
}
$.couch.config({
success: function(resp) {
for (var language in resp) {
all_langs[language] = resp[language];
}
$.couch.config({
success: function(resp) {
for (var language in resp) {
all_langs[language] = resp[language];
}
fill_language();
}
}, "native_query_servers");
},
error : function() {}
}, "query_servers");
}
this.populateViewsMenu = function() {
var select = $("#switch select");
db.allDocs({startkey: "_design/", endkey: "_design0",
include_docs: true,
success: function(resp) {
select[0].options.length = 3;
for (var i = 0; i < resp.rows.length; i++) {
var doc = resp.rows[i].doc;
var optGroup = $(document.createElement("optgroup"))
.attr("label", doc._id.substr(8)).appendTo(select);
var viewNames = [];
for (var name in doc.views) {
viewNames.push(name);
}
viewNames.sort();
for (var j = 0; j < viewNames.length; j++) {
var path = $.couch.encodeDocId(doc._id) + "/_view/" +
encodeURIComponent(viewNames[j]);
var option = $(document.createElement("option"))
.attr("value", path).text(viewNames[j]).appendTo(optGroup);
if (path == viewName) {
option[0].selected = true;
}
}
}
}
});
if (!viewName.match(/^_design\//)) {
$.each(["_all_docs", "_design_docs", "_temp_view"], function(idx, name) {
if (viewName == name) {
select[0].options[idx].selected = true;
}
});
}
}
this.revertViewChanges = function(callback) {
if (!page.storedViewCode) {
var viewNameParts = viewName.split("/");
var designDocId = decodeURIComponent(viewNameParts[1]);
var localViewName = decodeURIComponent(viewNameParts[3]);
db.openDoc("_design/" + designDocId, {
error: function(status, error, reason) {
if (status == 404) {
$.futon.storage.del("view");
location.href = "database.html?" + encodeURIComponent(db.name);
}
},
success: function(resp) {
if(!resp.views || !resp.views[localViewName]) {
$.futon.storage.del("view");
location.href = "database.html?" + encodeURIComponent(db.name);
}
var viewCode = resp.views[localViewName];
page.viewLanguage = resp.language || "javascript";
$("#language").val(page.viewLanguage);
page.updateViewEditor(viewCode.map, viewCode.reduce || "");
$("#viewcode button.revert, #viewcode button.save").attr("disabled", "disabled");
page.storedViewCode = viewCode;
page.storedViewLanguage = page.viewLanguage;
if (callback) callback();
}
}, {async: false});
} else {
page.updateViewEditor(page.storedViewCode.map,
page.storedViewCode.reduce || "");
page.viewLanguage = page.storedViewLanguage;
$("#language").val(page.viewLanguage);
$("#viewcode button.revert, #viewcode button.save").attr("disabled", "disabled");
page.isDirty = false;
if (callback) callback();
}
}
this.updateViewEditor = function(mapFun, reduceFun) {
if (!mapFun) return;
$("#viewcode_map").val(mapFun);
$("#viewcode_reduce").val(reduceFun);
var lines = Math.max(
mapFun.split("\n").length,
reduceFun.split("\n").length
);
$("#reduce, #grouplevel").toggle(!!reduceFun);
$("#viewcode textarea").attr("rows", Math.min(15, Math.max(3, lines)));
}
this.saveViewAs = function() {
if (viewName && /^_design/.test(viewName)) {
var viewNameParts = viewName.split("/");
var designDocId = decodeURIComponent(viewNameParts[1]);
var localViewName = decodeURIComponent(viewNameParts[3]);
} else {
var designDocId = "", localViewName = "";
}
$.showDialog("dialog/_save_view_as.html", {
load: function(elem) {
$("#input_docid", elem).val(designDocId).suggest(function(text, callback) {
db.allDocs({
limit: 10, startkey: "_design/" + text, endkey: "_design0",
success: function(docs) {
var matches = [];
for (var i = 0; i < docs.rows.length; i++) {
var docName = docs.rows[i].id.substr(8);
if (docName.indexOf(text) == 0) {
matches[i] = docName;
}
}
callback(matches);
}
});
});
$("#input_name", elem).val(localViewName).suggest(function(text, callback) {
db.openDoc("_design/" + $("#input_docid").val(), {
error: function() {}, // ignore
success: function(doc) {
var matches = [];
if (!doc.views) return;
for (var viewName in doc.views) {
if (viewName.indexOf(text) == 0) {
matches.push(viewName);
}
}
callback(matches);
}
});
});
},
submit: function(data, callback) {
if (!data.docid || !data.name) {
var errors = {};
if (!data.docid) errors.docid = "Please enter a document ID";
if (!data.name) errors.name = "Please enter a view name";
callback(errors);
} else {
var viewCode = {
map: $("#viewcode_map").val(),
reduce: $("#viewcode_reduce").val() || undefined
};
var docId = ["_design", data.docid].join("/");
function save(doc) {
if (!doc) {
doc = {_id: docId, language: page.viewLanguage};
} else {
var numViews = 0;
for (var viewName in (doc.views || {})) {
if (viewName != data.name) numViews++;
}
if (numViews > 0 && page.viewLanguage != doc.language) {
callback({
docid: "Cannot save to " + data.docid +
" because its language is \"" + doc.language +
"\", not \"" + page.viewLanguage + "\"."
});
return;
}
doc.language = page.viewLanguage;
}
if (doc.views === undefined) doc.views = {};
doc.views[data.name] = viewCode;
db.saveDoc(doc, {
success: function(resp) {
callback();
page.isDirty = false;
location.href = "database.html?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(doc._id) +
"/_view/" + encodeURIComponent(data.name);
}
});
}
db.openDoc(docId, {
error: function(status, error, reason) {
if (status == 404) save(null);
else alert(reason);
},
success: function(doc) {
save(doc);
}
});
}
}
});
}
this.saveViewChanges = function() {
var viewNameParts = viewName.split("/");
var designDocId = decodeURIComponent(viewNameParts[1]);
var localViewName = decodeURIComponent(viewNameParts[3]);
db.openDoc("_design/" + designDocId, {
success: function(doc) {
var numViews = 0;
for (var viewName in (doc.views || {})) {
if (viewName != localViewName) numViews++;
}
if (numViews > 0 && page.viewLanguage != doc.language) {
alert("Cannot save view because the design document language " +
"is \"" + doc.language + "\", not \"" +
page.viewLanguage + "\".");
return;
}
doc.language = page.viewLanguage;
var viewDef = doc.views[localViewName];
viewDef.map = $("#viewcode_map").val();
viewDef.reduce = $("#viewcode_reduce").val() || undefined;
db.saveDoc(doc, {
success: function(resp) {
page.isDirty = false;
$("#viewcode button.revert, #viewcode button.save")
.attr("disabled", "disabled");
}
});
}
});
}
this.updateDesignDocLink = function() {
if (viewName && /^_design/.test(viewName)) {
var docId = "_design/" + decodeURIComponent(viewName.split("/")[1]);
$("#designdoc-link").attr("href", "document.html?" +
encodeURIComponent(dbName) + "/" + $.couch.encodeDocId(docId)).text(docId);
} else {
$("#designdoc-link").removeAttr("href").text("");
}
}
this.jumpToDocument = function(docId) {
if (docId != "") {
location.href = 'document.html?' + encodeURIComponent(db.name)
+ "/" + $.couch.encodeDocId(docId);
}
}
this.updateDocumentListing = function(options) {
if (options === undefined) options = {};
if (options.limit === undefined) {
var perPage = parseInt($("#perpage").val(), 10)
// Fetch an extra row so we know when we're on the last page for
// reduce views
options.limit = perPage + 1;
} else {
perPage = options.limit - 1;
}
if ($("#documents thead th.key").is(".desc")) {
if (typeof options.descending == 'undefined') options.descending = true;
var descend = true;
$.futon.storage.set("desc", "1");
} else {
var descend = false;
$.futon.storage.del("desc");
}
$("#paging a").unbind();
$("#documents").find("tbody.content").empty().end().show();
page.updateDesignDocLink();
options.success = function(resp) {
if (resp.offset === undefined) {
resp.offset = 0;
}
var descending_reverse = ((options.descending && !descend) || (descend && (options.descending === false)));
var has_reduce_prev = resp.total_rows === undefined && (descending_reverse ? resp.rows.length > perPage : options.startkey !== undefined);
if (descending_reverse && resp.rows) {
resp.rows = resp.rows.reverse();
if (resp.rows.length > perPage) {
resp.rows.push(resp.rows.shift());
}
}
if (resp.rows !== null && (has_reduce_prev || (descending_reverse ?
(resp.total_rows - resp.offset > perPage) :
(resp.offset > 0)))) {
$("#paging a.prev").attr("href", "#" + (resp.offset - perPage)).click(function() {
var opt = {
descending: !descend,
limit: options.limit
};
if (resp.rows.length > 0) {
var firstDoc = resp.rows[0];
opt.startkey = firstDoc.key !== undefined ? firstDoc.key : null;
if (firstDoc.id !== undefined) {
opt.startkey_docid = firstDoc.id;
}
opt.skip = 1;
}
page.updateDocumentListing(opt);
return false;
});
} else {
$("#paging a.prev").removeAttr("href");
}
var has_reduce_next = resp.total_rows === undefined && (descending_reverse ? options.startkey !== undefined : resp.rows.length > perPage);
if (resp.rows !== null && (has_reduce_next || (descending_reverse ?
(resp.offset - resp.total_rows < perPage) :
(resp.total_rows - resp.offset > perPage)))) {
$("#paging a.next").attr("href", "#" + (resp.offset + perPage)).click(function() {
var opt = {
descending: descend,
limit: options.limit
};
if (resp.rows.length > 0) {
var lastDoc = resp.rows[Math.min(perPage, resp.rows.length) - 1];
opt.startkey = lastDoc.key !== undefined ? lastDoc.key : null;
if (lastDoc.id !== undefined) {
opt.startkey_docid = lastDoc.id;
}
opt.skip = 1;
}
page.updateDocumentListing(opt);
return false;
});
} else {
$("#paging a.next").removeAttr("href");
}
for (var i = 0; i < Math.min(perPage, resp.rows.length); i++) {
var row = resp.rows[i];
var tr = $("<tr></tr>");
var key = "null";
if (row.key !== null) {
key = $.futon.formatJSON(row.key, {indent: 0, linesep: ""});
}
if (row.id) {
$("<td class='key'><a href='document.html?" + encodeURIComponent(db.name) +
"/" + $.couch.encodeDocId(row.id) + "'><strong></strong><br>" +
"<span class='docid'>ID: " + row.id + "</span></a></td>")
.find("strong").text(key).end()
.appendTo(tr);
} else {
$("<td class='key'><strong></strong></td>")
.find("strong").text(key).end()
.appendTo(tr);
}
var value = "null";
if (row.value !== null) {
value = $.futon.formatJSON(row.value, {
html: true, indent: 0, linesep: "", quoteKeys: false
});
}
$("<td class='value'><div></div></td>").find("div").html(value).end()
.appendTo(tr).dblclick(function() {
location.href = this.previousSibling.firstChild.href;
});
tr.appendTo("#documents tbody.content");
}
var firstNum = 1;
var lastNum = totalNum = Math.min(perPage, resp.rows.length);
if (resp.total_rows != null) {
if (descending_reverse) {
lastNum = Math.min(resp.total_rows, resp.total_rows - resp.offset);
firstNum = lastNum - totalNum + 1;
} else {
firstNum = Math.min(resp.total_rows, resp.offset + 1);
lastNum = firstNum + totalNum - 1;
}
totalNum = resp.total_rows;
} else {
totalNum = "unknown";
}
$("#paging").show();
$("#documents tbody.footer td span").text(
"Showing " + firstNum + "-" + lastNum + " of " + totalNum +
" row" + (firstNum != lastNum || totalNum == "unknown" ? "s" : ""));
$("#documents tbody tr:odd").addClass("odd");
}
options.error = function(status, error, reason) {
alert("Error: " + error + "\n\n" + reason);
}
if (!viewName || viewName == "_all_docs") {
$("#switch select")[0].selectedIndex = 0;
db.allDocs(options);
} else {
if (viewName == "_temp_view") {
$("#viewcode").show().removeClass("collapsed");
var mapFun = $("#viewcode_map").val();
$.futon.storage.set("map_fun", mapFun);
var reduceFun = $.trim($("#viewcode_reduce").val()) || null;
if (reduceFun) {
$.futon.storage.set("reduce_fun", reduceFun);
if ($("#reduce :checked").length) {
var level = parseInt($("#grouplevel select").val(), 10);
options.group = level > 0;
if (options.group && level < 100) {
options.group_level = level;
}
} else {
options.reduce = false;
}
}
$.futon.storage.set("language", page.viewLanguage);
db.query(mapFun, reduceFun, page.viewLanguage, options);
} else if (viewName == "_design_docs") {
options.startkey = options.descending ? "_design0" : "_design";
options.endkey = options.descending ? "_design" : "_design0";
db.allDocs(options);
} else {
$("button.compactview").show();
$("#viewcode").show();
var currentMapCode = $("#viewcode_map").val();
var currentReduceCode = $.trim($("#viewcode_reduce").val()) || null;
if (currentReduceCode) {
if ($("#reduce :checked").length) {
var level = parseInt($("#grouplevel select").val(), 10);
options.group = level > 0;
if (options.group && level < 100) {
options.group_level = level;
}
} else {
options.reduce = false;
}
}
if (page.isDirty) {
db.query(currentMapCode, currentReduceCode, page.viewLanguage, options);
} else {
var viewParts = viewName.split('/');
db.view(viewParts[1] + "/" + viewParts[3], options);
}
}
}
}
window.onbeforeunload = function() {
$("#switch select").val(viewName);
if (page.isDirty) {
return "You've made changes to the view code that have not been " +
"saved yet.";
}
}
},
// Page class for browse/document.html
CouchDocumentPage: function() {
var urlParts = location.search.substr(1).split("/");
var dbName = decodeURIComponent(urlParts.shift());
if (urlParts.length) {
var idParts = urlParts.join("/").split("@", 2);
var docId = decodeURIComponent(idParts[0]);
var docRev = (idParts.length > 1) ? idParts[1] : null;
this.isNew = false;
} else {
var docId = $.couch.newUUID();
var docRev = null;
this.isNew = true;
}
var db = $.couch.db(dbName);
$.futon.storage.declare("tab", {defaultValue: "tabular", scope: "cookie"});
this.dbName = dbName;
this.db = db;
this.docId = docId;
this.doc = null;
this.isDirty = this.isNew;
page = this;
this.activateTabularView = function() {
if ($("#fields tbody.source textarea").length > 0)
return;
$.futon.storage.set("tab", "tabular");
$("#tabs li").removeClass("active").filter(".tabular").addClass("active");
$("#fields thead th:first").text("Field").attr("colspan", 1).next().show();
$("#fields tbody.content").show();
$("#fields tbody.source").hide();
return false;
}
this.activateSourceView = function() {
$.futon.storage.set("tab", "source");
$("#tabs li").removeClass("active").filter(".source").addClass("active");
$("#fields thead th:first").text("Source").attr("colspan", 2).next().hide();
$("#fields tbody.content").hide();
$("#fields tbody.source").find("td").each(function() {
$(this).html($("<pre></pre>").html($.futon.formatJSON(page.doc, {html: true})))
.makeEditable({allowEmpty: false,
createInput: function(value) {
var rows = value.split("\n").length;
return $("<textarea rows='" + rows + "' cols='80' spellcheck='false'></textarea>").enableTabInsertion();
},
prepareInput: function(input) {
$(input).makeResizable({vertical: true});
},
end: function() {
$(this).html($("<pre></pre>").html($.futon.formatJSON(page.doc, {html: true})));
},
accept: function(newValue) {
page.doc = JSON.parse(newValue);
page.isDirty = true;
page.updateFieldListing(true);
},
populate: function(value) {
return $.futon.formatJSON(page.doc);
},
validate: function(value) {
try {
var doc = JSON.parse(value);
if (typeof doc != "object")
throw new SyntaxError("Please enter a valid JSON document (for example, {}).");
return true;
} catch (err) {
var msg = err.message;
if (msg == "parseJSON" || msg == "JSON.parse") {
msg = "There is a syntax error in the document.";
}
$("<div class='error'></div>").text(msg).appendTo(this);
return false;
}
}
});
}).end().show();
return false;
}
this.addField = function() {
if (!$("#fields tbody.content:visible").length) {
location.hash = "#tabular";
page.activateTabularView();
}
var fieldName = "unnamed";
var fieldIdx = 1;
while (page.doc.hasOwnProperty(fieldName)) {
fieldName = "unnamed " + fieldIdx++;
}
page.doc[fieldName] = null;
var row = _addRowForField(page.doc, fieldName);
page.isDirty = true;
row.find("th b").dblclick();
}
var _sortFields = function(a, b) {
var a0 = a.charAt(0), b0 = b.charAt(0);
if (a0 == "_" && b0 != "_") {
return -1;
} else if (a0 != "_" && b0 == "_") {
return 1;
} else if (a == "_attachments" || b == "_attachments") {
return a0 == "_attachments" ? 1 : -1;
} else {
return a < b ? -1 : a != b ? 1 : 0;
}
}
this.updateFieldListing = function(noReload) {
$("#fields tbody.content").empty();
function handleResult(doc, revs) {
page.doc = doc;
var propNames = [];
for (var prop in doc) {
propNames.push(prop);
}
// Order properties alphabetically, but put internal fields first
propNames.sort(_sortFields);
for (var pi = 0; pi < propNames.length; pi++) {
_addRowForField(doc, propNames[pi]);
}
if (revs.length > 1) {
var currentIndex = 0;
for (var i = 0; i < revs.length; i++) {
if (revs[i].rev == doc._rev) {
currentIndex = i;
break;
}
}
if (currentIndex < revs.length - 1) {
var prevRev = revs[currentIndex + 1].rev;
$("#paging a.prev").attr("href", "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(docId) + "@" + prevRev);
}
if (currentIndex > 0) {
var nextRev = revs[currentIndex - 1].rev;
$("#paging a.next").attr("href", "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(docId) + "@" + nextRev);
}
$("#fields tbody.footer td span").text("Showing revision " +
(revs.length - currentIndex) + " of " + revs.length);
}
if ($.futon.storage.get("tab") == "source") {
page.activateSourceView();
}
}
if (noReload) {
handleResult(page.doc, []);
return;
}
if (!page.isNew) {
db.openDoc(docId, {revs_info: true,
success: function(doc) {
var revs = doc._revs_info || [];
delete doc._revs_info;
if (docRev != null) {
db.openDoc(docId, {rev: docRev,
error: function(status, error, reason) {
alert("The requested revision was not found. You will " +
"be redirected back to the latest revision.");
location.href = "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(docId);
},
success: function(doc) {
handleResult(doc, revs);
}
});
} else {
handleResult(doc, revs);
}
}
});
} else {
handleResult({_id: docId}, []);
$("#fields tbody td").dblclick();
}
}
this.deleteDocument = function() {
$.showDialog("dialog/_delete_document.html", {
submit: function(data, callback) {
db.removeDoc(page.doc, {
success: function(resp) {
callback();
location.href = "database.html?" + encodeURIComponent(dbName);
}
});
}
});
}
this.saveDocument = function() {
db.saveDoc(page.doc, {
error: function(status, error, reason) {
alert("Error: " + error + "\n\n" + reason);
},
success: function(resp) {
page.isDirty = false;
location.href = "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(page.docId);
}
});
}
this.uploadAttachment = function() {
if (page.isDirty) {
alert("You need to save or revert any changes you have made to the " +
"document before you can attach a new file.");
return false;
}
$.showDialog("dialog/_upload_attachment.html", {
load: function(elem) {
$("input[name='_rev']", elem).val(page.doc._rev);
},
submit: function(data, callback) {
if (!data._attachments || data._attachments.length == 0) {
callback({_attachments: "Please select a file to upload."});
return;
}
var form = $("#upload-form");
form.find("#progress").css("visibility", "visible");
form.ajaxSubmit({
url: db.uri + $.couch.encodeDocId(page.docId),
success: function(resp) {
form.find("#progress").css("visibility", "hidden");
page.isDirty = false;
location.href = "?" + encodeURIComponent(dbName) +
"/" + $.couch.encodeDocId(page.docId);
}
});
}
});
}
window.onbeforeunload = function() {
if (page.isDirty) {
return "You've made changes to this document that have not been " +
"saved yet.";
}
}
function _addRowForField(doc, fieldName) {
var row = $("<tr><th></th><td></td></tr>")
.find("th").append($("<b></b>").text(fieldName)).end()
.appendTo("#fields tbody.content");
if (fieldName == "_attachments") {
row.find("td").append(_renderAttachmentList(doc[fieldName]));
} else {
row.find("td").append(_renderValue(doc[fieldName]));
_initKey(doc, row, fieldName);
_initValue(doc, row, fieldName);
}
$("#fields tbody.content tr").removeClass("odd").filter(":odd").addClass("odd");
row.data("name", fieldName);
return row;
}
function _initKey(doc, row, fieldName) {
if (fieldName == "_id" || fieldName == "_rev") {
return;
}
var cell = row.find("th");
$("<button type='button' class='delete' title='Delete field'></button>").click(function() {
delete doc[fieldName];
row.remove();
page.isDirty = true;
$("#fields tbody.content tr").removeClass("odd").filter(":odd").addClass("odd");
}).prependTo(cell);
cell.find("b").makeEditable({allowEmpty: false,
accept: function(newName, oldName) {
doc[newName] = doc[oldName];
delete doc[oldName];
row.data("name", newName);
$(this).text(newName);
page.isDirty = true;
},
begin: function() {
row.find("th button.delete").hide();
return true;
},
end: function(keyCode) {
row.find("th button.delete").show();
if (keyCode == 9) { // tab, move to editing the value
row.find("td").dblclick();
}
},
validate: function(newName, oldName) {
$("div.error", this).remove();
if (newName != oldName && doc[newName] !== undefined) {
$("<div class='error'>Already have field with that name.</div>")
.appendTo(this);
return false;
}
return true;
}
});
}
function _initValue(doc, row, fieldName) {
if ((fieldName == "_id" && !page.isNew) || fieldName == "_rev") {
return;
}
row.find("td").makeEditable({acceptOnBlur: false, allowEmpty: true,
createInput: function(value) {
value = doc[row.data("name")];
var elem = $(this);
if (elem.find("dl").length > 0 ||
elem.find("code").is(".array, .object") ||
typeof(value) == "string" && (value.length > 60 || value.match(/\n/))) {
return $("<textarea rows='1' cols='40' spellcheck='false'></textarea>");
}
return $("<input type='text' spellcheck='false'>");
},
end: function() {
$(this).children().remove();
$(this).append(_renderValue(doc[row.data("name")]));
},
prepareInput: function(input) {
if ($(input).is("textarea")) {
var height = Math.min(input.scrollHeight, document.body.clientHeight - 100);
$(input).height(height).makeResizable({vertical: true}).enableTabInsertion();
}
},
accept: function(newValue) {
var fieldName = row.data("name");
try {
doc[fieldName] = JSON.parse(newValue);
} catch (err) {
doc[fieldName] = newValue;
}
page.isDirty = true;
if (fieldName == "_id") {
page.docId = page.doc._id = doc[fieldName];
$("h1 strong").text(page.docId);
}
},
populate: function(value) {
value = doc[row.data("name")];
if (typeof(value) == "string") {
return value;
}
return $.futon.formatJSON(value);
},
validate: function(value) {
$("div.error", this).remove();
try {
var parsed = JSON.parse(value);
if (row.data("name") == "_id" && typeof(parsed) != "string") {
$("<div class='error'>The document ID must be a string.</div>")
.appendTo(this);
return false;
}
return true;
} catch (err) {
return true;
}
}
});
}
function _renderValue(value) {
function isNullOrEmpty(val) {
if (val == null) return true;
for (var i in val) return false;
return true;
}
function render(val) {
var type = typeof(val);
if (type == "object" && !isNullOrEmpty(val)) {
var list = $("<dl></dl>");
for (var i in val) {
$("<dt></dt>").text(i).appendTo(list);
$("<dd></dd>").append(render(val[i])).appendTo(list);
}
return list;
} else {
var html = $.futon.formatJSON(val, {
html: true,
escapeStrings: false
});
var n = $(html);
if (n.text().length > 140) {
// This code reduces a long string in to a summarized string with a link to expand it.
// Someone, somewhere, is doing something nasty with the event after it leaves these handlers.
// At this time I can't track down the offender, it might actually be a jQuery propogation issue.
var fulltext = n.text();
var mintext = n.text().slice(0, 140);
var e = $('<a href="#expand">...</a>');
var m = $('<a href="#min">X</a>');
var expand = function (evt) {
n.empty();
n.text(fulltext);
n.append(m);
evt.stopPropagation();
evt.stopImmediatePropagation();
evt.preventDefault();
}
var minimize = function (evt) {
n.empty();
n.text(mintext);
// For some reason the old element's handler won't fire after removed and added again.
e = $('<a href="#expand">...</a>');
e.click(expand);
n.append(e);
evt.stopPropagation();
evt.stopImmediatePropagation();
evt.preventDefault();
}
e.click(expand);
n.click(minimize);
n.text(mintext);
n.append(e)
}
return n;
}
}
var elem = render(value);
elem.find("dd:has(dl)").hide().prev("dt").addClass("collapsed");
elem.find("dd:not(:has(dl))").addClass("inline").prev().addClass("inline");
elem.find("dt.collapsed").click(function() {
$(this).toggleClass("collapsed").next().toggle();
});
return elem;
}
function _renderAttachmentList(attachments) {
var ul = $("<ul></ul>").addClass("attachments");
$.each(attachments, function(idx, attachment) {
_renderAttachmentItem(idx, attachment).appendTo(ul);
});
return ul;
}
function _renderAttachmentItem(name, attachment) {
var attachmentHref = db.uri + $.couch.encodeDocId(page.docId)
+ "/" + encodeAttachment(name);
var li = $("<li></li>");
$("<a href='' title='Download file' target='_top'></a>").text(name)
.attr("href", attachmentHref)
.wrapInner("<tt></tt>").appendTo(li);
$("<span>()</span>").text("" + $.futon.formatSize(attachment.length) +
", " + attachment.content_type).addClass("info").appendTo(li);
if (name == "tests.js") {
li.find('span.info').append(', <a href="/_utils/couch_tests.html?'
+ attachmentHref + '">open in test runner</a>');
}
_initAttachmentItem(name, attachment, li);
return li;
}
function _initAttachmentItem(name, attachment, li) {
$("<button type='button' class='delete' title='Delete attachment'></button>").click(function() {
if (!li.siblings("li").length) {
delete page.doc._attachments;
li.parents("tr").remove();
$("#fields tbody.content tr").removeClass("odd").filter(":odd").addClass("odd");
} else {
delete page.doc._attachments[name];
li.remove();
}
page.isDirty = true;
return false;
}).prependTo($("a", li));
}
},
});
function encodeAttachment(name) {
var encoded = [], parts = name.split('/');
for (var i=0; i < parts.length; i++) {
encoded.push(encodeURIComponent(parts[i]));
};
return encoded.join('/');
}
})(jQuery);
|
fix attachments with "./" in their names in Futon. Closes COUCHDB-239.
git-svn-id: c1f8a8ec0b3bb2e99ab3413722f81f687760e79c@950119 13f79535-47bb-0310-9956-ffa450edef68
|
share/www/script/futon.browse.js
|
fix attachments with "./" in their names in Futon. Closes COUCHDB-239.
|
<ide><path>hare/www/script/futon.browse.js
<ide> for (var i=0; i < parts.length; i++) {
<ide> encoded.push(encodeURIComponent(parts[i]));
<ide> };
<del> return encoded.join('/');
<add> return encoded.join('%2f');
<ide> }
<ide>
<ide> })(jQuery);
|
|
JavaScript
|
apache-2.0
|
04dc75c977e457c720d28edee9be5079bc81d997
| 0 |
google/closure-library,google/closure-library,google/closure-library,google/closure-library,google/closure-library
|
/**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Input Date Picker implementation. Pairs a
* goog.ui.PopupDatePicker with an input element and handles the input from
* either.
*
* @see ../demos/inputdatepicker.html
*/
goog.provide('goog.ui.InputDatePicker');
goog.require('goog.date.DateTime');
goog.require('goog.dom');
goog.require('goog.dom.InputType');
goog.require('goog.dom.TagName');
goog.require('goog.i18n.DateTimeParse');
goog.require('goog.string');
goog.require('goog.ui.Component');
goog.require('goog.ui.DatePicker');
/** @suppress {extraRequire} */
goog.require('goog.ui.LabelInput');
goog.require('goog.ui.PopupBase');
goog.require('goog.ui.PopupDatePicker');
goog.requireType('goog.date.Date');
goog.requireType('goog.date.DateLike');
goog.requireType('goog.events.Event');
goog.requireType('goog.ui.DatePickerEvent');
/**
* Input date picker widget.
*
* @param {!goog.ui.InputDatePicker.DateFormatter} dateTimeFormatter A formatter
* instance used to format the date picker's date for display in the input
* element.
* @param {!goog.ui.InputDatePicker.DateParser} dateTimeParser A parser instance
* used to parse the input element's string as a date to set the picker.
* @param {goog.ui.DatePicker=} opt_datePicker Optional DatePicker. This
* enables the use of a custom date-picker instance.
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
* @extends {goog.ui.Component}
* @constructor
*/
goog.ui.InputDatePicker = function(
dateTimeFormatter, dateTimeParser, opt_datePicker, opt_domHelper) {
'use strict';
goog.ui.Component.call(this, opt_domHelper);
this.dateTimeFormatter_ = dateTimeFormatter;
this.dateTimeParser_ = dateTimeParser;
this.popupDatePicker_ =
new goog.ui.PopupDatePicker(opt_datePicker, opt_domHelper);
this.addChild(this.popupDatePicker_);
this.popupDatePicker_.setAllowAutoFocus(false);
};
goog.inherits(goog.ui.InputDatePicker, goog.ui.Component);
/**
* Used to format the date picker's date for display in the input element.
* @type {?goog.ui.InputDatePicker.DateFormatter}
* @private
*/
goog.ui.InputDatePicker.prototype.dateTimeFormatter_ = null;
/**
* Used to parse the input element's string as a date to set the picker.
* @type {?goog.ui.InputDatePicker.DateParser}
* @private
*/
goog.ui.InputDatePicker.prototype.dateTimeParser_ = null;
/**
* The instance of goog.ui.PopupDatePicker used to pop up and select the date.
* @type {?goog.ui.PopupDatePicker}
* @private
*/
goog.ui.InputDatePicker.prototype.popupDatePicker_ = null;
/**
* The element that the PopupDatePicker should be parented to. Defaults to the
* body element of the page.
* @type {?Element}
* @private
*/
goog.ui.InputDatePicker.prototype.popupParentElement_ = null;
/**
* Returns the PopupDatePicker's internal DatePicker instance. This can be
* used to customize the date picker's styling.
*
* @return {goog.ui.DatePicker} The internal DatePicker instance.
*/
goog.ui.InputDatePicker.prototype.getDatePicker = function() {
'use strict';
return this.popupDatePicker_.getDatePicker();
};
/**
* Returns the PopupDatePicker instance.
*
* @return {goog.ui.PopupDatePicker} Popup instance.
*/
goog.ui.InputDatePicker.prototype.getPopupDatePicker = function() {
'use strict';
return this.popupDatePicker_;
};
/**
* Returns the selected date, if any. Compares the dates from the date picker
* and the input field, causing them to be synced if different.
* @return {goog.date.DateTime} The selected date, if any.
*/
goog.ui.InputDatePicker.prototype.getDate = function() {
'use strict';
// The user expectation is that the date be whatever the input shows.
// This method biases towards the input value to conform to that expectation.
var inputDate = this.getInputValueAsDate_();
var pickerDate = this.popupDatePicker_.getDate();
if (inputDate && pickerDate) {
if (!inputDate.equals(pickerDate)) {
this.popupDatePicker_.setDate(inputDate);
}
} else {
this.popupDatePicker_.setDate(null);
}
return inputDate;
};
/**
* Sets the selected date. See goog.ui.PopupDatePicker.setDate().
* @param {goog.date.Date} date The date to set.
*/
goog.ui.InputDatePicker.prototype.setDate = function(date) {
'use strict';
this.popupDatePicker_.setDate(date);
};
/**
* Sets the value of the input element. This can be overridden to support
* alternative types of input setting.
* @param {string} value The value to set.
* @suppress {strictMissingProperties} Part of the go/strict_warnings_migration
*/
goog.ui.InputDatePicker.prototype.setInputValue = function(value) {
'use strict';
var el = this.getElement();
if (el.labelInput_) {
var labelInput = /** @type {goog.ui.LabelInput} */ (el.labelInput_);
labelInput.setValue(value);
} else {
el.value = value;
}
};
/**
* Returns the value of the input element. This can be overridden to support
* alternative types of input getting.
* @return {string} The input value.
* @suppress {strictMissingProperties} Part of the go/strict_warnings_migration
*/
goog.ui.InputDatePicker.prototype.getInputValue = function() {
'use strict';
var el = this.getElement();
if (el.labelInput_) {
var labelInput = /** @type {goog.ui.LabelInput} */ (el.labelInput_);
return labelInput.getValue();
} else {
return el.value;
}
};
/**
* Sets the value of the input element from date object.
*
* @param {?goog.date.Date} date The value to set.
* @private
*/
goog.ui.InputDatePicker.prototype.setInputValueAsDate_ = function(date) {
'use strict';
this.setInputValue(date ? this.dateTimeFormatter_.format(date) : '');
};
/**
* Gets the input element value and attempts to parse it as a date.
*
* @return {goog.date.DateTime} The date object is returned if the parse
* is successful, null is returned on failure.
* @private
*/
goog.ui.InputDatePicker.prototype.getInputValueAsDate_ = function() {
'use strict';
var value = goog.string.trim(this.getInputValue());
if (value) {
var date = new goog.date.DateTime();
// DateTime needed as parse assumes it can call getHours(), getMinutes(),
// etc, on the date if hours and minutes aren't defined.
if (this.dateTimeParser_.parse(value, date, {validate: true}) > 0) {
// Parser with YYYY format string will interpret 1 as year 1 A.D.
// However, datepicker.setDate() method will change it into 1901.
// Same is true for any other pattern when number entered by user is
// different from number of digits in the pattern. (YY and 1 will be 1AD).
// See i18n/datetimeparse.js
// Conversion happens in goog.date.Date/DateTime constructor
// when it calls new Date(year...). See ui/datepicker.js.
return date;
}
}
return null;
};
/**
* Creates an input element for use with the popup date picker.
* @override
*/
goog.ui.InputDatePicker.prototype.createDom = function() {
'use strict';
this.setElementInternal(this.getDomHelper().createDom(
goog.dom.TagName.INPUT, {'type': goog.dom.InputType.TEXT}));
this.popupDatePicker_.createDom();
};
/**
* Sets the element that the PopupDatePicker should be parented to. If not set,
* defaults to the body element of the page.
* @param {Element} el The element that the PopupDatePicker should be parented
* to.
*/
goog.ui.InputDatePicker.prototype.setPopupParentElement = function(el) {
'use strict';
this.popupParentElement_ = el;
};
/** @override */
goog.ui.InputDatePicker.prototype.enterDocument = function() {
'use strict';
// this.popupDatePicker_ has been added as a child even though it isn't really
// a child (since its root element is not within InputDatePicker's DOM tree).
// The PopupDatePicker will have its enterDocument method called as a result
// of calling the superClass's enterDocument method. The PopupDatePicker needs
// to be attached to the document *before* calling enterDocument so that when
// PopupDatePicker decorates its element as a DatePicker, the element will be
// in the document and enterDocument will be called for the DatePicker. Having
// the PopupDatePicker's element in the document before calling enterDocument
// will ensure that the event handlers for DatePicker are attached.
//
// An alternative could be to stop adding popupDatePicker_ as a child and
// instead keep a reference to it and sync some event handlers, etc. but
// appending the element to the document before calling enterDocument is a
// less intrusive option.
//
// See cl/100837907 for more context and the discussion around this decision.
(this.popupParentElement_ || this.getDomHelper().getDocument().body)
.appendChild(/** @type {!Node} */ (this.popupDatePicker_.getElement()));
goog.ui.InputDatePicker.superClass_.enterDocument.call(this);
var el = this.getElement();
this.popupDatePicker_.attach(el);
// Set the date picker to have the input's initial value, if any.
this.popupDatePicker_.setDate(this.getInputValueAsDate_());
var handler = this.getHandler();
handler.listen(
this.popupDatePicker_, goog.ui.DatePicker.Events.CHANGE,
this.onDateChanged_);
handler.listen(
this.popupDatePicker_, goog.ui.PopupBase.EventType.SHOW, this.onPopup_);
};
/** @override */
goog.ui.InputDatePicker.prototype.exitDocument = function() {
'use strict';
goog.ui.InputDatePicker.superClass_.exitDocument.call(this);
var el = this.getElement();
this.popupDatePicker_.detach(el);
this.popupDatePicker_.exitDocument();
goog.dom.removeNode(this.popupDatePicker_.getElement());
};
/** @override */
goog.ui.InputDatePicker.prototype.decorateInternal = function(element) {
'use strict';
goog.ui.InputDatePicker.superClass_.decorateInternal.call(this, element);
this.popupDatePicker_.createDom();
};
/** @override */
goog.ui.InputDatePicker.prototype.disposeInternal = function() {
'use strict';
goog.ui.InputDatePicker.superClass_.disposeInternal.call(this);
this.popupDatePicker_.dispose();
this.popupDatePicker_ = null;
this.popupParentElement_ = null;
};
/**
* See goog.ui.PopupDatePicker.showPopup().
* @param {Element} element Reference element for displaying the popup -- popup
* will appear at the bottom-left corner of this element.
*/
goog.ui.InputDatePicker.prototype.showForElement = function(element) {
'use strict';
this.popupDatePicker_.showPopup(element);
};
/**
* See goog.ui.PopupDatePicker.hidePopup().
*/
goog.ui.InputDatePicker.prototype.hidePopup = function() {
'use strict';
this.popupDatePicker_.hidePopup();
};
/**
* Event handler for popup date picker popup events.
*
* @param {goog.events.Event} e popup event.
* @private
*/
goog.ui.InputDatePicker.prototype.onPopup_ = function(e) {
'use strict';
var inputValueAsDate = this.getInputValueAsDate_();
this.setDate(inputValueAsDate);
// don't overwrite the input value with empty date if input is not valid
if (inputValueAsDate) {
this.setInputValueAsDate_(this.getDatePicker().getDate());
}
};
/**
* Event handler for date change events. Called when the date changes.
*
* @param {goog.ui.DatePickerEvent} e Date change event.
* @private
*/
goog.ui.InputDatePicker.prototype.onDateChanged_ = function(e) {
'use strict';
this.setInputValueAsDate_(e.date);
};
/**
* A DateFormatter implements functionality to convert a Date into
* human-readable text. text into a Date. This interface is expected to accept
* an instance of goog.i18n.DateTimeFormat directly, and as such the method
* signatures directly match those found on that class.
* @record
*/
goog.ui.InputDatePicker.DateFormatter = function() {};
/**
* @param {!goog.date.DateLike} date The Date object that is being formatted.
* @return {string} The formatted date value.
*/
goog.ui.InputDatePicker.DateFormatter.prototype.format = function(date) {};
/**
* A DateParser implements functionality to parse text into a Date. This
* interface is expected to accept an instance of goog.i18n.DateTimeParse
* directly, and as such the method signatures directly match those found on
* that class.
* @record
*/
goog.ui.InputDatePicker.DateParser = function() {};
/**
* @param {string} text The string being parsed.
* @param {!goog.date.DateLike} date The Date object to hold the parsed date.
* @param {!goog.i18n.DateTimeParse.ParseOptions=} options The options object.
* @return {number} How many characters parser advanced.
*/
goog.ui.InputDatePicker.DateParser.prototype.parse = function(
text, date, options) {};
|
closure/goog/ui/inputdatepicker.js
|
/**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Input Date Picker implementation. Pairs a
* goog.ui.PopupDatePicker with an input element and handles the input from
* either.
*
* @see ../demos/inputdatepicker.html
*/
goog.provide('goog.ui.InputDatePicker');
goog.require('goog.date.DateTime');
goog.require('goog.dom');
goog.require('goog.dom.InputType');
goog.require('goog.dom.TagName');
goog.require('goog.i18n.DateTimeParse');
goog.require('goog.string');
goog.require('goog.ui.Component');
goog.require('goog.ui.DatePicker');
/** @suppress {extraRequire} */
goog.require('goog.ui.LabelInput');
goog.require('goog.ui.PopupBase');
goog.require('goog.ui.PopupDatePicker');
goog.requireType('goog.date.Date');
goog.requireType('goog.date.DateLike');
goog.requireType('goog.events.Event');
goog.requireType('goog.ui.DatePickerEvent');
/**
* Input date picker widget.
*
* @param {!goog.ui.InputDatePicker.DateFormatter} dateTimeFormatter A formatter
* instance used to format the date picker's date for display in the input
* element.
* @param {!goog.ui.InputDatePicker.DateParser} dateTimeParser A parser instance
* used to parse the input element's string as a date to set the picker.
* @param {goog.ui.DatePicker=} opt_datePicker Optional DatePicker. This
* enables the use of a custom date-picker instance.
* @param {goog.dom.DomHelper=} opt_domHelper Optional DOM helper.
* @extends {goog.ui.Component}
* @constructor
*/
goog.ui.InputDatePicker = function(
dateTimeFormatter, dateTimeParser, opt_datePicker, opt_domHelper) {
'use strict';
goog.ui.Component.call(this, opt_domHelper);
this.dateTimeFormatter_ = dateTimeFormatter;
this.dateTimeParser_ = dateTimeParser;
this.popupDatePicker_ =
new goog.ui.PopupDatePicker(opt_datePicker, opt_domHelper);
this.addChild(this.popupDatePicker_);
this.popupDatePicker_.setAllowAutoFocus(false);
};
goog.inherits(goog.ui.InputDatePicker, goog.ui.Component);
/**
* Used to format the date picker's date for display in the input element.
* @type {?goog.ui.InputDatePicker.DateFormatter}
* @private
*/
goog.ui.InputDatePicker.prototype.dateTimeFormatter_ = null;
/**
* Used to parse the input element's string as a date to set the picker.
* @type {?goog.ui.InputDatePicker.DateParser}
* @private
*/
goog.ui.InputDatePicker.prototype.dateTimeParser_ = null;
/**
* The instance of goog.ui.PopupDatePicker used to pop up and select the date.
* @type {?goog.ui.PopupDatePicker}
* @private
*/
goog.ui.InputDatePicker.prototype.popupDatePicker_ = null;
/**
* The element that the PopupDatePicker should be parented to. Defaults to the
* body element of the page.
* @type {?Element}
* @private
*/
goog.ui.InputDatePicker.prototype.popupParentElement_ = null;
/**
* Returns the PopupDatePicker's internal DatePicker instance. This can be
* used to customize the date picker's styling.
*
* @return {goog.ui.DatePicker} The internal DatePicker instance.
*/
goog.ui.InputDatePicker.prototype.getDatePicker = function() {
'use strict';
return this.popupDatePicker_.getDatePicker();
};
/**
* Returns the PopupDatePicker instance.
*
* @return {goog.ui.PopupDatePicker} Popup instance.
*/
goog.ui.InputDatePicker.prototype.getPopupDatePicker = function() {
'use strict';
return this.popupDatePicker_;
};
/**
* Returns the selected date, if any. Compares the dates from the date picker
* and the input field, causing them to be synced if different.
* @return {goog.date.DateTime} The selected date, if any.
*/
goog.ui.InputDatePicker.prototype.getDate = function() {
'use strict';
// The user expectation is that the date be whatever the input shows.
// This method biases towards the input value to conform to that expectation.
var inputDate = this.getInputValueAsDate_();
var pickerDate = this.popupDatePicker_.getDate();
if (inputDate && pickerDate) {
if (!inputDate.equals(pickerDate)) {
this.popupDatePicker_.setDate(inputDate);
}
} else {
this.popupDatePicker_.setDate(null);
}
return inputDate;
};
/**
* Sets the selected date. See goog.ui.PopupDatePicker.setDate().
* @param {goog.date.Date} date The date to set.
*/
goog.ui.InputDatePicker.prototype.setDate = function(date) {
'use strict';
this.popupDatePicker_.setDate(date);
};
/**
* Sets the value of the input element. This can be overridden to support
* alternative types of input setting.
* @param {string} value The value to set.
* @suppress {strictMissingProperties} Part of the go/strict_warnings_migration
*/
goog.ui.InputDatePicker.prototype.setInputValue = function(value) {
'use strict';
var el = this.getElement();
if (el.labelInput_) {
var labelInput = /** @type {goog.ui.LabelInput} */ (el.labelInput_);
labelInput.setValue(value);
} else {
el.value = value;
}
};
/**
* Returns the value of the input element. This can be overridden to support
* alternative types of input getting.
* @return {string} The input value.
* @suppress {strictMissingProperties} Part of the go/strict_warnings_migration
*/
goog.ui.InputDatePicker.prototype.getInputValue = function() {
'use strict';
var el = this.getElement();
if (el.labelInput_) {
var labelInput = /** @type {goog.ui.LabelInput} */ (el.labelInput_);
return labelInput.getValue();
} else {
return el.value;
}
};
/**
* Sets the value of the input element from date object.
*
* @param {?goog.date.Date} date The value to set.
* @private
*/
goog.ui.InputDatePicker.prototype.setInputValueAsDate_ = function(date) {
'use strict';
this.setInputValue(date ? this.dateTimeFormatter_.format(date) : '');
};
/**
* Gets the input element value and attempts to parse it as a date.
*
* @return {goog.date.DateTime} The date object is returned if the parse
* is successful, null is returned on failure.
* @private
*/
goog.ui.InputDatePicker.prototype.getInputValueAsDate_ = function() {
'use strict';
var value = goog.string.trim(this.getInputValue());
if (value) {
var date = new goog.date.DateTime();
// DateTime needed as parse assumes it can call getHours(), getMinutes(),
// etc, on the date if hours and minutes aren't defined.
if (this.dateTimeParser_.strictParse(value, date) > 0) {
// Parser with YYYY format string will interpret 1 as year 1 A.D.
// However, datepicker.setDate() method will change it into 1901.
// Same is true for any other pattern when number entered by user is
// different from number of digits in the pattern. (YY and 1 will be 1AD).
// See i18n/datetimeparse.js
// Conversion happens in goog.date.Date/DateTime constructor
// when it calls new Date(year...). See ui/datepicker.js.
return date;
}
}
return null;
};
/**
* Creates an input element for use with the popup date picker.
* @override
*/
goog.ui.InputDatePicker.prototype.createDom = function() {
'use strict';
this.setElementInternal(this.getDomHelper().createDom(
goog.dom.TagName.INPUT, {'type': goog.dom.InputType.TEXT}));
this.popupDatePicker_.createDom();
};
/**
* Sets the element that the PopupDatePicker should be parented to. If not set,
* defaults to the body element of the page.
* @param {Element} el The element that the PopupDatePicker should be parented
* to.
*/
goog.ui.InputDatePicker.prototype.setPopupParentElement = function(el) {
'use strict';
this.popupParentElement_ = el;
};
/** @override */
goog.ui.InputDatePicker.prototype.enterDocument = function() {
'use strict';
// this.popupDatePicker_ has been added as a child even though it isn't really
// a child (since its root element is not within InputDatePicker's DOM tree).
// The PopupDatePicker will have its enterDocument method called as a result
// of calling the superClass's enterDocument method. The PopupDatePicker needs
// to be attached to the document *before* calling enterDocument so that when
// PopupDatePicker decorates its element as a DatePicker, the element will be
// in the document and enterDocument will be called for the DatePicker. Having
// the PopupDatePicker's element in the document before calling enterDocument
// will ensure that the event handlers for DatePicker are attached.
//
// An alternative could be to stop adding popupDatePicker_ as a child and
// instead keep a reference to it and sync some event handlers, etc. but
// appending the element to the document before calling enterDocument is a
// less intrusive option.
//
// See cl/100837907 for more context and the discussion around this decision.
(this.popupParentElement_ || this.getDomHelper().getDocument().body)
.appendChild(/** @type {!Node} */ (this.popupDatePicker_.getElement()));
goog.ui.InputDatePicker.superClass_.enterDocument.call(this);
var el = this.getElement();
this.popupDatePicker_.attach(el);
// Set the date picker to have the input's initial value, if any.
this.popupDatePicker_.setDate(this.getInputValueAsDate_());
var handler = this.getHandler();
handler.listen(
this.popupDatePicker_, goog.ui.DatePicker.Events.CHANGE,
this.onDateChanged_);
handler.listen(
this.popupDatePicker_, goog.ui.PopupBase.EventType.SHOW, this.onPopup_);
};
/** @override */
goog.ui.InputDatePicker.prototype.exitDocument = function() {
'use strict';
goog.ui.InputDatePicker.superClass_.exitDocument.call(this);
var el = this.getElement();
this.popupDatePicker_.detach(el);
this.popupDatePicker_.exitDocument();
goog.dom.removeNode(this.popupDatePicker_.getElement());
};
/** @override */
goog.ui.InputDatePicker.prototype.decorateInternal = function(element) {
'use strict';
goog.ui.InputDatePicker.superClass_.decorateInternal.call(this, element);
this.popupDatePicker_.createDom();
};
/** @override */
goog.ui.InputDatePicker.prototype.disposeInternal = function() {
'use strict';
goog.ui.InputDatePicker.superClass_.disposeInternal.call(this);
this.popupDatePicker_.dispose();
this.popupDatePicker_ = null;
this.popupParentElement_ = null;
};
/**
* See goog.ui.PopupDatePicker.showPopup().
* @param {Element} element Reference element for displaying the popup -- popup
* will appear at the bottom-left corner of this element.
*/
goog.ui.InputDatePicker.prototype.showForElement = function(element) {
'use strict';
this.popupDatePicker_.showPopup(element);
};
/**
* See goog.ui.PopupDatePicker.hidePopup().
*/
goog.ui.InputDatePicker.prototype.hidePopup = function() {
'use strict';
this.popupDatePicker_.hidePopup();
};
/**
* Event handler for popup date picker popup events.
*
* @param {goog.events.Event} e popup event.
* @private
*/
goog.ui.InputDatePicker.prototype.onPopup_ = function(e) {
'use strict';
var inputValueAsDate = this.getInputValueAsDate_();
this.setDate(inputValueAsDate);
// don't overwrite the input value with empty date if input is not valid
if (inputValueAsDate) {
this.setInputValueAsDate_(this.getDatePicker().getDate());
}
};
/**
* Event handler for date change events. Called when the date changes.
*
* @param {goog.ui.DatePickerEvent} e Date change event.
* @private
*/
goog.ui.InputDatePicker.prototype.onDateChanged_ = function(e) {
'use strict';
this.setInputValueAsDate_(e.date);
};
/**
* A DateFormatter implements functionality to convert a Date into
* human-readable text. text into a Date. This interface is expected to accept
* an instance of goog.i18n.DateTimeFormat directly, and as such the method
* signatures directly match those found on that class.
* @record
*/
goog.ui.InputDatePicker.DateFormatter = function() {};
/**
* @param {!goog.date.DateLike} date The Date object that is being formatted.
* @return {string} The formatted date value.
*/
goog.ui.InputDatePicker.DateFormatter.prototype.format = function(date) {};
/**
* A DateParser implements functionality to parse text into a Date. This
* interface is expected to accept an instance of goog.i18n.DateTimeParse
* directly, and as such the method signatures directly match those found on
* that class.
* @record
*/
goog.ui.InputDatePicker.DateParser = function() {};
/**
* @param {string} text The string being parsed.
* @param {!goog.date.DateLike} date The Date object to hold the parsed date.
* @return {number} How many characters parser advanced.
* @deprecated See the deprecation warning on
* goog.i18n.DateTimeParse#strictParse - this will be deprecated in favour
* of only using the `parse` signature with options.
*/
goog.ui.InputDatePicker.DateParser.prototype.strictParse = function(
text, date) {};
/**
* @param {string} text The string being parsed.
* @param {!goog.date.DateLike} date The Date object to hold the parsed date.
* @param {!goog.i18n.DateTimeParse.ParseOptions=} options The options object.
* @return {number} How many characters parser advanced.
*/
goog.ui.InputDatePicker.DateParser.prototype.parse = function(
text, date, options) {};
|
Stop using the deprecated strictParse method in InputDatePicker.
RELNOTES: n/a
PiperOrigin-RevId: 376962873
Change-Id: I87eb5b937ee4b6e6e100843a16512cfcd8964351
|
closure/goog/ui/inputdatepicker.js
|
Stop using the deprecated strictParse method in InputDatePicker. RELNOTES: n/a
|
<ide><path>losure/goog/ui/inputdatepicker.js
<ide> var date = new goog.date.DateTime();
<ide> // DateTime needed as parse assumes it can call getHours(), getMinutes(),
<ide> // etc, on the date if hours and minutes aren't defined.
<del> if (this.dateTimeParser_.strictParse(value, date) > 0) {
<add> if (this.dateTimeParser_.parse(value, date, {validate: true}) > 0) {
<ide> // Parser with YYYY format string will interpret 1 as year 1 A.D.
<ide> // However, datepicker.setDate() method will change it into 1901.
<ide> // Same is true for any other pattern when number entered by user is
<ide> /**
<ide> * @param {string} text The string being parsed.
<ide> * @param {!goog.date.DateLike} date The Date object to hold the parsed date.
<del> * @return {number} How many characters parser advanced.
<del> * @deprecated See the deprecation warning on
<del> * goog.i18n.DateTimeParse#strictParse - this will be deprecated in favour
<del> * of only using the `parse` signature with options.
<del> */
<del>goog.ui.InputDatePicker.DateParser.prototype.strictParse = function(
<del> text, date) {};
<del>
<del>/**
<del> * @param {string} text The string being parsed.
<del> * @param {!goog.date.DateLike} date The Date object to hold the parsed date.
<ide> * @param {!goog.i18n.DateTimeParse.ParseOptions=} options The options object.
<ide> * @return {number} How many characters parser advanced.
<ide> */
|
|
Java
|
lgpl-2.1
|
e5db5b01107b838698f6d8c760b1270f47f0d0e5
| 0 |
esig/dss,esig/dss
|
/**
* DSS - Digital Signature Services
* Copyright (C) 2015 European Commission, provided under the CEF programme
*
* This file is part of the "DSS - Digital Signature Services" project.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package eu.europa.esig.dss.validation;
import eu.europa.esig.dss.CertificateReorderer;
import eu.europa.esig.dss.alert.status.Status;
import eu.europa.esig.dss.enumerations.RevocationReason;
import eu.europa.esig.dss.enumerations.RevocationType;
import eu.europa.esig.dss.model.x509.CertificateToken;
import eu.europa.esig.dss.model.x509.Token;
import eu.europa.esig.dss.model.x509.X500PrincipalHelper;
import eu.europa.esig.dss.model.x509.revocation.Revocation;
import eu.europa.esig.dss.model.x509.revocation.crl.CRL;
import eu.europa.esig.dss.model.x509.revocation.ocsp.OCSP;
import eu.europa.esig.dss.spi.DSSASN1Utils;
import eu.europa.esig.dss.spi.DSSRevocationUtils;
import eu.europa.esig.dss.spi.x509.AlternateUrlsSourceAdapter;
import eu.europa.esig.dss.spi.x509.CandidatesForSigningCertificate;
import eu.europa.esig.dss.spi.x509.CertificateRef;
import eu.europa.esig.dss.spi.x509.CertificateSource;
import eu.europa.esig.dss.spi.x509.CertificateValidity;
import eu.europa.esig.dss.spi.x509.CommonTrustedCertificateSource;
import eu.europa.esig.dss.spi.x509.ListCertificateSource;
import eu.europa.esig.dss.spi.x509.ResponderId;
import eu.europa.esig.dss.spi.x509.aia.AIASource;
import eu.europa.esig.dss.spi.x509.revocation.OfflineRevocationSource;
import eu.europa.esig.dss.spi.x509.revocation.RevocationCertificateSource;
import eu.europa.esig.dss.spi.x509.revocation.RevocationSource;
import eu.europa.esig.dss.spi.x509.revocation.RevocationSourceAlternateUrlsSupport;
import eu.europa.esig.dss.spi.x509.revocation.RevocationToken;
import eu.europa.esig.dss.spi.x509.revocation.ocsp.OCSPToken;
import eu.europa.esig.dss.utils.Utils;
import eu.europa.esig.dss.validation.timestamp.TimestampToken;
import eu.europa.esig.dss.validation.timestamp.TimestampedReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
/**
* During the validation of a signature, the software retrieves different X509 artifacts like Certificate, CRL and OCSP
* Response. The SignatureValidationContext is a "cache" for
* one validation request that contains every object retrieved so far.
*
*/
public class SignatureValidationContext implements ValidationContext {
private static final Logger LOG = LoggerFactory.getLogger(SignatureValidationContext.class);
/**
* A set of certificates to process
*/
private final Set<CertificateToken> processedCertificates = new HashSet<>();
/**
* A set of revocation data to process
*/
private final Set<RevocationToken<?>> processedRevocations = new HashSet<>();
/**
* A set of timestamps to process
*/
private final Set<TimestampToken> processedTimestamps = new HashSet<>();
/**
* The CertificateVerifier to use
*/
private CertificateVerifier certificateVerifier;
/**
* Used to access certificate by AIA.
*/
private AIASource aiaSource;
/** Map of tokens defining if they have been processed yet */
private final Map<Token, Boolean> tokensToProcess = new HashMap<>();
/** The last usage of a timestamp's certificate tokens */
private final Map<CertificateToken, Date> lastTimestampCertChainDates = new HashMap<>();
/** A map of token IDs and their corresponding POE times */
private final Map<String, List<POE>> poeTimes = new HashMap<>();
/** Cached map of tokens and their {@code CertificateToken} issuers */
private final Map<Token, CertificateToken> tokenIssuerMap = new HashMap<>();
/** External OCSP source */
private RevocationSource<OCSP> remoteOCSPSource;
/** External CRL source */
private RevocationSource<CRL> remoteCRLSource;
/** This strategy defines the revocation loading logic and returns OCSP or CRL token for a provided certificate */
private RevocationDataLoadingStrategy revocationDataLoadingStrategy;
/** External trusted certificate sources */
private ListCertificateSource trustedCertSources;
/** External adjunct certificate sources */
private ListCertificateSource adjunctCertSources;
/** CRLs from the document */
private ListRevocationSource<CRL> documentCRLSource = new ListRevocationSource<>();
/** OCSP from the document */
private ListRevocationSource<OCSP> documentOCSPSource = new ListRevocationSource<>();
/** Certificates from the document */
private ListCertificateSource documentCertificateSource = new ListCertificateSource();
/** Certificates collected from AIA */
private ListCertificateSource aiaCertificateSources = new ListCertificateSource();
/** Certificates collected from revocation tokens */
private ListCertificateSource revocationCertificateSources = new ListCertificateSource();
/**
* This variable set the behavior to follow for revocation retrieving in case of
* untrusted certificate chains.
*/
private boolean checkRevocationForUntrustedChains;
/**
* This is the time at what the validation is carried out. It is used only for test purpose.
*/
protected Date currentTime = new Date();
/**
* @param certificateVerifier
* The certificates verifier (eg: using the TSL as list of trusted certificates).
*/
@Override
public void initialize(final CertificateVerifier certificateVerifier) {
Objects.requireNonNull(certificateVerifier);
this.certificateVerifier = certificateVerifier;
this.remoteCRLSource = certificateVerifier.getCrlSource();
this.remoteOCSPSource = certificateVerifier.getOcspSource();
this.aiaSource = certificateVerifier.getAIASource();
this.revocationDataLoadingStrategy = certificateVerifier.getRevocationDataLoadingStrategy();
this.adjunctCertSources = certificateVerifier.getAdjunctCertSources();
this.trustedCertSources = certificateVerifier.getTrustedCertSources();
this.checkRevocationForUntrustedChains = certificateVerifier.isCheckRevocationForUntrustedChains();
}
@Override
public void addSignatureForVerification(final AdvancedSignature signature) {
addDocumentCertificateSource(signature.getCertificateSource());
addDocumentCRLSource(signature.getCRLSource());
addDocumentOCSPSource(signature.getOCSPSource());
registerPOE(signature.getId(), currentTime);
// Add resolved certificates
CertificateToken signingCertificate = signature.getSigningCertificateToken();
if (signingCertificate != null) {
addCertificateTokenForVerification(signingCertificate);
} else {
List<CertificateValidity> certificateValidities = signature.getCandidatesForSigningCertificate().getCertificateValidityList();
if (Utils.isCollectionNotEmpty(certificateValidities)) {
for (CertificateValidity certificateValidity : certificateValidities) {
if (certificateValidity.isValid() && certificateValidity.getCertificateToken() != null) {
addCertificateTokenForVerification(certificateValidity.getCertificateToken());
}
}
}
}
prepareTimestamps(signature.getAllTimestamps());
prepareCounterSignatures(signature.getCounterSignatures());
}
@Override
public void addDocumentCertificateSource(CertificateSource certificateSource) {
addCertificateSource(documentCertificateSource, certificateSource);
}
@Override
public void addDocumentCertificateSource(ListCertificateSource listCertificateSource) {
for (CertificateSource certificateSource : listCertificateSource.getSources()) {
addDocumentCertificateSource(certificateSource);
}
}
/**
* Adds {@code certificateSourceToAdd} to the given {@code listCertificateSource}
*
* @param listCertificateSource {@link ListCertificateSource} to enrich
* @param certificateSourceToAdd {@link CertificateSource} to add
*/
private void addCertificateSource(ListCertificateSource listCertificateSource, CertificateSource certificateSourceToAdd) {
listCertificateSource.add(certificateSourceToAdd);
// add all existing equivalent certificates for the validation
ListCertificateSource allCertificateSources = getAllCertificateSources();
for (CertificateToken certificateToken : certificateSourceToAdd.getCertificates()) {
final Set<CertificateToken> equivalentCertificates = allCertificateSources.getByPublicKey(certificateToken.getPublicKey());
for (CertificateToken equivalentCertificate : equivalentCertificates) {
if (!certificateToken.getDSSIdAsString().equals(equivalentCertificate.getDSSIdAsString())) {
addCertificateTokenForVerification(certificateToken);
}
}
}
}
@Override
public void addDocumentCRLSource(OfflineRevocationSource<CRL> crlSource) {
documentCRLSource.add(crlSource);
}
@Override
public void addDocumentCRLSource(ListRevocationSource<CRL> crlSource) {
documentCRLSource.addAll(crlSource);
}
@Override
public void addDocumentOCSPSource(OfflineRevocationSource<OCSP> ocspSource) {
documentOCSPSource.add(ocspSource);
}
@Override
public void addDocumentOCSPSource(ListRevocationSource<OCSP> ocspSource) {
documentOCSPSource.addAll(ocspSource);
}
private void prepareTimestamps(final List<TimestampToken> timestampTokens) {
for (final TimestampToken timestampToken : timestampTokens) {
addTimestampTokenForVerification(timestampToken);
}
}
private void prepareCounterSignatures(final List<AdvancedSignature> counterSignatures) {
for (AdvancedSignature counterSignature : counterSignatures) {
addSignatureForVerification(counterSignature);
}
}
@Override
public Date getCurrentTime() {
return currentTime;
}
@Override
public void setCurrentTime(final Date currentTime) {
Objects.requireNonNull(currentTime);
this.currentTime = currentTime;
}
/**
* This method returns a token to verify. If there is no more tokens to verify null is returned.
*
* @return token to verify or null
*/
private Token getNotYetVerifiedToken() {
synchronized (tokensToProcess) {
for (final Entry<Token, Boolean> entry : tokensToProcess.entrySet()) {
if (entry.getValue() == null) {
entry.setValue(true);
return entry.getKey();
}
}
return null;
}
}
/**
* This method returns a timestamp token to verify. If there is no more tokens to verify null is returned.
*
* @return token to verify or null
*/
private TimestampToken getNotYetVerifiedTimestamp() {
synchronized (tokensToProcess) {
for (final Entry<Token, Boolean> entry : tokensToProcess.entrySet()) {
if (entry.getValue() == null && entry.getKey() instanceof TimestampToken) {
entry.setValue(true);
return (TimestampToken) entry.getKey();
}
}
return null;
}
}
private final Map<CertificateToken, List<CertificateToken>> getOrderedCertificateChains() {
final CertificateReorderer order = new CertificateReorderer(processedCertificates);
return order.getOrderedCertificateChains();
}
/**
* This method builds the complete certificate chain from the given token.
*
* @param token
* the token for which the certificate chain must be obtained.
* @return the built certificate chain
*/
private List<Token> getCertChain(final Token token) {
List<Token> chain = new LinkedList<>();
Token issuerCertificateToken = token;
do {
chain.add(issuerCertificateToken);
issuerCertificateToken = getIssuer(issuerCertificateToken);
} while (issuerCertificateToken != null && !chain.contains(issuerCertificateToken));
return chain;
}
private CertificateToken getIssuer(final Token token) {
// Return cached value
CertificateToken issuerCertificateToken = getIssuerFromProcessedCertificates(token);
if (issuerCertificateToken != null) {
return issuerCertificateToken;
}
// Find issuer candidates from a particular certificate source
Set<CertificateToken> candidates = Collections.emptySet();
if ((issuerCertificateToken == null) && (token instanceof OCSPToken)) {
candidates = getIssuersFromSource(token, ((OCSPToken) token).getCertificateSource());
}
if ((issuerCertificateToken == null) && (token instanceof TimestampToken)) {
candidates = getIssuersFromSource(token, ((TimestampToken) token).getCertificateSource());
}
ListCertificateSource allCertificateSources = getAllCertificateSources();
if (Utils.isCollectionEmpty(candidates)) {
// Find issuer candidates from all sources
candidates = getIssuersFromSources(token, allCertificateSources);
}
issuerCertificateToken = getTokenIssuerFromCandidates(token, candidates);
if ((issuerCertificateToken == null) && (token instanceof CertificateToken) && aiaSource != null) {
final AIACertificateSource aiaCertificateSource = new AIACertificateSource((CertificateToken) token, aiaSource);
issuerCertificateToken = aiaCertificateSource.getIssuerFromAIA();
addCertificateSource(aiaCertificateSources, aiaCertificateSource);
}
if ((issuerCertificateToken == null) && (token instanceof OCSPToken)) {
issuerCertificateToken = getOCSPIssuer((OCSPToken) token, allCertificateSources);
}
if ((issuerCertificateToken == null) && (token instanceof TimestampToken)) {
issuerCertificateToken = getTSACertificate((TimestampToken) token, allCertificateSources);
}
if (issuerCertificateToken != null) {
addCertificateTokenForVerification(issuerCertificateToken);
tokenIssuerMap.put(token, issuerCertificateToken);
}
return issuerCertificateToken;
}
private CertificateToken getIssuerFromProcessedCertificates(Token token) {
CertificateToken issuerCertificateToken = tokenIssuerMap.get(token);
// isSignedBy(...) check is required when a certificates is present in different sources
// in order to instantiate a public key of the signer
if (issuerCertificateToken != null &&
(token.getPublicKeyOfTheSigner() != null || token.isSignedBy(issuerCertificateToken))) {
return issuerCertificateToken;
}
return null;
}
@Override
public ListCertificateSource getAllCertificateSources() {
ListCertificateSource allCertificateSources = new ListCertificateSource();
allCertificateSources.addAll(documentCertificateSource);
allCertificateSources.addAll(revocationCertificateSources);
allCertificateSources.addAll(aiaCertificateSources);
allCertificateSources.addAll(adjunctCertSources);
allCertificateSources.addAll(trustedCertSources);
return allCertificateSources;
}
@Override
public ListCertificateSource getDocumentCertificateSource() {
return documentCertificateSource;
}
@Override
public ListRevocationSource<CRL> getDocumentCRLSource() {
return documentCRLSource;
}
@Override
public ListRevocationSource<OCSP> getDocumentOCSPSource() {
return documentOCSPSource;
}
private Set<CertificateToken> getIssuersFromSources(Token token, ListCertificateSource allCertificateSources) {
if (token.getPublicKeyOfTheSigner() != null) {
return allCertificateSources.getByPublicKey(token.getPublicKeyOfTheSigner());
} else if (token.getIssuerX500Principal() != null) {
return allCertificateSources.getBySubject(new X500PrincipalHelper(token.getIssuerX500Principal()));
}
return Collections.emptySet();
}
private Set<CertificateToken> getIssuersFromSource(Token token, CertificateSource certificateSource) {
if (token.getPublicKeyOfTheSigner() != null) {
return certificateSource.getByPublicKey(token.getPublicKeyOfTheSigner());
} else if (token.getIssuerX500Principal() != null) {
return certificateSource.getBySubject(new X500PrincipalHelper(token.getIssuerX500Principal()));
}
return Collections.emptySet();
}
private CertificateToken getOCSPIssuer(OCSPToken token, ListCertificateSource allCertificateSources) {
Set<CertificateRef> signingCertificateRefs = token.getCertificateSource().getAllCertificateRefs();
if (Utils.collectionSize(signingCertificateRefs) == 1) {
CertificateRef signingCertificateRef = signingCertificateRefs.iterator().next();
ResponderId responderId = signingCertificateRef.getResponderId();
if (responderId != null) {
Set<CertificateToken> issuerCandidates = new HashSet<>();
if (responderId.getSki() != null) {
issuerCandidates.addAll(allCertificateSources.getBySki(responderId.getSki()));
}
if (responderId.getX500Principal() != null) {
issuerCandidates.addAll(allCertificateSources.getBySubject(new X500PrincipalHelper(responderId.getX500Principal())));
}
return getTokenIssuerFromCandidates(token, issuerCandidates);
}
}
LOG.warn("Signing certificate is not found for an OCSPToken with id '{}'.", token.getDSSIdAsString());
return null;
}
private CertificateToken getTSACertificate(TimestampToken timestamp, ListCertificateSource allCertificateSources) {
CandidatesForSigningCertificate candidatesForSigningCertificate = timestamp.getCandidatesForSigningCertificate();
CertificateValidity theBestCandidate = candidatesForSigningCertificate.getTheBestCandidate();
if (theBestCandidate != null) {
Set<CertificateToken> issuerCandidates = new HashSet<>();
CertificateToken timestampSigner = theBestCandidate.getCertificateToken();
if (timestampSigner == null) {
issuerCandidates.addAll(allCertificateSources.getByCertificateIdentifier(theBestCandidate.getSignerInfo()));
} else {
issuerCandidates.add(timestampSigner);
}
return getTokenIssuerFromCandidates(timestamp, issuerCandidates);
}
return null;
}
private CertificateToken getTokenIssuerFromCandidates(Token token, Collection<CertificateToken> candidates) {
List<CertificateToken> issuers = new ArrayList<>();
for (CertificateToken candidate : candidates) {
if (token.isSignedBy(candidate)) {
issuers.add(candidate);
if (candidate.isValidOn(token.getCreationDate())) {
return candidate;
}
}
}
if (Utils.isCollectionNotEmpty(issuers)) {
LOG.warn("No issuer found for the token creation date. The process continues with an issuer which has the same public key.");
return issuers.iterator().next();
}
return null;
}
/**
* Adds a new token to the list of tokens to verify only if it was not already
* verified.
*
* @param token
* token to verify
* @return true if the token was not yet verified, false otherwise.
*/
private boolean addTokenForVerification(final Token token) {
if (token == null) {
return false;
}
final boolean traceEnabled = LOG.isTraceEnabled();
if (traceEnabled) {
LOG.trace("addTokenForVerification: trying to acquire synchronized block");
}
synchronized (tokensToProcess) {
try {
if (tokensToProcess.containsKey(token)) {
if (traceEnabled) {
LOG.trace("Token was already in the list {}:{}", token.getClass().getSimpleName(), token.getAbbreviation());
}
return false;
}
tokensToProcess.put(token, null);
registerPOE(token.getDSSIdAsString(), currentTime);
if (traceEnabled) {
LOG.trace("+ New {} to check: {}", token.getClass().getSimpleName(), token.getAbbreviation());
}
return true;
} finally {
if (traceEnabled) {
LOG.trace("addTokenForVerification: almost left synchronized block");
}
}
}
}
@Override
public void addRevocationTokenForVerification(RevocationToken revocationToken) {
if (addTokenForVerification(revocationToken)) {
RevocationCertificateSource revocationCertificateSource = revocationToken.getCertificateSource();
if (revocationCertificateSource != null) {
addCertificateSource(revocationCertificateSources, revocationCertificateSource);
}
CertificateToken issuerCertificateToken = revocationToken.getIssuerCertificateToken();
if (issuerCertificateToken != null) {
addCertificateTokenForVerification(issuerCertificateToken);
}
final boolean added = processedRevocations.add(revocationToken);
if (LOG.isTraceEnabled()) {
if (added) {
LOG.trace("RevocationToken added to processedRevocations: {} ", revocationToken);
} else {
LOG.trace("RevocationToken already present processedRevocations: {} ", revocationToken);
}
}
}
}
@Override
public void addCertificateTokenForVerification(final CertificateToken certificateToken) {
if (addTokenForVerification(certificateToken)) {
final boolean added = processedCertificates.add(certificateToken);
if (LOG.isTraceEnabled()) {
if (added) {
LOG.trace("CertificateToken added to processedCertificates: {} ", certificateToken);
} else {
LOG.trace("CertificateToken already present processedCertificates: {} ", certificateToken);
}
}
}
}
@Override
public void addTimestampTokenForVerification(final TimestampToken timestampToken) {
if (addTokenForVerification(timestampToken)) {
addDocumentCertificateSource(timestampToken.getCertificateSource());
addDocumentCRLSource(timestampToken.getCRLSource());
addDocumentOCSPSource(timestampToken.getOCSPSource());
List<CertificateValidity> certificateValidities = timestampToken.getCandidatesForSigningCertificate().getCertificateValidityList();
if (Utils.isCollectionNotEmpty(certificateValidities)) {
for (CertificateValidity certificateValidity : certificateValidities) {
if (certificateValidity.isValid() && certificateValidity.getCertificateToken() != null) {
addCertificateTokenForVerification(certificateValidity.getCertificateToken());
}
}
}
final boolean added = processedTimestamps.add(timestampToken);
if (LOG.isTraceEnabled()) {
if (added) {
LOG.trace("TimestampToken added to processedTimestamps: {} ", processedTimestamps);
} else {
LOG.trace("TimestampToken already present processedTimestamps: {} ", processedTimestamps);
}
}
}
}
private void registerUsageDate(TimestampToken timestampToken) {
CertificateToken tsaCertificate = getTSACertificate(timestampToken, getAllCertificateSources());
if (tsaCertificate == null) {
LOG.warn("No Timestamp Certificate found. Chain is skipped.");
return;
}
List<CertificateToken> tsaCertificateChain = toCertificateTokenChain(getCertChain(tsaCertificate));
Date usageDate = timestampToken.getCreationDate();
for (CertificateToken cert : tsaCertificateChain) {
if (isSelfSignedOrTrusted(cert)) {
break;
}
Date lastUsage = lastTimestampCertChainDates.get(cert);
if (lastUsage == null || lastUsage.before(usageDate)) {
lastTimestampCertChainDates.put(cert, usageDate);
}
}
for (TimestampedReference timestampedReference : timestampToken.getTimestampedReferences()) {
registerPOE(timestampedReference.getObjectId(), timestampToken);
}
}
private void registerPOE(String tokenId, TimestampToken timestampToken) {
List<POE> poeTimeList = poeTimes.get(tokenId);
if (Utils.isCollectionEmpty(poeTimeList)) {
poeTimeList = new ArrayList<>();
poeTimes.put(tokenId, poeTimeList);
}
poeTimeList.add(new POE(timestampToken));
}
private void registerPOE(String tokenId, Date poeTime) {
List<POE> poeTimeList = poeTimes.get(tokenId);
if (Utils.isCollectionEmpty(poeTimeList)) {
poeTimeList = new ArrayList<>();
poeTimes.put(tokenId, poeTimeList);
}
poeTimeList.add(new POE(poeTime));
}
private List<CertificateToken> toCertificateTokenChain(List<Token> tokens) {
List<CertificateToken> chain = new LinkedList<>();
for (Token token : tokens) {
if (token instanceof CertificateToken) {
chain.add((CertificateToken) token);
}
}
return chain;
}
@Override
public void validate() {
TimestampToken timestampToken = getNotYetVerifiedTimestamp();
while (timestampToken != null) {
getCertChain(timestampToken);
registerUsageDate(timestampToken);
timestampToken = getNotYetVerifiedTimestamp();
}
Token token = getNotYetVerifiedToken();
while (token != null) {
// extract the certificate chain and add missing tokens for verification
List<Token> certChain = getCertChain(token);
if (token instanceof CertificateToken) {
getRevocationData((CertificateToken) token, certChain);
}
token = getNotYetVerifiedToken();
}
}
/**
* Retrieves the revocation data from signature (if exists) or from the online
* sources. The issuer certificate must be provided, the underlining library
* (bouncy castle) needs it to build the request.
*
* @param certToken the current token
* @param certChain the complete chain
* @return a set of found {@link RevocationToken}s
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private Set<RevocationToken> getRevocationData(final CertificateToken certToken, List<Token> certChain) {
if (LOG.isTraceEnabled()) {
LOG.trace("Checking revocation data for : {}", certToken.getDSSIdAsString());
}
if (isRevocationDataNotRequired(certToken)) {
LOG.debug("Revocation data is not required for certificate : {}", certToken.getDSSIdAsString());
return Collections.emptySet();
}
CertificateToken issuerToken = getIssuer(certToken);
if (issuerToken == null) {
LOG.warn("Issuer not found for certificate {}", certToken.getDSSIdAsString());
return Collections.emptySet();
}
Set<RevocationToken> revocations = new HashSet<>();
// ALL Embedded revocation data
if (documentCRLSource != null) {
List<RevocationToken<CRL>> revocationTokens = documentCRLSource.getRevocationTokens(certToken, issuerToken);
for (RevocationToken revocationToken : revocationTokens) {
revocations.add(revocationToken);
addRevocationTokenForVerification(revocationToken);
}
}
if (documentOCSPSource != null) {
List<RevocationToken<OCSP>> revocationTokens = documentOCSPSource.getRevocationTokens(certToken, issuerToken);
for (RevocationToken revocationToken : revocationTokens) {
revocations.add(revocationToken);
addRevocationTokenForVerification(revocationToken);
}
}
// add processed revocation tokens
revocations.addAll(getRelatedRevocationTokens(certToken));
if (Utils.isCollectionEmpty(revocations) || isRevocationDataRefreshNeeded(certToken, revocations)) {
LOG.debug("The signature does not contain relative revocation data.");
if (checkRevocationForUntrustedChains || containsTrustAnchor(certChain)) {
LOG.trace("Revocation update is in progress for certificate : {}", certToken.getDSSIdAsString());
CertificateToken trustAnchor = (CertificateToken) getFirstTrustAnchor(certChain);
// Fetch OCSP or CRL from online sources
final RevocationToken<Revocation> onlineRevocationToken = getRevocationToken(
certToken, issuerToken, trustAnchor);
// Check if the obtained revocation is not yet present
if (onlineRevocationToken != null && !revocations.contains(onlineRevocationToken)) {
LOG.debug("Obtained a new revocation data : {}, for certificate : {}",
onlineRevocationToken.getDSSIdAsString(), certToken.getDSSIdAsString());
revocations.add(onlineRevocationToken);
addRevocationTokenForVerification(onlineRevocationToken);
}
} else {
LOG.warn("External revocation check is skipped for untrusted certificate : {}", certToken.getDSSIdAsString());
}
}
if (revocations.isEmpty()) {
LOG.warn("No revocation found for the certificate {}", certToken.getDSSIdAsString());
}
return revocations;
}
private <T extends Token> boolean containsTrustAnchor(List<T> certChain) {
return getFirstTrustAnchor(certChain) != null;
}
private <T extends Token> Token getFirstTrustAnchor(List<T> certChain) {
for (T token : certChain) {
if (isTrusted(token)) {
return token;
}
}
return null;
}
private RevocationToken getRevocationToken(CertificateToken certificateToken, CertificateToken issuerCertificate,
CertificateToken trustAnchor) {
// configure the CompositeRevocationSource
RevocationSource<OCSP> currentOCSPSource;
RevocationSource<CRL> currentCRLSource;
ListCertificateSource currentCertSource = null;
if (!trustedCertSources.isEmpty() && (trustAnchor != null)) {
LOG.trace("Initializing a revocation verifier for a trusted chain...");
currentOCSPSource = instantiateOCSPWithTrustServices(trustAnchor);
currentCRLSource = instantiateCRLWithTrustServices(trustAnchor);
currentCertSource = trustedCertSources;
} else {
LOG.trace("Initializing a revocation verifier for not trusted chain...");
currentOCSPSource = remoteOCSPSource;
currentCRLSource = remoteCRLSource;
}
revocationDataLoadingStrategy.setOcspSource(currentOCSPSource);
revocationDataLoadingStrategy.setCrlSource(currentCRLSource);
revocationDataLoadingStrategy.setTrustedCertificateSource(currentCertSource);
// fetch the data
return revocationDataLoadingStrategy.getRevocationToken(certificateToken, issuerCertificate);
}
private RevocationSource<OCSP> instantiateOCSPWithTrustServices(CertificateToken trustAnchor) {
List<String> alternativeOCSPUrls = getAlternativeOCSPUrls(trustAnchor);
if (Utils.isCollectionNotEmpty(alternativeOCSPUrls) && remoteOCSPSource instanceof RevocationSourceAlternateUrlsSupport) {
return new AlternateUrlsSourceAdapter<>((RevocationSourceAlternateUrlsSupport) remoteOCSPSource, alternativeOCSPUrls);
} else {
return remoteOCSPSource;
}
}
private RevocationSource<CRL> instantiateCRLWithTrustServices(CertificateToken trustAnchor) {
List<String> alternativeCRLUrls = getAlternativeCRLUrls(trustAnchor);
if (Utils.isCollectionNotEmpty(alternativeCRLUrls) && remoteCRLSource instanceof RevocationSourceAlternateUrlsSupport) {
return new AlternateUrlsSourceAdapter<>((RevocationSourceAlternateUrlsSupport) remoteCRLSource, alternativeCRLUrls);
} else {
return remoteCRLSource;
}
}
private List<String> getAlternativeOCSPUrls(CertificateToken trustAnchor) {
List<String> alternativeOCSPUrls = new ArrayList<>();
for (CertificateSource certificateSource : trustedCertSources.getSources()) {
if (certificateSource instanceof CommonTrustedCertificateSource) {
CommonTrustedCertificateSource trustedCertSource = (CommonTrustedCertificateSource) certificateSource;
alternativeOCSPUrls.addAll(trustedCertSource.getAlternativeOCSPUrls(trustAnchor));
}
}
return alternativeOCSPUrls;
}
private List<String> getAlternativeCRLUrls(CertificateToken trustAnchor) {
List<String> alternativeCRLUrls = new ArrayList<>();
for (CertificateSource certificateSource : trustedCertSources.getSources()) {
if (certificateSource instanceof CommonTrustedCertificateSource) {
CommonTrustedCertificateSource trustedCertSource = (CommonTrustedCertificateSource) certificateSource;
alternativeCRLUrls.addAll(trustedCertSource.getAlternativeCRLUrls(trustAnchor));
}
}
return alternativeCRLUrls;
}
@Override
public boolean checkAllRequiredRevocationDataPresent() {
List<String> errors = new ArrayList<>();
Map<CertificateToken, List<CertificateToken>> orderedCertificateChains = getOrderedCertificateChains();
for (List<CertificateToken> orderedCertChain : orderedCertificateChains.values()) {
checkRevocationForCertificateChainAgainstBestSignatureTime(orderedCertChain, null, errors);
}
if (!errors.isEmpty()) {
Status status = new Status("Revocation data is missing for one or more certificate(s).", errors);
certificateVerifier.getAlertOnMissingRevocationData().alert(status);
}
return errors.isEmpty();
}
private void checkRevocationForCertificateChainAgainstBestSignatureTime(List<CertificateToken> certificates,
Date bestSignatureTime, List<String> errors) {
for (CertificateToken certificateToken : certificates) {
if (isSelfSignedOrTrusted(certificateToken)) {
// break on the first trusted entry
break;
} else if (isOCSPNoCheckExtension(certificateToken)) {
// skip the revocation check for OCSP certs if no check is specified
continue;
}
boolean found = false;
Date earliestNextUpdate = null; // used for informational purpose only
List<RevocationToken> relatedRevocationTokens = getRelatedRevocationTokens(certificateToken);
for (RevocationToken<Revocation> revocationToken : relatedRevocationTokens) {
if (bestSignatureTime == null || revocationToken.getThisUpdate().after(bestSignatureTime)) {
found = true;
break;
} else {
if (revocationToken.getNextUpdate() != null &&
(earliestNextUpdate == null || revocationToken.getNextUpdate().before(earliestNextUpdate))) {
earliestNextUpdate = revocationToken.getNextUpdate();
}
}
}
if (!found) {
if (!certificateVerifier.isCheckRevocationForUntrustedChains() && !containsTrustAnchor(certificates)) {
errors.add(String.format("Revocation data is skipped for untrusted certificate chain for the token : '%s'", certificateToken.getDSSIdAsString()));
} else if (bestSignatureTime == null) {
// simple revocation presence check
errors.add(String.format("No revocation data found for certificate : %s", certificateToken.getDSSIdAsString()));
} else if (earliestNextUpdate != null) {
errors.add(String.format(
"No revocation data found after the best signature time [%s] "
+ "for the certificate : %s. \n The nextUpdate available after : [%s]",
bestSignatureTime, certificateToken.getDSSIdAsString(), earliestNextUpdate));
} else {
errors.add(String.format("No revocation data found after the best signature time [%s] for the certificate : %s", bestSignatureTime,
certificateToken.getDSSIdAsString()));
}
}
}
}
@Override
public boolean checkAllPOECoveredByRevocationData() {
List<String> errors = new ArrayList<>();
for (Entry<CertificateToken, Date> entry : lastTimestampCertChainDates.entrySet()) {
Date lastUsage = entry.getValue();
CertificateToken certificateToken = entry.getKey();
if (!isRevocationDataNotRequired(certificateToken)) {
boolean foundValidRevocationDataAfterLastUsage = false;
Date nextUpdate = null;
List<RevocationToken> relatedRevocationTokens = getRelatedRevocationTokens(certificateToken);
for (RevocationToken<Revocation> revocationToken : relatedRevocationTokens) {
Date productionDate = revocationToken.getProductionDate();
if (productionDate.after(lastUsage)) {
foundValidRevocationDataAfterLastUsage = true;
break;
}
Date currentNextUpdate = revocationToken.getNextUpdate();
if (nextUpdate == null || (currentNextUpdate != null && nextUpdate.before(currentNextUpdate))) {
nextUpdate = currentNextUpdate;
}
}
if (!foundValidRevocationDataAfterLastUsage) {
errors.add(String.format("POE certificate '%s' not covered by a valid revocation data (nextUpdate : %s)",
certificateToken.getDSSIdAsString(), nextUpdate));
}
}
}
if (!errors.isEmpty()) {
Status status = new Status("Revocation data is missing for one or more POE(s).", errors);
certificateVerifier.getAlertOnUncoveredPOE().alert(status);
}
return errors.isEmpty();
}
@Override
public boolean checkAllTimestampsValid() {
Set<String> invalidTimestampIds = new HashSet<>();
for (TimestampToken timestampToken : processedTimestamps) {
if (!timestampToken.isSignatureIntact() || !timestampToken.isMessageImprintDataFound() ||
!timestampToken.isMessageImprintDataIntact()) {
invalidTimestampIds.add(timestampToken.getDSSIdAsString());
}
}
if (!invalidTimestampIds.isEmpty()) {
Status status = new Status("Broken timestamp(s) detected.", invalidTimestampIds);
certificateVerifier.getAlertOnInvalidTimestamp().alert(status);
}
return invalidTimestampIds.isEmpty();
}
@Override
public boolean checkAllCertificatesValid() {
Set<String> invalidCertificateIds = new HashSet<>();
for (CertificateToken certificateToken : processedCertificates) {
if (!isRevocationDataNotRequired(certificateToken)) {
List<RevocationToken> relatedRevocationTokens = getRelatedRevocationTokens(certificateToken);
// check only available revocation data in order to not duplicate
// the method {@code checkAllRequiredRevocationDataPresent()}
if (Utils.isCollectionNotEmpty(relatedRevocationTokens)) {
// check if there is a best-signature-time before the revocation date
Date lowestPOETime = getLowestPOETime(certificateToken);
for (RevocationToken<Revocation> revocationToken : relatedRevocationTokens) {
if ((revocationToken.getStatus().isRevoked() && lowestPOETime != null &&
!lowestPOETime.before(revocationToken.getRevocationDate())) ||
!revocationToken.getStatus().isKnown()) {
invalidCertificateIds.add(certificateToken.getDSSIdAsString());
}
}
}
}
}
if (!invalidCertificateIds.isEmpty()) {
Status status = new Status("Revoked/Suspended certificate(s) detected.", invalidCertificateIds);
certificateVerifier.getAlertOnRevokedCertificate().alert(status);
}
return invalidCertificateIds.isEmpty();
}
private boolean isRevocationDataNotRequired(CertificateToken certToken) {
return isSelfSignedOrTrusted(certToken) || isOCSPNoCheckExtension(certToken);
}
private boolean isSelfSignedOrTrusted(CertificateToken certToken) {
return certToken.isSelfSigned() || isTrusted(certToken);
}
private boolean isOCSPNoCheckExtension(CertificateToken certToken) {
return DSSASN1Utils.hasIdPkixOcspNoCheckExtension(certToken);
}
private List<RevocationToken> getRelatedRevocationTokens(CertificateToken certificateToken) {
List<RevocationToken> result = new ArrayList<>();
for (RevocationToken<?> revocationToken : processedRevocations) {
if (Utils.areStringsEqual(certificateToken.getDSSIdAsString(), revocationToken.getRelatedCertificateId())) {
result.add(revocationToken);
}
}
return result;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private boolean isRevocationDataRefreshNeeded(CertificateToken certToken, Collection<RevocationToken> revocations) {
// get last usage dates for the same timestamp certificate chain
Date refreshNeededAfterTime = lastTimestampCertChainDates.get(certToken);
if (refreshNeededAfterTime == null) {
// the best signature time for other tokens (i.e. B-level and revocation data)
// shall not return null
refreshNeededAfterTime = getLowestPOETime(certToken);
}
boolean freshRevocationDataFound = false;
for (RevocationToken<Revocation> revocationToken : revocations) {
if (refreshNeededAfterTime != null && (refreshNeededAfterTime.before(revocationToken.getProductionDate()))
&& (RevocationReason.CERTIFICATE_HOLD != revocationToken.getReason()
&& isConsistent(revocationToken, certToken))) {
freshRevocationDataFound = true;
break;
}
}
if (!freshRevocationDataFound) {
LOG.debug("Revocation data refresh is needed");
return true;
}
return false;
}
private Date getLowestPOETime(Token token) {
Date lowestPOE = null;
List<POE> poeList = poeTimes.get(token.getDSSIdAsString());
if (Utils.isCollectionEmpty(poeList)) {
throw new IllegalStateException("POE shall be defined before accessing the 'poeTimes' list!");
}
for (POE poe : poeList) {
Date poeTime = poe.getTime();
if (lowestPOE == null || poeTime.before(lowestPOE)) {
lowestPOE = poeTime;
}
}
return lowestPOE;
}
private boolean isConsistent(RevocationToken<Revocation> revocation, CertificateToken certToken) {
List<CertificateToken> certificateTokenChain = toCertificateTokenChain(getCertChain(revocation));
if (Utils.isCollectionEmpty(certificateTokenChain)) {
LOG.debug("The revocation {} is not consistent! Issuer CertificateToken is not found.",
revocation.getDSSIdAsString());
return false;
}
if (RevocationType.OCSP.equals(revocation.getRevocationType()) &&
!DSSRevocationUtils.checkIssuerValidAtRevocationProductionTime(revocation)) {
LOG.debug("The revocation {} is not consistent! The revocation has been produced outside " +
"the issuer certificate's validity range!", revocation.getDSSIdAsString());
return false;
}
if (RevocationType.CRL.equals(revocation.getRevocationType()) && (
!isInCertificateValidityRange(revocation, certToken))) {
LOG.debug("The revocation '{}' was not issued during the validity period of the certificate! Certificate: {}",
revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return false;
}
if (revocation.getNextUpdate() != null && !hasPOEAfterProductionAndBeforeNextUpdate(revocation)) {
LOG.debug("There is no POE for the revocation '{}' after its production time and before the nextUpdate! " +
"Certificate: {}", revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return false;
}
// useful for short-life certificates (i.e. ocsp responder)
CertificateToken revocationIssuer = certificateTokenChain.iterator().next();
if (!isTrusted(revocationIssuer) && !hasPOEInTheValidityRange(revocationIssuer)) {
LOG.debug("There is no POE for the revocation issuer '{}' for revocation '{}' within its validity range! " +
"Certificate: {}", revocationIssuer.getDSSIdAsString(), revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return false;
}
LOG.debug("The revocation '{}' is consistent. Certificate: {}", revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return true;
}
private boolean isInCertificateValidityRange(RevocationToken<?> revocationToken, CertificateToken certificateToken) {
final Date thisUpdate = revocationToken.getThisUpdate();
final Date nextUpdate = revocationToken.getNextUpdate();
final Date notAfter = certificateToken.getNotAfter();
final Date notBefore = certificateToken.getNotBefore();
return thisUpdate.compareTo(notAfter) <= 0 && (nextUpdate != null && nextUpdate.compareTo(notBefore) >= 0);
}
private boolean hasPOEAfterProductionAndBeforeNextUpdate(RevocationToken<Revocation> revocation) {
List<POE> poeTimeList = poeTimes.get(revocation.getDSSIdAsString());
if (Utils.isCollectionNotEmpty(poeTimeList)) {
for (POE poeTime : poeTimeList) {
if (isConsistentOnTime(revocation, poeTime.getTime())) {
return true;
}
}
}
return false;
}
private boolean hasPOEInTheValidityRange(CertificateToken certificateToken) {
List<POE> poeTimeList = poeTimes.get(certificateToken.getDSSIdAsString());
if (Utils.isCollectionNotEmpty(poeTimeList)) {
for (POE poeTime : poeTimeList) {
if (certificateToken.isValidOn(poeTime.getTime())) {
return true;
}
// continue
}
}
return false;
}
private boolean isConsistentOnTime(RevocationToken<Revocation> revocationToken, Date date) {
Date productionDate = revocationToken.getProductionDate();
Date nextUpdate = revocationToken.getNextUpdate();
return date.compareTo(productionDate) >= 0 && date.compareTo(nextUpdate) <= 0;
}
@Override
public boolean checkAtLeastOneRevocationDataPresentAfterBestSignatureTime(AdvancedSignature signature) {
List<String> errors = new ArrayList<>();
CertificateToken signingCertificateToken = signature.getSigningCertificateToken();
Map<CertificateToken, List<CertificateToken>> orderedCertificateChains = getOrderedCertificateChains();
for (Map.Entry<CertificateToken, List<CertificateToken>> entry : orderedCertificateChains.entrySet()) {
CertificateToken firstChainCertificate = entry.getKey();
Date bestSignatureTime = firstChainCertificate.equals(signingCertificateToken) ? getEarliestTimestampTime()
: lastTimestampCertChainDates.get(firstChainCertificate);
checkRevocationForCertificateChainAgainstBestSignatureTime(entry.getValue(), bestSignatureTime, errors);
}
if (!errors.isEmpty()) {
Status status = new Status("Fresh revocation data is missing for one or more certificate(s).", errors);
certificateVerifier.getAlertOnNoRevocationAfterBestSignatureTime().alert(status);
}
return errors.isEmpty();
}
private Date getEarliestTimestampTime() {
Date earliestDate = null;
for (TimestampToken timestamp : getProcessedTimestamps()) {
if (timestamp.getTimeStampType().coversSignature()) {
Date timestampTime = timestamp.getCreationDate();
if (earliestDate == null || timestampTime.before(earliestDate)) {
earliestDate = timestampTime;
}
}
}
return earliestDate;
}
@Override
public boolean checkSignatureNotExpired(AdvancedSignature signature) {
CertificateToken signingCertificate = signature.getSigningCertificateToken();
if (signingCertificate != null) {
boolean signatureNotExpired = verifyCertificateTokenHasPOERecursively(signingCertificate, poeTimes.get(signature.getId()));
if (!signatureNotExpired) {
Status status = new Status("The signing certificate has been expired and " +
"there is no POE during its validity range.", Arrays.asList(signingCertificate.getDSSIdAsString()));
certificateVerifier.getAlertOnExpiredSignature().alert(status);
}
return signatureNotExpired;
}
return true;
}
private boolean verifyCertificateTokenHasPOERecursively(CertificateToken certificateToken, List<POE> poeTimeList) {
if (Utils.isCollectionNotEmpty(poeTimeList)) {
for (POE poeTime : poeTimeList) {
if (certificateToken.isValidOn(poeTime.getTime())) {
TimestampToken timestampToken = poeTime.getTimestampToken();
if (timestampToken != null) {
// check if the timestamp is valid at validation time
CertificateToken issuerCertificateToken = getIssuer(timestampToken);
if (issuerCertificateToken != null &&
verifyCertificateTokenHasPOERecursively(issuerCertificateToken, poeTimes.get(timestampToken.getDSSIdAsString()))) {
return true;
}
} else {
// the certificate is valid at the current time
return true;
}
}
}
}
return false;
}
@Override
public Set<CertificateToken> getProcessedCertificates() {
return Collections.unmodifiableSet(processedCertificates);
}
@Override
public Set<RevocationToken> getProcessedRevocations() {
return Collections.unmodifiableSet(processedRevocations);
}
@Override
public Set<TimestampToken> getProcessedTimestamps() {
return Collections.unmodifiableSet(processedTimestamps);
}
private <T extends Token> boolean isTrusted(T token) {
return token instanceof CertificateToken && trustedCertSources.isTrusted((CertificateToken) token);
}
@Override
public ValidationData getValidationData(final AdvancedSignature signature) {
return getValidationData(signature.getSigningCertificateToken());
}
@Override
public ValidationData getValidationData(final TimestampToken timestampToken) {
return getValidationData(getIssuer(timestampToken));
}
private ValidationData getValidationData(final CertificateToken certificateToken) {
ValidationData validationData = new ValidationData();
if (certificateToken != null) {
populateValidationDataRecursively(certificateToken, validationData);
}
return validationData;
}
private void populateValidationDataRecursively(final Token token, final ValidationData validationData) {
boolean added = validationData.addToken(token);
if (added) {
if (token instanceof CertificateToken) {
List<RevocationToken> revocationTokens = getRelatedRevocationTokens((CertificateToken) token);
for (RevocationToken revocationToken : revocationTokens) {
populateValidationDataRecursively(revocationToken, validationData);
}
}
CertificateToken issuerToken = getIssuer(token);
if (issuerToken != null) {
populateValidationDataRecursively(issuerToken, validationData);
}
}
}
/**
* This class defines a POE provided to the validation process or obtained from processed timestamps
*/
private static class POE {
/** The POE time */
private final Date time;
/** The TimestampToken provided the POE, when present */
private TimestampToken timestampToken;
/**
* Default constructor to instantiate the object from a provided time
*
* @param time {@link Date}
*/
public POE(final Date time) {
this.time = time;
}
/**
* Constructor to instantiate the POE object from a TimestampToken
*
* @param timestampToken {@link TimestampToken}
*/
public POE(TimestampToken timestampToken) {
this.timestampToken = timestampToken;
this.time = timestampToken.getCreationDate();
}
/**
* Returns the POE time
*
* @return {@link Date}
*/
public Date getTime() {
return time;
}
/**
* Returns the TimestampToken used to create the POE, when present
*
* @return {@link TimestampToken} if it has been used for the POE, null otherwise
*/
public TimestampToken getTimestampToken() {
return timestampToken;
}
}
}
|
dss-document/src/main/java/eu/europa/esig/dss/validation/SignatureValidationContext.java
|
/**
* DSS - Digital Signature Services
* Copyright (C) 2015 European Commission, provided under the CEF programme
*
* This file is part of the "DSS - Digital Signature Services" project.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package eu.europa.esig.dss.validation;
import eu.europa.esig.dss.CertificateReorderer;
import eu.europa.esig.dss.alert.status.Status;
import eu.europa.esig.dss.enumerations.RevocationReason;
import eu.europa.esig.dss.enumerations.RevocationType;
import eu.europa.esig.dss.model.x509.CertificateToken;
import eu.europa.esig.dss.model.x509.Token;
import eu.europa.esig.dss.model.x509.X500PrincipalHelper;
import eu.europa.esig.dss.model.x509.revocation.Revocation;
import eu.europa.esig.dss.model.x509.revocation.crl.CRL;
import eu.europa.esig.dss.model.x509.revocation.ocsp.OCSP;
import eu.europa.esig.dss.spi.DSSASN1Utils;
import eu.europa.esig.dss.spi.DSSRevocationUtils;
import eu.europa.esig.dss.spi.x509.AlternateUrlsSourceAdapter;
import eu.europa.esig.dss.spi.x509.CandidatesForSigningCertificate;
import eu.europa.esig.dss.spi.x509.CertificateRef;
import eu.europa.esig.dss.spi.x509.CertificateSource;
import eu.europa.esig.dss.spi.x509.CertificateValidity;
import eu.europa.esig.dss.spi.x509.CommonTrustedCertificateSource;
import eu.europa.esig.dss.spi.x509.ListCertificateSource;
import eu.europa.esig.dss.spi.x509.ResponderId;
import eu.europa.esig.dss.spi.x509.aia.AIASource;
import eu.europa.esig.dss.spi.x509.revocation.OfflineRevocationSource;
import eu.europa.esig.dss.spi.x509.revocation.RevocationCertificateSource;
import eu.europa.esig.dss.spi.x509.revocation.RevocationSource;
import eu.europa.esig.dss.spi.x509.revocation.RevocationSourceAlternateUrlsSupport;
import eu.europa.esig.dss.spi.x509.revocation.RevocationToken;
import eu.europa.esig.dss.spi.x509.revocation.ocsp.OCSPToken;
import eu.europa.esig.dss.utils.Utils;
import eu.europa.esig.dss.validation.timestamp.TimestampToken;
import eu.europa.esig.dss.validation.timestamp.TimestampedReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
/**
* During the validation of a signature, the software retrieves different X509 artifacts like Certificate, CRL and OCSP
* Response. The SignatureValidationContext is a "cache" for
* one validation request that contains every object retrieved so far.
*
*/
public class SignatureValidationContext implements ValidationContext {
private static final Logger LOG = LoggerFactory.getLogger(SignatureValidationContext.class);
/**
* A set of certificates to process
*/
private final Set<CertificateToken> processedCertificates = new HashSet<>();
/**
* A set of revocation data to process
*/
private final Set<RevocationToken<?>> processedRevocations = new HashSet<>();
/**
* A set of timestamps to process
*/
private final Set<TimestampToken> processedTimestamps = new HashSet<>();
/**
* The CertificateVerifier to use
*/
private CertificateVerifier certificateVerifier;
/**
* Used to access certificate by AIA.
*/
private AIASource aiaSource;
/** Map of tokens defining if they have been processed yet */
private final Map<Token, Boolean> tokensToProcess = new HashMap<>();
/** The last usage of a timestamp's certificate tokens */
private final Map<CertificateToken, Date> lastTimestampCertChainDates = new HashMap<>();
/** A map of token IDs and their corresponding POE times */
private final Map<String, List<POE>> poeTimes = new HashMap<>();
/** Cached map of tokens and their {@code CertificateToken} issuers */
private final Map<Token, CertificateToken> tokenIssuerMap = new HashMap<>();
/** External OCSP source */
private RevocationSource<OCSP> remoteOCSPSource;
/** External CRL source */
private RevocationSource<CRL> remoteCRLSource;
/** This strategy defines the revocation loading logic and returns OCSP or CRL token for a provided certificate */
private RevocationDataLoadingStrategy revocationDataLoadingStrategy;
/** External trusted certificate sources */
private ListCertificateSource trustedCertSources;
/** External adjunct certificate sources */
private ListCertificateSource adjunctCertSources;
/** CRLs from the document */
private ListRevocationSource<CRL> documentCRLSource = new ListRevocationSource<>();
/** OCSP from the document */
private ListRevocationSource<OCSP> documentOCSPSource = new ListRevocationSource<>();
/** Certificates from the document */
private ListCertificateSource documentCertificateSource = new ListCertificateSource();
/** Certificates collected from AIA */
private ListCertificateSource aiaCertificateSources = new ListCertificateSource();
/** Certificates collected from revocation tokens */
private ListCertificateSource revocationCertificateSources = new ListCertificateSource();
/**
* This variable set the behavior to follow for revocation retrieving in case of
* untrusted certificate chains.
*/
private boolean checkRevocationForUntrustedChains;
/**
* This is the time at what the validation is carried out. It is used only for test purpose.
*/
protected Date currentTime = new Date();
/**
* @param certificateVerifier
* The certificates verifier (eg: using the TSL as list of trusted certificates).
*/
@Override
public void initialize(final CertificateVerifier certificateVerifier) {
Objects.requireNonNull(certificateVerifier);
this.certificateVerifier = certificateVerifier;
this.remoteCRLSource = certificateVerifier.getCrlSource();
this.remoteOCSPSource = certificateVerifier.getOcspSource();
this.aiaSource = certificateVerifier.getAIASource();
this.revocationDataLoadingStrategy = certificateVerifier.getRevocationDataLoadingStrategy();
this.adjunctCertSources = certificateVerifier.getAdjunctCertSources();
this.trustedCertSources = certificateVerifier.getTrustedCertSources();
this.checkRevocationForUntrustedChains = certificateVerifier.isCheckRevocationForUntrustedChains();
}
@Override
public void addSignatureForVerification(final AdvancedSignature signature) {
addDocumentCertificateSource(signature.getCertificateSource());
addDocumentCRLSource(signature.getCRLSource());
addDocumentOCSPSource(signature.getOCSPSource());
registerPOE(signature.getId(), currentTime);
// Add resolved certificates
CertificateToken signingCertificate = signature.getSigningCertificateToken();
if (signingCertificate != null) {
addCertificateTokenForVerification(signingCertificate);
} else {
List<CertificateValidity> certificateValidities = signature.getCandidatesForSigningCertificate().getCertificateValidityList();
if (Utils.isCollectionNotEmpty(certificateValidities)) {
for (CertificateValidity certificateValidity : certificateValidities) {
if (certificateValidity.isValid() && certificateValidity.getCertificateToken() != null) {
addCertificateTokenForVerification(certificateValidity.getCertificateToken());
}
}
}
}
prepareTimestamps(signature.getAllTimestamps());
prepareCounterSignatures(signature.getCounterSignatures());
}
@Override
public void addDocumentCertificateSource(CertificateSource certificateSource) {
addCertificateSource(documentCertificateSource, certificateSource);
}
@Override
public void addDocumentCertificateSource(ListCertificateSource listCertificateSource) {
for (CertificateSource certificateSource : listCertificateSource.getSources()) {
addDocumentCertificateSource(certificateSource);
}
}
/**
* Adds {@code certificateSourceToAdd} to the given {@code listCertificateSource}
*
* @param listCertificateSource {@link ListCertificateSource} to enrich
* @param certificateSourceToAdd {@link CertificateSource} to add
*/
private void addCertificateSource(ListCertificateSource listCertificateSource, CertificateSource certificateSourceToAdd) {
listCertificateSource.add(certificateSourceToAdd);
// add all existing equivalent certificates for the validation
ListCertificateSource allCertificateSources = getAllCertificateSources();
for (CertificateToken certificateToken : certificateSourceToAdd.getCertificates()) {
final Set<CertificateToken> equivalentCertificates = allCertificateSources.getByPublicKey(certificateToken.getPublicKey());
for (CertificateToken equivalentCertificate : equivalentCertificates) {
if (!certificateToken.getDSSIdAsString().equals(equivalentCertificate.getDSSIdAsString())) {
addCertificateTokenForVerification(certificateToken);
}
}
}
}
@Override
public void addDocumentCRLSource(OfflineRevocationSource<CRL> crlSource) {
documentCRLSource.add(crlSource);
}
@Override
public void addDocumentCRLSource(ListRevocationSource<CRL> crlSource) {
documentCRLSource.addAll(crlSource);
}
@Override
public void addDocumentOCSPSource(OfflineRevocationSource<OCSP> ocspSource) {
documentOCSPSource.add(ocspSource);
}
@Override
public void addDocumentOCSPSource(ListRevocationSource<OCSP> ocspSource) {
documentOCSPSource.addAll(ocspSource);
}
private void prepareTimestamps(final List<TimestampToken> timestampTokens) {
for (final TimestampToken timestampToken : timestampTokens) {
addTimestampTokenForVerification(timestampToken);
}
}
private void prepareCounterSignatures(final List<AdvancedSignature> counterSignatures) {
for (AdvancedSignature counterSignature : counterSignatures) {
addSignatureForVerification(counterSignature);
}
}
@Override
public Date getCurrentTime() {
return currentTime;
}
@Override
public void setCurrentTime(final Date currentTime) {
Objects.requireNonNull(currentTime);
this.currentTime = currentTime;
}
/**
* This method returns a token to verify. If there is no more tokens to verify null is returned.
*
* @return token to verify or null
*/
private Token getNotYetVerifiedToken() {
synchronized (tokensToProcess) {
for (final Entry<Token, Boolean> entry : tokensToProcess.entrySet()) {
if (entry.getValue() == null) {
entry.setValue(true);
return entry.getKey();
}
}
return null;
}
}
/**
* This method returns a timestamp token to verify. If there is no more tokens to verify null is returned.
*
* @return token to verify or null
*/
private TimestampToken getNotYetVerifiedTimestamp() {
synchronized (tokensToProcess) {
for (final Entry<Token, Boolean> entry : tokensToProcess.entrySet()) {
if (entry.getValue() == null && entry.getKey() instanceof TimestampToken) {
entry.setValue(true);
return (TimestampToken) entry.getKey();
}
}
return null;
}
}
private final Map<CertificateToken, List<CertificateToken>> getOrderedCertificateChains() {
final CertificateReorderer order = new CertificateReorderer(processedCertificates);
return order.getOrderedCertificateChains();
}
/**
* This method builds the complete certificate chain from the given token.
*
* @param token
* the token for which the certificate chain must be obtained.
* @return the built certificate chain
*/
private List<Token> getCertChain(final Token token) {
List<Token> chain = new LinkedList<>();
Token issuerCertificateToken = token;
do {
chain.add(issuerCertificateToken);
issuerCertificateToken = getIssuer(issuerCertificateToken);
} while (issuerCertificateToken != null && !chain.contains(issuerCertificateToken));
return chain;
}
private CertificateToken getIssuer(final Token token) {
// Return cached value
CertificateToken issuerCertificateToken = getIssuerFromProcessedCertificates(token);
if (issuerCertificateToken != null) {
return issuerCertificateToken;
}
// Find issuer candidates from a particular certificate source
Set<CertificateToken> candidates = Collections.emptySet();
if ((issuerCertificateToken == null) && (token instanceof OCSPToken)) {
candidates = getIssuersFromSource(token, ((OCSPToken) token).getCertificateSource());
}
if ((issuerCertificateToken == null) && (token instanceof TimestampToken)) {
candidates = getIssuersFromSource(token, ((TimestampToken) token).getCertificateSource());
}
ListCertificateSource allCertificateSources = getAllCertificateSources();
if (Utils.isCollectionEmpty(candidates)) {
// Find issuer candidates from all sources
candidates = getIssuersFromSources(token, allCertificateSources);
}
issuerCertificateToken = getTokenIssuerFromCandidates(token, candidates);
if ((issuerCertificateToken == null) && (token instanceof CertificateToken) && aiaSource != null) {
final AIACertificateSource aiaCertificateSource = new AIACertificateSource((CertificateToken) token, aiaSource);
issuerCertificateToken = aiaCertificateSource.getIssuerFromAIA();
addCertificateSource(aiaCertificateSources, aiaCertificateSource);
}
if ((issuerCertificateToken == null) && (token instanceof OCSPToken)) {
issuerCertificateToken = getOCSPIssuer((OCSPToken) token, allCertificateSources);
}
if ((issuerCertificateToken == null) && (token instanceof TimestampToken)) {
issuerCertificateToken = getTSACertificate((TimestampToken) token, allCertificateSources);
}
if (issuerCertificateToken != null) {
addCertificateTokenForVerification(issuerCertificateToken);
tokenIssuerMap.put(token, issuerCertificateToken);
}
return issuerCertificateToken;
}
private CertificateToken getIssuerFromProcessedCertificates(Token token) {
CertificateToken issuerCertificateToken = tokenIssuerMap.get(token);
// isSignedBy(...) check is required when a certificates is present in different sources
// in order to instantiate a public key of the signer
if (issuerCertificateToken != null &&
(token.getPublicKeyOfTheSigner() != null || token.isSignedBy(issuerCertificateToken))) {
return issuerCertificateToken;
}
return null;
}
@Override
public ListCertificateSource getAllCertificateSources() {
ListCertificateSource allCertificateSources = new ListCertificateSource();
allCertificateSources.addAll(documentCertificateSource);
allCertificateSources.addAll(revocationCertificateSources);
allCertificateSources.addAll(aiaCertificateSources);
allCertificateSources.addAll(adjunctCertSources);
allCertificateSources.addAll(trustedCertSources);
return allCertificateSources;
}
@Override
public ListCertificateSource getDocumentCertificateSource() {
return documentCertificateSource;
}
@Override
public ListRevocationSource<CRL> getDocumentCRLSource() {
return documentCRLSource;
}
@Override
public ListRevocationSource<OCSP> getDocumentOCSPSource() {
return documentOCSPSource;
}
private Set<CertificateToken> getIssuersFromSources(Token token, ListCertificateSource allCertificateSources) {
if (token.getPublicKeyOfTheSigner() != null) {
return allCertificateSources.getByPublicKey(token.getPublicKeyOfTheSigner());
} else if (token.getIssuerX500Principal() != null) {
return allCertificateSources.getBySubject(new X500PrincipalHelper(token.getIssuerX500Principal()));
}
return Collections.emptySet();
}
private Set<CertificateToken> getIssuersFromSource(Token token, CertificateSource certificateSource) {
if (token.getPublicKeyOfTheSigner() != null) {
return certificateSource.getByPublicKey(token.getPublicKeyOfTheSigner());
} else if (token.getIssuerX500Principal() != null) {
return certificateSource.getBySubject(new X500PrincipalHelper(token.getIssuerX500Principal()));
}
return Collections.emptySet();
}
private CertificateToken getOCSPIssuer(OCSPToken token, ListCertificateSource allCertificateSources) {
Set<CertificateRef> signingCertificateRefs = token.getCertificateSource().getAllCertificateRefs();
if (Utils.collectionSize(signingCertificateRefs) == 1) {
CertificateRef signingCertificateRef = signingCertificateRefs.iterator().next();
ResponderId responderId = signingCertificateRef.getResponderId();
if (responderId != null) {
Set<CertificateToken> issuerCandidates = new HashSet<>();
if (responderId.getSki() != null) {
issuerCandidates.addAll(allCertificateSources.getBySki(responderId.getSki()));
}
if (responderId.getX500Principal() != null) {
issuerCandidates.addAll(allCertificateSources.getBySubject(new X500PrincipalHelper(responderId.getX500Principal())));
}
return getTokenIssuerFromCandidates(token, issuerCandidates);
}
}
LOG.warn("Signing certificate is not found for an OCSPToken with id '{}'.", token.getDSSIdAsString());
return null;
}
private CertificateToken getTSACertificate(TimestampToken timestamp, ListCertificateSource allCertificateSources) {
CandidatesForSigningCertificate candidatesForSigningCertificate = timestamp.getCandidatesForSigningCertificate();
CertificateValidity theBestCandidate = candidatesForSigningCertificate.getTheBestCandidate();
if (theBestCandidate != null) {
Set<CertificateToken> issuerCandidates = new HashSet<>();
CertificateToken timestampSigner = theBestCandidate.getCertificateToken();
if (timestampSigner == null) {
issuerCandidates.addAll(allCertificateSources.getByCertificateIdentifier(theBestCandidate.getSignerInfo()));
} else {
issuerCandidates.add(timestampSigner);
}
return getTokenIssuerFromCandidates(timestamp, issuerCandidates);
}
return null;
}
private CertificateToken getTokenIssuerFromCandidates(Token token, Collection<CertificateToken> candidates) {
List<CertificateToken> issuers = new ArrayList<>();
for (CertificateToken candidate : candidates) {
if (token.isSignedBy(candidate)) {
issuers.add(candidate);
if (candidate.isValidOn(token.getCreationDate())) {
return candidate;
}
}
}
if (Utils.isCollectionNotEmpty(issuers)) {
LOG.warn("No issuer found for the token creation date. The process continues with an issuer which has the same public key.");
return issuers.iterator().next();
}
return null;
}
/**
* Adds a new token to the list of tokens to verify only if it was not already
* verified.
*
* @param token
* token to verify
* @return true if the token was not yet verified, false otherwise.
*/
private boolean addTokenForVerification(final Token token) {
if (token == null) {
return false;
}
final boolean traceEnabled = LOG.isTraceEnabled();
if (traceEnabled) {
LOG.trace("addTokenForVerification: trying to acquire synchronized block");
}
synchronized (tokensToProcess) {
try {
if (tokensToProcess.containsKey(token)) {
if (traceEnabled) {
LOG.trace("Token was already in the list {}:{}", token.getClass().getSimpleName(), token.getAbbreviation());
}
return false;
}
tokensToProcess.put(token, null);
registerPOE(token.getDSSIdAsString(), currentTime);
if (traceEnabled) {
LOG.trace("+ New {} to check: {}", token.getClass().getSimpleName(), token.getAbbreviation());
}
return true;
} finally {
if (traceEnabled) {
LOG.trace("addTokenForVerification: almost left synchronized block");
}
}
}
}
@Override
public void addRevocationTokenForVerification(RevocationToken revocationToken) {
if (addTokenForVerification(revocationToken)) {
RevocationCertificateSource revocationCertificateSource = revocationToken.getCertificateSource();
if (revocationCertificateSource != null) {
addCertificateSource(revocationCertificateSources, revocationCertificateSource);
}
CertificateToken issuerCertificateToken = revocationToken.getIssuerCertificateToken();
if (issuerCertificateToken != null) {
addCertificateTokenForVerification(issuerCertificateToken);
}
final boolean added = processedRevocations.add(revocationToken);
if (LOG.isTraceEnabled()) {
if (added) {
LOG.trace("RevocationToken added to processedRevocations: {} ", revocationToken);
} else {
LOG.trace("RevocationToken already present processedRevocations: {} ", revocationToken);
}
}
}
}
@Override
public void addCertificateTokenForVerification(final CertificateToken certificateToken) {
if (addTokenForVerification(certificateToken)) {
final boolean added = processedCertificates.add(certificateToken);
if (LOG.isTraceEnabled()) {
if (added) {
LOG.trace("CertificateToken added to processedCertificates: {} ", certificateToken);
} else {
LOG.trace("CertificateToken already present processedCertificates: {} ", certificateToken);
}
}
}
}
@Override
public void addTimestampTokenForVerification(final TimestampToken timestampToken) {
if (addTokenForVerification(timestampToken)) {
addDocumentCertificateSource(timestampToken.getCertificateSource());
addDocumentCRLSource(timestampToken.getCRLSource());
addDocumentOCSPSource(timestampToken.getOCSPSource());
List<CertificateValidity> certificateValidities = timestampToken.getCandidatesForSigningCertificate().getCertificateValidityList();
if (Utils.isCollectionNotEmpty(certificateValidities)) {
for (CertificateValidity certificateValidity : certificateValidities) {
if (certificateValidity.isValid() && certificateValidity.getCertificateToken() != null) {
addCertificateTokenForVerification(certificateValidity.getCertificateToken());
}
}
}
final boolean added = processedTimestamps.add(timestampToken);
if (LOG.isTraceEnabled()) {
if (added) {
LOG.trace("TimestampToken added to processedTimestamps: {} ", processedTimestamps);
} else {
LOG.trace("TimestampToken already present processedTimestamps: {} ", processedTimestamps);
}
}
}
}
private void registerUsageDate(TimestampToken timestampToken) {
CertificateToken tsaCertificate = getTSACertificate(timestampToken, getAllCertificateSources());
if (tsaCertificate == null) {
LOG.warn("No Timestamp Certificate found. Chain is skipped.");
return;
}
List<CertificateToken> tsaCertificateChain = toCertificateTokenChain(getCertChain(tsaCertificate));
Date usageDate = timestampToken.getCreationDate();
for (CertificateToken cert : tsaCertificateChain) {
if (isSelfSignedOrTrusted(cert)) {
break;
}
Date lastUsage = lastTimestampCertChainDates.get(cert);
if (lastUsage == null || lastUsage.before(usageDate)) {
lastTimestampCertChainDates.put(cert, usageDate);
}
}
for (TimestampedReference timestampedReference : timestampToken.getTimestampedReferences()) {
registerPOE(timestampedReference.getObjectId(), timestampToken);
}
}
private void registerPOE(String tokenId, TimestampToken timestampToken) {
List<POE> poeTimeList = poeTimes.get(tokenId);
if (Utils.isCollectionEmpty(poeTimeList)) {
poeTimeList = new ArrayList<>();
poeTimes.put(tokenId, poeTimeList);
}
poeTimeList.add(new POE(timestampToken));
}
private void registerPOE(String tokenId, Date poeTime) {
List<POE> poeTimeList = poeTimes.get(tokenId);
if (Utils.isCollectionEmpty(poeTimeList)) {
poeTimeList = new ArrayList<>();
poeTimes.put(tokenId, poeTimeList);
}
poeTimeList.add(new POE(poeTime));
}
private List<CertificateToken> toCertificateTokenChain(List<Token> tokens) {
List<CertificateToken> chain = new LinkedList<>();
for (Token token : tokens) {
if (token instanceof CertificateToken) {
chain.add((CertificateToken) token);
}
}
return chain;
}
@Override
public void validate() {
TimestampToken timestampToken = getNotYetVerifiedTimestamp();
while (timestampToken != null) {
getCertChain(timestampToken);
registerUsageDate(timestampToken);
timestampToken = getNotYetVerifiedTimestamp();
}
Token token = getNotYetVerifiedToken();
while (token != null) {
// extract the certificate chain and add missing tokens for verification
List<Token> certChain = getCertChain(token);
if (token instanceof CertificateToken) {
getRevocationData((CertificateToken) token, certChain);
}
token = getNotYetVerifiedToken();
}
}
/**
* Retrieves the revocation data from signature (if exists) or from the online
* sources. The issuer certificate must be provided, the underlining library
* (bouncy castle) needs it to build the request.
*
* @param certToken the current token
* @param certChain the complete chain
* @return a set of found {@link RevocationToken}s
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private Set<RevocationToken> getRevocationData(final CertificateToken certToken, List<Token> certChain) {
if (LOG.isTraceEnabled()) {
LOG.trace("Checking revocation data for : {}", certToken.getDSSIdAsString());
}
if (isRevocationDataNotRequired(certToken)) {
LOG.debug("Revocation data is not required for certificate : {}", certToken.getDSSIdAsString());
return Collections.emptySet();
}
CertificateToken issuerToken = getIssuer(certToken);
if (issuerToken == null) {
LOG.warn("Issuer not found for certificate {}", certToken.getDSSIdAsString());
return Collections.emptySet();
}
Set<RevocationToken> revocations = new HashSet<>();
// ALL Embedded revocation data
if (documentCRLSource != null) {
List<RevocationToken<CRL>> revocationTokens = documentCRLSource.getRevocationTokens(certToken, issuerToken);
for (RevocationToken revocationToken : revocationTokens) {
revocations.add(revocationToken);
addRevocationTokenForVerification(revocationToken);
}
}
if (documentOCSPSource != null) {
List<RevocationToken<OCSP>> revocationTokens = documentOCSPSource.getRevocationTokens(certToken, issuerToken);
for (RevocationToken revocationToken : revocationTokens) {
revocations.add(revocationToken);
addRevocationTokenForVerification(revocationToken);
}
}
// add processed revocation tokens
revocations.addAll(getRelatedRevocationTokens(certToken));
if (Utils.isCollectionEmpty(revocations) || isRevocationDataRefreshNeeded(certToken, revocations)) {
LOG.debug("The signature does not contain relative revocation data.");
if (checkRevocationForUntrustedChains || containsTrustAnchor(certChain)) {
LOG.trace("Revocation update is in progress for certificate : {}", certToken.getDSSIdAsString());
CertificateToken trustAnchor = (CertificateToken) getFirstTrustAnchor(certChain);
// Fetch OCSP or CRL from online sources
final RevocationToken<Revocation> onlineRevocationToken = getRevocationToken(
certToken, issuerToken, trustAnchor);
// Check if the obtained revocation is not yet present
if (onlineRevocationToken != null && !revocations.contains(onlineRevocationToken)) {
LOG.debug("Obtained a new revocation data : {}, for certificate : {}",
onlineRevocationToken.getDSSIdAsString(), certToken.getDSSIdAsString());
revocations.add(onlineRevocationToken);
addRevocationTokenForVerification(onlineRevocationToken);
}
} else {
LOG.warn("External revocation check is skipped for untrusted certificate : {}", certToken.getDSSIdAsString());
}
}
if (revocations.isEmpty()) {
LOG.warn("No revocation found for the certificate {}", certToken.getDSSIdAsString());
}
return revocations;
}
private <T extends Token> boolean containsTrustAnchor(List<T> certChain) {
return getFirstTrustAnchor(certChain) != null;
}
private <T extends Token> Token getFirstTrustAnchor(List<T> certChain) {
for (T token : certChain) {
if (isTrusted(token)) {
return token;
}
}
return null;
}
private RevocationToken getRevocationToken(CertificateToken certificateToken, CertificateToken issuerCertificate,
CertificateToken trustAnchor) {
// configure the CompositeRevocationSource
RevocationSource<OCSP> currentOCSPSource;
RevocationSource<CRL> currentCRLSource;
ListCertificateSource currentCertSource = null;
if (!trustedCertSources.isEmpty() && (trustAnchor != null)) {
LOG.trace("Initializing a revocation verifier for a trusted chain...");
currentOCSPSource = instantiateOCSPWithTrustServices(trustAnchor);
currentCRLSource = instantiateCRLWithTrustServices(trustAnchor);
currentCertSource = trustedCertSources;
} else {
LOG.trace("Initializing a revocation verifier for not trusted chain...");
currentOCSPSource = remoteOCSPSource;
currentCRLSource = remoteCRLSource;
}
revocationDataLoadingStrategy.setOcspSource(currentOCSPSource);
revocationDataLoadingStrategy.setCrlSource(currentCRLSource);
revocationDataLoadingStrategy.setTrustedCertificateSource(currentCertSource);
// fetch the data
return revocationDataLoadingStrategy.getRevocationToken(certificateToken, issuerCertificate);
}
private RevocationSource<OCSP> instantiateOCSPWithTrustServices(CertificateToken trustAnchor) {
List<String> alternativeOCSPUrls = getAlternativeOCSPUrls(trustAnchor);
if (Utils.isCollectionNotEmpty(alternativeOCSPUrls) && remoteOCSPSource instanceof RevocationSourceAlternateUrlsSupport) {
return new AlternateUrlsSourceAdapter<>((RevocationSourceAlternateUrlsSupport) remoteOCSPSource, alternativeOCSPUrls);
} else {
return remoteOCSPSource;
}
}
private RevocationSource<CRL> instantiateCRLWithTrustServices(CertificateToken trustAnchor) {
List<String> alternativeCRLUrls = getAlternativeCRLUrls(trustAnchor);
if (Utils.isCollectionNotEmpty(alternativeCRLUrls) && remoteCRLSource instanceof RevocationSourceAlternateUrlsSupport) {
return new AlternateUrlsSourceAdapter<>((RevocationSourceAlternateUrlsSupport) remoteCRLSource, alternativeCRLUrls);
} else {
return remoteCRLSource;
}
}
private List<String> getAlternativeOCSPUrls(CertificateToken trustAnchor) {
List<String> alternativeOCSPUrls = new ArrayList<>();
for (CertificateSource certificateSource : trustedCertSources.getSources()) {
if (certificateSource instanceof CommonTrustedCertificateSource) {
CommonTrustedCertificateSource trustedCertSource = (CommonTrustedCertificateSource) certificateSource;
alternativeOCSPUrls.addAll(trustedCertSource.getAlternativeOCSPUrls(trustAnchor));
}
}
return alternativeOCSPUrls;
}
private List<String> getAlternativeCRLUrls(CertificateToken trustAnchor) {
List<String> alternativeCRLUrls = new ArrayList<>();
for (CertificateSource certificateSource : trustedCertSources.getSources()) {
if (certificateSource instanceof CommonTrustedCertificateSource) {
CommonTrustedCertificateSource trustedCertSource = (CommonTrustedCertificateSource) certificateSource;
alternativeCRLUrls.addAll(trustedCertSource.getAlternativeCRLUrls(trustAnchor));
}
}
return alternativeCRLUrls;
}
@Override
public boolean checkAllRequiredRevocationDataPresent() {
List<String> errors = new ArrayList<>();
Map<CertificateToken, List<CertificateToken>> orderedCertificateChains = getOrderedCertificateChains();
for (List<CertificateToken> orderedCertChain : orderedCertificateChains.values()) {
checkRevocationForCertificateChainAgainstBestSignatureTime(orderedCertChain, null, errors);
}
if (!errors.isEmpty()) {
Status status = new Status("Revocation data is missing for one or more certificate(s).", errors);
certificateVerifier.getAlertOnMissingRevocationData().alert(status);
}
return errors.isEmpty();
}
private void checkRevocationForCertificateChainAgainstBestSignatureTime(List<CertificateToken> certificates,
Date bestSignatureTime, List<String> errors) {
for (CertificateToken certificateToken : certificates) {
if (isSelfSignedOrTrusted(certificateToken)) {
// break on the first trusted entry
break;
} else if (isOCSPNoCheckExtension(certificateToken)) {
// skip the revocation check for OCSP certs if no check is specified
continue;
}
boolean found = false;
Date earliestNextUpdate = null; // used for informational purpose only
List<RevocationToken> relatedRevocationTokens = getRelatedRevocationTokens(certificateToken);
for (RevocationToken<Revocation> revocationToken : relatedRevocationTokens) {
if (bestSignatureTime == null || revocationToken.getThisUpdate().after(bestSignatureTime)) {
found = true;
break;
} else {
if (revocationToken.getNextUpdate() != null &&
(earliestNextUpdate == null || revocationToken.getNextUpdate().before(earliestNextUpdate))) {
earliestNextUpdate = revocationToken.getNextUpdate();
}
}
}
if (!found) {
if (!certificateVerifier.isCheckRevocationForUntrustedChains() && !containsTrustAnchor(certificates)) {
errors.add(String.format("Revocation data is skipped for untrusted certificate chain for the token : '%s'", certificateToken.getDSSIdAsString()));
} else if (bestSignatureTime == null) {
// simple revocation presence check
errors.add(String.format("No revocation data found for certificate : %s", certificateToken.getDSSIdAsString()));
} else if (earliestNextUpdate != null) {
errors.add(String.format(
"No revocation data found after the best signature time [%s] "
+ "for the certificate : %s. \n The nextUpdate available after : [%s]",
bestSignatureTime, certificateToken.getDSSIdAsString(), earliestNextUpdate));
} else {
errors.add(String.format("No revocation data found after the best signature time [%s] for the certificate : %s", bestSignatureTime,
certificateToken.getDSSIdAsString()));
}
}
}
}
@Override
public boolean checkAllPOECoveredByRevocationData() {
List<String> errors = new ArrayList<>();
for (Entry<CertificateToken, Date> entry : lastTimestampCertChainDates.entrySet()) {
Date lastUsage = entry.getValue();
CertificateToken certificateToken = entry.getKey();
if (!isRevocationDataNotRequired(certificateToken)) {
boolean foundValidRevocationDataAfterLastUsage = false;
Date nextUpdate = null;
List<RevocationToken> relatedRevocationTokens = getRelatedRevocationTokens(certificateToken);
for (RevocationToken<Revocation> revocationToken : relatedRevocationTokens) {
Date productionDate = revocationToken.getProductionDate();
if (productionDate.after(lastUsage)) {
foundValidRevocationDataAfterLastUsage = true;
break;
}
Date currentNextUpdate = revocationToken.getNextUpdate();
if (nextUpdate == null || (currentNextUpdate != null && nextUpdate.before(currentNextUpdate))) {
nextUpdate = currentNextUpdate;
}
}
if (!foundValidRevocationDataAfterLastUsage) {
errors.add(String.format("POE certificate '%s' not covered by a valid revocation data (nextUpdate : %s)",
certificateToken.getDSSIdAsString(), nextUpdate));
}
}
}
if (!errors.isEmpty()) {
Status status = new Status("Revocation data is missing for one or more POE(s).", errors);
certificateVerifier.getAlertOnUncoveredPOE().alert(status);
}
return errors.isEmpty();
}
@Override
public boolean checkAllTimestampsValid() {
Set<String> invalidTimestampIds = new HashSet<>();
for (TimestampToken timestampToken : processedTimestamps) {
if (!timestampToken.isSignatureIntact() || !timestampToken.isMessageImprintDataFound() ||
!timestampToken.isMessageImprintDataIntact()) {
invalidTimestampIds.add(timestampToken.getDSSIdAsString());
}
}
if (!invalidTimestampIds.isEmpty()) {
Status status = new Status("Broken timestamp(s) detected.", invalidTimestampIds);
certificateVerifier.getAlertOnInvalidTimestamp().alert(status);
}
return invalidTimestampIds.isEmpty();
}
@Override
public boolean checkAllCertificatesValid() {
Set<String> invalidCertificateIds = new HashSet<>();
for (CertificateToken certificateToken : processedCertificates) {
if (!isRevocationDataNotRequired(certificateToken)) {
List<RevocationToken> relatedRevocationTokens = getRelatedRevocationTokens(certificateToken);
// check only available revocation data in order to not duplicate
// the method {@code checkAllRequiredRevocationDataPresent()}
if (Utils.isCollectionNotEmpty(relatedRevocationTokens)) {
// check if there is a best-signature-time before the revocation date
Date lowestPOETime = getLowestPOETime(certificateToken);
for (RevocationToken<Revocation> revocationToken : relatedRevocationTokens) {
if ((revocationToken.getStatus().isRevoked() && lowestPOETime != null &&
!lowestPOETime.before(revocationToken.getRevocationDate())) ||
!revocationToken.getStatus().isKnown()) {
invalidCertificateIds.add(certificateToken.getDSSIdAsString());
}
}
}
}
}
if (!invalidCertificateIds.isEmpty()) {
Status status = new Status("Revoked/Suspended certificate(s) detected.", invalidCertificateIds);
certificateVerifier.getAlertOnRevokedCertificate().alert(status);
}
return invalidCertificateIds.isEmpty();
}
private boolean isRevocationDataNotRequired(CertificateToken certToken) {
return isSelfSignedOrTrusted(certToken) || isOCSPNoCheckExtension(certToken);
}
private boolean isSelfSignedOrTrusted(CertificateToken certToken) {
return certToken.isSelfSigned() || isTrusted(certToken);
}
private boolean isOCSPNoCheckExtension(CertificateToken certToken) {
return DSSASN1Utils.hasIdPkixOcspNoCheckExtension(certToken);
}
private List<RevocationToken> getRelatedRevocationTokens(CertificateToken certificateToken) {
List<RevocationToken> result = new ArrayList<>();
for (RevocationToken<?> revocationToken : processedRevocations) {
if (Utils.areStringsEqual(certificateToken.getDSSIdAsString(), revocationToken.getRelatedCertificateId())) {
result.add(revocationToken);
}
}
return result;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private boolean isRevocationDataRefreshNeeded(CertificateToken certToken, Collection<RevocationToken> revocations) {
// get last usage dates for the same timestamp certificate chain
Date refreshNeededAfterTime = lastTimestampCertChainDates.get(certToken);
if (refreshNeededAfterTime == null) {
// the best signature time for other tokens (i.e. B-level and revocation data)
// shall not return null
refreshNeededAfterTime = getLowestPOETime(certToken);
}
boolean freshRevocationDataFound = false;
for (RevocationToken<Revocation> revocationToken : revocations) {
if (refreshNeededAfterTime != null && (refreshNeededAfterTime.before(revocationToken.getProductionDate()))
&& (RevocationReason.CERTIFICATE_HOLD != revocationToken.getReason()
&& isConsistent(revocationToken, certToken))) {
freshRevocationDataFound = true;
break;
}
}
if (!freshRevocationDataFound) {
LOG.debug("Revocation data refresh is needed");
return true;
}
return false;
}
private Date getLowestPOETime(Token token) {
Date lowestPOE = null;
List<POE> poeList = poeTimes.get(token.getDSSIdAsString());
if (Utils.isCollectionEmpty(poeList)) {
throw new IllegalStateException("POE shall be defined before accessing the 'poeTimes' list!");
}
for (POE poe : poeList) {
Date poeTime = poe.getTime();
if (lowestPOE == null || poeTime.before(lowestPOE)) {
lowestPOE = poeTime;
}
}
return lowestPOE;
}
private boolean isConsistent(RevocationToken<Revocation> revocation, CertificateToken certToken) {
List<CertificateToken> certificateTokenChain = toCertificateTokenChain(getCertChain(revocation));
if (Utils.isCollectionEmpty(certificateTokenChain)) {
LOG.debug("The revocation {} is not consistent! Issuer CertificateToken is not found.",
revocation.getDSSIdAsString());
return false;
}
if (RevocationType.OCSP.equals(revocation.getRevocationType()) &&
!DSSRevocationUtils.checkIssuerValidAtRevocationProductionTime(revocation)) {
LOG.debug("The revocation {} is not consistent! The revocation has been produced outside " +
"the issuer certificate's validity range!", revocation.getDSSIdAsString());
return false;
}
if (RevocationType.CRL.equals(revocation.getRevocationType()) && (
!isInCertificateValidityRange(revocation, certToken))) {
LOG.debug("The revocation '{}' was not issued during the validity period of the certificate! Certificate: {}",
revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return false;
}
if (revocation.getNextUpdate() != null && !hasPOEAfterProductionAndBeforeNextUpdate(revocation)) {
LOG.debug("There is no POE for the revocation '{}' after its production time and before the nextUpdate! " +
"Certificate: {}", revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return false;
}
// useful for short-life certificates (i.e. ocsp responder)
CertificateToken revocationIssuer = certificateTokenChain.iterator().next();
if (!hasPOEInTheValidityRange(revocationIssuer)) {
LOG.info("There is no POE for the revocation issuer '{}' for revocation '{}' within its validity range! " +
"Certificate: {}", revocationIssuer.getDSSIdAsString(), revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return false;
}
LOG.info("The revocation '{}' is consistent. Certificate: {}", revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
return true;
}
private boolean isInCertificateValidityRange(RevocationToken<?> revocationToken, CertificateToken certificateToken) {
final Date thisUpdate = revocationToken.getThisUpdate();
final Date nextUpdate = revocationToken.getNextUpdate();
final Date notAfter = certificateToken.getNotAfter();
final Date notBefore = certificateToken.getNotBefore();
return thisUpdate.compareTo(notAfter) <= 0 && (nextUpdate != null && nextUpdate.compareTo(notBefore) >= 0);
}
private boolean hasPOEAfterProductionAndBeforeNextUpdate(RevocationToken<Revocation> revocation) {
List<POE> poeTimeList = poeTimes.get(revocation.getDSSIdAsString());
if (Utils.isCollectionNotEmpty(poeTimeList)) {
for (POE poeTime : poeTimeList) {
if (isConsistentOnTime(revocation, poeTime.getTime())) {
return true;
}
}
}
return false;
}
private boolean hasPOEInTheValidityRange(CertificateToken certificateToken) {
List<POE> poeTimeList = poeTimes.get(certificateToken.getDSSIdAsString());
if (Utils.isCollectionNotEmpty(poeTimeList)) {
for (POE poeTime : poeTimeList) {
if (certificateToken.isValidOn(poeTime.getTime())) {
return true;
}
// continue
}
}
return false;
}
private boolean isConsistentOnTime(RevocationToken<Revocation> revocationToken, Date date) {
Date productionDate = revocationToken.getProductionDate();
Date nextUpdate = revocationToken.getNextUpdate();
return date.compareTo(productionDate) >= 0 && date.compareTo(nextUpdate) <= 0;
}
@Override
public boolean checkAtLeastOneRevocationDataPresentAfterBestSignatureTime(AdvancedSignature signature) {
List<String> errors = new ArrayList<>();
CertificateToken signingCertificateToken = signature.getSigningCertificateToken();
Map<CertificateToken, List<CertificateToken>> orderedCertificateChains = getOrderedCertificateChains();
for (Map.Entry<CertificateToken, List<CertificateToken>> entry : orderedCertificateChains.entrySet()) {
CertificateToken firstChainCertificate = entry.getKey();
Date bestSignatureTime = firstChainCertificate.equals(signingCertificateToken) ? getEarliestTimestampTime()
: lastTimestampCertChainDates.get(firstChainCertificate);
checkRevocationForCertificateChainAgainstBestSignatureTime(entry.getValue(), bestSignatureTime, errors);
}
if (!errors.isEmpty()) {
Status status = new Status("Fresh revocation data is missing for one or more certificate(s).", errors);
certificateVerifier.getAlertOnNoRevocationAfterBestSignatureTime().alert(status);
}
return errors.isEmpty();
}
private Date getEarliestTimestampTime() {
Date earliestDate = null;
for (TimestampToken timestamp : getProcessedTimestamps()) {
if (timestamp.getTimeStampType().coversSignature()) {
Date timestampTime = timestamp.getCreationDate();
if (earliestDate == null || timestampTime.before(earliestDate)) {
earliestDate = timestampTime;
}
}
}
return earliestDate;
}
@Override
public boolean checkSignatureNotExpired(AdvancedSignature signature) {
CertificateToken signingCertificate = signature.getSigningCertificateToken();
if (signingCertificate != null) {
boolean signatureNotExpired = verifyCertificateTokenHasPOERecursively(signingCertificate, poeTimes.get(signature.getId()));
if (!signatureNotExpired) {
Status status = new Status("The signing certificate has been expired and " +
"there is no POE during its validity range.", Arrays.asList(signingCertificate.getDSSIdAsString()));
certificateVerifier.getAlertOnExpiredSignature().alert(status);
}
return signatureNotExpired;
}
return true;
}
private boolean verifyCertificateTokenHasPOERecursively(CertificateToken certificateToken, List<POE> poeTimeList) {
if (Utils.isCollectionNotEmpty(poeTimeList)) {
for (POE poeTime : poeTimeList) {
if (certificateToken.isValidOn(poeTime.getTime())) {
TimestampToken timestampToken = poeTime.getTimestampToken();
if (timestampToken != null) {
// check if the timestamp is valid at validation time
CertificateToken issuerCertificateToken = getIssuer(timestampToken);
if (issuerCertificateToken != null &&
verifyCertificateTokenHasPOERecursively(issuerCertificateToken, poeTimes.get(timestampToken.getDSSIdAsString()))) {
return true;
}
} else {
// the certificate is valid at the current time
return true;
}
}
}
}
return false;
}
@Override
public Set<CertificateToken> getProcessedCertificates() {
return Collections.unmodifiableSet(processedCertificates);
}
@Override
public Set<RevocationToken> getProcessedRevocations() {
return Collections.unmodifiableSet(processedRevocations);
}
@Override
public Set<TimestampToken> getProcessedTimestamps() {
return Collections.unmodifiableSet(processedTimestamps);
}
private <T extends Token> boolean isTrusted(T token) {
return token instanceof CertificateToken && trustedCertSources.isTrusted((CertificateToken) token);
}
@Override
public ValidationData getValidationData(final AdvancedSignature signature) {
return getValidationData(signature.getSigningCertificateToken());
}
@Override
public ValidationData getValidationData(final TimestampToken timestampToken) {
return getValidationData(getIssuer(timestampToken));
}
private ValidationData getValidationData(final CertificateToken certificateToken) {
ValidationData validationData = new ValidationData();
if (certificateToken != null) {
populateValidationDataRecursively(certificateToken, validationData);
}
return validationData;
}
private void populateValidationDataRecursively(final Token token, final ValidationData validationData) {
boolean added = validationData.addToken(token);
if (added) {
if (token instanceof CertificateToken) {
List<RevocationToken> revocationTokens = getRelatedRevocationTokens((CertificateToken) token);
for (RevocationToken revocationToken : revocationTokens) {
populateValidationDataRecursively(revocationToken, validationData);
}
}
CertificateToken issuerToken = getIssuer(token);
if (issuerToken != null) {
populateValidationDataRecursively(issuerToken, validationData);
}
}
}
/**
* This class defines a POE provided to the validation process or obtained from processed timestamps
*/
private static class POE {
/** The POE time */
private final Date time;
/** The TimestampToken provided the POE, when present */
private TimestampToken timestampToken;
/**
* Default constructor to instantiate the object from a provided time
*
* @param time {@link Date}
*/
public POE(final Date time) {
this.time = time;
}
/**
* Constructor to instantiate the POE object from a TimestampToken
*
* @param timestampToken {@link TimestampToken}
*/
public POE(TimestampToken timestampToken) {
this.timestampToken = timestampToken;
this.time = timestampToken.getCreationDate();
}
/**
* Returns the POE time
*
* @return {@link Date}
*/
public Date getTime() {
return time;
}
/**
* Returns the TimestampToken used to create the POE, when present
*
* @return {@link TimestampToken} if it has been used for the POE, null otherwise
*/
public TimestampToken getTimestampToken() {
return timestampToken;
}
}
}
|
DSS-2513 : skip POE check for trusted certificates
|
dss-document/src/main/java/eu/europa/esig/dss/validation/SignatureValidationContext.java
|
DSS-2513 : skip POE check for trusted certificates
|
<ide><path>ss-document/src/main/java/eu/europa/esig/dss/validation/SignatureValidationContext.java
<ide>
<ide> // useful for short-life certificates (i.e. ocsp responder)
<ide> CertificateToken revocationIssuer = certificateTokenChain.iterator().next();
<del> if (!hasPOEInTheValidityRange(revocationIssuer)) {
<del> LOG.info("There is no POE for the revocation issuer '{}' for revocation '{}' within its validity range! " +
<add> if (!isTrusted(revocationIssuer) && !hasPOEInTheValidityRange(revocationIssuer)) {
<add> LOG.debug("There is no POE for the revocation issuer '{}' for revocation '{}' within its validity range! " +
<ide> "Certificate: {}", revocationIssuer.getDSSIdAsString(), revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
<ide> return false;
<ide> }
<ide>
<del> LOG.info("The revocation '{}' is consistent. Certificate: {}", revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
<add> LOG.debug("The revocation '{}' is consistent. Certificate: {}", revocation.getDSSIdAsString(), certToken.getDSSIdAsString());
<ide> return true;
<ide> }
<ide>
|
|
Java
|
bsd-3-clause
|
25ef7ada542ac0ae66216329ffa89c70e2056ea6
| 0 |
sirixdb/sirix,sirixdb/sirix,sirixdb/sirix,sirixdb/sirix
|
/**
* Copyright (c) 2018, Sirix
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package function.sdb.io;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import org.brackit.xquery.QueryContext;
import org.brackit.xquery.QueryException;
import org.brackit.xquery.XQuery;
import org.brackit.xquery.compiler.CompileChain;
import org.custommonkey.xmlunit.DetailedDiff;
import org.custommonkey.xmlunit.Diff;
import org.custommonkey.xmlunit.Difference;
import org.junit.Test;
import org.sirix.TestHelper;
import org.sirix.service.xml.serialize.XMLSerializer;
import org.sirix.service.xml.serialize.XMLSerializer.XMLSerializerBuilder;
import org.sirix.utils.SirixFiles;
import org.sirix.xquery.SirixCompileChain;
import org.sirix.xquery.SirixQueryContext;
import org.sirix.xquery.node.DBNode;
import org.sirix.xquery.node.DBStore;
import org.xml.sax.SAXException;
import junit.framework.TestCase;
/**
* @author Johannes Lichtenberger <[email protected]>
*
*/
public final class ImportTestCase extends TestCase {
private Path mTempDir;
@Override
protected void setUp() throws Exception {
super.setUp();
mTempDir = Files.createTempDirectory("sirix");
}
@Override
protected void tearDown() throws Exception {
SirixFiles.recursiveRemove(mTempDir);
super.tearDown();
}
public void test() {
}
// @Test
// public void test() throws QueryException, IOException, SAXException {
// final Path doc = Paths.get("src", "test", "resources", "revXMLsAll");
//
// // Initialize query context and store.
// try (final DBStore store =
// DBStore.newBuilder().location(mTempDir).buildPathSummary(false).build()) {
// final CompileChain cc = new SirixCompileChain(store);
// final QueryContext ctx = new SirixQueryContext(store);
//
// // Use XQuery to load sample document into store.
// final String xq1 = String.format(
// "sdb:load('mydoc.col', 'mydoc.xml', '%s')", doc.resolve("1.xml").toString());
// new XQuery(cc, xq1).evaluate(ctx);
//
// // final String xq = "sdb:doc('mydoc.col', 'mydoc.xml')";
// // final DBNode node = (DBNode) new XQuery(cc, xq).evaluate(ctx);
// // final OutputStream out = new ByteArrayOutputStream();
// // final XMLSerializer serializer =
// // new XMLSerializerBuilder(node.getTrx().getResourceManager(), out).prettyPrint().build();
// // serializer.call();
// // System.out.println(out.toString());
//
// // Use XQuery to import the differences.
// final String xq2 = String.format(
// "sdb:import('mydoc.col', 'mydoc.xml', '%s')", doc.resolve("2.xml").toString());
// final DBNode node = (DBNode) new XQuery(cc, xq2).evaluate(ctx);
//
// final OutputStream out = new ByteArrayOutputStream();
// final XMLSerializer serializer =
// new XMLSerializerBuilder(node.getTrx().getResourceManager(), out).build();
// serializer.call();
// System.out.println(out.toString());
//
// final StringBuilder sBuilder = TestHelper.readFile(doc.resolve("2.xml"), false);
//
// final Diff diff = new Diff(sBuilder.toString(), out.toString());
// final DetailedDiff detDiff = new DetailedDiff(diff);
// @SuppressWarnings("unchecked")
// final List<Difference> differences = detDiff.getAllDifferences();
// for (final Difference difference : differences) {
// System.err.println("***********************");
// System.err.println(difference);
// System.err.println("***********************");
// }
//
// assertTrue("pieces of XML are similar " + diff, diff.similar());
// assertTrue("but are they identical? " + diff, diff.identical());
// }
// }
}
|
bundles/sirix-xquery/src/test/java/function/sdb/io/ImportTestCase.java
|
/**
* Copyright (c) 2018, Sirix
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package function.sdb.io;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import org.brackit.xquery.QueryContext;
import org.brackit.xquery.QueryException;
import org.brackit.xquery.XQuery;
import org.brackit.xquery.compiler.CompileChain;
import org.custommonkey.xmlunit.DetailedDiff;
import org.custommonkey.xmlunit.Diff;
import org.custommonkey.xmlunit.Difference;
import org.junit.Test;
import org.sirix.TestHelper;
import org.sirix.service.xml.serialize.XMLSerializer;
import org.sirix.service.xml.serialize.XMLSerializer.XMLSerializerBuilder;
import org.sirix.utils.SirixFiles;
import org.sirix.xquery.SirixCompileChain;
import org.sirix.xquery.SirixQueryContext;
import org.sirix.xquery.node.DBNode;
import org.sirix.xquery.node.DBStore;
import org.xml.sax.SAXException;
import junit.framework.TestCase;
/**
* @author Johannes Lichtenberger <[email protected]>
*
*/
public final class ImportTestCase extends TestCase {
private Path mTempDir;
@Override
protected void setUp() throws Exception {
super.setUp();
mTempDir = Files.createTempDirectory("sirix");
}
@Override
protected void tearDown() throws Exception {
SirixFiles.recursiveRemove(mTempDir);
super.tearDown();
}
// public void test() throws QueryException, IOException, SAXException {
// final Path doc = Paths.get("src", "test", "resources", "revXMLsAll");
//
// // Initialize query context and store.
// try (final DBStore store =
// DBStore.newBuilder().location(mTempDir).buildPathSummary(false).build()) {
// final CompileChain cc = new SirixCompileChain(store);
// final QueryContext ctx = new SirixQueryContext(store);
//
// // Use XQuery to load sample document into store.
// final String xq1 = String.format(
// "sdb:load('mydoc.col', 'mydoc.xml', '%s')", doc.resolve("1.xml").toString());
// new XQuery(cc, xq1).evaluate(ctx);
//
// // final String xq = "sdb:doc('mydoc.col', 'mydoc.xml')";
// // final DBNode node = (DBNode) new XQuery(cc, xq).evaluate(ctx);
// // final OutputStream out = new ByteArrayOutputStream();
// // final XMLSerializer serializer =
// // new XMLSerializerBuilder(node.getTrx().getResourceManager(), out).prettyPrint().build();
// // serializer.call();
// // System.out.println(out.toString());
//
// // Use XQuery to import the differences.
// final String xq2 = String.format(
// "sdb:import('mydoc.col', 'mydoc.xml', '%s')", doc.resolve("2.xml").toString());
// final DBNode node = (DBNode) new XQuery(cc, xq2).evaluate(ctx);
//
// final OutputStream out = new ByteArrayOutputStream();
// final XMLSerializer serializer =
// new XMLSerializerBuilder(node.getTrx().getResourceManager(), out).build();
// serializer.call();
// System.out.println(out.toString());
//
// final StringBuilder sBuilder = TestHelper.readFile(doc.resolve("2.xml"), false);
//
// final Diff diff = new Diff(sBuilder.toString(), out.toString());
// final DetailedDiff detDiff = new DetailedDiff(diff);
// @SuppressWarnings("unchecked")
// final List<Difference> differences = detDiff.getAllDifferences();
// for (final Difference difference : differences) {
// System.err.println("***********************");
// System.err.println(difference);
// System.err.println("***********************");
// }
//
// assertTrue("pieces of XML are similar " + diff, diff.similar());
// assertTrue("but are they identical? " + diff, diff.identical());
// }
// }
}
|
[MOD] Added empty test.
|
bundles/sirix-xquery/src/test/java/function/sdb/io/ImportTestCase.java
|
[MOD] Added empty test.
|
<ide><path>undles/sirix-xquery/src/test/java/function/sdb/io/ImportTestCase.java
<ide> super.tearDown();
<ide> }
<ide>
<add> public void test() {
<add>
<add> }
<add>
<add> // @Test
<ide> // public void test() throws QueryException, IOException, SAXException {
<ide> // final Path doc = Paths.get("src", "test", "resources", "revXMLsAll");
<ide> //
|
|
Java
|
mit
|
e6be6befe85073628ff686af6ddaafc53b8e9d96
| 0 |
konmik/nucleus,hoanganhx86/nucleus,radityagumay/nucleus,nguyenhongson03/nucleus,hejunbinlan/nucleus,hanhailong/nucleus,b-cuts/nucleus,lenguyenthanh/nucleus,treejames/nucleus,jmreyes/nucleus,Aphoh/nucleus
|
package nucleus;
import android.app.Activity;
import android.test.ActivityInstrumentationTestCase2;
import android.util.Log;
import java.util.concurrent.atomic.AtomicBoolean;
public class BaseActivityTest<ActivityClass extends Activity> extends ActivityInstrumentationTestCase2<ActivityClass> {
public BaseActivityTest(Class<ActivityClass> activityClass) {
super(activityClass);
}
@Override
protected void setUp() throws Exception {
super.setUp();
getActivity();
}
public interface Condition {
boolean call();
}
/**
* Waits for an expression to become true on the main thread.
*
* @param condition
*/
public void waitFor(final Condition condition) {
final AtomicBoolean done = new AtomicBoolean();
do {
runOnUiThread(new Runnable() {
@Override
public void run() {
done.set(condition.call());
}
});
}
while (!done.get());
}
public void restartActivity() {
final Activity activity = getActivity();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
activity.recreate();
}
});
setActivity(null);
getActivity(); // DOES NOT WORK ON LOLLIPOP - has some bug in instrumentation
}
private void sleep(String description, final int ms) {
final long time1 = System.nanoTime();
waitFor(new Condition() {
@Override
public boolean call() {
return (System.nanoTime() - time1) / 1000000 > ms;
}
});
Log.v(getClass().getSimpleName(), "WAIT " + description + " COMPLETE");
}
public void runOnUiThread(final Runnable runnable) {
getInstrumentation().runOnMainSync(runnable);
}
// prevents mockito exception on some devices
public void dexmakerWorkaround() {
System.setProperty("dexmaker.dexcache", getInstrumentation().getTargetContext().getCacheDir().getPath());
}
}
|
nucleus-test-kit/src/main/java/nucleus/BaseActivityTest.java
|
package nucleus;
import android.app.Activity;
import android.test.ActivityInstrumentationTestCase2;
import java.util.concurrent.atomic.AtomicBoolean;
public class BaseActivityTest<ActivityClass extends Activity> extends ActivityInstrumentationTestCase2<ActivityClass> {
public BaseActivityTest(Class<ActivityClass> activityClass) {
super(activityClass);
}
@Override
protected void setUp() throws Exception {
super.setUp();
getActivity();
}
public interface Condition {
boolean call();
}
/**
* Waits for an expression to become true on the main thread.
*
* @param condition
*/
public void waitFor(final Condition condition) {
final AtomicBoolean done = new AtomicBoolean();
do {
runOnUiThread(new Runnable() {
@Override
public void run() {
done.set(condition.call());
}
});
} while (!done.get());
}
public void restartActivity() {
final Activity activity = getActivity();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
activity.recreate();
}
});
setActivity(null);
getActivity();
}
public void runOnUiThread(final Runnable runnable) {
getInstrumentation().runOnMainSync(runnable);
}
// prevents mockito exception on some devices
public void dexmakerWorkaround() {
System.setProperty("dexmaker.dexcache", getInstrumentation().getTargetContext().getCacheDir().getPath());
}
}
|
sleep for instrumentation test
|
nucleus-test-kit/src/main/java/nucleus/BaseActivityTest.java
|
sleep for instrumentation test
|
<ide><path>ucleus-test-kit/src/main/java/nucleus/BaseActivityTest.java
<ide>
<ide> import android.app.Activity;
<ide> import android.test.ActivityInstrumentationTestCase2;
<add>import android.util.Log;
<ide>
<ide> import java.util.concurrent.atomic.AtomicBoolean;
<ide>
<ide> done.set(condition.call());
<ide> }
<ide> });
<del> } while (!done.get());
<add> }
<add> while (!done.get());
<ide> }
<ide>
<ide> public void restartActivity() {
<ide> }
<ide> });
<ide> setActivity(null);
<del> getActivity();
<add> getActivity(); // DOES NOT WORK ON LOLLIPOP - has some bug in instrumentation
<add> }
<add>
<add> private void sleep(String description, final int ms) {
<add> final long time1 = System.nanoTime();
<add> waitFor(new Condition() {
<add> @Override
<add> public boolean call() {
<add> return (System.nanoTime() - time1) / 1000000 > ms;
<add> }
<add> });
<add> Log.v(getClass().getSimpleName(), "WAIT " + description + " COMPLETE");
<ide> }
<ide>
<ide> public void runOnUiThread(final Runnable runnable) {
|
|
Java
|
mit
|
f88c50ddac068dd0cafd00f3b5f10f448674d997
| 0 |
kzantow/git-plugin,kzantow/git-plugin,v1v/git-plugin,martinda/git-plugin,v1v/git-plugin,jenkinsci/git-plugin,recena/git-plugin,MarkEWaite/git-plugin,kzantow/git-plugin,jacob-keller/git-plugin,martinda/git-plugin,jacob-keller/git-plugin,MarkEWaite/git-plugin,recena/git-plugin,jacob-keller/git-plugin,jenkinsci/git-plugin,martinda/git-plugin,jenkinsci/git-plugin,v1v/git-plugin,jenkinsci/git-plugin,MarkEWaite/git-plugin,recena/git-plugin,MarkEWaite/git-plugin
|
package hudson.plugins.git.extensions.impl;
import hudson.AbortException;
import hudson.Extension;
import hudson.plugins.git.GitException;
import hudson.plugins.git.GitSCM;
import hudson.plugins.git.Revision;
import hudson.plugins.git.Branch;
import hudson.plugins.git.UserMergeOptions;
import hudson.plugins.git.extensions.GitClientType;
import hudson.plugins.git.extensions.GitSCMExtension;
import hudson.plugins.git.extensions.GitSCMExtensionDescriptor;
import hudson.plugins.git.util.Build;
import hudson.plugins.git.util.BuildData;
import hudson.plugins.git.util.GitUtils;
import hudson.plugins.git.util.MergeRecord;
import org.eclipse.jgit.lib.ObjectId;
import org.jenkinsci.plugins.gitclient.CheckoutCommand;
import org.jenkinsci.plugins.gitclient.GitClient;
import org.jenkinsci.plugins.gitclient.MergeCommand;
import org.kohsuke.stapler.DataBoundConstructor;
import java.io.IOException;
import static hudson.model.Result.FAILURE;
import hudson.model.Run;
import hudson.model.TaskListener;
import static org.eclipse.jgit.lib.Constants.HEAD;
/**
* Speculatively merge the selected commit with another branch before the build to answer the "what happens
* if I were to integrate this feature branch back to the master?" question.
*
* @author Nigel Magney
* @author Nicolas Deloof
* @author Andrew Bayer
* @author Kohsuke Kawaguchi
*/
public class PreBuildMerge extends GitSCMExtension {
private UserMergeOptions options;
@DataBoundConstructor
public PreBuildMerge(UserMergeOptions options) {
if (options==null) throw new IllegalStateException();
this.options = options;
}
public UserMergeOptions getOptions() {
return options;
}
@Override
public Revision decorateRevisionToBuild(GitSCM scm, Run<?, ?> build, GitClient git, TaskListener listener, Revision marked, Revision rev) throws IOException, InterruptedException {
String remoteBranchRef = GitSCM.getParameterString(options.getRef(), build.getEnvironment(listener));
// if the branch we are merging is already at the commit being built, the entire merge becomes no-op
// so there's nothing to do
if (rev.containsBranchName(remoteBranchRef))
return rev;
// Only merge if there's a branch to merge that isn't us..
listener.getLogger().println("Merging " + rev + " to " + remoteBranchRef + ", " + scm.getUserMergeOptions().toString());
// checkout origin/blah
ObjectId target = git.revParse(remoteBranchRef);
String paramLocalBranch = scm.getParamLocalBranch(build, listener);
CheckoutCommand checkoutCommand = git.checkout().branch(paramLocalBranch).ref(remoteBranchRef).deleteBranchIfExist(true);
for (GitSCMExtension ext : scm.getExtensions())
ext.decorateCheckoutCommand(scm, build, git, listener, checkoutCommand);
checkoutCommand.execute();
try {
MergeCommand cmd = git.merge().setRevisionToMerge(rev.getSha1());
for (GitSCMExtension ext : scm.getExtensions())
ext.decorateMergeCommand(scm, build, git, listener, cmd);
cmd.execute();
} catch (GitException ex) {
// merge conflict. First, avoid leaving any conflict markers in the working tree
// by checking out some known clean state. We don't really mind what commit this is,
// since the next build is going to pick its own commit to build, but 'rev' is as good any.
checkoutCommand = git.checkout().branch(paramLocalBranch).ref(rev.getSha1String()).deleteBranchIfExist(true);
for (GitSCMExtension ext : scm.getExtensions())
ext.decorateCheckoutCommand(scm, build, git, listener, checkoutCommand);
checkoutCommand.execute();
// record the fact that we've tried building 'rev' and it failed, or else
// BuildChooser in future builds will pick up this same 'rev' again and we'll see the exact same merge failure
// all over again.
BuildData bd = scm.getBuildData(build);
if(bd != null){
bd.saveBuild(new Build(marked,rev, build.getNumber(), FAILURE));
} else {
listener.getLogger().println("Was not possible to get build data");
}
throw new AbortException("Branch not suitable for integration as it does not merge cleanly: " + ex.getMessage());
}
build.addAction(new MergeRecord(remoteBranchRef,target.getName()));
Revision mergeRevision = new GitUtils(listener,git).getRevisionForSHA1(git.revParse(HEAD));
mergeRevision.getBranches().add(new Branch(remoteBranchRef, target));
return mergeRevision;
}
@Override
public void decorateMergeCommand(GitSCM scm, Run<?, ?> build, GitClient git, TaskListener listener, MergeCommand cmd) throws IOException, InterruptedException, GitException {
if (scm.getUserMergeOptions().getMergeStrategy() != null)
cmd.setStrategy(scm.getUserMergeOptions().getMergeStrategy());
cmd.setGitPluginFastForwardMode(scm.getUserMergeOptions().getFastForwardMode());
}
@Override
public GitClientType getRequiredClient() {
return GitClientType.GITCLI;
}
@Extension
public static class DescriptorImpl extends GitSCMExtensionDescriptor {
@Override
public String getDisplayName() {
return "Merge before build";
}
}
}
|
src/main/java/hudson/plugins/git/extensions/impl/PreBuildMerge.java
|
package hudson.plugins.git.extensions.impl;
import hudson.AbortException;
import hudson.Extension;
import hudson.plugins.git.GitException;
import hudson.plugins.git.GitSCM;
import hudson.plugins.git.Revision;
import hudson.plugins.git.Branch;
import hudson.plugins.git.UserMergeOptions;
import hudson.plugins.git.extensions.GitClientType;
import hudson.plugins.git.extensions.GitSCMExtension;
import hudson.plugins.git.extensions.GitSCMExtensionDescriptor;
import hudson.plugins.git.util.Build;
import hudson.plugins.git.util.BuildData;
import hudson.plugins.git.util.GitUtils;
import hudson.plugins.git.util.MergeRecord;
import org.eclipse.jgit.lib.ObjectId;
import org.jenkinsci.plugins.gitclient.CheckoutCommand;
import org.jenkinsci.plugins.gitclient.GitClient;
import org.jenkinsci.plugins.gitclient.MergeCommand;
import org.kohsuke.stapler.DataBoundConstructor;
import java.io.IOException;
import static hudson.model.Result.FAILURE;
import hudson.model.Run;
import hudson.model.TaskListener;
import static org.eclipse.jgit.lib.Constants.HEAD;
/**
* Speculatively merge the selected commit with another branch before the build to answer the "what happens
* if I were to integrate this feature branch back to the master?" question.
*
* @author Nigel Magney
* @author Nicolas Deloof
* @author Andrew Bayer
* @author Kohsuke Kawaguchi
*/
public class PreBuildMerge extends GitSCMExtension {
private UserMergeOptions options;
@DataBoundConstructor
public PreBuildMerge(UserMergeOptions options) {
if (options==null) throw new IllegalStateException();
this.options = options;
}
public UserMergeOptions getOptions() {
return options;
}
@Override
public Revision decorateRevisionToBuild(GitSCM scm, Run<?, ?> build, GitClient git, TaskListener listener, Revision marked, Revision rev) throws IOException, InterruptedException {
String remoteBranchRef = GitSCM.getParameterString(options.getRef(), build.getEnvironment(listener));
// if the branch we are merging is already at the commit being built, the entire merge becomes no-op
// so there's nothing to do
if (rev.containsBranchName(remoteBranchRef))
return rev;
// Only merge if there's a branch to merge that isn't us..
listener.getLogger().println("Merging " + rev + " to " + remoteBranchRef + ", " + scm.getUserMergeOptions().toString());
// checkout origin/blah
ObjectId target = git.revParse(remoteBranchRef);
String paramLocalBranch = scm.getParamLocalBranch(build, listener);
CheckoutCommand checkoutCommand = git.checkout().branch(paramLocalBranch).ref(remoteBranchRef).deleteBranchIfExist(true);
for (GitSCMExtension ext : scm.getExtensions())
ext.decorateCheckoutCommand(scm, build, git, listener, checkoutCommand);
checkoutCommand.execute();
try {
MergeCommand cmd = git.merge().setRevisionToMerge(rev.getSha1());
for (GitSCMExtension ext : scm.getExtensions())
ext.decorateMergeCommand(scm, build, git, listener, cmd);
cmd.execute();
} catch (GitException ex) {
// merge conflict. First, avoid leaving any conflict markers in the working tree
// by checking out some known clean state. We don't really mind what commit this is,
// since the next build is going to pick its own commit to build, but 'rev' is as good any.
checkoutCommand = git.checkout().branch(paramLocalBranch).ref(rev.getSha1String()).deleteBranchIfExist(true);
for (GitSCMExtension ext : scm.getExtensions())
ext.decorateCheckoutCommand(scm, build, git, listener, checkoutCommand);
checkoutCommand.execute();
// record the fact that we've tried building 'rev' and it failed, or else
// BuildChooser in future builds will pick up this same 'rev' again and we'll see the exact same merge failure
// all over again.
BuildData bd = scm.getBuildData(build);
if(bd != null){
bd.saveBuild(new Build(marked,rev, build.getNumber(), FAILURE));
}
throw new AbortException("Branch not suitable for integration as it does not merge cleanly: " + ex.getMessage());
}
build.addAction(new MergeRecord(remoteBranchRef,target.getName()));
Revision mergeRevision = new GitUtils(listener,git).getRevisionForSHA1(git.revParse(HEAD));
mergeRevision.getBranches().add(new Branch(remoteBranchRef, target));
return mergeRevision;
}
@Override
public void decorateMergeCommand(GitSCM scm, Run<?, ?> build, GitClient git, TaskListener listener, MergeCommand cmd) throws IOException, InterruptedException, GitException {
if (scm.getUserMergeOptions().getMergeStrategy() != null)
cmd.setStrategy(scm.getUserMergeOptions().getMergeStrategy());
cmd.setGitPluginFastForwardMode(scm.getUserMergeOptions().getFastForwardMode());
}
@Override
public GitClientType getRequiredClient() {
return GitClientType.GITCLI;
}
@Extension
public static class DescriptorImpl extends GitSCMExtensionDescriptor {
@Override
public String getDisplayName() {
return "Merge before build";
}
}
}
|
warning about unexpected behaviour
|
src/main/java/hudson/plugins/git/extensions/impl/PreBuildMerge.java
|
warning about unexpected behaviour
|
<ide><path>rc/main/java/hudson/plugins/git/extensions/impl/PreBuildMerge.java
<ide> BuildData bd = scm.getBuildData(build);
<ide> if(bd != null){
<ide> bd.saveBuild(new Build(marked,rev, build.getNumber(), FAILURE));
<add> } else {
<add> listener.getLogger().println("Was not possible to get build data");
<ide> }
<ide> throw new AbortException("Branch not suitable for integration as it does not merge cleanly: " + ex.getMessage());
<ide> }
|
|
Java
|
lgpl-2.1
|
c39a89778c21dee44e0956cf0c1ab304dcf12593
| 0 |
nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,nuxeo-archives/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,deadcyclo/nuxeo-features
|
/*
* (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* Nuxeo - initial API and implementation
*
* $Id$
*/
package org.nuxeo.ecm.directory.ldap;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SimpleTimeZone;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.SizeLimitExceededException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.DataModel;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.DocumentModelList;
import org.nuxeo.ecm.core.api.impl.DataModelImpl;
import org.nuxeo.ecm.core.api.impl.DocumentModelImpl;
import org.nuxeo.ecm.core.api.impl.DocumentModelListImpl;
import org.nuxeo.ecm.core.schema.types.Field;
import org.nuxeo.ecm.core.schema.types.Type;
import org.nuxeo.ecm.core.utils.SIDGenerator;
import org.nuxeo.ecm.directory.Directory;
import org.nuxeo.ecm.directory.DirectoryException;
import org.nuxeo.ecm.directory.DirectoryFieldMapper;
import org.nuxeo.ecm.directory.EntrySource;
import org.nuxeo.ecm.directory.Reference;
import org.nuxeo.ecm.directory.Session;
/**
* This class represents a session against an LDAPDirectory.
*
* @author Olivier Grisel <[email protected]>
*
*/
public class LDAPSession implements Session, EntrySource {
protected static final String MISSING_ID_LOWER_CASE = "lower";
protected static final String MISSING_ID_UPPER_CASE = "upper";
// directory connection parameters
private static final Log log = LogFactory.getLog(LDAPSession.class);
protected final String schemaName;
protected final DirContext dirContext;
protected final String idAttribute;
protected final LDAPDirectory directory;
protected final String searchBaseDn;
protected final Set<String> emptySet = Collections.emptySet();
protected final String sid;
protected final Map<String, Field> schemaFieldMap;
protected final String substringMatchType;
public LDAPSession(LDAPDirectory directory, DirContext dirContext) {
this.directory = directory;
this.dirContext = dirContext;
DirectoryFieldMapper fieldMapper = directory.getFieldMapper();
idAttribute = fieldMapper.getBackendField(directory.getConfig().getIdField());
schemaName = directory.getSchema();
schemaFieldMap = directory.getSchemaFieldMap();
sid = String.valueOf(SIDGenerator.next());
searchBaseDn = directory.getConfig().getSearchBaseDn();
substringMatchType = directory.getConfig().getSubstringMatchType();
}
public Directory getDirectory() {
return directory;
}
public DirContext getContext() {
return dirContext;
}
public DocumentModel createEntry(Map<String, Object> fieldMap)
throws DirectoryException {
if (isReadOnly()) {
return null;
}
List<String> referenceFieldList = new LinkedList<String>();
try {
String dn = String.format("%s=%s,%s", idAttribute,
fieldMap.get(getIdField()),
directory.getConfig().getCreationBaseDn());
Attributes attrs = new BasicAttributes();
Attribute attr;
List<String> mandatoryAttributes = getMandatoryAttributes();
for (String mandatoryAttribute : mandatoryAttributes) {
attr = new BasicAttribute(mandatoryAttribute);
attr.add(" ");
attrs.put(attr);
}
String[] creationClasses = directory.getConfig().getCreationClasses();
if (creationClasses.length != 0) {
attr = new BasicAttribute("objectclass");
for (String creationClasse : creationClasses) {
attr.add(creationClasse);
}
attrs.put(attr);
}
String backendFieldId;
for (String fieldId : fieldMap.keySet()) {
backendFieldId = directory.getFieldMapper().getBackendField(
fieldId);
if (backendFieldId.equals(getPasswordField())) {
attr = new BasicAttribute(backendFieldId);
attr.add(fieldMap.get(fieldId)); // TODO: encode in ssha
// or md5
attrs.put(attr);
} else if (directory.isReference(fieldId)) {
Reference reference = directory.getReference(fieldId);
if (reference instanceof LDAPReference) {
attr = new BasicAttribute(
((LDAPReference) reference).getStaticAttributeId());
attr.add(directory.getConfig().getEmptyRefMarker());
attrs.put(attr);
}
referenceFieldList.add(fieldId);
} else {
Object value = fieldMap.get(fieldId);
if ((value != null) && !value.equals("")) {
attrs.put(getAttributeValue(fieldId, value));
}
}
}
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.createEntry(%s): LDAP bind dn=%s attrs=%s",
fieldMap.toString(), dn, attrs.toString()));
}
dirContext.bind(dn, null, attrs);
for (String referenceFieldName : referenceFieldList) {
Reference reference = directory.getReference(referenceFieldName);
List<String> targetIds = (List<String>) fieldMap.get(referenceFieldName);
reference.addLinks((String) fieldMap.get(getIdField()),
targetIds);
}
directory.invalidateCaches();
return fieldMapToDocumentModel(fieldMap);
} catch (Exception e) {
throw new DirectoryException("createEntry failed", e);
}
}
public DocumentModel getEntry(String id) throws DirectoryException {
return directory.getCache().getEntry(id, this);
}
public DocumentModel getEntryFromSource(String id)
throws DirectoryException {
try {
SearchResult result = getLdapEntry(id);
if (result == null) {
return null;
}
// fetch result with references
return ldapResultToDocumentModel(result, id, true);
} catch (NamingException e) {
throw new DirectoryException("getEntry failed: " + e.getMessage(),
e);
}
}
public boolean hasEntry(String id) throws DirectoryException {
try {
// TODO: check directory cache first
return getLdapEntry(id) != null;
} catch (NamingException e) {
throw new DirectoryException("hasEntry failed: " + e.getMessage(),
e);
}
}
protected SearchResult getLdapEntry(String id) throws NamingException,
DirectoryException {
return getLdapEntry(id, false);
}
protected SearchResult getLdapEntry(String id, boolean fetchAllAttributes)
throws NamingException, DirectoryException {
String filterExpr;
if (directory.getBaseFilter().startsWith("(")) {
filterExpr = String.format("(&(%s={0})%s)", idAttribute,
directory.getBaseFilter());
} else {
filterExpr = String.format("(&(%s={0})(%s))", idAttribute,
directory.getBaseFilter());
}
String[] filterArgs = new String[] { id };
SearchControls scts = directory.getSearchControls(fetchAllAttributes);
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.getLdapEntry(%s, %s): LDAP search base='%s' filter='%s' "
+ " args='%s' scope='%s'", id, fetchAllAttributes,
searchBaseDn, filterExpr, id, scts.getSearchScope()));
}
NamingEnumeration<SearchResult> results = dirContext.search(
searchBaseDn, filterExpr, filterArgs, scts);
if (!results.hasMore()) {
log.debug("Entry not found: " + id);
return null;
}
SearchResult result = results.next();
if (results.hasMore()) {
log.debug("More than one entry found");
throw new DirectoryException("more than one entry found for " + id);
}
return result;
}
public DocumentModelList getEntries() throws DirectoryException {
try {
SearchControls scts = directory.getSearchControls();
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.getEntries(): LDAP search base='%s' filter='%s' "
+ " args=* scope=%s", searchBaseDn,
directory.getBaseFilter(), scts.getSearchScope()));
}
NamingEnumeration<SearchResult> results = dirContext.search(
searchBaseDn, directory.getBaseFilter(), scts);
// skip reference fetching
return ldapResultsToDocumentModels(results, false);
} catch (SizeLimitExceededException e) {
throw new org.nuxeo.ecm.directory.SizeLimitExceededException(e);
} catch (NamingException e) {
throw new DirectoryException("getEntries failed", e);
}
}
public void updateEntry(DocumentModel docModel) throws DirectoryException {
List<String> updateList = new ArrayList<String>();
List<String> referenceFieldList = new LinkedList<String>();
try {
DataModel dataModel = docModel.getDataModel(schemaName);
for (String fieldName : schemaFieldMap.keySet()) {
if (!dataModel.isDirty(fieldName)) {
continue;
}
if (directory.isReference(fieldName)) {
referenceFieldList.add(fieldName);
} else {
updateList.add(fieldName);
}
}
if (!isReadOnly() && !updateList.isEmpty()) {
Attributes attrs = new BasicAttributes();
Attribute attr;
SearchResult ldapEntry = getLdapEntry(docModel.getId());
if (ldapEntry == null) {
throw new DirectoryException(docModel.getId()
+ " not found");
}
Attributes oldattrs = ldapEntry.getAttributes();
String dn = ldapEntry.getNameInNamespace();
Attributes attrsToDel = new BasicAttributes();
for (String f : updateList) {
// TODO: encode password
Object value = docModel.getProperty(schemaName, f);
String backendField = directory.getFieldMapper().getBackendField(
f);
if ((value == null) || (value.equals(""))) {
if (getMandatoryAttributes().contains(backendField)) {
attr = new BasicAttribute(backendField);
attr.add(" ");
attrs.put(attr);
} else if (oldattrs.get(backendField) != null) {
attr = new BasicAttribute(backendField);
attr.add(oldattrs.get(backendField).get());
attrsToDel.put(attr);
}
} else {
attrs.put(getAttributeValue(f, value));
}
}
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.updateEntry(%s): LDAP modifyAttributes dn='%s' mod_op='REMOVE_ATTRIBUTE' attr='%s'",
docModel.toString(), dn, attrsToDel.toString()));
}
dirContext.modifyAttributes(dn, DirContext.REMOVE_ATTRIBUTE,
attrsToDel);
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.updateEntry(%s): LDAP modifyAttributes dn='%s' mod_op='REPLACE_ATTRIBUTE' attr='%s'",
docModel.toString(), dn, attrs.toString()));
}
dirContext.modifyAttributes(dn, DirContext.REPLACE_ATTRIBUTE,
attrs);
}
// update reference fields
for (String referenceFieldName : referenceFieldList) {
Reference reference = directory.getReference(referenceFieldName);
List<String> targetIds = (List<String>) docModel.getProperty(
schemaName, referenceFieldName);
reference.setTargetIdsForSource(docModel.getId(), targetIds);
}
} catch (Exception e) {
throw new DirectoryException("updateEntry failed: "
+ e.getMessage(), e);
}
directory.invalidateCaches();
}
public void deleteEntry(DocumentModel dm) throws DirectoryException {
deleteEntry(dm.getId());
}
public void deleteEntry(String id) throws DirectoryException {
if (isReadOnly()) {
return;
}
try {
for (String fieldName : schemaFieldMap.keySet()) {
if (directory.isReference(fieldName)) {
Reference reference = directory.getReference(fieldName);
reference.removeLinksForSource(id);
}
}
SearchResult result = getLdapEntry(id);
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.deleteEntry(%s): LDAP destroySubcontext dn='%s'",
id, result.getNameInNamespace()));
}
dirContext.destroySubcontext(result.getNameInNamespace());
} catch (Exception e) {
throw new DirectoryException("deleteEntry failed", e);
}
directory.invalidateCaches();
}
public void deleteEntry(String id, Map<String, String> map)
throws DirectoryException {
log.warn("Calling deleteEntry extended on LDAP directory");
deleteEntry(id);
}
public DocumentModelList query(Map<String, Object> filter,
Set<String> fulltext, boolean fetchReferences,
Map<String, String> orderBy) throws DirectoryException {
try {
// building the query using filterExpr / filterArgs to
// escape special characters and to fulltext search only on
// the explicitly specified fields
String[] filters = new String[filter.size()];
String[] filterArgs = new String[filter.size()];
if (fulltext == null) {
fulltext = Collections.emptySet();
}
int index = 0;
for (String fieldName : filter.keySet()) {
if (directory.isReference(fieldName)) {
log.warn(fieldName
+ " is a reference and will be ignored as a query criterion");
continue;
}
String backendFieldName = directory.getFieldMapper().getBackendField(
fieldName);
Object fieldValue = filter.get(fieldName);
StringBuilder currentFilter = new StringBuilder();
currentFilter.append("(");
if (fieldValue == null) {
currentFilter.append("!(" + backendFieldName + "=*)");
} else if ("".equals(fieldValue)) {
if (fulltext.contains(fieldName)) {
currentFilter.append(backendFieldName + "=*");
} else {
currentFilter.append(backendFieldName + "=");
}
} else {
currentFilter.append(backendFieldName + "=");
if (fulltext.contains(fieldName)) {
if (LDAPSubstringMatchType.SUBFINAL.equals(substringMatchType)) {
currentFilter.append("*{" + index + "}");
} else if (LDAPSubstringMatchType.SUBANY.equals(substringMatchType)) {
currentFilter.append("*{" + index + "}*");
} else {
// default behavior: subinitial
currentFilter.append("{" + index + "}*");
}
} else {
currentFilter.append("{" + index + "}");
}
}
currentFilter.append(")");
filters[index] = currentFilter.toString();
if (fieldValue != null && !"".equals(fieldValue)) {
// XXX: what kind of Objects can we get here? Is toString()
// enough?
filterArgs[index] = fieldValue.toString();
}
index++;
}
String filterExpr = "(&" + directory.getBaseFilter()
+ StringUtils.join(filters) + ')';
SearchControls scts = directory.getSearchControls();
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.query(): LDAP search base=%s filter=%s args=%s scope=%s",
searchBaseDn, filterExpr, StringUtils.join(filterArgs,
","), scts.getSearchScope()));
}
NamingEnumeration<SearchResult> results = dirContext.search(
searchBaseDn, filterExpr, filterArgs, scts);
DocumentModelList entries = ldapResultsToDocumentModels(results,
fetchReferences);
if (orderBy != null && !orderBy.isEmpty()) {
directory.orderEntries(entries, orderBy);
}
return entries;
} catch (SizeLimitExceededException e) {
throw new org.nuxeo.ecm.directory.SizeLimitExceededException(e);
} catch (NamingException e) {
throw new DirectoryException("executeQuery failed", e);
}
}
public DocumentModelList query(Map<String, Object> filter)
throws DirectoryException {
// by default, do not fetch references of result entries
return query(filter, emptySet, new HashMap<String, String>());
}
public DocumentModelList query(Map<String, Object> filter,
Set<String> fulltext, Map<String, String> orderBy)
throws DirectoryException {
return query(filter, fulltext, false, orderBy);
}
public DocumentModelList query(Map<String, Object> filter,
Set<String> fulltext) throws DirectoryException {
// by default, do not fetch references of result entries
return query(filter, fulltext, new HashMap<String, String>());
}
public void commit() {
// No LDAP support for transactions
}
public void rollback() {
// No LDAP support for transactions
}
public void close() throws DirectoryException {
try {
dirContext.close();
directory.removeSession(this);
} catch (NamingException e) {
throw new DirectoryException("close failed", e);
}
}
public List<String> getProjection(Map<String, Object> filter,
String columnName) throws DirectoryException {
return getProjection(filter, emptySet, columnName);
}
public List<String> getProjection(Map<String, Object> filter,
Set<String> fulltext, String columnName) throws DirectoryException {
// XXX: this suboptimal code should be either optimized for LDAP or
// moved to an abstract class
List<String> result = new ArrayList<String>();
DocumentModelList docList = query(filter, fulltext);
String columnNameinDocModel = directory.getFieldMapper().getDirectoryField(
columnName);
for (DocumentModel docModel : docList) {
Object obj = docModel.getProperty(schemaName, columnNameinDocModel);
String propValue;
if (obj instanceof String) {
propValue = (String) obj;
} else {
propValue = String.valueOf(obj);
}
result.add(propValue);
}
return result;
}
// XXX: this should be moved to an abstract session class
protected DocumentModel fieldMapToDocumentModel(Map<String, Object> fieldMap) {
DataModel dataModel = new DataModelImpl(schemaName, fieldMap);
String id = String.valueOf(fieldMap.get(getIdField()));
DocumentModelImpl docModel = new DocumentModelImpl(sid, schemaName, id,
null, null, null, new String[] { schemaName }, null);
dataModel.setMap(fieldMap);
docModel.addDataModel(dataModel);
return docModel;
}
@SuppressWarnings("unchecked")
protected Object getFieldValue(Attribute attribute, String fieldName,
String entryId, boolean fetchReferences) throws DirectoryException {
Field field = schemaFieldMap.get(fieldName);
Type type = field.getType();
Object defaultValue = field.getDefaultValue();
String typeName = type.getName();
if (attribute == null) {
return defaultValue;
}
Object value;
try {
value = attribute.get();
} catch (NamingException e) {
throw new DirectoryException("Could not fetch value for "
+ attribute, e);
}
if (value == null) {
return defaultValue;
}
String trimmedValue = value.toString().trim();
if ("string".equals(typeName)) {
return trimmedValue;
} else if ("integer".equals(typeName) || "long".equals(typeName)) {
if ("".equals(trimmedValue)) {
return defaultValue;
}
try {
return Long.valueOf(trimmedValue);
} catch (NumberFormatException e) {
log.error(String.format(
"field %s of type %s has non-numeric value found on server: '%s' (ignoring and using default value instead)",
fieldName, typeName, trimmedValue));
return defaultValue;
}
} else if (type.isListType()) {
List<String> parsedItems = new LinkedList<String>();
NamingEnumeration<Object> values = null;
try {
values = (NamingEnumeration<Object>) attribute.getAll();
while (values.hasMore()) {
parsedItems.add(values.next().toString().trim());
}
return parsedItems;
} catch (NamingException e) {
log.error(String.format(
"field %s of type %s has non list value found on server: '%s' (ignoring and using default value instead)",
fieldName, typeName, values != null ? values.toString()
: trimmedValue));
return defaultValue;
}
} else if ("date".equals(typeName)) {
if ("".equals(trimmedValue)) {
return defaultValue;
}
try {
SimpleDateFormat dateFormat = new SimpleDateFormat(
"yyyyMMddHHmmss'Z'");
dateFormat.setTimeZone(new SimpleTimeZone(0, "Z"));
Date date = dateFormat.parse(trimmedValue);
Calendar cal = Calendar.getInstance();
cal.setTime(date);
return cal;
} catch (ParseException e) {
log.error(String.format(
"field %s of type %s has invalid value found on server: '%s' (ignoring and using default value instead)",
fieldName, typeName, trimmedValue));
return defaultValue;
}
} else {
throw new DirectoryException(
"Field type not supported in directories: " + typeName);
}
}
@SuppressWarnings("unchecked")
protected Attribute getAttributeValue(String fieldName, Object value)
throws DirectoryException {
Attribute attribute = new BasicAttribute(
directory.getFieldMapper().getBackendField(fieldName));
Type type = schemaFieldMap.get(fieldName).getType();
String typeName = type.getName();
if ("string".equals(typeName)) {
attribute.add(value);
} else if ("integer".equals(typeName) || "long".equals(typeName)) {
attribute.add(value.toString());
} else if (type.isListType()) {
Collection<String> valueItems;
if (value instanceof String[]) {
valueItems = Arrays.asList((String[]) value);
} else if (value instanceof Collection) {
valueItems = (Collection<String>) value;
} else {
throw new DirectoryException(String.format(
"field %s with value %s does not match type %s",
fieldName, value.toString(), type.getName()));
}
for (String item : valueItems) {
attribute.add(item);
}
} else if ("date".equals(typeName)) {
Calendar cal = (Calendar) value;
Date date = cal.getTime();
SimpleDateFormat dateFormat = new SimpleDateFormat(
"yyyyMMddHHmmss'Z'");
dateFormat.setTimeZone(new SimpleTimeZone(0, "Z"));
attribute.add(dateFormat.format(date));
} else {
throw new DirectoryException(
"Field type not supported in directories: " + typeName);
}
return attribute;
}
protected DocumentModelList ldapResultsToDocumentModels(
NamingEnumeration<SearchResult> results, boolean fetchReferences)
throws DirectoryException, NamingException {
DocumentModelList list = new DocumentModelListImpl();
DocumentModel entry;
while (results.hasMore()) {
SearchResult result = results.next();
entry = ldapResultToDocumentModel(result, null, fetchReferences);
if (entry != null) {
list.add(entry);
}
}
log.debug("LDAP search returned " + list.size() + " results");
return list;
}
protected DocumentModel ldapResultToDocumentModel(SearchResult result,
String entryId, boolean fetchReferences) throws DirectoryException,
NamingException {
Attributes attributes = result.getAttributes();
Attribute attribute;
String attributeId;
String passwordFieldId = getPasswordField();
Map<String, Object> fieldMap = new HashMap<String, Object>();
if (entryId == null) {
// NXP-2461: check that id field is filled
attribute = attributes.get(idAttribute);
if (attribute != null) {
Object entry = attribute.get();
if (entry != null) {
entryId = entry.toString();
}
}
}
if (entryId == null) {
// don't bother
return null;
}
for (String fieldName : schemaFieldMap.keySet()) {
Reference reference = directory.getReference(fieldName);
if (reference != null) {
// reference resolution
List<String> referencedIds;
if (!fetchReferences) {
referencedIds = new ArrayList<String>();
} else {
if (reference instanceof LDAPReference) {
// optim: use the current LDAPSession directly to
// provide
// the LDAP reference with the needed backend entries
LDAPReference ldapReference = (LDAPReference) reference;
referencedIds = ldapReference.getLdapTargetIds(attributes);
} else {
try {
referencedIds = reference.getTargetIdsForSource(entryId);
} catch (ClientException e) {
throw new DirectoryException(e);
}
}
}
fieldMap.put(fieldName, referencedIds);
} else {
// manage directly stored fields
attributeId = directory.getFieldMapper().getBackendField(
fieldName);
attribute = attributes.get(attributeId);
if (fieldName.equals(passwordFieldId)) {
// do not try to fetch the password attribute
continue;
} else {
fieldMap.put(fieldName, getFieldValue(attribute, fieldName,
entryId, fetchReferences));
}
}
}
// check if the idAttribute was returned from the search. If not
// set it anyway.
String fieldId = directory.getFieldMapper().getDirectoryField(
idAttribute);
Object obj = fieldMap.get(fieldId);
if (obj == null) {
fieldMap.put(fieldId, changeEntryIdCase(entryId));
}
return fieldMapToDocumentModel(fieldMap);
}
protected String changeEntryIdCase(String id) {
String idFieldCase = directory.getConfig().missingIdFieldCase;
if (MISSING_ID_LOWER_CASE.equals(idFieldCase)) {
return id.toLowerCase();
} else if (MISSING_ID_UPPER_CASE.equals(idFieldCase)) {
return id.toUpperCase();
}
// returns the unchanged id
return id;
}
public boolean authenticate(String username, String password)
throws DirectoryException {
if (password == null || "".equals(password.trim())) {
// never use anonymous bind as a way to authenticate a user in Nuxeo
// EP
return false;
}
// lookup the user: fetch its dn
SearchResult entry;
try {
entry = getLdapEntry(username);
} catch (NamingException e) {
throw new DirectoryException("failed to fetch the ldap entry for "
+ username, e);
}
if (entry == null) {
// no such user => authentication failed
return false;
}
String dn = entry.getNameInNamespace();
Properties env = (Properties) directory.getContextProperties().clone();
env.put(Context.SECURITY_PRINCIPAL, dn);
env.put(Context.SECURITY_CREDENTIALS, password);
try {
// creating a context does a bind
log.debug(String.format("LDAP bind dn='%s'", dn));
// noinspection ResultOfObjectAllocationIgnored
new InitialDirContext(env);
log.debug("Bind succeeded, authentication ok");
return true;
} catch (NamingException e) {
log.debug("Bind failed: " + e.getMessage());
// authentication failed
return false;
}
}
public String getIdField() {
return directory.getConfig().getIdField();
}
public String getPasswordField() {
return directory.getConfig().getPasswordField();
}
public boolean isAuthenticating() throws DirectoryException {
String password = getPasswordField();
return schemaFieldMap.containsKey(password);
}
public boolean isReadOnly() {
return directory.getConfig().getReadOnly();
}
public boolean rdnMatchesIdField() {
return directory.getConfig().rdnAttribute.equals(idAttribute);
}
protected List<String> getMandatoryAttributes() throws DirectoryException {
try {
List<String> mandatoryAttributes = new ArrayList<String>();
DirContext schema = dirContext.getSchema("");
Attributes attributes;
Attribute attribute;
NamingEnumeration<String> values;
String value;
List<String> creationClasses = new ArrayList<String>(
Arrays.asList(directory.getConfig().getCreationClasses()));
creationClasses.remove("top");
for (String creationClass : creationClasses) {
attributes = schema.getAttributes("ClassDefinition/"
+ creationClass);
attribute = attributes.get("MUST");
if (attribute != null) {
values = (NamingEnumeration<String>) attribute.getAll();
while (values.hasMore()) {
value = values.next();
mandatoryAttributes.add(value);
}
}
}
return mandatoryAttributes;
} catch (NamingException e) {
throw new DirectoryException("getMandatoryAttributes failed", e);
}
}
}
|
nuxeo-platform-directory-ldap/src/main/java/org/nuxeo/ecm/directory/ldap/LDAPSession.java
|
/*
* (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* Nuxeo - initial API and implementation
*
* $Id$
*/
package org.nuxeo.ecm.directory.ldap;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SimpleTimeZone;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.SizeLimitExceededException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.DataModel;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.DocumentModelList;
import org.nuxeo.ecm.core.api.impl.DataModelImpl;
import org.nuxeo.ecm.core.api.impl.DocumentModelImpl;
import org.nuxeo.ecm.core.api.impl.DocumentModelListImpl;
import org.nuxeo.ecm.core.schema.types.Field;
import org.nuxeo.ecm.core.schema.types.Type;
import org.nuxeo.ecm.core.utils.SIDGenerator;
import org.nuxeo.ecm.directory.Directory;
import org.nuxeo.ecm.directory.DirectoryException;
import org.nuxeo.ecm.directory.DirectoryFieldMapper;
import org.nuxeo.ecm.directory.EntrySource;
import org.nuxeo.ecm.directory.Reference;
import org.nuxeo.ecm.directory.Session;
/**
* This class represents a session against an LDAPDirectory.
*
* @author Olivier Grisel <[email protected]>
*
*/
public class LDAPSession implements Session, EntrySource {
protected static final String MISSING_ID_LOWER_CASE = "lower";
protected static final String MISSING_ID_UPPER_CASE = "upper";
// directory connection parameters
private static final Log log = LogFactory.getLog(LDAPSession.class);
protected final String schemaName;
protected final DirContext dirContext;
protected final String idAttribute;
protected final LDAPDirectory directory;
protected final String searchBaseDn;
protected final Set<String> emptySet = Collections.emptySet();
protected final String sid;
protected final Map<String, Field> schemaFieldMap;
protected final String substringMatchType;
public LDAPSession(LDAPDirectory directory, DirContext dirContext) {
this.directory = directory;
this.dirContext = dirContext;
DirectoryFieldMapper fieldMapper = directory.getFieldMapper();
idAttribute = fieldMapper.getBackendField(directory.getConfig().getIdField());
schemaName = directory.getSchema();
schemaFieldMap = directory.getSchemaFieldMap();
sid = String.valueOf(SIDGenerator.next());
searchBaseDn = directory.getConfig().getSearchBaseDn();
substringMatchType = directory.getConfig().getSubstringMatchType();
}
public Directory getDirectory() {
return directory;
}
public DirContext getContext() {
return dirContext;
}
public DocumentModel createEntry(Map<String, Object> fieldMap)
throws DirectoryException {
if (isReadOnly()) {
return null;
}
List<String> referenceFieldList = new LinkedList<String>();
try {
String dn = String.format("%s=%s,%s", idAttribute,
fieldMap.get(getIdField()),
directory.getConfig().getCreationBaseDn());
Attributes attrs = new BasicAttributes();
Attribute attr;
List<String> mandatoryAttributes = getMandatoryAttributes();
for (String mandatoryAttribute : mandatoryAttributes) {
attr = new BasicAttribute(mandatoryAttribute);
attr.add(" ");
attrs.put(attr);
}
String[] creationClasses = directory.getConfig().getCreationClasses();
if (creationClasses.length != 0) {
attr = new BasicAttribute("objectclass");
for (String creationClasse : creationClasses) {
attr.add(creationClasse);
}
attrs.put(attr);
}
String backendFieldId;
for (String fieldId : fieldMap.keySet()) {
backendFieldId = directory.getFieldMapper().getBackendField(
fieldId);
if (backendFieldId.equals(getPasswordField())) {
attr = new BasicAttribute(backendFieldId);
attr.add(fieldMap.get(fieldId)); // TODO: encode in ssha
// or md5
attrs.put(attr);
} else if (directory.isReference(fieldId)) {
Reference reference = directory.getReference(fieldId);
if (reference instanceof LDAPReference) {
attr = new BasicAttribute(
((LDAPReference) reference).getStaticAttributeId());
attr.add(directory.getConfig().getEmptyRefMarker());
attrs.put(attr);
}
referenceFieldList.add(fieldId);
} else {
Object value = fieldMap.get(fieldId);
if ((value != null) && !value.equals("")) {
attrs.put(getAttributeValue(fieldId, value));
}
}
}
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.createEntry(%s): LDAP bind dn=%s attrs=%s",
fieldMap.toString(), dn, attrs.toString()));
}
dirContext.bind(dn, null, attrs);
for (String referenceFieldName : referenceFieldList) {
Reference reference = directory.getReference(referenceFieldName);
List<String> targetIds = (List<String>) fieldMap.get(referenceFieldName);
reference.addLinks((String) fieldMap.get(getIdField()),
targetIds);
}
directory.invalidateCaches();
return fieldMapToDocumentModel(fieldMap);
} catch (Exception e) {
throw new DirectoryException("createEntry failed", e);
}
}
public DocumentModel getEntry(String id) throws DirectoryException {
return directory.getCache().getEntry(id, this);
}
public DocumentModel getEntryFromSource(String id)
throws DirectoryException {
try {
SearchResult result = getLdapEntry(id);
if (result == null) {
return null;
}
// fetch result with references
return ldapResultToDocumentModel(result, id, true);
} catch (NamingException e) {
throw new DirectoryException("getEntry failed: " + e.getMessage(),
e);
}
}
public boolean hasEntry(String id) throws DirectoryException {
try {
// TODO: check directory cache first
return getLdapEntry(id) != null;
} catch (NamingException e) {
throw new DirectoryException("hasEntry failed: " + e.getMessage(),
e);
}
}
protected SearchResult getLdapEntry(String id) throws NamingException,
DirectoryException {
return getLdapEntry(id, false);
}
protected SearchResult getLdapEntry(String id, boolean fetchAllAttributes)
throws NamingException, DirectoryException {
String filterExpr;
if (directory.getBaseFilter().startsWith("(")) {
filterExpr = String.format("(&(%s={0})%s)", idAttribute,
directory.getBaseFilter());
} else {
filterExpr = String.format("(&(%s={0})(%s))", idAttribute,
directory.getBaseFilter());
}
String[] filterArgs = new String[] { id };
SearchControls scts = directory.getSearchControls(fetchAllAttributes);
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.getLdapEntry(%s, %s): LDAP search base='%s' filter='%s' "
+ " args='%s' scope='%s'", id, fetchAllAttributes,
searchBaseDn, filterExpr, id, scts.getSearchScope()));
}
NamingEnumeration<SearchResult> results = dirContext.search(
searchBaseDn, filterExpr, filterArgs, scts);
if (!results.hasMore()) {
log.debug("Entry not found: " + id);
return null;
}
SearchResult result = results.next();
if (results.hasMore()) {
log.debug("More than one entry found");
throw new DirectoryException("more than one entry found for " + id);
}
return result;
}
public DocumentModelList getEntries() throws DirectoryException {
try {
SearchControls scts = directory.getSearchControls();
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.getEntries(): LDAP search base='%s' filter='%s' "
+ " args=* scope=%s", searchBaseDn,
directory.getBaseFilter(), scts.getSearchScope()));
}
NamingEnumeration<SearchResult> results = dirContext.search(
searchBaseDn, directory.getBaseFilter(), scts);
// skip reference fetching
return ldapResultsToDocumentModels(results, false);
} catch (SizeLimitExceededException e) {
throw new org.nuxeo.ecm.directory.SizeLimitExceededException(e);
} catch (NamingException e) {
throw new DirectoryException("getEntries failed", e);
}
}
public void updateEntry(DocumentModel docModel) throws DirectoryException {
List<String> updateList = new ArrayList<String>();
List<String> referenceFieldList = new LinkedList<String>();
try {
DataModel dataModel = docModel.getDataModel(schemaName);
for (String fieldName : schemaFieldMap.keySet()) {
if (!dataModel.isDirty(fieldName)) {
continue;
}
if (directory.isReference(fieldName)) {
referenceFieldList.add(fieldName);
} else {
updateList.add(fieldName);
}
}
if (!isReadOnly() && !updateList.isEmpty()) {
Attributes attrs = new BasicAttributes();
Attribute attr;
SearchResult ldapEntry = getLdapEntry(docModel.getId());
if (ldapEntry == null) {
throw new DirectoryException(docModel.getId()
+ " not found");
}
Attributes oldattrs = ldapEntry.getAttributes();
String dn = ldapEntry.getNameInNamespace();
Attributes attrsToDel = new BasicAttributes();
for (String f : updateList) {
// TODO: encode password
Object value = docModel.getProperty(schemaName, f);
String backendField = directory.getFieldMapper().getBackendField(
f);
if ((value == null) || (value.equals(""))) {
if (getMandatoryAttributes().contains(backendField)) {
attr = new BasicAttribute(backendField);
attr.add(" ");
attrs.put(attr);
} else if (oldattrs.get(backendField) != null) {
attr = new BasicAttribute(backendField);
attr.add(oldattrs.get(backendField).get());
attrsToDel.put(attr);
}
} else {
attrs.put(getAttributeValue(f, value));
}
}
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.updateEntry(%s): LDAP modifyAttributes dn='%s' mod_op='REMOVE_ATTRIBUTE' attr='%s'",
docModel.toString(), dn, attrsToDel.toString()));
}
dirContext.modifyAttributes(dn, DirContext.REMOVE_ATTRIBUTE,
attrsToDel);
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.updateEntry(%s): LDAP modifyAttributes dn='%s' mod_op='REPLACE_ATTRIBUTE' attr='%s'",
docModel.toString(), dn, attrs.toString()));
}
dirContext.modifyAttributes(dn, DirContext.REPLACE_ATTRIBUTE,
attrs);
}
// update reference fields
for (String referenceFieldName : referenceFieldList) {
Reference reference = directory.getReference(referenceFieldName);
List<String> targetIds = (List<String>) docModel.getProperty(
schemaName, referenceFieldName);
reference.setTargetIdsForSource(docModel.getId(), targetIds);
}
} catch (Exception e) {
throw new DirectoryException("updateEntry failed: "
+ e.getMessage(), e);
}
directory.invalidateCaches();
}
public void deleteEntry(DocumentModel dm) throws DirectoryException {
deleteEntry(dm.getId());
}
public void deleteEntry(String id) throws DirectoryException {
if (isReadOnly()) {
return;
}
try {
for (String fieldName : schemaFieldMap.keySet()) {
if (directory.isReference(fieldName)) {
Reference reference = directory.getReference(fieldName);
reference.removeLinksForSource(id);
}
}
SearchResult result = getLdapEntry(id);
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.deleteEntry(%s): LDAP destroySubcontext dn='%s'",
id, result.getNameInNamespace()));
}
dirContext.destroySubcontext(result.getNameInNamespace());
} catch (Exception e) {
throw new DirectoryException("deleteEntry failed", e);
}
directory.invalidateCaches();
}
public void deleteEntry(String id, Map<String, String> map)
throws DirectoryException {
log.warn("Calling deleteEntry extended on LDAP directory");
deleteEntry(id);
}
public DocumentModelList query(Map<String, Object> filter,
Set<String> fulltext, boolean fetchReferences,
Map<String, String> orderBy) throws DirectoryException {
try {
// building the query using filterExpr / filterArgs to
// escape special characters and to fulltext search only on
// the explicitly specified fields
String[] filters = new String[filter.size()];
String[] filterArgs = new String[filter.size()];
if (fulltext == null) {
fulltext = Collections.emptySet();
}
int index = 0;
for (String fieldName : filter.keySet()) {
if (directory.isReference(fieldName)) {
log.warn(fieldName
+ " is a reference and will be ignored as a query criterion");
continue;
}
String backendFieldName = directory.getFieldMapper().getBackendField(
fieldName);
Object fieldValue = filter.get(fieldName);
StringBuilder currentFilter = new StringBuilder();
currentFilter.append("(");
if (fieldValue == null) {
currentFilter.append("!(" + backendFieldName + "=*)");
} else if ("".equals(fieldValue)) {
if (fulltext.contains(fieldName)) {
currentFilter.append(backendFieldName + "=*");
} else {
currentFilter.append(backendFieldName + "=");
}
} else {
currentFilter.append(backendFieldName + "=");
if (fulltext.contains(fieldName)) {
if (LDAPSubstringMatchType.SUBFINAL.equals(substringMatchType)) {
currentFilter.append("*{" + index + "}");
} else if (LDAPSubstringMatchType.SUBANY.equals(substringMatchType)) {
currentFilter.append("*{" + index + "}*");
} else {
// default behavior: subinitial
currentFilter.append("{" + index + "}*");
}
} else {
currentFilter.append("{" + index + "}");
}
}
currentFilter.append(")");
filters[index] = currentFilter.toString();
if (fieldValue != null && !"".equals(fieldValue)) {
// XXX: what kind of Objects can we get here? Is toString()
// enough?
filterArgs[index] = fieldValue.toString();
}
index++;
}
String filterExpr = "(&" + directory.getBaseFilter()
+ StringUtils.join(filters) + ')';
SearchControls scts = directory.getSearchControls();
if (log.isDebugEnabled()) {
log.debug(String.format(
"LDAPSession.query(): LDAP search base=%s filter=%s args=%s scope=%s",
searchBaseDn, filterExpr, StringUtils.join(filterArgs,
","), scts.getSearchScope()));
}
NamingEnumeration<SearchResult> results = dirContext.search(
searchBaseDn, filterExpr, filterArgs, scts);
DocumentModelList entries = ldapResultsToDocumentModels(results,
fetchReferences);
if (orderBy != null && !orderBy.isEmpty()) {
directory.orderEntries(entries, orderBy);
}
return entries;
} catch (SizeLimitExceededException e) {
throw new org.nuxeo.ecm.directory.SizeLimitExceededException(e);
} catch (NamingException e) {
throw new DirectoryException("executeQuery failed", e);
}
}
public DocumentModelList query(Map<String, Object> filter)
throws DirectoryException {
// by default, do not fetch references of result entries
return query(filter, emptySet, new HashMap<String, String>());
}
public DocumentModelList query(Map<String, Object> filter,
Set<String> fulltext, Map<String, String> orderBy)
throws DirectoryException {
return query(filter, fulltext, false, orderBy);
}
public DocumentModelList query(Map<String, Object> filter,
Set<String> fulltext) throws DirectoryException {
// by default, do not fetch references of result entries
return query(filter, fulltext, new HashMap<String, String>());
}
public void commit() {
// No LDAP support for transactions
}
public void rollback() {
// No LDAP support for transactions
}
public void close() throws DirectoryException {
try {
dirContext.close();
directory.removeSession(this);
} catch (NamingException e) {
throw new DirectoryException("close failed", e);
}
}
public List<String> getProjection(Map<String, Object> filter,
String columnName) throws DirectoryException {
return getProjection(filter, emptySet, columnName);
}
public List<String> getProjection(Map<String, Object> filter,
Set<String> fulltext, String columnName) throws DirectoryException {
// XXX: this suboptimal code should be either optimized for LDAP or
// moved to an abstract class
List<String> result = new ArrayList<String>();
DocumentModelList docList = query(filter, fulltext);
String columnNameinDocModel = directory.getFieldMapper().getDirectoryField(
columnName);
for (DocumentModel docModel : docList) {
Object obj = docModel.getProperty(schemaName, columnNameinDocModel);
String propValue;
if (obj instanceof String) {
propValue = (String) obj;
} else {
propValue = String.valueOf(obj);
}
result.add(propValue);
}
return result;
}
// XXX: this should be moved to an abstract session class
protected DocumentModel fieldMapToDocumentModel(Map<String, Object> fieldMap) {
DataModel dataModel = new DataModelImpl(schemaName, fieldMap);
String id = String.valueOf(fieldMap.get(getIdField()));
DocumentModelImpl docModel = new DocumentModelImpl(sid, schemaName, id,
null, null, null, new String[] { schemaName }, null);
dataModel.setMap(fieldMap);
docModel.addDataModel(dataModel);
return docModel;
}
@SuppressWarnings("unchecked")
protected Object getFieldValue(Attribute attribute, String fieldName,
String entryId, boolean fetchReferences) throws DirectoryException {
Field field = schemaFieldMap.get(fieldName);
Type type = field.getType();
Object defaultValue = field.getDefaultValue();
String typeName = type.getName();
if (attribute == null) {
return defaultValue;
}
Object value;
try {
value = attribute.get();
} catch (NamingException e) {
throw new DirectoryException("Could not fetch value for "
+ attribute, e);
}
if (value == null) {
return defaultValue;
}
String trimmedValue = value.toString().trim();
if ("string".equals(typeName)) {
return trimmedValue;
} else if ("integer".equals(typeName) || "long".equals(typeName)) {
if ("".equals(trimmedValue)) {
return defaultValue;
}
try {
return Long.valueOf(trimmedValue);
} catch (NumberFormatException e) {
log.error(String.format(
"field %s of type %s has non-numeric value found on server: '%s' (ignoring and using default value instead)",
fieldName, typeName, trimmedValue));
return defaultValue;
}
} else if (type.isListType()) {
List<String> parsedItems = new LinkedList<String>();
NamingEnumeration<Object> values = null;
try {
values = (NamingEnumeration<Object>) attribute.getAll();
while (values.hasMore()) {
parsedItems.add(values.next().toString().trim());
}
return parsedItems;
} catch (NamingException e) {
log.error(String.format(
"field %s of type %s has non list value found on server: '%s' (ignoring and using default value instead)",
fieldName, typeName, values != null ? values.toString()
: trimmedValue));
return defaultValue;
}
} else if ("date".equals(typeName)) {
if ("".equals(trimmedValue)) {
return defaultValue;
}
try {
SimpleDateFormat dateFormat = new SimpleDateFormat(
"yyyyMMddHHmmss'Z'");
dateFormat.setTimeZone(new SimpleTimeZone(0, "Z"));
Date date = dateFormat.parse(trimmedValue);
Calendar cal = Calendar.getInstance();
cal.setTime(date);
return cal;
} catch (ParseException e) {
log.error(String.format(
"field %s of type %s has invalid value found on server: '%s' (ignoring and using default value instead)",
fieldName, typeName, trimmedValue));
return defaultValue;
}
} else {
throw new DirectoryException(
"Field type not supported in directories: " + typeName);
}
}
@SuppressWarnings("unchecked")
protected Attribute getAttributeValue(String fieldName, Object value)
throws DirectoryException {
Attribute attribute = new BasicAttribute(
directory.getFieldMapper().getBackendField(fieldName));
Type type = schemaFieldMap.get(fieldName).getType();
String typeName = type.getName();
if ("string".equals(typeName)) {
attribute.add(value);
} else if ("integer".equals(typeName) || "long".equals(typeName)) {
attribute.add(value.toString());
} else if (type.isListType()) {
Collection<String> valueItems;
if (value instanceof String[]) {
valueItems = Arrays.asList((String[]) value);
} else if (value instanceof Collection) {
valueItems = (Collection<String>) value;
} else {
throw new DirectoryException(String.format(
"field %s with value %s does not match type %s",
fieldName, value.toString(), type.getName()));
}
for (String item : valueItems) {
attribute.add(item);
}
} else if ("date".equals(typeName)) {
Calendar cal = (Calendar) value;
Date date = cal.getTime();
SimpleDateFormat dateFormat = new SimpleDateFormat(
"yyyyMMddHHmmss'Z'");
dateFormat.setTimeZone(new SimpleTimeZone(0, "Z"));
attribute.add(dateFormat.format(date));
} else {
throw new DirectoryException(
"Field type not supported in directories: " + typeName);
}
return attribute;
}
protected DocumentModelList ldapResultsToDocumentModels(
NamingEnumeration<SearchResult> results, boolean fetchReferences)
throws DirectoryException, NamingException {
DocumentModelList list = new DocumentModelListImpl();
DocumentModel entry;
while (results.hasMore()) {
SearchResult result = results.next();
entry = ldapResultToDocumentModel(result, null, fetchReferences);
if (entry != null) {
list.add(entry);
}
}
log.debug("LDAP search returned " + list.size() + " results");
return list;
}
protected DocumentModel ldapResultToDocumentModel(SearchResult result,
String entryId, boolean fetchReferences) throws DirectoryException,
NamingException {
Attributes attributes = result.getAttributes();
Attribute attribute;
String attributeId;
String passwordFieldId = getPasswordField();
Map<String, Object> fieldMap = new HashMap<String, Object>();
if (entryId == null) {
// NXP-2461: check that id field is filled
attribute = attributes.get(idAttribute);
if (attribute != null) {
Object entry = attribute.get();
if (entry != null) {
entryId = entry.toString();
}
}
}
if (entryId == null) {
// don't bother
return null;
}
for (String fieldName : schemaFieldMap.keySet()) {
Reference reference = directory.getReference(fieldName);
if (reference != null) {
// reference resolution
List<String> referencedIds;
if (!fetchReferences) {
referencedIds = new ArrayList<String>();
}
if (reference instanceof LDAPReference) {
// optim: use the current LDAPSession directly to provide
// the LDAP reference with the needed backend entries
LDAPReference ldapReference = (LDAPReference) reference;
referencedIds = ldapReference.getLdapTargetIds(attributes);
} else {
try {
referencedIds = reference.getTargetIdsForSource(entryId);
} catch (ClientException e) {
throw new DirectoryException(e);
}
}
fieldMap.put(fieldName, referencedIds);
} else {
// manage directly stored fields
attributeId = directory.getFieldMapper().getBackendField(
fieldName);
attribute = attributes.get(attributeId);
if (fieldName.equals(passwordFieldId)) {
// do not try to fetch the password attribute
continue;
} else {
fieldMap.put(fieldName, getFieldValue(attribute, fieldName,
entryId, fetchReferences));
}
}
}
// check if the idAttribute was returned from the search. If not
// set it anyway.
String fieldId = directory.getFieldMapper().getDirectoryField(
idAttribute);
Object obj = fieldMap.get(fieldId);
if (obj == null) {
fieldMap.put(fieldId, changeEntryIdCase(entryId));
}
return fieldMapToDocumentModel(fieldMap);
}
protected String changeEntryIdCase(String id) {
String idFieldCase = directory.getConfig().missingIdFieldCase;
if (MISSING_ID_LOWER_CASE.equals(idFieldCase)) {
return id.toLowerCase();
} else if (MISSING_ID_UPPER_CASE.equals(idFieldCase)) {
return id.toUpperCase();
}
// returns the unchanged id
return id;
}
public boolean authenticate(String username, String password)
throws DirectoryException {
if (password == null || "".equals(password.trim())) {
// never use anonymous bind as a way to authenticate a user in Nuxeo
// EP
return false;
}
// lookup the user: fetch its dn
SearchResult entry;
try {
entry = getLdapEntry(username);
} catch (NamingException e) {
throw new DirectoryException("failed to fetch the ldap entry for "
+ username, e);
}
if (entry == null) {
// no such user => authentication failed
return false;
}
String dn = entry.getNameInNamespace();
Properties env = (Properties) directory.getContextProperties().clone();
env.put(Context.SECURITY_PRINCIPAL, dn);
env.put(Context.SECURITY_CREDENTIALS, password);
try {
// creating a context does a bind
log.debug(String.format("LDAP bind dn='%s'", dn));
// noinspection ResultOfObjectAllocationIgnored
new InitialDirContext(env);
log.debug("Bind succeeded, authentication ok");
return true;
} catch (NamingException e) {
log.debug("Bind failed: " + e.getMessage());
// authentication failed
return false;
}
}
public String getIdField() {
return directory.getConfig().getIdField();
}
public String getPasswordField() {
return directory.getConfig().getPasswordField();
}
public boolean isAuthenticating() throws DirectoryException {
String password = getPasswordField();
return schemaFieldMap.containsKey(password);
}
public boolean isReadOnly() {
return directory.getConfig().getReadOnly();
}
public boolean rdnMatchesIdField() {
return directory.getConfig().rdnAttribute.equals(idAttribute);
}
protected List<String> getMandatoryAttributes() throws DirectoryException {
try {
List<String> mandatoryAttributes = new ArrayList<String>();
DirContext schema = dirContext.getSchema("");
Attributes attributes;
Attribute attribute;
NamingEnumeration<String> values;
String value;
List<String> creationClasses = new ArrayList<String>(
Arrays.asList(directory.getConfig().getCreationClasses()));
creationClasses.remove("top");
for (String creationClass : creationClasses) {
attributes = schema.getAttributes("ClassDefinition/"
+ creationClass);
attribute = attributes.get("MUST");
if (attribute != null) {
values = (NamingEnumeration<String>) attribute.getAll();
while (values.hasMore()) {
value = values.next();
mandatoryAttributes.add(value);
}
}
}
return mandatoryAttributes;
} catch (NamingException e) {
throw new DirectoryException("getMandatoryAttributes failed", e);
}
}
}
|
NXP-2666: Avoid fetching references when searching on ldap directory
|
nuxeo-platform-directory-ldap/src/main/java/org/nuxeo/ecm/directory/ldap/LDAPSession.java
|
NXP-2666: Avoid fetching references when searching on ldap directory
|
<ide><path>uxeo-platform-directory-ldap/src/main/java/org/nuxeo/ecm/directory/ldap/LDAPSession.java
<ide> List<String> referencedIds;
<ide> if (!fetchReferences) {
<ide> referencedIds = new ArrayList<String>();
<del> }
<del> if (reference instanceof LDAPReference) {
<del> // optim: use the current LDAPSession directly to provide
<del> // the LDAP reference with the needed backend entries
<del> LDAPReference ldapReference = (LDAPReference) reference;
<del> referencedIds = ldapReference.getLdapTargetIds(attributes);
<ide> } else {
<del> try {
<del> referencedIds = reference.getTargetIdsForSource(entryId);
<del> } catch (ClientException e) {
<del> throw new DirectoryException(e);
<add> if (reference instanceof LDAPReference) {
<add> // optim: use the current LDAPSession directly to
<add> // provide
<add> // the LDAP reference with the needed backend entries
<add> LDAPReference ldapReference = (LDAPReference) reference;
<add> referencedIds = ldapReference.getLdapTargetIds(attributes);
<add> } else {
<add> try {
<add> referencedIds = reference.getTargetIdsForSource(entryId);
<add> } catch (ClientException e) {
<add> throw new DirectoryException(e);
<add> }
<ide> }
<ide> }
<ide> fieldMap.put(fieldName, referencedIds);
|
|
Java
|
apache-2.0
|
a496f8958ddde5a6942f033c088fbf913cb0e3d5
| 0 |
eBaoTech/pinpoint,barneykim/pinpoint,eBaoTech/pinpoint,87439247/pinpoint,barneykim/pinpoint,chenguoxi1985/pinpoint,denzelsN/pinpoint,87439247/pinpoint,minwoo-jung/pinpoint,andyspan/pinpoint,emeroad/pinpoint,naver/pinpoint,jiaqifeng/pinpoint,koo-taejin/pinpoint,majinkai/pinpoint,87439247/pinpoint,majinkai/pinpoint,barneykim/pinpoint,Xylus/pinpoint,emeroad/pinpoint,koo-taejin/pinpoint,chenguoxi1985/pinpoint,minwoo-jung/pinpoint,jaehong-kim/pinpoint,suraj-raturi/pinpoint,minwoo-jung/pinpoint,naver/pinpoint,denzelsN/pinpoint,Xylus/pinpoint,suraj-raturi/pinpoint,denzelsN/pinpoint,87439247/pinpoint,suraj-raturi/pinpoint,minwoo-jung/pinpoint,Xylus/pinpoint,jaehong-kim/pinpoint,majinkai/pinpoint,denzelsN/pinpoint,emeroad/pinpoint,andyspan/pinpoint,jaehong-kim/pinpoint,minwoo-jung/pinpoint,andyspan/pinpoint,andyspan/pinpoint,naver/pinpoint,chenguoxi1985/pinpoint,87439247/pinpoint,Xylus/pinpoint,denzelsN/pinpoint,denzelsN/pinpoint,eBaoTech/pinpoint,jaehong-kim/pinpoint,barneykim/pinpoint,naver/pinpoint,naver/pinpoint,denzelsN/pinpoint,majinkai/pinpoint,koo-taejin/pinpoint,barneykim/pinpoint,koo-taejin/pinpoint,chenguoxi1985/pinpoint,andyspan/pinpoint,eBaoTech/pinpoint,Xylus/pinpoint,barneykim/pinpoint,emeroad/pinpoint,andyspan/pinpoint,chenguoxi1985/pinpoint,suraj-raturi/pinpoint,suraj-raturi/pinpoint,majinkai/pinpoint,chenguoxi1985/pinpoint,barneykim/pinpoint,jiaqifeng/pinpoint,jaehong-kim/pinpoint,jiaqifeng/pinpoint,jiaqifeng/pinpoint,eBaoTech/pinpoint,suraj-raturi/pinpoint,jaehong-kim/pinpoint,jiaqifeng/pinpoint,minwoo-jung/pinpoint,emeroad/pinpoint,majinkai/pinpoint,jiaqifeng/pinpoint,Xylus/pinpoint,Xylus/pinpoint,koo-taejin/pinpoint,87439247/pinpoint,emeroad/pinpoint,eBaoTech/pinpoint,koo-taejin/pinpoint
|
/*
* Copyright 2016 Naver Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.jdk7.activemq.client;
import com.navercorp.pinpoint.bootstrap.plugin.test.ExpectedTrace;
import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier;
import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifierHolder;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.ActiveMQClientITHelper;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.AssertTextMessageListener;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.MessageConsumerBuilder;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.MessageProducerBuilder;
import org.apache.activemq.ActiveMQMessageConsumer;
import org.apache.activemq.ActiveMQSession;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.command.MessageDispatch;
import org.junit.Assert;
import org.junit.Test;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.TextMessage;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.Collection;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.annotation;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.event;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.root;
/**
* @author HyunGil Jeong
*/
public abstract class ActiveMQClientITBase {
public static final String ACTIVEMQ_CLIENT = "ACTIVEMQ_CLIENT";
public static final String ACTIVEMQ_CLIENT_INTERNAL = "ACTIVEMQ_CLIENT_INTERNAL";
protected abstract String getProducerBrokerName();
protected abstract String getProducerBrokerUrl();
protected abstract String getConsumerBrokerName();
protected abstract String getConsumerBrokerUrl();
@Test
public void testQueuePull() throws Exception {
// Given
final String testQueueName = "TestPullQueue";
final ActiveMQQueue testQueue = new ActiveMQQueue(testQueueName);
final String testMessage = "Hello World for Queue!";
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = producerSession.createProducer(testQueue);
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// When
ActiveMQSession consumerSession = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer = consumerSession.createConsumer(testQueue);
// Then
producer.send(expectedTextMessage);
Message message = consumer.receive(1000L);
Assert.assertEquals(testMessage, ((TextMessage) message).getText());
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(5, 5000L);
verifyProducerSendEvent(testQueue, producerSession); // trace count : 1
verifyConsumerPullEvent(testQueue, consumerSession, consumer, expectedTextMessage); // trace count : 4
}
@Test
public void testTopicPull() throws Exception {
// Given
final String testTopicName = "TestPullTopic";
final ActiveMQTopic testTopic = new ActiveMQTopic(testTopicName);
final String testMessage = "Hello World for Topic!";
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = new MessageProducerBuilder(producerSession, testTopic).waitTillStarted().build();
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create 2 consumers
ActiveMQSession consumer1Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer1 = new MessageConsumerBuilder(consumer1Session, testTopic).waitTillStarted().build();
ActiveMQSession consumer2Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer2 = new MessageConsumerBuilder(consumer2Session, testTopic).waitTillStarted().build();
// When
producer.send(expectedTextMessage);
Message message1 = consumer1.receive(1000L);
Message message2 = consumer2.receive(1000L);
Assert.assertEquals(testMessage, ((TextMessage) message1).getText());
Assert.assertEquals(testMessage, ((TextMessage) message2).getText());
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(9, 5000L);
verifyProducerSendEvent(testTopic, producerSession); // trace count : 1
verifyConsumerPullEvent(testTopic, consumer1Session, consumer1, expectedTextMessage); // trace count : 4
verifyConsumerPullEvent(testTopic, consumer2Session, consumer2, expectedTextMessage); // trace count : 4
}
@Test
public void testQueuePush() throws Exception {
// Given
final String testQueueName = "TestPushQueue";
final ActiveMQQueue testQueue = new ActiveMQQueue(testQueueName);
final String testMessage = "Hello World for Queue!";
final CountDownLatch consumerLatch = new CountDownLatch(1);
final Collection<Throwable> consumerThrowables = new CopyOnWriteArrayList<Throwable>();
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = producerSession.createProducer(testQueue);
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create consumer
ActiveMQSession consumerSession = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer = consumerSession.createConsumer(testQueue);
consumer.setMessageListener(new AssertTextMessageListener(consumerLatch, consumerThrowables, expectedTextMessage));
// When
producer.send(expectedTextMessage);
consumerLatch.await(1L, TimeUnit.SECONDS);
// Then
assertNoConsumerError(consumerThrowables);
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(2, 5000L);
verifyProducerSendEvent(testQueue, producerSession); // trace count : 1
verifyConsumerPushEvent(testQueue, consumerSession); // trace count : 1
}
@Test
public void testTopicPush() throws Exception {
// Given
final String testTopicName = "TestPushTopic";
final ActiveMQTopic testTopic = new ActiveMQTopic(testTopicName);
final String testMessage = "Hello World for Topic!";
final int numMessageConsumers = 2;
final CountDownLatch consumerConsumeLatch = new CountDownLatch(numMessageConsumers);
final Collection<Throwable> consumerThrowables = new CopyOnWriteArrayList<Throwable>();
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = new MessageProducerBuilder(producerSession, testTopic).waitTillStarted().build();
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create 2 consumers
ActiveMQSession consumer1Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
new MessageConsumerBuilder(consumer1Session, testTopic)
.withMessageListener(new AssertTextMessageListener(consumerConsumeLatch, consumerThrowables, expectedTextMessage))
.waitTillStarted()
.build();
ActiveMQSession consumer2Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
new MessageConsumerBuilder(consumer2Session, testTopic)
.withMessageListener(new AssertTextMessageListener(consumerConsumeLatch, consumerThrowables, expectedTextMessage))
.waitTillStarted()
.build();
// When
producer.send(expectedTextMessage);
consumerConsumeLatch.await(1L, TimeUnit.SECONDS);
// Then
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(3, 1000L);
verifyProducerSendEvent(testTopic, producerSession); // trace count : 1
verifyConsumerPushEvent(testTopic, consumer1Session); // trace count : 1
verifyConsumerPushEvent(testTopic, consumer2Session); // trace count : 1
}
/**
* Verifies traced span event for when {@link org.apache.activemq.ActiveMQMessageProducer ActiveMQMessageProducer}
* sends the message. (trace count : 1)
*
* @param destination the destination to which the producer is sending the message
* @throws Exception
*/
private void verifyProducerSendEvent(ActiveMQDestination destination, ActiveMQSession session) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
verifier.printCache();
Class<?> messageProducerClass = Class.forName("org.apache.activemq.ActiveMQMessageProducer");
Method send = messageProducerClass.getDeclaredMethod("send", Destination.class, Message.class, int.class, int.class, long.class);
// URI producerBrokerUri = new URI(getProducerBrokerUrl());
// String expectedEndPoint = getProducerBrokerUri.getHost() + ":" + producerBrokerUri.getPort();
// String expectedEndPoint = producerBrokerUri.toString();
String expectedEndPoint = session.getConnection().getTransport().getRemoteAddress();
verifier.verifyDiscreteTrace(event(
ACTIVEMQ_CLIENT, // serviceType
send, // method
null, // rpc
expectedEndPoint, // endPoint
destination.getPhysicalName(), // destinationId
annotation("message.queue.url", destination.getQualifiedName()),
annotation("activemq.broker.address", expectedEndPoint)
));
}
/**
* Verifies spans and span events for when {@link ActiveMQMessageConsumer} receives the message and enqueues it to
* the {@link org.apache.activemq.MessageDispatchChannel MessageDispatchChannel}. The client then invokes any of
* {@link ActiveMQMessageConsumer#receive() receive()}, {@link ActiveMQMessageConsumer#receive(long) receive(long)},
* or {@link ActiveMQMessageConsumer#receiveNoWait() receiveNotWait()} to retrieve the message. (trace count : 4)
*
* @param destination the destination from which the consumer is receiving the message
* @param expectedMessage the message the consumer is expected to receive
* @throws Exception
*/
private void verifyConsumerPullEvent(ActiveMQDestination destination, ActiveMQSession session, MessageConsumer consumer, Message expectedMessage) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
verifier.printCache();
Class<?> messageConsumerClass = Class.forName("org.apache.activemq.ActiveMQMessageConsumer");
Method receiveWithTimeout = messageConsumerClass.getDeclaredMethod("receive", long.class);
// URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
// String expectedEndPoint = consumerBrokerUri.toString();
String expectedEndPoint = session.getConnection().getTransport().getRemoteAddress();
ExpectedTrace consumerDispatchTrace = root(ACTIVEMQ_CLIENT, // serviceType
"ActiveMQ Consumer Invocation", // method
destination.getQualifiedName(), // rpc
null, // endPoint (collected but there's no easy way to retrieve local address)
expectedEndPoint);
ExpectedTrace consumerReceiveTrace = event(ACTIVEMQ_CLIENT_INTERNAL, // serviceType
receiveWithTimeout, // method
annotation("activemq.message", getMessageAsString(expectedMessage)));
Class<?> messageDispatchChannel = getMessageDispatchChannelClass(consumer);
if (messageDispatchChannel != null) {
Method enqueue = messageDispatchChannel.getDeclaredMethod("enqueue", MessageDispatch.class);
Method dequeueWithTimeout = messageDispatchChannel.getDeclaredMethod("dequeue", long.class);
// Consumer dispatches and enqueues the message to dispatch channel automatically
verifier.verifyDiscreteTrace(consumerDispatchTrace, event(ACTIVEMQ_CLIENT_INTERNAL, enqueue));
// Client receives the message by dequeueing it from the dispatch channel
verifier.verifyDiscreteTrace(consumerReceiveTrace, event(ACTIVEMQ_CLIENT_INTERNAL, dequeueWithTimeout));
} else {
// Consumer dispatches and enqueues the message to dispatch channel automatically
verifier.verifyDiscreteTrace(consumerDispatchTrace);
// Client receives the message by dequeueing it from the dispatch channel
verifier.verifyDiscreteTrace(consumerReceiveTrace);
}
}
/**
* Verifies spans and span events for when {@link ActiveMQMessageConsumer} receives the message and invokes it's
* {@link javax.jms.MessageListener MessageListener}. (trace count : 1)
*
* @param destination the destination from which the consumer is receiving the message
* @throws Exception
*/
private void verifyConsumerPushEvent(ActiveMQDestination destination, ActiveMQSession session) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
// URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
// String expectedRemoteAddress = consumerBrokerUri.toString();
String expectedRemoteAddress = session.getConnection().getTransport().getRemoteAddress();
verifier.verifyDiscreteTrace(root(
ACTIVEMQ_CLIENT, // serviceType
"ActiveMQ Consumer Invocation", // method
destination.getQualifiedName(), // rpc
null, // endPoint (collected but there's no easy way to retrieve local address so skip check)
expectedRemoteAddress // remoteAddress
));
}
private Class<?> getMessageDispatchChannelClass(MessageConsumer consumer) throws NoSuchFieldException, IllegalAccessException {
final String messageDispatchChannelFieldName = "unconsumedMessages";
Class<?> consumerClass = consumer.getClass();
// Need a better way as field names could change in future versions. Comparing classes or class names doesn't
// work due to class loading issue, and some versions may not have certain implementations of
// MessageDispatchChannel.
// Test should be fixed if anything changes in future ActiveMQClient library
Field messageDispatchChannelField = consumerClass.getDeclaredField(messageDispatchChannelFieldName);
messageDispatchChannelField.setAccessible(true);
return messageDispatchChannelField.get(consumer).getClass();
}
private String getMessageAsString(Message message) throws JMSException {
StringBuilder messageStringBuilder = new StringBuilder(message.getClass().getSimpleName());
if (message instanceof TextMessage) {
messageStringBuilder.append('{').append(((TextMessage) message).getText()).append('}');
}
return messageStringBuilder.toString();
}
protected final void assertNoConsumerError(Collection<Throwable> consumerThrowables) {
Assert.assertTrue("Consumer Error : " + consumerThrowables.toString(), consumerThrowables.isEmpty());
}
protected final void awaitAndVerifyTraceCount(int expectedTraceCount, long maxWaitMs) throws InterruptedException {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
final long waitIntervalMs = 100L;
long maxWaitTime = maxWaitMs;
if (maxWaitMs < waitIntervalMs) {
maxWaitTime = waitIntervalMs;
}
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < maxWaitTime) {
try {
verifier.verifyTraceCount(expectedTraceCount);
return;
} catch (AssertionError e) {
// ignore and retry
Thread.sleep(waitIntervalMs);
}
}
verifier.printCache();
verifier.verifyTraceCount(expectedTraceCount);
}
}
|
agent/src/test/java/com/navercorp/pinpoint/plugin/jdk7/activemq/client/ActiveMQClientITBase.java
|
/*
* Copyright 2016 Naver Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.jdk7.activemq.client;
import com.navercorp.pinpoint.bootstrap.plugin.test.ExpectedTrace;
import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier;
import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifierHolder;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.ActiveMQClientITHelper;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.AssertTextMessageListener;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.MessageConsumerBuilder;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.MessageProducerBuilder;
import org.apache.activemq.ActiveMQMessageConsumer;
import org.apache.activemq.ActiveMQSession;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.command.MessageDispatch;
import org.junit.Assert;
import org.junit.Test;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.TextMessage;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.Collection;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.annotation;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.event;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.root;
/**
* @author HyunGil Jeong
*/
public abstract class ActiveMQClientITBase {
public static final String ACTIVEMQ_CLIENT = "ACTIVEMQ_CLIENT";
public static final String ACTIVEMQ_CLIENT_INTERNAL = "ACTIVEMQ_CLIENT_INTERNAL";
protected abstract String getProducerBrokerName();
protected abstract String getProducerBrokerUrl();
protected abstract String getConsumerBrokerName();
protected abstract String getConsumerBrokerUrl();
@Test
public void testQueuePull() throws Exception {
// Given
final String testQueueName = "TestPullQueue";
final ActiveMQQueue testQueue = new ActiveMQQueue(testQueueName);
final String testMessage = "Hello World for Queue!";
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = producerSession.createProducer(testQueue);
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// When
ActiveMQSession consumerSession = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer = consumerSession.createConsumer(testQueue);
// Then
producer.send(expectedTextMessage);
Message message = consumer.receive(1000L);
Assert.assertEquals(testMessage, ((TextMessage) message).getText());
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(5, 5000L);
verifyProducerSendEvent(testQueue); // trace count : 1
verifyConsumerPullEvent(testQueue, consumer, expectedTextMessage); // trace count : 4
}
@Test
public void testTopicPull() throws Exception {
// Given
final String testTopicName = "TestPullTopic";
final ActiveMQTopic testTopic = new ActiveMQTopic(testTopicName);
final String testMessage = "Hello World for Topic!";
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = new MessageProducerBuilder(producerSession, testTopic).waitTillStarted().build();
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create 2 consumers
ActiveMQSession consumer1Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer1 = new MessageConsumerBuilder(consumer1Session, testTopic).waitTillStarted().build();
ActiveMQSession consumer2Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer2 = new MessageConsumerBuilder(consumer2Session, testTopic).waitTillStarted().build();
// When
producer.send(expectedTextMessage);
Message message1 = consumer1.receive(1000L);
Message message2 = consumer2.receive(1000L);
Assert.assertEquals(testMessage, ((TextMessage) message1).getText());
Assert.assertEquals(testMessage, ((TextMessage) message2).getText());
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(9, 5000L);
verifyProducerSendEvent(testTopic); // trace count : 1
verifyConsumerPullEvent(testTopic, consumer1, expectedTextMessage); // trace count : 4
verifyConsumerPullEvent(testTopic, consumer2, expectedTextMessage); // trace count : 4
}
@Test
public void testQueuePush() throws Exception {
// Given
final String testQueueName = "TestPushQueue";
final ActiveMQQueue testQueue = new ActiveMQQueue(testQueueName);
final String testMessage = "Hello World for Queue!";
final CountDownLatch consumerLatch = new CountDownLatch(1);
final Collection<Throwable> consumerThrowables = new CopyOnWriteArrayList<Throwable>();
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = producerSession.createProducer(testQueue);
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create consumer
ActiveMQSession consumerSession = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer = consumerSession.createConsumer(testQueue);
consumer.setMessageListener(new AssertTextMessageListener(consumerLatch, consumerThrowables, expectedTextMessage));
// When
producer.send(expectedTextMessage);
consumerLatch.await(1L, TimeUnit.SECONDS);
// Then
assertNoConsumerError(consumerThrowables);
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(2, 5000L);
verifyProducerSendEvent(testQueue); // trace count : 1
verifyConsumerPushEvent(testQueue); // trace count : 1
}
@Test
public void testTopicPush() throws Exception {
// Given
final String testTopicName = "TestPushTopic";
final ActiveMQTopic testTopic = new ActiveMQTopic(testTopicName);
final String testMessage = "Hello World for Topic!";
final int numMessageConsumers = 2;
final CountDownLatch consumerConsumeLatch = new CountDownLatch(numMessageConsumers);
final Collection<Throwable> consumerThrowables = new CopyOnWriteArrayList<Throwable>();
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = new MessageProducerBuilder(producerSession, testTopic).waitTillStarted().build();
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create 2 consumers
ActiveMQSession consumer1Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
new MessageConsumerBuilder(consumer1Session, testTopic)
.withMessageListener(new AssertTextMessageListener(consumerConsumeLatch, consumerThrowables, expectedTextMessage))
.waitTillStarted()
.build();
ActiveMQSession consumer2Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
new MessageConsumerBuilder(consumer2Session, testTopic)
.withMessageListener(new AssertTextMessageListener(consumerConsumeLatch, consumerThrowables, expectedTextMessage))
.waitTillStarted()
.build();
// When
producer.send(expectedTextMessage);
consumerConsumeLatch.await(1L, TimeUnit.SECONDS);
// Then
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(3, 1000L);
verifyProducerSendEvent(testTopic); // trace count : 1
verifyConsumerPushEvent(testTopic); // trace count : 1
verifyConsumerPushEvent(testTopic); // trace count : 1
}
/**
* Verifies traced span event for when {@link org.apache.activemq.ActiveMQMessageProducer ActiveMQMessageProducer}
* sends the message. (trace count : 1)
*
* @param destination the destination to which the producer is sending the message
* @throws Exception
*/
private void verifyProducerSendEvent(ActiveMQDestination destination) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
Class<?> messageProducerClass = Class.forName("org.apache.activemq.ActiveMQMessageProducer");
Method send = messageProducerClass.getDeclaredMethod("send", Destination.class, Message.class, int.class, int.class, long.class);
URI producerBrokerUri = new URI(getProducerBrokerUrl());
verifier.verifyDiscreteTrace(event(
ACTIVEMQ_CLIENT, // serviceType
send, // method
null, // rpc
producerBrokerUri.getHost() + ":" + producerBrokerUri.getPort(), // endPoint
destination.getPhysicalName(), // destinationId
annotation("message.queue.url", destination.getQualifiedName()),
annotation("activemq.broker.address", producerBrokerUri.getHost() + ":" + producerBrokerUri.getPort())
));
}
/**
* Verifies spans and span events for when {@link ActiveMQMessageConsumer} receives the message and enqueues it to
* the {@link org.apache.activemq.MessageDispatchChannel MessageDispatchChannel}. The client then invokes any of
* {@link ActiveMQMessageConsumer#receive() receive()}, {@link ActiveMQMessageConsumer#receive(long) receive(long)},
* or {@link ActiveMQMessageConsumer#receiveNoWait() receiveNotWait()} to retrieve the message. (trace count : 4)
*
* @param destination the destination from which the consumer is receiving the message
* @param expectedMessage the message the consumer is expected to receive
* @throws Exception
*/
private void verifyConsumerPullEvent(ActiveMQDestination destination, MessageConsumer consumer, Message expectedMessage) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
verifier.printCache();
Class<?> messageConsumerClass = Class.forName("org.apache.activemq.ActiveMQMessageConsumer");
Method receiveWithTimeout = messageConsumerClass.getDeclaredMethod("receive", long.class);
URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
ExpectedTrace consumerDispatchTrace = root(ACTIVEMQ_CLIENT, // serviceType
"ActiveMQ Consumer Invocation", // method
destination.getQualifiedName(), // rpc
null, // endPoint (collected but there's no easy way to retrieve local address)
consumerBrokerUri.getHost() + ":" + consumerBrokerUri.getPort());
ExpectedTrace consumerReceiveTrace = event(ACTIVEMQ_CLIENT_INTERNAL, // serviceType
receiveWithTimeout, // method
annotation("activemq.message", getMessageAsString(expectedMessage)));
Class<?> messageDispatchChannel = getMessageDispatchChannelClass(consumer);
if (messageDispatchChannel != null) {
Method enqueue = messageDispatchChannel.getDeclaredMethod("enqueue", MessageDispatch.class);
Method dequeueWithTimeout = messageDispatchChannel.getDeclaredMethod("dequeue", long.class);
// Consumer dispatches and enqueues the message to dispatch channel automatically
verifier.verifyDiscreteTrace(consumerDispatchTrace, event(ACTIVEMQ_CLIENT_INTERNAL, enqueue));
// Client receives the message by dequeueing it from the dispatch channel
verifier.verifyDiscreteTrace(consumerReceiveTrace, event(ACTIVEMQ_CLIENT_INTERNAL, dequeueWithTimeout));
} else {
// Consumer dispatches and enqueues the message to dispatch channel automatically
verifier.verifyDiscreteTrace(consumerDispatchTrace);
// Client receives the message by dequeueing it from the dispatch channel
verifier.verifyDiscreteTrace(consumerReceiveTrace);
}
}
/**
* Verifies spans and span events for when {@link ActiveMQMessageConsumer} receives the message and invokes it's
* {@link javax.jms.MessageListener MessageListener}. (trace count : 1)
*
* @param destination the destination from which the consumer is receiving the message
* @throws Exception
*/
private void verifyConsumerPushEvent(ActiveMQDestination destination) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
verifier.verifyDiscreteTrace(root(
ACTIVEMQ_CLIENT, // serviceType
"ActiveMQ Consumer Invocation", // method
destination.getQualifiedName(), // rpc
null, // endPoint (collected but there's no easy way to retrieve local address so skip check)
consumerBrokerUri.getHost() + ":" + consumerBrokerUri.getPort() // remoteAddress
));
}
private Class<?> getMessageDispatchChannelClass(MessageConsumer consumer) throws NoSuchFieldException, IllegalAccessException {
final String messageDispatchChannelFieldName = "unconsumedMessages";
Class<?> consumerClass = consumer.getClass();
// Need a better way as field names could change in future versions. Comparing classes or class names doesn't
// work due to class loading issue, and some versions may not have certain implementations of
// MessageDispatchChannel.
// Test should be fixed if anything changes in future ActiveMQClient library
Field messageDispatchChannelField = consumerClass.getDeclaredField(messageDispatchChannelFieldName);
messageDispatchChannelField.setAccessible(true);
return messageDispatchChannelField.get(consumer).getClass();
}
private String getMessageAsString(Message message) throws JMSException {
StringBuilder messageStringBuilder = new StringBuilder(message.getClass().getSimpleName());
if (message instanceof TextMessage) {
messageStringBuilder.append('{').append(((TextMessage) message).getText()).append('}');
}
return messageStringBuilder.toString();
}
protected final void assertNoConsumerError(Collection<Throwable> consumerThrowables) {
Assert.assertTrue("Consumer Error : " + consumerThrowables.toString(), consumerThrowables.isEmpty());
}
protected final void awaitAndVerifyTraceCount(int expectedTraceCount, long maxWaitMs) throws InterruptedException {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
final long waitIntervalMs = 100L;
long maxWaitTime = maxWaitMs;
if (maxWaitMs < waitIntervalMs) {
maxWaitTime = waitIntervalMs;
}
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < maxWaitTime) {
try {
verifier.verifyTraceCount(expectedTraceCount);
return;
} catch (AssertionError e) {
// ignore and retry
Thread.sleep(waitIntervalMs);
}
}
verifier.printCache();
verifier.verifyTraceCount(expectedTraceCount);
}
}
|
[#2702] Fix ActiveMQ integration tests
Protocol is now collected when tracing end points and remote address
|
agent/src/test/java/com/navercorp/pinpoint/plugin/jdk7/activemq/client/ActiveMQClientITBase.java
|
[#2702] Fix ActiveMQ integration tests
|
<ide><path>gent/src/test/java/com/navercorp/pinpoint/plugin/jdk7/activemq/client/ActiveMQClientITBase.java
<ide>
<ide> // Wait till all traces are recorded (consumer traces are recorded from another thread)
<ide> awaitAndVerifyTraceCount(5, 5000L);
<del> verifyProducerSendEvent(testQueue); // trace count : 1
<del> verifyConsumerPullEvent(testQueue, consumer, expectedTextMessage); // trace count : 4
<add> verifyProducerSendEvent(testQueue, producerSession); // trace count : 1
<add> verifyConsumerPullEvent(testQueue, consumerSession, consumer, expectedTextMessage); // trace count : 4
<ide> }
<ide>
<ide> @Test
<ide>
<ide> // Wait till all traces are recorded (consumer traces are recorded from another thread)
<ide> awaitAndVerifyTraceCount(9, 5000L);
<del> verifyProducerSendEvent(testTopic); // trace count : 1
<del> verifyConsumerPullEvent(testTopic, consumer1, expectedTextMessage); // trace count : 4
<del> verifyConsumerPullEvent(testTopic, consumer2, expectedTextMessage); // trace count : 4
<add> verifyProducerSendEvent(testTopic, producerSession); // trace count : 1
<add> verifyConsumerPullEvent(testTopic, consumer1Session, consumer1, expectedTextMessage); // trace count : 4
<add> verifyConsumerPullEvent(testTopic, consumer2Session, consumer2, expectedTextMessage); // trace count : 4
<ide> }
<ide>
<ide> @Test
<ide> assertNoConsumerError(consumerThrowables);
<ide> // Wait till all traces are recorded (consumer traces are recorded from another thread)
<ide> awaitAndVerifyTraceCount(2, 5000L);
<del> verifyProducerSendEvent(testQueue); // trace count : 1
<del> verifyConsumerPushEvent(testQueue); // trace count : 1
<add> verifyProducerSendEvent(testQueue, producerSession); // trace count : 1
<add> verifyConsumerPushEvent(testQueue, consumerSession); // trace count : 1
<ide> }
<ide>
<ide> @Test
<ide> // Then
<ide> // Wait till all traces are recorded (consumer traces are recorded from another thread)
<ide> awaitAndVerifyTraceCount(3, 1000L);
<del> verifyProducerSendEvent(testTopic); // trace count : 1
<del> verifyConsumerPushEvent(testTopic); // trace count : 1
<del> verifyConsumerPushEvent(testTopic); // trace count : 1
<add> verifyProducerSendEvent(testTopic, producerSession); // trace count : 1
<add> verifyConsumerPushEvent(testTopic, consumer1Session); // trace count : 1
<add> verifyConsumerPushEvent(testTopic, consumer2Session); // trace count : 1
<ide> }
<ide>
<ide> /**
<ide> * @param destination the destination to which the producer is sending the message
<ide> * @throws Exception
<ide> */
<del> private void verifyProducerSendEvent(ActiveMQDestination destination) throws Exception {
<add> private void verifyProducerSendEvent(ActiveMQDestination destination, ActiveMQSession session) throws Exception {
<ide> PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
<add> verifier.printCache();
<ide> Class<?> messageProducerClass = Class.forName("org.apache.activemq.ActiveMQMessageProducer");
<ide> Method send = messageProducerClass.getDeclaredMethod("send", Destination.class, Message.class, int.class, int.class, long.class);
<del> URI producerBrokerUri = new URI(getProducerBrokerUrl());
<add>// URI producerBrokerUri = new URI(getProducerBrokerUrl());
<add>// String expectedEndPoint = getProducerBrokerUri.getHost() + ":" + producerBrokerUri.getPort();
<add>// String expectedEndPoint = producerBrokerUri.toString();
<add> String expectedEndPoint = session.getConnection().getTransport().getRemoteAddress();
<ide> verifier.verifyDiscreteTrace(event(
<ide> ACTIVEMQ_CLIENT, // serviceType
<ide> send, // method
<ide> null, // rpc
<del> producerBrokerUri.getHost() + ":" + producerBrokerUri.getPort(), // endPoint
<add> expectedEndPoint, // endPoint
<ide> destination.getPhysicalName(), // destinationId
<ide> annotation("message.queue.url", destination.getQualifiedName()),
<del> annotation("activemq.broker.address", producerBrokerUri.getHost() + ":" + producerBrokerUri.getPort())
<add> annotation("activemq.broker.address", expectedEndPoint)
<ide> ));
<ide> }
<ide>
<ide> * @param expectedMessage the message the consumer is expected to receive
<ide> * @throws Exception
<ide> */
<del> private void verifyConsumerPullEvent(ActiveMQDestination destination, MessageConsumer consumer, Message expectedMessage) throws Exception {
<add> private void verifyConsumerPullEvent(ActiveMQDestination destination, ActiveMQSession session, MessageConsumer consumer, Message expectedMessage) throws Exception {
<ide> PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
<ide> verifier.printCache();
<ide> Class<?> messageConsumerClass = Class.forName("org.apache.activemq.ActiveMQMessageConsumer");
<ide> Method receiveWithTimeout = messageConsumerClass.getDeclaredMethod("receive", long.class);
<del> URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
<add>// URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
<add>// String expectedEndPoint = consumerBrokerUri.toString();
<add> String expectedEndPoint = session.getConnection().getTransport().getRemoteAddress();
<ide>
<ide> ExpectedTrace consumerDispatchTrace = root(ACTIVEMQ_CLIENT, // serviceType
<ide> "ActiveMQ Consumer Invocation", // method
<ide> destination.getQualifiedName(), // rpc
<ide> null, // endPoint (collected but there's no easy way to retrieve local address)
<del> consumerBrokerUri.getHost() + ":" + consumerBrokerUri.getPort());
<add> expectedEndPoint);
<ide> ExpectedTrace consumerReceiveTrace = event(ACTIVEMQ_CLIENT_INTERNAL, // serviceType
<ide> receiveWithTimeout, // method
<ide> annotation("activemq.message", getMessageAsString(expectedMessage)));
<ide> * @param destination the destination from which the consumer is receiving the message
<ide> * @throws Exception
<ide> */
<del> private void verifyConsumerPushEvent(ActiveMQDestination destination) throws Exception {
<add> private void verifyConsumerPushEvent(ActiveMQDestination destination, ActiveMQSession session) throws Exception {
<ide> PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
<del> URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
<add>// URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
<add>// String expectedRemoteAddress = consumerBrokerUri.toString();
<add> String expectedRemoteAddress = session.getConnection().getTransport().getRemoteAddress();
<ide> verifier.verifyDiscreteTrace(root(
<ide> ACTIVEMQ_CLIENT, // serviceType
<ide> "ActiveMQ Consumer Invocation", // method
<ide> destination.getQualifiedName(), // rpc
<ide> null, // endPoint (collected but there's no easy way to retrieve local address so skip check)
<del> consumerBrokerUri.getHost() + ":" + consumerBrokerUri.getPort() // remoteAddress
<add> expectedRemoteAddress // remoteAddress
<ide> ));
<ide> }
<ide>
|
|
JavaScript
|
mit
|
7cc7cb873bc7a4ad4499c2fb0d98ca5f51532e24
| 0 |
tehp/flofe
|
var Botkit = require('botkit')
var Witbot = require('witbot')
var slackToken = process.env.SLACK_TOKEN
var witToken = process.env.WIT_TOKEN
var openWeatherApiKey = process.env.OPENWEATHER_KEY
var controller = Botkit.slackbot({
debug: false
})
controller.spawn({
token: slackToken
}).startRTM(function (err, bot, payload) {
if (err) {
throw new Error('Error connecting to slack: ', err)
}
console.log('Connected to slack')
})
var witbot = Witbot(witToken)
controller.hears('.*', 'direct_message,direct_mention', function (bot, message) {
witbot.process(message.text, bot, message)
})
witbot.hears('hello', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Hello to you as well!')
})
witbot.hears('marc_gay', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Marc is so gay...')
})
witbot.hears('status', 0.5, function (bot, message, outcome) {
bot.reply(message, 'FLOFE BOT STATUS: Active. Power level: Over 9000. Target: http://github.com/tehp/flofe')
})
witbot.hears('fuck_off', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Fuck you too, cock sucker.')
})
//witbot.hears('get_datetime', 0.5, function (bot, message, outcome) {
// console.log(outcome.entities.location)
// if (!outcome.entities.location || outcome.entities.location.length === 0) {
// bot.reply(message, 'I\'d love to give you the weather but for where?')
// return
// }
//
// var location = outcome.entities.location[0].value
// weather.get(location, function (error, msg) {
// if (error) {
// console.error(error)
// bot.reply(message, 'uh oh, there was a problem getting the weather')
// return
// }
// bot.reply(message, msg)
// })
//})
|
index.js
|
var Botkit = require('botkit')
var Witbot = require('witbot')
var slackToken = process.env.SLACK_TOKEN
var witToken = process.env.WIT_TOKEN
var openWeatherApiKey = process.env.OPENWEATHER_KEY
var controller = Botkit.slackbot({
debug: false
})
controller.spawn({
token: slackToken
}).startRTM(function (err, bot, payload) {
if (err) {
throw new Error('Error connecting to slack: ', err)
}
console.log('Connected to slack')
})
var witbot = Witbot(witToken)
controller.hears('.*', 'direct_message,direct_mention', function (bot, message) {
witbot.process(message.text, bot, message)
})
witbot.hears('hello', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Hello to you as well!')
})
witbot.hears('marc_gay', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Marc is so gay...')
})
witbot.hears('status', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Status: running, Target: http://github.com/tehp/flofe')
})
witbot.hears('fuck_off', 0.5, function (bot, message, outcome) {
bot.reply(message, 'Fuck you too, cock sucker.')
})
//witbot.hears('get_datetime', 0.5, function (bot, message, outcome) {
// console.log(outcome.entities.location)
// if (!outcome.entities.location || outcome.entities.location.length === 0) {
// bot.reply(message, 'I\'d love to give you the weather but for where?')
// return
// }
//
// var location = outcome.entities.location[0].value
// weather.get(location, function (error, msg) {
// if (error) {
// console.error(error)
// bot.reply(message, 'uh oh, there was a problem getting the weather')
// return
// }
// bot.reply(message, msg)
// })
//})
|
status
|
index.js
|
status
|
<ide><path>ndex.js
<ide> })
<ide>
<ide> witbot.hears('status', 0.5, function (bot, message, outcome) {
<del> bot.reply(message, 'Status: running, Target: http://github.com/tehp/flofe')
<add> bot.reply(message, 'FLOFE BOT STATUS: Active. Power level: Over 9000. Target: http://github.com/tehp/flofe')
<ide> })
<ide>
<ide> witbot.hears('fuck_off', 0.5, function (bot, message, outcome) {
|
|
JavaScript
|
mit
|
e34d2de79327451b94f66ca2d58c8953522542e9
| 0 |
bmakuh/quip-mass-exporter
|
const request = require('request')
const { forEach } = require('lodash')
const fs = require('fs')
const toMarkdown = require('to-markdown')
const Authorization = `Bearer ${process.argv[2]}`
const fetchPrivateFolder = (error, response, body) => {
if (!error && response.statusCode == 200) {
const info = JSON.parse(body)
request({
url: `https://platform.quip.com/1/folders/${info.private_folder_id}`,
headers: { Authorization }
}, fetchDocs)
}
}
const fetchThreads = (folder_id) => {
request({
url: `https://platform.quip.com/1/folders/${folder_id}`,
headers: { Authorization }
}, (err, res, body) => {
forEach(JSON.parse(body), (folder) => {
if (!folder.title) return
fs.mkdirSync(`output/${folder.title}`, (err) => {
if (err) throw err
console.log(`successfully created output/${folder.title}`)
})
fetchDocs(err, res, body, folder.title)
})
})
}
const fetchDocs = (err, res, body, folder_name = 'output') => {
const { children } = JSON.parse(body)
const ids = children
.filter(({ thread_id }) => !!thread_id)
.map(({ thread_id }) => thread_id)
.join(',')
const folder_ids = children
.filter(({ folder_id }) => !!folder_id)
.map(({ folder_id }) => folder_id)
forEach(folder_ids, fetchThreads)
request({
url: `https://platform.quip.com/1/threads/?ids=${ids}`,
headers: { Authorization }
}, (err, res, body) => {
forEach(JSON.parse(body), (({ thread, html }) => {
const file = thread.title.replace(/\//g, '')
const path = folder_name === 'output'
? folder_name
: `output/${folder_name}`
fs.writeFile(`${path}/${file}.html`, html, (err) => {
if (err) throw err
console.log(`${path}/${file}.html saved successfully`)
})
fs.writeFile(`${path}/${file}.md`, toMarkdown(html), (err) => {
if (err) throw err
console.log(`${path}/${file}.md saved successfully`)
})
}))
})
}
request({
url: 'https://platform.quip.com/1/users/current',
headers: { Authorization }
}, fetchPrivateFolder)
|
index.js
|
const request = require('request')
const { forEach } = require('lodash')
const fs = require('fs')
const toMarkdown = require('to-markdown')
const Authorization = `Bearer ${process.argv[2]}`
function fetchPrivateFolder (error, response, body) {
if (!error && response.statusCode == 200) {
const info = JSON.parse(body)
request({
url: `https://platform.quip.com/1/folders/${info.private_folder_id}`,
headers: { Authorization }
}, fetchDocs)
}
}
function fetchThreads (folder_id) {
request({
url: `https://platform.quip.com/1/folders/${folder_id}`,
headers: { Authorization }
}, (err, res, body) => {
forEach(JSON.parse(body), (folder) => {
if (!folder.title) return
fs.mkdirSync(`output/${folder.title}`, (err) => {
if (err) throw err
console.log(`successfully created output/${folder.title}`)
})
fetchDocs(err, res, body, folder.title)
})
})
}
function fetchDocs (err, res, body, folder_name = 'output') {
const { children } = JSON.parse(body)
const ids = children
.filter(({ thread_id }) => !!thread_id)
.map(({ thread_id }) => thread_id)
.join(',')
const folder_ids = children
.filter(({ folder_id }) => !!folder_id)
.map(({ folder_id }) => folder_id)
forEach(folder_ids, fetchThreads)
request({
url: `https://platform.quip.com/1/threads/?ids=${ids}`,
headers: { Authorization }
}, (err, res, body) => {
forEach(JSON.parse(body), (({ thread, html }) => {
const file = thread.title.replace(/\//g, '')
const path = folder_name === 'output'
? folder_name
: `output/${folder_name}`
fs.writeFile(`${path}/${file}.html`, html, (err) => {
if (err) throw err
console.log(`${path}/${file}.html saved successfully`)
})
fs.writeFile(`${path}/${file}.md`, toMarkdown(html), (err) => {
if (err) throw err
console.log(`${path}/${file}.md saved successfully`)
})
}))
})
}
request({
url: 'https://platform.quip.com/1/users/current',
headers: { Authorization }
}, fetchPrivateFolder)
|
No more of those silly `function`s.
|
index.js
|
No more of those silly `function`s.
|
<ide><path>ndex.js
<ide>
<ide> const Authorization = `Bearer ${process.argv[2]}`
<ide>
<del>function fetchPrivateFolder (error, response, body) {
<add>const fetchPrivateFolder = (error, response, body) => {
<ide> if (!error && response.statusCode == 200) {
<ide> const info = JSON.parse(body)
<ide> request({
<ide> }
<ide> }
<ide>
<del>function fetchThreads (folder_id) {
<add>const fetchThreads = (folder_id) => {
<ide> request({
<ide> url: `https://platform.quip.com/1/folders/${folder_id}`,
<ide> headers: { Authorization }
<ide> })
<ide> }
<ide>
<del>function fetchDocs (err, res, body, folder_name = 'output') {
<add>const fetchDocs = (err, res, body, folder_name = 'output') => {
<ide> const { children } = JSON.parse(body)
<ide> const ids = children
<ide> .filter(({ thread_id }) => !!thread_id)
|
|
Java
|
apache-2.0
|
error: pathspec 'addressbook-selenium-tests/src/com/example/tests/Test1.java' did not match any file(s) known to git
|
103581879a74882441e682db516bf1baf7a16cfc
| 1 |
NPoberezhnaya/JavaAutomation,NPoberezhnaya/JavaAutomation,NPoberezhnaya/JavaAutomation
|
package com.example.tests;
import java.util.regex.Pattern;
import java.util.concurrent.TimeUnit;
import org.junit.*;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.support.ui.Select;
public class Test1 {
private WebDriver driver;
private String baseUrl;
private boolean acceptNextAlert = true;
private StringBuffer verificationErrors = new StringBuffer();
@Before
public void setUp() throws Exception {
driver = new FirefoxDriver();
baseUrl = "http://localhost/";
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
}
@Test
public void testUntitled2() throws Exception {
driver.get(baseUrl + "/addressbookv4.1.4/");
driver.findElement(By.linkText("add new")).click();
driver.findElement(By.name("firstname")).clear();
driver.findElement(By.name("firstname")).sendKeys("QQQ");
driver.findElement(By.name("lastname")).clear();
driver.findElement(By.name("lastname")).sendKeys("QQQQQ");
driver.findElement(By.name("address")).clear();
driver.findElement(By.name("address")).sendKeys("QQQQQQQQ");
driver.findElement(By.name("home")).clear();
driver.findElement(By.name("home")).sendKeys("QQQQQ");
driver.findElement(By.name("mobile")).clear();
driver.findElement(By.name("mobile")).sendKeys("QQQQ");
driver.findElement(By.name("work")).clear();
driver.findElement(By.name("work")).sendKeys("QQQQQQQQQQQQQQQQ");
driver.findElement(By.name("email")).clear();
driver.findElement(By.name("email")).sendKeys("QQQQQQQQ");
driver.findElement(By.name("email2")).clear();
driver.findElement(By.name("email2")).sendKeys("QQQQQQQQQQQQQQQQQQQ");
new Select(driver.findElement(By.name("bday"))).selectByVisibleText("7");
new Select(driver.findElement(By.name("bmonth"))).selectByVisibleText("June");
driver.findElement(By.name("byear")).clear();
driver.findElement(By.name("byear")).sendKeys("1980");
driver.findElement(By.name("address2")).clear();
driver.findElement(By.name("address2")).sendKeys("2222222222");
driver.findElement(By.name("phone2")).clear();
driver.findElement(By.name("phone2")).sendKeys("2222222");
driver.findElement(By.name("submit")).click();
driver.findElement(By.linkText("home")).click();
}
@After
public void tearDown() throws Exception {
driver.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private boolean isAlertPresent() {
try {
driver.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
}
|
addressbook-selenium-tests/src/com/example/tests/Test1.java
|
Revert "first home task is done"
This reverts commit 3b60e71624b150c03e594caed229bb8d09bf9c63.
|
addressbook-selenium-tests/src/com/example/tests/Test1.java
|
Revert "first home task is done"
|
<ide><path>ddressbook-selenium-tests/src/com/example/tests/Test1.java
<add>package com.example.tests;
<add>
<add>import java.util.regex.Pattern;
<add>import java.util.concurrent.TimeUnit;
<add>import org.junit.*;
<add>import static org.junit.Assert.*;
<add>import static org.hamcrest.CoreMatchers.*;
<add>import org.openqa.selenium.*;
<add>import org.openqa.selenium.firefox.FirefoxDriver;
<add>import org.openqa.selenium.support.ui.Select;
<add>
<add>public class Test1 {
<add> private WebDriver driver;
<add> private String baseUrl;
<add> private boolean acceptNextAlert = true;
<add> private StringBuffer verificationErrors = new StringBuffer();
<add>
<add> @Before
<add> public void setUp() throws Exception {
<add> driver = new FirefoxDriver();
<add> baseUrl = "http://localhost/";
<add> driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
<add> }
<add>
<add> @Test
<add> public void testUntitled2() throws Exception {
<add> driver.get(baseUrl + "/addressbookv4.1.4/");
<add> driver.findElement(By.linkText("add new")).click();
<add> driver.findElement(By.name("firstname")).clear();
<add> driver.findElement(By.name("firstname")).sendKeys("QQQ");
<add> driver.findElement(By.name("lastname")).clear();
<add> driver.findElement(By.name("lastname")).sendKeys("QQQQQ");
<add> driver.findElement(By.name("address")).clear();
<add> driver.findElement(By.name("address")).sendKeys("QQQQQQQQ");
<add> driver.findElement(By.name("home")).clear();
<add> driver.findElement(By.name("home")).sendKeys("QQQQQ");
<add> driver.findElement(By.name("mobile")).clear();
<add> driver.findElement(By.name("mobile")).sendKeys("QQQQ");
<add> driver.findElement(By.name("work")).clear();
<add> driver.findElement(By.name("work")).sendKeys("QQQQQQQQQQQQQQQQ");
<add> driver.findElement(By.name("email")).clear();
<add> driver.findElement(By.name("email")).sendKeys("QQQQQQQQ");
<add> driver.findElement(By.name("email2")).clear();
<add> driver.findElement(By.name("email2")).sendKeys("QQQQQQQQQQQQQQQQQQQ");
<add> new Select(driver.findElement(By.name("bday"))).selectByVisibleText("7");
<add> new Select(driver.findElement(By.name("bmonth"))).selectByVisibleText("June");
<add> driver.findElement(By.name("byear")).clear();
<add> driver.findElement(By.name("byear")).sendKeys("1980");
<add> driver.findElement(By.name("address2")).clear();
<add> driver.findElement(By.name("address2")).sendKeys("2222222222");
<add> driver.findElement(By.name("phone2")).clear();
<add> driver.findElement(By.name("phone2")).sendKeys("2222222");
<add> driver.findElement(By.name("submit")).click();
<add> driver.findElement(By.linkText("home")).click();
<add> }
<add>
<add> @After
<add> public void tearDown() throws Exception {
<add> driver.quit();
<add> String verificationErrorString = verificationErrors.toString();
<add> if (!"".equals(verificationErrorString)) {
<add> fail(verificationErrorString);
<add> }
<add> }
<add>
<add> private boolean isElementPresent(By by) {
<add> try {
<add> driver.findElement(by);
<add> return true;
<add> } catch (NoSuchElementException e) {
<add> return false;
<add> }
<add> }
<add>
<add> private boolean isAlertPresent() {
<add> try {
<add> driver.switchTo().alert();
<add> return true;
<add> } catch (NoAlertPresentException e) {
<add> return false;
<add> }
<add> }
<add>
<add> private String closeAlertAndGetItsText() {
<add> try {
<add> Alert alert = driver.switchTo().alert();
<add> String alertText = alert.getText();
<add> if (acceptNextAlert) {
<add> alert.accept();
<add> } else {
<add> alert.dismiss();
<add> }
<add> return alertText;
<add> } finally {
<add> acceptNextAlert = true;
<add> }
<add> }
<add>}
|
|
Java
|
apache-2.0
|
0007407d959548c9f2a77f8e8a45e7682c52b5ee
| 0 |
ontop/ontop,ontop/ontop,ontop/ontop,ontop/ontop,ontop/ontop
|
package it.unibz.inf.ontop.spec.mapping.sqlparser;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import it.unibz.inf.ontop.dbschema.*;
import it.unibz.inf.ontop.dbschema.impl.OfflineMetadataProviderBuilder;
import it.unibz.inf.ontop.iq.node.ExtensionalDataNode;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.DBTermType;
import it.unibz.inf.ontop.spec.mapping.sqlparser.exception.IllegalJoinException;
import net.sf.jsqlparser.expression.operators.relational.EqualsTo;
import net.sf.jsqlparser.schema.Column;
import net.sf.jsqlparser.schema.Table;
import org.junit.Before;
import org.junit.Test;
import static it.unibz.inf.ontop.utils.SQLMappingTestingTools.*;
import static org.junit.Assert.*;
/**
* Created by Roman Kontchakov on 01/11/2016.
*
*/
// TODO: REFACTOR
public class RelationalExpressionTest {
private static QuotedIDFactory MDFAC;
private DatabaseRelationDefinition P;
private ExtensionalDataNode f1, f2;
private ImmutableFunctionalTerm eq;
private Variable x, y, u, v;
private QualifiedAttributeID qaTx, qaTy, qaNx, qaNy, qaTu, qaTv, qaNu, qaNv;
private QuotedID attX, attY;
private RAExpression re1, re2, re1_1, re3;
private EqualsTo onExpression;
@Before
public void setupTest(){
OfflineMetadataProviderBuilder builder = createMetadataProviderBuilder();
MDFAC = builder.getQuotedIDFactory();
x = TERM_FACTORY.getVariable("x");
y = TERM_FACTORY.getVariable("y");
DBTermType integerDBType = builder.getDBTypeFactory().getDBLargeIntegerType();
P = builder.createDatabaseRelation("P",
"A", integerDBType, true,
"B", integerDBType, true);
attX = P.getAttribute(1).getID();
attY = P.getAttribute(2).getID();
f1 = IQ_FACTORY.createExtensionalDataNode(P, ImmutableMap.of(0, x, 1, y));
qaTx = new QualifiedAttributeID(P.getID(), attX);
qaTy = new QualifiedAttributeID(P.getID(), attY);
qaNx = new QualifiedAttributeID(null, attX);
qaNy = new QualifiedAttributeID(null, attY);
re1 = new RAExpression(ImmutableList.of(f1),
ImmutableList.of(),
new RAExpressionAttributes(
ImmutableMap.of(qaTx, x, qaTy, y, qaNx, x, qaNy, y),
ImmutableMap.of(attX, ImmutableSet.of(P.getID()), attY, P.getAllIDs())), TERM_FACTORY);
u = TERM_FACTORY.getVariable("u");
v = TERM_FACTORY.getVariable("v");
DatabaseRelationDefinition Q = builder.createDatabaseRelation("Q",
"A", integerDBType, true,
"C", integerDBType, true);
QuotedID attu = Q.getAttribute(1).getID();
QuotedID attv = Q.getAttribute(2).getID();
f2 = IQ_FACTORY.createExtensionalDataNode(Q, ImmutableMap.of(0, u, 1, v));
qaTu = new QualifiedAttributeID(Q.getID(), attu);
qaTv = new QualifiedAttributeID(Q.getID(), attv);
qaNu = new QualifiedAttributeID(null, attu);
qaNv = new QualifiedAttributeID(null, attv);
re2 = new RAExpression(ImmutableList.of(f2),
ImmutableList.of(),
new RAExpressionAttributes(
ImmutableMap.of(qaTu, u,qaTv, v, qaNu, u, qaNv, v),
ImmutableMap.of(attu, ImmutableSet.of(Q.getID()), attv, ImmutableSet.of(Q.getID()))), TERM_FACTORY);
Variable w = TERM_FACTORY.getVariable("u");
Variable z = TERM_FACTORY.getVariable("v");
ExtensionalDataNode f3 = IQ_FACTORY.createExtensionalDataNode(Q, ImmutableMap.of(0, w, 1, z));
RelationID table3 = MDFAC.createRelationID(null, "R");
QuotedID attW = MDFAC.createAttributeID("A");
QuotedID attZ = MDFAC.createAttributeID("B");
// This is used to simulate an ambiguity during the operation of natural join
re3 = new RAExpression(
ImmutableList.of(f3),
ImmutableList.of(),
RAExpressionAttributes.create(ImmutableMap.of(attW, w, attZ, z), ImmutableSet.of(table3)), TERM_FACTORY);
eq = TERM_FACTORY.getNotYetTypedEquality(x, u);
onExpression = new EqualsTo();
onExpression.setLeftExpression(new Column(new Table("P"), "A"));
onExpression.setRightExpression(new Column(new Table("Q"), "A"));
// this relation contains just a common attribute with the RAExpression "re1"
// and it is used to simulate an exception during the operations of:
// "cross join" and "join on" and "natural join"
re1_1 = new RAExpression(ImmutableList.of(f2),
ImmutableList.of(),
RAExpressionAttributes.create(ImmutableMap.of(attX, x), P.getAllIDs()), TERM_FACTORY);
System.out.println("****************************************************");
}
@Test
public void cross_join_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
RAExpression relationalExpression = re1.crossJoin(re2);
System.out.println(relationalExpression);
crossJoinAndJoinOnCommonAsserts(relationalExpression);
assertTrue(relationalExpression.getFilterAtoms().isEmpty());
}
@Test(expected = IllegalJoinException.class)
public void cross_join_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
re1.crossJoin(re1_1);
}
@Test
public void join_on_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
System.out.println(eq);
RAExpression relationalExpression = re1.joinOn(re2,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
System.out.println(relationalExpression);
crossJoinAndJoinOnCommonAsserts(relationalExpression);
assertTrue(relationalExpression.getFilterAtoms().contains(eq));
}
@Test(expected = IllegalJoinException.class)
public void join_on_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
re1.joinOn(re1_1,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
}
@Test
public void natural_join_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
System.out.println(eq);
RAExpression relationalExpression = re1.naturalJoin(re2);
System.out.println(relationalExpression);
naturalUsingCommonAsserts(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void natural_join_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
RAExpression relationalExpression = re1.naturalJoin(re1_1);
System.out.println(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void natural_join_ambiguity_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
RAExpression re = re1.joinOn(re2,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
System.out.println(re);
System.out.println(re3);
re.naturalJoin(re3);
}
@Test()
public void join_using_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
System.out.println(eq);
RAExpression relationalExpression =
re1.joinUsing(re2, ImmutableSet.of(MDFAC.createAttributeID("A")));
System.out.println(relationalExpression);
naturalUsingCommonAsserts(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void join_using_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
RAExpression relationalExpression = re1.joinUsing(re1_1,
ImmutableSet.of(MDFAC.createAttributeID("A")));
System.out.println(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void join_using_no_commons_test() throws IllegalJoinException {
// a new relationId without any common attribute with the re1 is created to simulate an exception
RAExpression re2 = new RAExpression(ImmutableList.of(f2),
ImmutableList.of(),
RAExpressionAttributes.create(
ImmutableMap.of(MDFAC.createAttributeID("C"), u, MDFAC.createAttributeID("D"), v),
ImmutableSet.of(MDFAC.createRelationID(null, "Q"))), TERM_FACTORY);
System.out.println(re1);
System.out.println(re2);
re1.joinUsing(re2, ImmutableSet.of(MDFAC.createAttributeID("A")));
}
@Test(expected = IllegalJoinException.class)
public void join_using_ambiguity_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
RAExpression relationalExpression = re1.joinOn(re2,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
System.out.println(relationalExpression);
System.out.println(re3);
relationalExpression.joinUsing(re3, ImmutableSet.of(MDFAC.createAttributeID("A")));
}
@Test
public void alias_test() {
RelationID tableAlias = MDFAC.createRelationID(null, "S");
QualifiedAttributeID qaAx = new QualifiedAttributeID(tableAlias, attX);
QualifiedAttributeID qaAy = new QualifiedAttributeID(tableAlias, attY);
System.out.println(re1);
RAExpression actual = re1.withAlias(tableAlias);
System.out.println(actual);
assertTrue(actual.getDataAtoms().contains(f1));
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = actual.getAttributes();
assertEquals(x, attrs.get(qaNx));
assertEquals(y, attrs.get(qaNy));
assertEquals(x, attrs.get(qaAx));
assertEquals(y, attrs.get(qaAy));
}
@Test
public void create_test(){
RAExpression actual = new RAExpression(re1.getDataAtoms(),
re1.getFilterAtoms(),
RAExpressionAttributes.create(ImmutableMap.of(attX, x, attY, y), P.getAllIDs()), TERM_FACTORY);
System.out.println(actual);
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = actual.getAttributes();
assertEquals(x, attrs.get(qaNx));
assertEquals(y, attrs.get(qaNy));
assertEquals(x, attrs.get(qaTx));
assertEquals(y, attrs.get(qaTy));
}
private void naturalUsingCommonAsserts(RAExpression relationalExpression){
assertTrue(relationalExpression.getDataAtoms().contains(f1));
assertTrue(relationalExpression.getDataAtoms().contains(f2));
assertTrue(relationalExpression.getFilterAtoms().contains(eq));
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = relationalExpression.getAttributes();
assertEquals(x, attrs.get(qaNx));
assertNull(attrs.get(qaTx));
assertEquals(y, attrs.get(qaTy));
assertEquals(y, attrs.get(qaNy));
assertNull(attrs.get(qaTu));
assertEquals(v, attrs.get(qaTv));
assertEquals(v, attrs.get(qaNv));
}
private void crossJoinAndJoinOnCommonAsserts(RAExpression relationalExpression ){
assertTrue(relationalExpression.getDataAtoms().contains(f1));
assertTrue(relationalExpression.getDataAtoms().contains(f2));
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = relationalExpression.getAttributes();
assertEquals(x, attrs.get(qaTx));
assertNull(attrs.get(qaNx));
assertEquals(y, attrs.get(qaTy));
assertEquals(y, attrs.get(qaNy));
assertEquals(u, attrs.get(qaTu));
assertNull(attrs.get(qaNu));
assertEquals(v, attrs.get(qaTv));
assertEquals(v, attrs.get(qaNv));
}
}
|
mapping/sql/core/src/test/java/it/unibz/inf/ontop/spec/mapping/sqlparser/RelationalExpressionTest.java
|
package it.unibz.inf.ontop.spec.mapping.sqlparser;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import it.unibz.inf.ontop.dbschema.*;
import it.unibz.inf.ontop.dbschema.impl.OfflineMetadataProviderBuilder;
import it.unibz.inf.ontop.iq.node.ExtensionalDataNode;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.DBTermType;
import it.unibz.inf.ontop.spec.mapping.sqlparser.exception.IllegalJoinException;
import net.sf.jsqlparser.expression.operators.relational.EqualsTo;
import net.sf.jsqlparser.schema.Column;
import net.sf.jsqlparser.schema.Table;
import org.junit.Before;
import org.junit.Test;
import static it.unibz.inf.ontop.utils.SQLMappingTestingTools.*;
import static org.junit.Assert.*;
/**
* Created by Roman Kontchakov on 01/11/2016.
*
*/
// TODO: REFACTOR
public class RelationalExpressionTest {
private static QuotedIDFactory MDFAC;
private DatabaseRelationDefinition P;
private ExtensionalDataNode f1, f2;
private ImmutableFunctionalTerm eq;
private Variable x, y, u, v;
private QualifiedAttributeID qaTx, qaTy, qaNx, qaNy, qaTu, qaTv, qaNu, qaNv;
private QuotedID attX, attY;
private RAExpression re1, re2, re1_1, re3;
private EqualsTo onExpression;
@Before
public void setupTest(){
OfflineMetadataProviderBuilder builder = createMetadataProviderBuilder();
MDFAC = builder.getQuotedIDFactory();
x = TERM_FACTORY.getVariable("x");
y = TERM_FACTORY.getVariable("y");
DBTermType integerDBType = builder.getDBTypeFactory().getDBLargeIntegerType();
P = builder.createDatabaseRelation("P",
"A", integerDBType, true,
"B", integerDBType, true);
attX = P.getAttribute(1).getID();
attY = P.getAttribute(2).getID();
f1 = IQ_FACTORY.createExtensionalDataNode(P, ImmutableMap.of(0, x, 1, y));
qaTx = new QualifiedAttributeID(P.getID(), attX);
qaTy = new QualifiedAttributeID(P.getID(), attY);
qaNx = new QualifiedAttributeID(null, attX);
qaNy = new QualifiedAttributeID(null, attY);
re1 = new RAExpression(ImmutableList.of(f1),
ImmutableList.of(),
new RAExpressionAttributes(
ImmutableMap.of(qaTx, x, qaTy, y, qaNx, x, qaNy, y),
ImmutableMap.of(attX, ImmutableSet.of(P.getID()), attY, P.getAllIDs())), TERM_FACTORY);
u = TERM_FACTORY.getVariable("u");
v = TERM_FACTORY.getVariable("v");
DatabaseRelationDefinition Q = builder.createDatabaseRelation("Q",
"A", integerDBType, true,
"C", integerDBType, true);
QuotedID attu = Q.getAttribute(1).getID();
QuotedID attv = Q.getAttribute(2).getID();
f2 = IQ_FACTORY.createExtensionalDataNode(Q, ImmutableMap.of(0, u, 1, v));
qaTu = new QualifiedAttributeID(Q.getID(), attu);
qaTv = new QualifiedAttributeID(Q.getID(), attv);
qaNu = new QualifiedAttributeID(null, attu);
qaNv = new QualifiedAttributeID(null, attv);
re2 = new RAExpression(ImmutableList.of(f2),
ImmutableList.of(),
new RAExpressionAttributes(
ImmutableMap.of(qaTu, u,qaTv, v, qaNu, u, qaNv, v),
ImmutableMap.of(attu, ImmutableSet.of(Q.getID()), attv, ImmutableSet.of(Q.getID()))), TERM_FACTORY);
Variable w = TERM_FACTORY.getVariable("u");
Variable z = TERM_FACTORY.getVariable("v");
ExtensionalDataNode f3 = IQ_FACTORY.createExtensionalDataNode(Q, ImmutableMap.of(0, w, 1, z));
RelationID table3 = MDFAC.createRelationID(null, "R");
QuotedID attW = MDFAC.createAttributeID("A");
QuotedID attZ = MDFAC.createAttributeID("B");
// This is used to simulate an ambiguity during the operation of natural join
re3 = new RAExpression(
ImmutableList.of(f3),
ImmutableList.of(),
RAExpressionAttributes.create(ImmutableMap.of(attW, w, attZ, z), ImmutableSet.of(table3)), TERM_FACTORY);
eq = TERM_FACTORY.getNotYetTypedEquality(x, u);
onExpression = new EqualsTo();
onExpression.setLeftExpression(new Column(new Table("P"), "A"));
onExpression.setRightExpression(new Column(new Table("Q"), "A"));
// this relation contains just a common attribute with the RAExpression "re1"
// and it is used to simulate an exception during the operations of:
// "cross join" and "join on" and "natural join"
re1_1 = new RAExpression(ImmutableList.of(f2),
ImmutableList.of(),
RAExpressionAttributes.create(ImmutableMap.of(attX, x), P.getAllIDs()), TERM_FACTORY);
System.out.println("****************************************************");
}
@Test
public void cross_join_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
RAExpression relationalExpression = re1.crossJoin(re2);
System.out.println(relationalExpression);
crossJoinAndJoinOnCommonAsserts(relationalExpression);
assertTrue(relationalExpression.getFilterAtoms().isEmpty());
}
@Test(expected = IllegalJoinException.class)
public void cross_join_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
re1.crossJoin(re1_1);
}
@Test
public void join_on_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
System.out.println(eq);
RAExpression relationalExpression = re1.joinOn(re2,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
System.out.println(relationalExpression);
crossJoinAndJoinOnCommonAsserts(relationalExpression);
assertTrue(relationalExpression.getFilterAtoms().contains(eq));
}
@Test(expected = IllegalJoinException.class)
public void join_on_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
re1.joinOn(re1_1,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
}
@Test
public void natural_join_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
System.out.println(eq);
RAExpression relationalExpression = re1.joinUsing(re2, re1.getSharedAttributeNames(re2));
System.out.println(relationalExpression);
naturalUsingCommonAsserts(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void natural_join_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
RAExpression relationalExpression = re1.joinUsing(re1_1, re1.getSharedAttributeNames(re1_1));
System.out.println(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void natural_join_ambiguity_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
RAExpression re = re1.joinOn(re2,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
System.out.println(re);
System.out.println(re3);
re.joinUsing(re3, re.getSharedAttributeNames(re3));
}
@Test()
public void join_using_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
System.out.println(eq);
RAExpression relationalExpression =
re1.joinUsing(re2, ImmutableSet.of(MDFAC.createAttributeID("A")));
System.out.println(relationalExpression);
naturalUsingCommonAsserts(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void join_using_exception_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re1_1);
RAExpression relationalExpression = re1.joinUsing(re1_1,
ImmutableSet.of(MDFAC.createAttributeID("A")));
System.out.println(relationalExpression);
}
@Test(expected = IllegalJoinException.class)
public void join_using_no_commons_test() throws IllegalJoinException {
// a new relationId without any common attribute with the re1 is created to simulate an exception
RAExpression re2 = new RAExpression(ImmutableList.of(f2),
ImmutableList.of(),
RAExpressionAttributes.create(
ImmutableMap.of(MDFAC.createAttributeID("C"), u, MDFAC.createAttributeID("D"), v),
ImmutableSet.of(MDFAC.createRelationID(null, "Q"))), TERM_FACTORY);
System.out.println(re1);
System.out.println(re2);
re1.joinUsing(re2, ImmutableSet.of(MDFAC.createAttributeID("A")));
}
@Test(expected = IllegalJoinException.class)
public void join_using_ambiguity_test() throws IllegalJoinException {
System.out.println(re1);
System.out.println(re2);
RAExpression relationalExpression = re1.joinOn(re2,
attributes -> new ExpressionParser(MDFAC, CORE_SINGLETONS)
.parseBooleanExpression(onExpression, attributes));
System.out.println(relationalExpression);
System.out.println(re3);
relationalExpression.joinUsing(re3, ImmutableSet.of(MDFAC.createAttributeID("A")));
}
@Test
public void alias_test() {
RelationID tableAlias = MDFAC.createRelationID(null, "S");
QualifiedAttributeID qaAx = new QualifiedAttributeID(tableAlias, attX);
QualifiedAttributeID qaAy = new QualifiedAttributeID(tableAlias, attY);
System.out.println(re1);
RAExpression actual = re1.withAlias(tableAlias);
System.out.println(actual);
assertTrue(actual.getDataAtoms().contains(f1));
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = actual.getAttributes();
assertEquals(x, attrs.get(qaNx));
assertEquals(y, attrs.get(qaNy));
assertEquals(x, attrs.get(qaAx));
assertEquals(y, attrs.get(qaAy));
}
@Test
public void create_test(){
RAExpression actual = new RAExpression(re1.getDataAtoms(),
re1.getFilterAtoms(),
RAExpressionAttributes.create(ImmutableMap.of(attX, x, attY, y), P.getAllIDs()), TERM_FACTORY);
System.out.println(actual);
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = actual.getAttributes();
assertEquals(x, attrs.get(qaNx));
assertEquals(y, attrs.get(qaNy));
assertEquals(x, attrs.get(qaTx));
assertEquals(y, attrs.get(qaTy));
}
private void naturalUsingCommonAsserts(RAExpression relationalExpression){
assertTrue(relationalExpression.getDataAtoms().contains(f1));
assertTrue(relationalExpression.getDataAtoms().contains(f2));
assertTrue(relationalExpression.getFilterAtoms().contains(eq));
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = relationalExpression.getAttributes();
assertEquals(x, attrs.get(qaNx));
assertNull(attrs.get(qaTx));
assertEquals(y, attrs.get(qaTy));
assertEquals(y, attrs.get(qaNy));
assertNull(attrs.get(qaTu));
assertEquals(v, attrs.get(qaTv));
assertEquals(v, attrs.get(qaNv));
}
private void crossJoinAndJoinOnCommonAsserts(RAExpression relationalExpression ){
assertTrue(relationalExpression.getDataAtoms().contains(f1));
assertTrue(relationalExpression.getDataAtoms().contains(f2));
ImmutableMap<QualifiedAttributeID, ImmutableTerm> attrs = relationalExpression.getAttributes();
assertEquals(x, attrs.get(qaTx));
assertNull(attrs.get(qaNx));
assertEquals(y, attrs.get(qaTy));
assertEquals(y, attrs.get(qaNy));
assertEquals(u, attrs.get(qaTu));
assertNull(attrs.get(qaNu));
assertEquals(v, attrs.get(qaTv));
assertEquals(v, attrs.get(qaNv));
}
}
|
test fix
|
mapping/sql/core/src/test/java/it/unibz/inf/ontop/spec/mapping/sqlparser/RelationalExpressionTest.java
|
test fix
|
<ide><path>apping/sql/core/src/test/java/it/unibz/inf/ontop/spec/mapping/sqlparser/RelationalExpressionTest.java
<ide> System.out.println(re2);
<ide> System.out.println(eq);
<ide>
<del> RAExpression relationalExpression = re1.joinUsing(re2, re1.getSharedAttributeNames(re2));
<add> RAExpression relationalExpression = re1.naturalJoin(re2);
<ide> System.out.println(relationalExpression);
<ide>
<ide> naturalUsingCommonAsserts(relationalExpression);
<ide> System.out.println(re1);
<ide> System.out.println(re1_1);
<ide>
<del> RAExpression relationalExpression = re1.joinUsing(re1_1, re1.getSharedAttributeNames(re1_1));
<add> RAExpression relationalExpression = re1.naturalJoin(re1_1);
<ide> System.out.println(relationalExpression);
<ide> }
<ide>
<ide> System.out.println(re);
<ide> System.out.println(re3);
<ide>
<del> re.joinUsing(re3, re.getSharedAttributeNames(re3));
<add> re.naturalJoin(re3);
<ide> }
<ide>
<ide> @Test()
|
|
Java
|
mit
|
63f54c3fe128533016f62b303ccd44d53c650dd6
| 0 |
tjohn/robolectric,lexs/robolectric,zhongyu05/robolectric,karlicoss/robolectric,charlesmunger/robolectric,amarts/robolectric,tec27/robolectric,gruszczy/robolectric,ecgreb/robolectric,Omegaphora/external_robolectric,wyvx/robolectric,yuzhong-google/robolectric,daisy1754/robolectric,tuenti/robolectric,trevorrjohn/robolectric,ChameleonOS/android_external_robolectric,gruszczy/robolectric,davidsun/robolectric,fiower/robolectric,karlicoss/robolectric,VikingDen/robolectric,plackemacher/robolectric,upsight/playhaven-robolectric,daisy1754/robolectric,ChengCorp/robolectric,1zaman/robolectric,tyronen/robolectric,hgl888/robolectric,mag/robolectric,pivotal-oscar/robolectric,rburgst/robolectric,spotify/robolectric,diegotori/robolectric,yinquan529/platform-external-robolectric,IllusionRom-deprecated/android_platform_external_robolectric,spotify/robolectric,android-ia/platform_external_robolectric,kriegfrj/robolectric,Omegaphora/external_robolectric,macklinu/robolectric,wyvx/robolectric,Omegaphora/external_robolectric,gruszczy/robolectric,thiz11/platform_external_robolectric,gabrielduque/robolectric,toluju/robolectric,ocadotechnology/robolectric,xin3liang/platform_external_robolectric,hgl888/robolectric,jongerrish/robolectric,zhongyu05/robolectric,ChengCorp/robolectric,yuzhong-google/robolectric,cc12703/robolectric,fiower/robolectric,eric-kansas/robolectric,eric-kansas/robolectric,trevorrjohn/robolectric,thiz11/platform_external_robolectric,paulpv/robolectric,gb112211/robolectric,rongou/robolectric,tec27/robolectric,geekboxzone/mmallow_external_robolectric,lexs/robolectric,VikingDen/robolectric,BCGDV/robolectric,zbsz/robolectric,paulpv/robolectric,zhongyu05/robolectric,plackemacher/robolectric,geekboxzone/mmallow_external_robolectric,jongerrish/robolectric,toluju/robolectric,erichaugh/robolectric,MIPS/external-robolectric,tmrudick/robolectric,WonderCsabo/robolectric,yinquan529/platform-external-robolectric,android-ia/platform_external_robolectric,VikingDen/robolectric,jongerrish/robolectric,holmari/robolectric,tjohn/robolectric,Omegaphora/external_robolectric,tjohn/robolectric,zbsz/robolectric,android-ia/platform_external_robolectric,cc12703/robolectric,ecgreb/robolectric,mag/robolectric,daisy1754/robolectric,svenji/robolectric,diegotori/robolectric,ecgreb/robolectric,kriegfrj/robolectric,tuenti/robolectric,paulpv/robolectric,cesar1000/robolectric,pivotal-oscar/robolectric,diegotori/robolectric,charlesmunger/robolectric,yinquan529/platform-external-robolectric,davidsun/robolectric,gb112211/robolectric,plackemacher/robolectric,tmrudick/robolectric,gb112211/robolectric,macklinu/robolectric,android-ia/platform_external_robolectric,holmari/robolectric,tmrudick/robolectric,tec27/robolectric,MIPS/external-robolectric,charlesmunger/robolectric,xin3liang/platform_external_robolectric,erichaugh/robolectric,IllusionRom-deprecated/android_platform_external_robolectric,svenji/robolectric,macklinu/robolectric,jongerrish/robolectric,ChameleonOS/android_external_robolectric,rossimo/robolectric,yinquan529/platform-external-robolectric,mag/robolectric,gabrielduque/robolectric,ChengCorp/robolectric,wyvx/robolectric,geekboxzone/lollipop_external_robolectric,geekboxzone/mmallow_external_robolectric,lexs/robolectric,jingle1267/robolectric,rburgst/robolectric,jingle1267/robolectric,geekboxzone/lollipop_external_robolectric,erichaugh/robolectric,BCGDV/robolectric,cesar1000/robolectric,gabrielduque/robolectric,svenji/robolectric,amarts/robolectric,1zaman/robolectric,thiz11/platform_external_robolectric,cc12703/robolectric,davidsun/robolectric,kriegfrj/robolectric,toluju/robolectric,MIPS/external-robolectric,BCGDV/robolectric,geekboxzone/lollipop_external_robolectric,WonderCsabo/robolectric,IllusionRom-deprecated/android_platform_external_robolectric,xin3liang/platform_external_robolectric,fiower/robolectric,geekboxzone/mmallow_external_robolectric,holmari/robolectric,BCGDV/robolectric,rongou/robolectric,geekboxzone/lollipop_external_robolectric,pivotal-oscar/robolectric,ocadotechnology/robolectric,spotify/robolectric,ocadotechnology/robolectric,trevorrjohn/robolectric,zbsz/robolectric,tyronen/robolectric,tyronen/robolectric,tuenti/robolectric,karlicoss/robolectric,eric-kansas/robolectric,WonderCsabo/robolectric,yuzhong-google/robolectric,jingle1267/robolectric,cesar1000/robolectric,rossimo/robolectric,ChameleonOS/android_external_robolectric,amarts/robolectric,rongou/robolectric,1zaman/robolectric,rossimo/robolectric,hgl888/robolectric,rburgst/robolectric
|
package com.xtremelabs.robolectric.tester.android.util;
import android.os.Bundle;
import android.support.v4.app.*;
import android.view.View;
import android.view.ViewGroup;
import com.xtremelabs.robolectric.Robolectric;
import com.xtremelabs.robolectric.shadows.SerializedFragmentState;
import com.xtremelabs.robolectric.shadows.ShadowFragment;
import com.xtremelabs.robolectric.shadows.ShadowFragmentActivity;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.Map;
import static com.xtremelabs.robolectric.Robolectric.shadowOf;
public class TestFragmentManager extends FragmentManager {
private Map<Integer, Fragment> fragmentsById = new HashMap<Integer, Fragment>();
private Map<String, Fragment> fragmentsByTag = new HashMap<String, Fragment>();
private FragmentActivity activity;
public TestFragmentManager(FragmentActivity activity) {
this.activity = activity;
}
public FragmentActivity getActivity() {
return activity;
}
@Override
public FragmentTransaction beginTransaction() {
return new TestFragmentTransaction(this);
}
@Override
public boolean executePendingTransactions() {
return false;
}
@Override
public Fragment findFragmentById(int id) {
return fragmentsById.get(id);
}
@Override
public Fragment findFragmentByTag(String tag) {
return fragmentsByTag.get(tag);
}
@Override
public void popBackStack() {
}
@Override
public boolean popBackStackImmediate() {
return false;
}
@Override
public void popBackStack(String name, int flags) {
}
@Override
public boolean popBackStackImmediate(String name, int flags) {
return false;
}
@Override
public void popBackStack(int id, int flags) {
}
@Override
public boolean popBackStackImmediate(int id, int flags) {
return false;
}
@Override
public int getBackStackEntryCount() {
return 0;
}
@Override
public BackStackEntry getBackStackEntryAt(int index) {
return null;
}
@Override
public void addOnBackStackChangedListener(OnBackStackChangedListener listener) {
}
@Override
public void removeOnBackStackChangedListener(OnBackStackChangedListener listener) {
}
@Override
public void putFragment(Bundle bundle, String key, Fragment fragment) {
}
@Override
public Fragment getFragment(Bundle bundle, String key) {
Object[] fragments = (Object[]) bundle.getSerializable(ShadowFragmentActivity.FRAGMENTS_TAG);
for (Object object : fragments) {
SerializedFragmentState fragment = (SerializedFragmentState) object;
if (fragment.tag.equals(key)) {
// TODO deserialize state
return Robolectric.newInstanceOf(fragment.fragmentClass);
}
}
return null;
}
@Override
public Fragment.SavedState saveFragmentInstanceState(Fragment f) {
return null;
}
@Override
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
}
public void addDialogFragment(String tag, DialogFragment fragment) {
fragmentsByTag.put(tag, fragment);
}
public void addFragment(int containerViewId, String tag, Fragment fragment, boolean replace) {
fragmentsById.put(containerViewId, fragment);
fragmentsByTag.put(tag, fragment);
shadowOf(fragment).setTag(tag);
shadowOf(fragment).setContainerViewId(containerViewId);
shadowOf(fragment).setShouldReplace(replace);
shadowOf(fragment).setActivity(activity);
fragment.onAttach(activity);
fragment.onCreate(null);
}
public void startFragment(Fragment fragment) {
ViewGroup container = null;
ShadowFragment shadowFragment = shadowOf(fragment);
if (shadowOf(activity).getContentView() != null) {
container = (ViewGroup) activity.findViewById(shadowFragment.getContainerViewId());
}
View view = fragment.onCreateView(activity.getLayoutInflater(), container, shadowFragment.getSavedInstanceState());
shadowFragment.setView(view);
fragment.onViewCreated(view, null);
if (container != null) {
if (shadowFragment.getShouldReplace()) {
container.removeAllViews();
}
container.addView(view);
}
fragment.onActivityCreated(shadowFragment.getSavedInstanceState());
fragment.onStart();
}
public HashMap<Integer, Fragment> getFragments() {
return new HashMap<Integer, Fragment>(fragmentsById);
}
}
|
src/main/java/com/xtremelabs/robolectric/tester/android/util/TestFragmentManager.java
|
package com.xtremelabs.robolectric.tester.android.util;
import android.os.Bundle;
import android.support.v4.app.*;
import android.view.View;
import android.view.ViewGroup;
import com.xtremelabs.robolectric.Robolectric;
import com.xtremelabs.robolectric.shadows.SerializedFragmentState;
import com.xtremelabs.robolectric.shadows.ShadowFragment;
import com.xtremelabs.robolectric.shadows.ShadowFragmentActivity;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.Map;
import static com.xtremelabs.robolectric.Robolectric.shadowOf;
public class TestFragmentManager extends FragmentManager {
private Map<Integer, Fragment> fragmentsById = new HashMap<Integer, Fragment>();
private Map<String, Fragment> fragmentsByTag = new HashMap<String, Fragment>();
private FragmentActivity activity;
public TestFragmentManager(FragmentActivity activity) {
this.activity = activity;
}
public FragmentActivity getActivity() {
return activity;
}
@Override
public FragmentTransaction beginTransaction() {
return new TestFragmentTransaction(this);
}
@Override
public boolean executePendingTransactions() {
return false;
}
@Override
public Fragment findFragmentById(int id) {
return fragmentsById.get(id);
}
@Override
public Fragment findFragmentByTag(String tag) {
return fragmentsByTag.get(tag);
}
@Override
public void popBackStack() {
}
@Override
public boolean popBackStackImmediate() {
return false;
}
@Override
public void popBackStack(String name, int flags) {
}
@Override
public boolean popBackStackImmediate(String name, int flags) {
return false;
}
@Override
public void popBackStack(int id, int flags) {
}
@Override
public boolean popBackStackImmediate(int id, int flags) {
return false;
}
@Override
public int getBackStackEntryCount() {
return 0;
}
@Override
public BackStackEntry getBackStackEntryAt(int index) {
return null;
}
@Override
public void addOnBackStackChangedListener(OnBackStackChangedListener listener) {
}
@Override
public void removeOnBackStackChangedListener(OnBackStackChangedListener listener) {
}
@Override
public void putFragment(Bundle bundle, String key, Fragment fragment) {
}
@Override
public Fragment getFragment(Bundle bundle, String key) {
Object[] fragments = (Object[]) bundle.getSerializable(ShadowFragmentActivity.FRAGMENTS_TAG);
for (Object object : fragments) {
SerializedFragmentState fragment = (SerializedFragmentState) object;
if (fragment.tag.equals(key)) {
// TODO deserialize state
return Robolectric.newInstanceOf(fragment.fragmentClass);
}
}
return null;
}
@Override
public Fragment.SavedState saveFragmentInstanceState(Fragment f) {
return null;
}
@Override
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
}
public void addDialogFragment(String tag, DialogFragment fragment) {
fragmentsByTag.put(tag, fragment);
}
public void addFragment(int containerViewId, String tag, Fragment fragment, boolean replace) {
fragmentsById.put(containerViewId, fragment);
fragmentsByTag.put(tag, fragment);
shadowOf(fragment).setTag(tag);
shadowOf(fragment).setContainerViewId(containerViewId);
shadowOf(fragment).setShouldReplace(replace);
shadowOf(fragment).setActivity(activity);
fragment.onAttach(activity);
fragment.onCreate(null);
}
public void startFragment(Fragment fragment) {
ViewGroup container = null;
ShadowFragment shadowFragment = shadowOf(fragment);
if (shadowOf(activity).getContentView() != null) {
container = (ViewGroup) activity.findViewById(shadowFragment.getContainerViewId());
}
View view = fragment.onCreateView(activity.getLayoutInflater(), container, null);
shadowFragment.setView(view);
fragment.onViewCreated(view, null);
if (container != null) {
if (shadowFragment.getShouldReplace()) {
container.removeAllViews();
}
container.addView(view);
}
fragment.onActivityCreated(shadowFragment.getSavedInstanceState());
fragment.onStart();
}
public HashMap<Integer, Fragment> getFragments() {
return new HashMap<Integer, Fragment>(fragmentsById);
}
}
|
startFragment passes the saved instance state to the fragment's onCreateView() method instead of null
|
src/main/java/com/xtremelabs/robolectric/tester/android/util/TestFragmentManager.java
|
startFragment passes the saved instance state to the fragment's onCreateView() method instead of null
|
<ide><path>rc/main/java/com/xtremelabs/robolectric/tester/android/util/TestFragmentManager.java
<ide> container = (ViewGroup) activity.findViewById(shadowFragment.getContainerViewId());
<ide> }
<ide>
<del> View view = fragment.onCreateView(activity.getLayoutInflater(), container, null);
<add> View view = fragment.onCreateView(activity.getLayoutInflater(), container, shadowFragment.getSavedInstanceState());
<ide> shadowFragment.setView(view);
<ide>
<ide> fragment.onViewCreated(view, null);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.